summaryrefslogtreecommitdiffstats
path: root/google_appengine/google/appengine
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2009-10-19 20:20:09 -0400
committerJason A. Donenfeld <Jason@zx2c4.com>2009-10-19 20:20:09 -0400
commit2d6dd2c5ade3f5fad3e2257dce52a6e188fe7535 (patch)
treeda9c93d2f87df6d2b688a455a31e69859117ba1e /google_appengine/google/appengine
downloadFramedPrototype-2d6dd2c5ade3f5fad3e2257dce52a6e188fe7535.tar.xz
FramedPrototype-2d6dd2c5ade3f5fad3e2257dce52a6e188fe7535.zip
Initial import.
Diffstat (limited to 'google_appengine/google/appengine')
-rwxr-xr-xgoogle_appengine/google/appengine/__init__.py16
-rw-r--r--google_appengine/google/appengine/__init__.pycbin0 -> 152 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/__init__.py16
-rw-r--r--google_appengine/google/appengine/api/__init__.pycbin0 -> 156 bytes
-rw-r--r--google_appengine/google/appengine/api/api_base_pb.py582
-rw-r--r--google_appengine/google/appengine/api/api_base_pb.pycbin0 -> 26242 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/apiproxy_rpc.py150
-rw-r--r--google_appengine/google/appengine/api/apiproxy_rpc.pycbin0 -> 5367 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/apiproxy_stub.py80
-rw-r--r--google_appengine/google/appengine/api/apiproxy_stub.pycbin0 -> 2862 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/apiproxy_stub_map.py470
-rw-r--r--google_appengine/google/appengine/api/apiproxy_stub_map.pycbin0 -> 19038 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/app_logging.py99
-rwxr-xr-xgoogle_appengine/google/appengine/api/appinfo.py430
-rw-r--r--google_appengine/google/appengine/api/appinfo.pycbin0 -> 13746 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/appinfo_errors.py46
-rw-r--r--google_appengine/google/appengine/api/appinfo_errors.pycbin0 -> 2564 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/capabilities/__init__.py172
-rw-r--r--google_appengine/google/appengine/api/capabilities/__init__.pycbin0 -> 5952 bytes
-rw-r--r--google_appengine/google/appengine/api/capabilities/capability_service_pb.py366
-rw-r--r--google_appengine/google/appengine/api/capabilities/capability_service_pb.pycbin0 -> 18033 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/capabilities/capability_stub.py53
-rw-r--r--google_appengine/google/appengine/api/capabilities/capability_stub.pycbin0 -> 1762 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/croninfo.py132
-rw-r--r--google_appengine/google/appengine/api/croninfo.pycbin0 -> 4778 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore.py2170
-rw-r--r--google_appengine/google/appengine/api/datastore.pycbin0 -> 73429 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_admin.py213
-rw-r--r--google_appengine/google/appengine/api/datastore_admin.pycbin0 -> 7403 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_entities.py343
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_errors.py105
-rw-r--r--google_appengine/google/appengine/api/datastore_errors.pycbin0 -> 6056 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_file_stub.py1061
-rw-r--r--google_appengine/google/appengine/api/datastore_file_stub.pycbin0 -> 36485 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_types.py1788
-rw-r--r--google_appengine/google/appengine/api/datastore_types.pycbin0 -> 64168 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/images/__init__.py827
-rw-r--r--google_appengine/google/appengine/api/images/__init__.pycbin0 -> 29034 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/images/images_not_implemented_stub.py36
-rw-r--r--google_appengine/google/appengine/api/images/images_not_implemented_stub.pycbin0 -> 1326 bytes
-rw-r--r--google_appengine/google/appengine/api/images/images_service_pb.py1988
-rw-r--r--google_appengine/google/appengine/api/images/images_service_pb.pycbin0 -> 92677 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/images/images_stub.py411
-rw-r--r--google_appengine/google/appengine/api/images/images_stub.pycbin0 -> 13183 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/labs/__init__.py16
-rw-r--r--google_appengine/google/appengine/api/labs/__init__.pycbin0 -> 161 bytes
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/__init__.py20
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/__init__.pycbin0 -> 258 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/labs/taskqueue/taskqueue.py633
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/taskqueue.pycbin0 -> 25279 bytes
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py1645
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.pycbin0 -> 80247 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py327
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.pycbin0 -> 10766 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/mail.py1127
-rw-r--r--google_appengine/google/appengine/api/mail.pycbin0 -> 36719 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/mail_errors.py55
-rw-r--r--google_appengine/google/appengine/api/mail_errors.pycbin0 -> 3365 bytes
-rw-r--r--google_appengine/google/appengine/api/mail_service_pb.py584
-rw-r--r--google_appengine/google/appengine/api/mail_service_pb.pycbin0 -> 26434 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/mail_stub.py233
-rw-r--r--google_appengine/google/appengine/api/mail_stub.pycbin0 -> 7980 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/memcache/__init__.py931
-rw-r--r--google_appengine/google/appengine/api/memcache/__init__.pycbin0 -> 35018 bytes
-rw-r--r--google_appengine/google/appengine/api/memcache/memcache_service_pb.py2002
-rw-r--r--google_appengine/google/appengine/api/memcache/memcache_service_pb.pycbin0 -> 96235 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/memcache/memcache_stub.py293
-rw-r--r--google_appengine/google/appengine/api/memcache/memcache_stub.pycbin0 -> 10214 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/namespace_manager/__init__.py75
-rw-r--r--google_appengine/google/appengine/api/namespace_manager/__init__.pycbin0 -> 2413 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/queueinfo.py143
-rw-r--r--google_appengine/google/appengine/api/queueinfo.pycbin0 -> 4721 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/quota.py71
-rwxr-xr-xgoogle_appengine/google/appengine/api/urlfetch.py361
-rw-r--r--google_appengine/google/appengine/api/urlfetch.pycbin0 -> 12400 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/urlfetch_errors.py60
-rw-r--r--google_appengine/google/appengine/api/urlfetch_errors.pycbin0 -> 2546 bytes
-rw-r--r--google_appengine/google/appengine/api/urlfetch_service_pb.py823
-rw-r--r--google_appengine/google/appengine/api/urlfetch_service_pb.pycbin0 -> 37893 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/urlfetch_stub.py270
-rw-r--r--google_appengine/google/appengine/api/urlfetch_stub.pycbin0 -> 8446 bytes
-rw-r--r--google_appengine/google/appengine/api/user_service_pb.py491
-rw-r--r--google_appengine/google/appengine/api/user_service_pb.pycbin0 -> 22376 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/user_service_stub.py106
-rw-r--r--google_appengine/google/appengine/api/user_service_stub.pycbin0 -> 3831 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/users.py230
-rw-r--r--google_appengine/google/appengine/api/users.pycbin0 -> 8366 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/validation.py928
-rw-r--r--google_appengine/google/appengine/api/validation.pycbin0 -> 35134 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/xmpp/__init__.py332
-rw-r--r--google_appengine/google/appengine/api/xmpp/__init__.pycbin0 -> 11264 bytes
-rw-r--r--google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py826
-rw-r--r--google_appengine/google/appengine/api/xmpp/xmpp_service_pb.pycbin0 -> 37790 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/xmpp/xmpp_service_stub.py154
-rw-r--r--google_appengine/google/appengine/api/xmpp/xmpp_service_stub.pycbin0 -> 4894 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/yaml_builder.py432
-rw-r--r--google_appengine/google/appengine/api/yaml_builder.pycbin0 -> 15897 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/yaml_errors.py96
-rw-r--r--google_appengine/google/appengine/api/yaml_errors.pycbin0 -> 4937 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/yaml_listener.py218
-rw-r--r--google_appengine/google/appengine/api/yaml_listener.pycbin0 -> 9436 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/yaml_object.py294
-rw-r--r--google_appengine/google/appengine/api/yaml_object.pycbin0 -> 10844 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/base/__init__.py16
-rw-r--r--google_appengine/google/appengine/base/__init__.pycbin0 -> 157 bytes
-rw-r--r--google_appengine/google/appengine/base/capabilities_pb.py451
-rw-r--r--google_appengine/google/appengine/base/capabilities_pb.pycbin0 -> 20294 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/cron/GrocLexer.py1669
-rw-r--r--google_appengine/google/appengine/cron/GrocLexer.pycbin0 -> 22472 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/cron/GrocParser.py1008
-rw-r--r--google_appengine/google/appengine/cron/GrocParser.pycbin0 -> 18129 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/cron/__init__.py17
-rw-r--r--google_appengine/google/appengine/cron/__init__.pycbin0 -> 222 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/cron/groc.py74
-rw-r--r--google_appengine/google/appengine/cron/groc.pycbin0 -> 2413 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/cron/groctimespecification.py304
-rw-r--r--google_appengine/google/appengine/cron/groctimespecification.pycbin0 -> 10574 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/datastore/__init__.py16
-rw-r--r--google_appengine/google/appengine/datastore/__init__.pycbin0 -> 162 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/datastore/action_pb.py24
-rw-r--r--google_appengine/google/appengine/datastore/action_pb.pycbin0 -> 615 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/datastore/datastore_index.py438
-rw-r--r--google_appengine/google/appengine/datastore/datastore_index.pycbin0 -> 14075 bytes
-rw-r--r--google_appengine/google/appengine/datastore/datastore_pb.py4673
-rw-r--r--google_appengine/google/appengine/datastore/datastore_pb.pycbin0 -> 214173 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/datastore/datastore_v3_pb.py26
-rw-r--r--google_appengine/google/appengine/datastore/datastore_v3_pb.pycbin0 -> 533 bytes
-rw-r--r--google_appengine/google/appengine/datastore/entity_pb.py2599
-rw-r--r--google_appengine/google/appengine/datastore/entity_pb.pycbin0 -> 117476 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/dist/__init__.py36
-rw-r--r--google_appengine/google/appengine/dist/__init__.pycbin0 -> 487 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/dist/_library.py284
-rw-r--r--google_appengine/google/appengine/dist/_library.pycbin0 -> 9435 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/dist/ftplib.py16
-rwxr-xr-xgoogle_appengine/google/appengine/dist/httplib.py388
-rw-r--r--google_appengine/google/appengine/dist/httplib.pycbin0 -> 15982 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/dist/neo_cgi.py16
-rwxr-xr-xgoogle_appengine/google/appengine/dist/py_imp.py142
-rw-r--r--google_appengine/google/appengine/dist/py_imp.pycbin0 -> 5838 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/dist/py_zipimport.py291
-rw-r--r--google_appengine/google/appengine/dist/py_zipimport.pycbin0 -> 10960 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/dist/select.py16
-rwxr-xr-xgoogle_appengine/google/appengine/dist/socket.py45
-rw-r--r--google_appengine/google/appengine/dist/socket.pycbin0 -> 1618 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/dist/subprocess.py16
-rwxr-xr-xgoogle_appengine/google/appengine/dist/tempfile.py65
-rw-r--r--google_appengine/google/appengine/dist/tempfile.pycbin0 -> 1621 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/__init__.py16
-rw-r--r--google_appengine/google/appengine/ext/__init__.pycbin0 -> 156 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/admin/__init__.py1297
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/base.html96
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/cron.html85
-rwxr-xr-xgoogle_appengine/google/appengine/ext/admin/templates/css/ae.css170
-rwxr-xr-xgoogle_appengine/google/appengine/ext/admin/templates/css/base.css2
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/cron.css26
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/datastore.css71
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/form.css20
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/inboundmail.css19
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/memcache.css54
-rwxr-xr-xgoogle_appengine/google/appengine/ext/admin/templates/css/nav.css88
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/pager.css7
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/queues.css26
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/tasks.css26
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/css/xmpp.css19
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/datastore.html183
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/datastore_edit.html162
-rwxr-xr-xgoogle_appengine/google/appengine/ext/admin/templates/images/google.gifbin0 -> 1470 bytes
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/inboundmail.html158
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/interactive-output.html36
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/interactive.html104
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/js/multipart_form_data.js125
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/js/rfc822_date.js70
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/js/webhook.js87
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/memcache.html119
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/pager.html9
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/queues.html75
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/tasks.html103
-rw-r--r--google_appengine/google/appengine/ext/admin/templates/xmpp.html234
-rwxr-xr-xgoogle_appengine/google/appengine/ext/bulkload/__init__.py435
-rwxr-xr-xgoogle_appengine/google/appengine/ext/bulkload/constants.py24
-rwxr-xr-xgoogle_appengine/google/appengine/ext/db/__init__.py2959
-rw-r--r--google_appengine/google/appengine/ext/db/__init__.pycbin0 -> 107352 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/db/djangoforms.py886
-rwxr-xr-xgoogle_appengine/google/appengine/ext/db/polymodel.py355
-rw-r--r--google_appengine/google/appengine/ext/db/polymodel.pycbin0 -> 13673 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/deferred/__init__.py22
-rwxr-xr-xgoogle_appengine/google/appengine/ext/deferred/deferred.py267
-rwxr-xr-xgoogle_appengine/google/appengine/ext/ereporter/__init__.py18
-rwxr-xr-xgoogle_appengine/google/appengine/ext/ereporter/ereporter.py261
-rwxr-xr-xgoogle_appengine/google/appengine/ext/ereporter/report_generator.py184
-rw-r--r--google_appengine/google/appengine/ext/ereporter/templates/report.html15
-rwxr-xr-xgoogle_appengine/google/appengine/ext/gql/__init__.py1151
-rw-r--r--google_appengine/google/appengine/ext/gql/__init__.pycbin0 -> 41358 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/key_range/__init__.py570
-rw-r--r--google_appengine/google/appengine/ext/key_range/__init__.pycbin0 -> 19668 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/preload/__init__.py213
-rw-r--r--google_appengine/google/appengine/ext/remote_api/__init__.py16
-rw-r--r--google_appengine/google/appengine/ext/remote_api/__init__.pycbin0 -> 167 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/remote_api/handler.py319
-rw-r--r--google_appengine/google/appengine/ext/remote_api/remote_api_pb.py809
-rw-r--r--google_appengine/google/appengine/ext/remote_api/remote_api_pb.pycbin0 -> 37288 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/remote_api/remote_api_stub.py499
-rw-r--r--google_appengine/google/appengine/ext/remote_api/remote_api_stub.pycbin0 -> 20575 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/remote_api/throttle.py637
-rw-r--r--google_appengine/google/appengine/ext/remote_api/throttle.pycbin0 -> 24719 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/search/__init__.py420
-rwxr-xr-xgoogle_appengine/google/appengine/ext/webapp/__init__.py580
-rw-r--r--google_appengine/google/appengine/ext/webapp/__init__.pycbin0 -> 22445 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/webapp/mail_handlers.py78
-rwxr-xr-xgoogle_appengine/google/appengine/ext/webapp/template.py219
-rw-r--r--google_appengine/google/appengine/ext/webapp/template.pycbin0 -> 7270 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/webapp/util.py90
-rw-r--r--google_appengine/google/appengine/ext/webapp/util.pycbin0 -> 3085 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/ext/webapp/xmpp_handlers.py119
-rwxr-xr-xgoogle_appengine/google/appengine/ext/zipserve/__init__.py173
-rwxr-xr-xgoogle_appengine/google/appengine/runtime/__init__.py33
-rw-r--r--google_appengine/google/appengine/runtime/__init__.pycbin0 -> 768 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/runtime/apiproxy.py184
-rw-r--r--google_appengine/google/appengine/runtime/apiproxy.pycbin0 -> 6433 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/runtime/apiproxy_errors.py84
-rw-r--r--google_appengine/google/appengine/runtime/apiproxy_errors.pycbin0 -> 4918 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/__init__.py16
-rw-r--r--google_appengine/google/appengine/tools/__init__.pycbin0 -> 158 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/adaptive_thread_pool.py460
-rw-r--r--google_appengine/google/appengine/tools/adaptive_thread_pool.pycbin0 -> 17325 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/appcfg.py2525
-rw-r--r--google_appengine/google/appengine/tools/appcfg.pycbin0 -> 91877 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/appengine_rpc.py435
-rw-r--r--google_appengine/google/appengine/tools/appengine_rpc.pycbin0 -> 16715 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/bulkload_client.py297
-rwxr-xr-xgoogle_appengine/google/appengine/tools/bulkloader.py3827
-rw-r--r--google_appengine/google/appengine/tools/bulkloader.pycbin0 -> 135278 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/dev_appserver.py3542
-rw-r--r--google_appengine/google/appengine/tools/dev_appserver.pycbin0 -> 119171 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/dev_appserver_index.py277
-rw-r--r--google_appengine/google/appengine/tools/dev_appserver_index.pycbin0 -> 8685 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/dev_appserver_info.py160
-rwxr-xr-xgoogle_appengine/google/appengine/tools/dev_appserver_login.py297
-rw-r--r--google_appengine/google/appengine/tools/dev_appserver_login.pycbin0 -> 8740 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/dev_appserver_main.py498
-rwxr-xr-xgoogle_appengine/google/appengine/tools/os_compat.py46
-rw-r--r--google_appengine/google/appengine/tools/os_compat.pycbin0 -> 1180 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/tools/remote_api_shell.py94
-rwxr-xr-xgoogle_appengine/google/appengine/tools/requeue.py219
-rw-r--r--google_appengine/google/appengine/tools/requeue.pycbin0 -> 7934 bytes
245 files changed, 65760 insertions, 0 deletions
diff --git a/google_appengine/google/appengine/__init__.py b/google_appengine/google/appengine/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/__init__.pyc b/google_appengine/google/appengine/__init__.pyc
new file mode 100644
index 0000000..3215a40
--- /dev/null
+++ b/google_appengine/google/appengine/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/__init__.py b/google_appengine/google/appengine/api/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/api/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/api/__init__.pyc b/google_appengine/google/appengine/api/__init__.pyc
new file mode 100644
index 0000000..874041c
--- /dev/null
+++ b/google_appengine/google/appengine/api/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/api_base_pb.py b/google_appengine/google/appengine/api/api_base_pb.py
new file mode 100644
index 0000000..aa30190
--- /dev/null
+++ b/google_appengine/google/appengine/api/api_base_pb.py
@@ -0,0 +1,582 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class StringProto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.value_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Integer32Proto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = 0
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.value_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_value(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt32(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Integer64Proto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = 0
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.value_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt64(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_value(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt64(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class BoolProto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = 0
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 2
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putBoolean(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_value(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatBool(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class DoubleProto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = 0.0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = 0.0
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 9
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(9)
+ out.putDouble(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 9:
+ self.set_value(d.getDouble())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormat(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.DOUBLE,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class BytesProto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.value_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class VoidProto(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','BytesProto','VoidProto']
diff --git a/google_appengine/google/appengine/api/api_base_pb.pyc b/google_appengine/google/appengine/api/api_base_pb.pyc
new file mode 100644
index 0000000..fbbb0fc
--- /dev/null
+++ b/google_appengine/google/appengine/api/api_base_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/apiproxy_rpc.py b/google_appengine/google/appengine/api/apiproxy_rpc.py
new file mode 100755
index 0000000..2ac8923
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_rpc.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Base class for implementing RPC of API proxy stubs."""
+
+
+
+
+
+import sys
+
+
+class RPC(object):
+ """Base class for implementing RPC of API proxy stubs.
+
+ To implement a RPC to make real asynchronous API call:
+ - Extend this class.
+ - Override _MakeCallImpl and/or _WaitImpl to do a real asynchronous call.
+ """
+
+ IDLE = 0
+ RUNNING = 1
+ FINISHING = 2
+
+ def __init__(self, package=None, call=None, request=None, response=None,
+ callback=None, deadline=None, stub=None):
+ """Constructor for the RPC object.
+
+ All arguments are optional, and simply set members on the class.
+ These data members will be overriden by values passed to MakeCall.
+
+ Args:
+ package: string, the package for the call
+ call: string, the call within the package
+ request: ProtocolMessage instance, appropriate for the arguments
+ response: ProtocolMessage instance, appropriate for the response
+ callback: callable, called when call is complete
+ deadline: A double specifying the deadline for this call as the number of
+ seconds from the current time. Ignored if non-positive.
+ stub: APIProxyStub instance, used in default _WaitImpl to do real call
+ """
+ self.__exception = None
+ self.__state = RPC.IDLE
+ self.__traceback = None
+
+ self.package = package
+ self.call = call
+ self.request = request
+ self.response = response
+ self.callback = callback
+ self.deadline = deadline
+ self.stub = stub
+ self.cpu_usage_mcycles = 0
+
+ def MakeCall(self, package=None, call=None, request=None, response=None,
+ callback=None, deadline=None):
+ """Makes an asynchronous (i.e. non-blocking) API call within the
+ specified package for the specified call method.
+
+ It will call the _MakeRealCall to do the real job.
+
+ Args:
+ Same as constructor; see __init__.
+
+ Raises:
+ TypeError or AssertionError if an argument is of an invalid type.
+ AssertionError or RuntimeError is an RPC is already in use.
+ """
+ self.callback = callback or self.callback
+ self.package = package or self.package
+ self.call = call or self.call
+ self.request = request or self.request
+ self.response = response or self.response
+ self.deadline = deadline or self.deadline
+
+ assert self.__state is RPC.IDLE, ('RPC for %s.%s has already been started' %
+ (self.package, self.call))
+ assert self.callback is None or callable(self.callback)
+ self._MakeCallImpl()
+
+ def Wait(self):
+ """Waits on the API call associated with this RPC."""
+ rpc_completed = self._WaitImpl()
+
+ assert rpc_completed, ('RPC for %s.%s was not completed, and no other ' +
+ 'exception was raised ' % (self.package, self.call))
+
+ def CheckSuccess(self):
+ """If there was an exception, raise it now.
+
+ Raises:
+ Exception of the API call or the callback, if any.
+ """
+ if self.exception and self.__traceback:
+ raise self.exception.__class__, self.exception, self.__traceback
+ elif self.exception:
+ raise self.exception
+
+ @property
+ def exception(self):
+ return self.__exception
+
+ @property
+ def state(self):
+ return self.__state
+
+ def _MakeCallImpl(self):
+ """Override this method to implement a real asynchronous call rpc."""
+ self.__state = RPC.RUNNING
+
+ def _WaitImpl(self):
+ """Override this method to implement a real asynchronous call rpc.
+
+ Returns:
+ True if the async call was completed successfully.
+ """
+ try:
+ try:
+ self.stub.MakeSyncCall(self.package, self.call,
+ self.request, self.response)
+ except Exception, e:
+ self.__exception = e
+ finally:
+ self.__state = RPC.FINISHING
+ self.__Callback()
+
+ return True
+
+ def __Callback(self):
+ if self.callback:
+ try:
+ self.callback()
+ except:
+ exc_class, self.__exception, self.__traceback = sys.exc_info()
+ self.__exception._appengine_apiproxy_rpc = self
+ raise
diff --git a/google_appengine/google/appengine/api/apiproxy_rpc.pyc b/google_appengine/google/appengine/api/apiproxy_rpc.pyc
new file mode 100644
index 0000000..da77a36
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_rpc.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/apiproxy_stub.py b/google_appengine/google/appengine/api/apiproxy_stub.py
new file mode 100755
index 0000000..5104ab2
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_stub.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Base class for implementing API proxy stubs."""
+
+
+
+
+
+from google.appengine.api import apiproxy_rpc
+from google.appengine.runtime import apiproxy_errors
+
+
+MAX_REQUEST_SIZE = 1 << 20
+
+
+class APIProxyStub(object):
+ """Base class for implementing API proxy stub classes.
+
+ To implement an API proxy stub:
+ - Extend this class.
+ - Override __init__ to pass in appropriate default service name.
+ - Implement service methods as _Dynamic_<method>(request, response).
+ """
+
+ def __init__(self, service_name, max_request_size=MAX_REQUEST_SIZE):
+ """Constructor.
+
+ Args:
+ service_name: Service name expected for all calls.
+ max_request_size: int, maximum allowable size of the incoming request. A
+ apiproxy_errors.RequestTooLargeError will be raised if the inbound
+ request exceeds this size. Default is 1 MB.
+ """
+ self.__service_name = service_name
+ self.__max_request_size = max_request_size
+
+ def CreateRPC(self):
+ """Creates RPC object instance.
+
+ Returns:
+ a instance of RPC.
+ """
+ return apiproxy_rpc.RPC(stub=self)
+
+ def MakeSyncCall(self, service, call, request, response):
+ """The main RPC entry point.
+
+ Args:
+ service: Must be name as provided to service_name of constructor.
+ call: A string representing the rpc to make. Must be part of
+ the underlying services methods and impemented by _Dynamic_<call>.
+ request: A protocol buffer of the type corresponding to 'call'.
+ response: A protocol buffer of the type corresponding to 'call'.
+ """
+ assert service == self.__service_name, ('Expected "%s" service name, '
+ 'was "%s"' % (self.__service_name,
+ service))
+ if request.ByteSize() > self.__max_request_size:
+ raise apiproxy_errors.RequestTooLargeError(
+ 'The request to API call %s.%s() was too large.' % (service, call))
+ messages = []
+ assert request.IsInitialized(messages), messages
+
+ method = getattr(self, '_Dynamic_' + call)
+ method(request, response)
diff --git a/google_appengine/google/appengine/api/apiproxy_stub.pyc b/google_appengine/google/appengine/api/apiproxy_stub.pyc
new file mode 100644
index 0000000..41e7a0c
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/apiproxy_stub_map.py b/google_appengine/google/appengine/api/apiproxy_stub_map.py
new file mode 100755
index 0000000..716498f
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_stub_map.py
@@ -0,0 +1,470 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Container of APIProxy stubs for more convenient unittesting.
+
+Classes/variables/functions defined here:
+ APIProxyStubMap: container of APIProxy stubs.
+ apiproxy: global instance of an APIProxyStubMap.
+ MakeSyncCall: APIProxy entry point.
+ UserRPC: User-visible class wrapping asynchronous RPCs.
+"""
+
+
+
+
+
+import inspect
+import sys
+
+from google.appengine.api import apiproxy_rpc
+
+
+def CreateRPC(service):
+ """Creates a RPC instance for the given service.
+
+ The instance is suitable for talking to remote services.
+ Each RPC instance can be used only once, and should not be reused.
+
+ Args:
+ service: string representing which service to call.
+
+ Returns:
+ the rpc object.
+
+ Raises:
+ AssertionError or RuntimeError if the stub for service doesn't supply a
+ CreateRPC method.
+ """
+ stub = apiproxy.GetStub(service)
+ assert stub, 'No api proxy found for service "%s"' % service
+ assert hasattr(stub, 'CreateRPC'), ('The service "%s" doesn\'t have ' +
+ 'a CreateRPC method.' % service)
+ return stub.CreateRPC()
+
+
+def MakeSyncCall(service, call, request, response):
+ """The APIProxy entry point for a synchronous API call.
+
+ Args:
+ service: string representing which service to call
+ call: string representing which function to call
+ request: protocol buffer for the request
+ response: protocol buffer for the response
+
+ Raises:
+ apiproxy_errors.Error or a subclass.
+ """
+ apiproxy.MakeSyncCall(service, call, request, response)
+
+
+class ListOfHooks(object):
+ """An ordered collection of hooks for a particular API call.
+
+ A hook is a function that has exactly the same signature as
+ a service stub. It will be called before or after an api hook is
+ executed, depending on whether this list is for precall of postcall hooks.
+ Hooks can be used for debugging purposes (check certain
+ pre- or postconditions on api calls) or to apply patches to protocol
+ buffers before/after a call gets submitted.
+ """
+
+ def __init__(self):
+ """Constructor."""
+
+ self.__content = []
+
+ self.__unique_keys = set()
+
+ def __len__(self):
+ """Returns the amount of elements in the collection."""
+ return self.__content.__len__()
+
+ def __Insert(self, index, key, function, service=None):
+ """Appends a hook at a certain position in the list.
+
+ Args:
+ index: the index of where to insert the function
+ key: a unique key (within the module) for this particular function.
+ If something from the same module with the same key is already
+ registered, nothing will be added.
+ function: the hook to be added.
+ service: optional argument that restricts the hook to a particular api
+
+ Returns:
+ True if the collection was modified.
+ """
+ unique_key = (key, inspect.getmodule(function))
+ if unique_key in self.__unique_keys:
+ return False
+ num_args = len(inspect.getargspec(function)[0])
+ if (inspect.ismethod(function)):
+ num_args -= 1
+ self.__content.insert(index, (key, function, service, num_args))
+ self.__unique_keys.add(unique_key)
+ return True
+
+ def Append(self, key, function, service=None):
+ """Appends a hook at the end of the list.
+
+ Args:
+ key: a unique key (within the module) for this particular function.
+ If something from the same module with the same key is already
+ registered, nothing will be added.
+ function: the hook to be added.
+ service: optional argument that restricts the hook to a particular api
+
+ Returns:
+ True if the collection was modified.
+ """
+ return self.__Insert(len(self), key, function, service)
+
+ def Push(self, key, function, service=None):
+ """Inserts a hook at the beginning of the list.
+
+ Args:
+ key: a unique key (within the module) for this particular function.
+ If something from the same module with the same key is already
+ registered, nothing will be added.
+ function: the hook to be added.
+ service: optional argument that restricts the hook to a particular api
+
+ Returns:
+ True if the collection was modified.
+ """
+ return self.__Insert(0, key, function, service)
+
+ def Clear(self):
+ """Removes all hooks from the list (useful for unit tests)."""
+ self.__content = []
+ self.__unique_keys = set()
+
+ def Call(self, service, call, request, response, rpc=None):
+ """Invokes all hooks in this collection.
+
+ Args:
+ service: string representing which service to call
+ call: string representing which function to call
+ request: protocol buffer for the request
+ response: protocol buffer for the response
+ rpc: optional RPC used to make this call
+ """
+ for key, function, srv, num_args in self.__content:
+ if srv is None or srv == service:
+ if num_args == 5:
+ function(service, call, request, response, rpc)
+ else:
+ function(service, call, request, response)
+
+
+class APIProxyStubMap(object):
+ """Container of APIProxy stubs for more convenient unittesting.
+
+ Stubs may be either trivial implementations of APIProxy services (e.g.
+ DatastoreFileStub, UserServiceStub) or "real" implementations.
+
+ For unittests, we may want to mix and match real and trivial implementations
+ of services in order to better focus testing on individual service
+ implementations. To achieve this, we allow the client to attach stubs to
+ service names, as well as define a default stub to be used if no specific
+ matching stub is identified.
+ """
+
+
+ def __init__(self, default_stub=None):
+ """Constructor.
+
+ Args:
+ default_stub: optional stub
+
+ 'default_stub' will be used whenever no specific matching stub is found.
+ """
+ self.__stub_map = {}
+ self.__default_stub = default_stub
+ self.__precall_hooks = ListOfHooks()
+ self.__postcall_hooks = ListOfHooks()
+
+ def GetPreCallHooks(self):
+ """Gets a collection for all precall hooks."""
+ return self.__precall_hooks
+
+ def GetPostCallHooks(self):
+ """Gets a collection for all precall hooks."""
+ return self.__postcall_hooks
+
+ def RegisterStub(self, service, stub):
+ """Register the provided stub for the specified service.
+
+ Args:
+ service: string
+ stub: stub
+ """
+ assert not self.__stub_map.has_key(service), repr(service)
+ self.__stub_map[service] = stub
+
+ if service == 'datastore':
+ self.RegisterStub('datastore_v3', stub)
+
+ def GetStub(self, service):
+ """Retrieve the stub registered for the specified service.
+
+ Args:
+ service: string
+
+ Returns:
+ stub
+
+ Returns the stub registered for 'service', and returns the default stub
+ if no such stub is found.
+ """
+ return self.__stub_map.get(service, self.__default_stub)
+
+ def MakeSyncCall(self, service, call, request, response):
+ """The APIProxy entry point.
+
+ Args:
+ service: string representing which service to call
+ call: string representing which function to call
+ request: protocol buffer for the request
+ response: protocol buffer for the response
+
+ Raises:
+ apiproxy_errors.Error or a subclass.
+ """
+ stub = self.GetStub(service)
+ assert stub, 'No api proxy found for service "%s"' % service
+ if hasattr(stub, 'CreateRPC'):
+ rpc = stub.CreateRPC()
+ self.__precall_hooks.Call(service, call, request, response, rpc)
+ rpc.MakeCall(service, call, request, response)
+ rpc.Wait()
+ rpc.CheckSuccess()
+ self.__postcall_hooks.Call(service, call, request, response, rpc)
+ else:
+ self.__precall_hooks.Call(service, call, request, response)
+ stub.MakeSyncCall(service, call, request, response)
+ self.__postcall_hooks.Call(service, call, request, response)
+
+
+class UserRPC(object):
+ """Wrapper class for asynchronous RPC.
+
+ Simplest low-level usage pattern:
+
+ rpc = UserRPC('service', [deadline], [callback])
+ rpc.make_call('method', request, response)
+ .
+ .
+ .
+ rpc.wait()
+ rpc.check_success()
+
+ However, a service module normally provides a wrapper so that the
+ typical usage pattern becomes more like this:
+
+ from google.appengine.api import service
+ rpc = service.create_rpc([deadline], [callback])
+ service.make_method_call(rpc, [service-specific-args])
+ .
+ .
+ .
+ rpc.wait()
+ result = rpc.get_result()
+
+ The service.make_method_call() function sets a service- and method-
+ specific hook function that is called by rpc.get_result() with the
+ rpc object as its first argument, and service-specific value as its
+ second argument. The hook function should call rpc.check_success()
+ and then extract the user-level result from the rpc.result
+ protobuffer. Additional arguments may be passed from
+ make_method_call() to the get_result hook via the second argument.
+ """
+
+ __method = None
+ __get_result_hook = None
+ __user_data = None
+ __postcall_hooks_called = False
+
+ def __init__(self, service, deadline=None, callback=None):
+ """Constructor.
+
+ Args:
+ service: The service name.
+ deadline: Optional deadline. Default depends on the implementation.
+ callback: Optional argument-less callback function.
+ """
+ self.__service = service
+ self.__rpc = CreateRPC(service)
+ self.__rpc.deadline = deadline
+ self.__rpc.callback = callback
+
+ @property
+ def service(self):
+ """Return the service name."""
+ return self.__service
+
+ @property
+ def method(self):
+ """Return the method name."""
+ return self.__method
+
+ @property
+ def deadline(self):
+ """Return the deadline, if set explicitly (otherwise None)."""
+ return self.__rpc.deadline
+
+ def __get_callback(self):
+ """Return the callback attribute, a function without arguments.
+
+ This attribute can also be assigned to. For example, the
+ following code calls some_other_function(rpc) when the RPC is
+ complete:
+
+ rpc = service.create_rpc()
+ rpc.callback = lambda: some_other_function(rpc)
+ service.make_method_call(rpc)
+ rpc.wait()
+ """
+ return self.__rpc.callback
+ def __set_callback(self, callback):
+ """Set the callback function."""
+ self.__rpc.callback = callback
+ callback = property(__get_callback, __set_callback)
+
+ @property
+ def request(self):
+ """Return the request protocol buffer object."""
+ return self.__rpc.request
+
+ @property
+ def response(self):
+ """Return the response protocol buffer object."""
+ return self.__rpc.response
+
+ @property
+ def state(self):
+ """Return the RPC state.
+
+ Possible values are attributes of apiproxy_rpc.RPC: IDLE, RUNNING,
+ FINISHING.
+ """
+ return self.__rpc.state
+
+ @property
+ def get_result_hook(self):
+ """Return the get-result hook function."""
+ return self.__get_result_hook
+
+ @property
+ def user_data(self):
+ """Return the user data for the hook function."""
+ return self.__user_data
+
+ def make_call(self, method, request, response,
+ get_result_hook=None, user_data=None):
+ """Initiate a call.
+
+ Args:
+ method: The method name.
+ request: The request protocol buffer.
+ response: The response protocol buffer.
+ get_result_hook: Optional get-result hook function. If not None,
+ this must be a function with exactly one argument, the RPC
+ object (self). Its return value is returned from get_result().
+ user_data: Optional additional arbitrary data for the get-result
+ hook function. This can be accessed as rpc.user_data. The
+ type of this value is up to the service module.
+
+ This function may only be called once per RPC object. It sends
+ the request to the remote server, but does not wait for a
+ response. This allows concurrent execution of the remote call and
+ further local processing (e.g., making additional remote calls).
+
+ Before the call is initiated, the precall hooks are called.
+ """
+ assert self.__rpc.state == apiproxy_rpc.RPC.IDLE, repr(self.state)
+ self.__method = method
+ self.__get_result_hook = get_result_hook
+ self.__user_data = user_data
+ apiproxy.GetPreCallHooks().Call(
+ self.__service, method, request, response, self.__rpc)
+ self.__rpc.MakeCall(self.__service, method, request, response)
+
+ def wait(self):
+ """Wait for the call to complete, and call callbacks.
+
+ This is the only time callback functions may be called. (However,
+ note that check_success() and get_result() call wait().) Waiting
+ for one RPC may cause callbacks for other RPCs to be called.
+ Callback functions may call check_success() and get_result().
+
+ Callbacks are called without arguments; if a callback needs access
+ to the RPC object a Python nested function (a.k.a. closure) or a
+ bound may be used. To facilitate this, the callback may be
+ assigned after the RPC object is created (but before make_call()
+ is called).
+
+ Note: don't confuse callbacks with get-result hooks or precall
+ and postcall hooks.
+ """
+ assert self.__rpc.state != apiproxy_rpc.RPC.IDLE, repr(self.state)
+ if self.__rpc.state == apiproxy_rpc.RPC.RUNNING:
+ self.__rpc.Wait()
+ assert self.__rpc.state == apiproxy_rpc.RPC.FINISHING, repr(self.state)
+
+ def check_success(self):
+ """Check for success of the RPC, possibly raising an exception.
+
+ This function should be called at least once per RPC. If wait()
+ hasn't been called yet, it is called first. If the RPC caused
+ an exceptional condition, an exception will be raised here.
+ The first time check_success() is called, the postcall hooks
+ are called.
+ """
+ self.wait()
+ self.__rpc.CheckSuccess()
+ if not self.__postcall_hooks_called:
+ self.__postcall_hooks_called = True
+ apiproxy.GetPostCallHooks().Call(self.__service, self.__method,
+ self.request, self.response, self.__rpc)
+
+ def get_result(self):
+ """Get the result of the RPC, or possibly raise an exception.
+
+ This implies a call to check_success(). If a get-result hook was
+ passed to make_call(), that hook is responsible for calling
+ check_success(), and the return value of the hook is returned.
+ Otherwise, check_success() is called directly and None is
+ returned.
+ """
+ if self.__get_result_hook is None:
+ self.check_success()
+ return None
+ else:
+ return self.__get_result_hook(self)
+
+
+def GetDefaultAPIProxy():
+ try:
+ runtime = __import__('google.appengine.runtime', globals(), locals(),
+ ['apiproxy'])
+ return APIProxyStubMap(runtime.apiproxy)
+ except (AttributeError, ImportError):
+ return APIProxyStubMap()
+
+
+apiproxy = GetDefaultAPIProxy()
diff --git a/google_appengine/google/appengine/api/apiproxy_stub_map.pyc b/google_appengine/google/appengine/api/apiproxy_stub_map.pyc
new file mode 100644
index 0000000..5d889f2
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_stub_map.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/app_logging.py b/google_appengine/google/appengine/api/app_logging.py
new file mode 100755
index 0000000..e576d37
--- /dev/null
+++ b/google_appengine/google/appengine/api/app_logging.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Logging utilities for use by applications.
+
+Classes defined here:
+ AppLogsHandler: StreamHandler subclass
+"""
+
+
+
+
+
+import logging
+import sys
+import types
+
+
+NEWLINE_REPLACEMENT = "\0"
+
+
+class AppLogsHandler(logging.StreamHandler):
+ """Logging handler that will direct output to a persistent store of
+ application logs.
+
+ This handler will output log statements to stderr. This handler is
+ automatically initialized and attached to the Python common logging library.
+ """
+
+
+
+
+ def __init__(self, stream=None):
+ """Constructor.
+
+ Args:
+ # stream is optional. it defaults to sys.stderr.
+ stream: destination for output
+ """
+ logging.StreamHandler.__init__(self, stream)
+
+ def close(self):
+ """Closes the stream.
+
+ This implementation based on the implementation of FileHandler.close()."""
+ self.flush()
+ self.stream.close()
+ logging.StreamHandler.close(self)
+
+ def emit(self, record):
+ """Emit a record.
+
+ This implementation is based on the implementation of
+ StreamHandler.emit()."""
+ try:
+ message = self._AppLogsMessage(record)
+ if isinstance(message, unicode):
+ message = message.encode("UTF-8")
+ self.stream.write(message)
+ self.flush()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ self.handleError(record)
+
+ def _AppLogsMessage(self, record):
+ """Converts the log record into a log line."""
+
+ message = self.format(record).replace("\n", NEWLINE_REPLACEMENT)
+ return "LOG %d %d %s\n" % (self._AppLogsLevel(record.levelno),
+ long(record.created * 1000 * 1000),
+ message)
+
+ def _AppLogsLevel(self, level):
+ """Converts the logging level used in Python to the API logging level"""
+ if level >= logging.CRITICAL:
+ return 4
+ elif level >= logging.ERROR:
+ return 3
+ elif level >= logging.WARNING:
+ return 2
+ elif level >= logging.INFO:
+ return 1
+ else:
+ return 0
diff --git a/google_appengine/google/appengine/api/appinfo.py b/google_appengine/google/appengine/api/appinfo.py
new file mode 100755
index 0000000..6ab406c
--- /dev/null
+++ b/google_appengine/google/appengine/api/appinfo.py
@@ -0,0 +1,430 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""AppInfo tools.
+
+Library for working with AppInfo records in memory, store and load from
+configuration files.
+"""
+
+
+
+
+
+import re
+
+from google.appengine.api import appinfo_errors
+from google.appengine.api import validation
+from google.appengine.api import yaml_builder
+from google.appengine.api import yaml_listener
+from google.appengine.api import yaml_object
+
+
+_URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).'
+_FILES_REGEX = r'(?!\^).*(?!\$).'
+
+_DELTA_REGEX = r'([1-9][0-9]*)([DdHhMm]|[sS]?)'
+_EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
+
+_SERVICE_RE_STRING = r'(mail|xmpp_message)'
+
+_EXPIRATION_CONVERSIONS = {
+ 'd': 60 * 60 * 24,
+ 'h': 60 * 60,
+ 'm': 60,
+ 's': 1,
+}
+
+APP_ID_MAX_LEN = 100
+MAJOR_VERSION_ID_MAX_LEN = 100
+MAX_URL_MAPS = 100
+
+APPLICATION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % APP_ID_MAX_LEN
+VERSION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % MAJOR_VERSION_ID_MAX_LEN
+
+RUNTIME_RE_STRING = r'[a-z]{1,30}'
+
+API_VERSION_RE_STRING = r'[\w.]{1,32}'
+
+HANDLER_STATIC_FILES = 'static_files'
+HANDLER_STATIC_DIR = 'static_dir'
+HANDLER_SCRIPT = 'script'
+
+LOGIN_OPTIONAL = 'optional'
+LOGIN_REQUIRED = 'required'
+LOGIN_ADMIN = 'admin'
+
+SECURE_HTTP = 'never'
+SECURE_HTTPS = 'always'
+SECURE_HTTP_OR_HTTPS = 'optional'
+
+REQUIRE_MATCHING_FILE = 'require_matching_file'
+
+DEFAULT_SKIP_FILES = (r'^(.*/)?('
+ r'(app\.yaml)|'
+ r'(app\.yml)|'
+ r'(index\.yaml)|'
+ r'(index\.yml)|'
+ r'(#.*#)|'
+ r'(.*~)|'
+ r'(.*\.py[co])|'
+ r'(.*/RCS/.*)|'
+ r'(\..*)|'
+ r')$')
+
+LOGIN = 'login'
+SECURE = 'secure'
+URL = 'url'
+STATIC_FILES = 'static_files'
+UPLOAD = 'upload'
+STATIC_DIR = 'static_dir'
+MIME_TYPE = 'mime_type'
+SCRIPT = 'script'
+EXPIRATION = 'expiration'
+
+APPLICATION = 'application'
+VERSION = 'version'
+RUNTIME = 'runtime'
+API_VERSION = 'api_version'
+HANDLERS = 'handlers'
+DEFAULT_EXPIRATION = 'default_expiration'
+SKIP_FILES = 'skip_files'
+SERVICES = 'inbound_services'
+
+
+class URLMap(validation.Validated):
+ """Mapping from URLs to handlers.
+
+ This class acts like something of a union type. Its purpose is to
+ describe a mapping between a set of URLs and their handlers. What
+ handler type a given instance has is determined by which handler-id
+ attribute is used.
+
+ Each mapping can have one and only one handler type. Attempting to
+ use more than one handler-id attribute will cause an UnknownHandlerType
+ to be raised during validation. Failure to provide any handler-id
+ attributes will cause MissingHandlerType to be raised during validation.
+
+ The regular expression used by the url field will be used to match against
+ the entire URL path and query string of the request. This means that
+ partial maps will not be matched. Specifying a url, say /admin, is the
+ same as matching against the regular expression '^/admin$'. Don't begin
+ your matching url with ^ or end them with $. These regular expressions
+ won't be accepted and will raise ValueError.
+
+ Attributes:
+ login: Whether or not login is required to access URL. Defaults to
+ 'optional'.
+ secure: Restriction on the protocol which can be used to serve
+ this URL/handler (HTTP, HTTPS or either).
+ url: Regular expression used to fully match against the request URLs path.
+ See Special Cases for using static_dir.
+ static_files: Handler id attribute that maps URL to the appropriate
+ file. Can use back regex references to the string matched to url.
+ upload: Regular expression used by the application configuration
+ program to know which files are uploaded as blobs. It's very
+ difficult to determine this using just the url and static_files
+ so this attribute must be included. Required when defining a
+ static_files mapping.
+ A matching file name must fully match against the upload regex, similar
+ to how url is matched against the request path. Do not begin upload
+ with ^ or end it with $.
+ static_dir: Handler id that maps the provided url to a sub-directory
+ within the application directory. See Special Cases.
+ mime_type: When used with static_files and static_dir the mime-type
+ of files served from those directories are overridden with this
+ value.
+ script: Handler id that maps URLs to scipt handler within the application
+ directory that will run using CGI.
+ expiration: When used with static files and directories, the time delta to
+ use for cache expiration. Has the form '4d 5h 30m 15s', where each letter
+ signifies days, hours, minutes, and seconds, respectively. The 's' for
+ seconds may be omitted. Only one amount must be specified, combining
+ multiple amounts is optional. Example good values: '10', '1d 6h',
+ '1h 30m', '7d 7d 7d', '5m 30'.
+
+ Special cases:
+ When defining a static_dir handler, do not use a regular expression
+ in the url attribute. Both the url and static_dir attributes are
+ automatically mapped to these equivalents:
+
+ <url>/(.*)
+ <static_dir>/\1
+
+ For example:
+
+ url: /images
+ static_dir: images_folder
+
+ Is the same as this static_files declaration:
+
+ url: /images/(.*)
+ static_files: images/\1
+ upload: images/(.*)
+ """
+
+ ATTRIBUTES = {
+
+ URL: validation.Optional(_URL_REGEX),
+ LOGIN: validation.Options(LOGIN_OPTIONAL,
+ LOGIN_REQUIRED,
+ LOGIN_ADMIN,
+ default=LOGIN_OPTIONAL),
+
+ SECURE: validation.Options(SECURE_HTTP,
+ SECURE_HTTPS,
+ SECURE_HTTP_OR_HTTPS,
+ default=SECURE_HTTP),
+
+
+
+ HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
+ UPLOAD: validation.Optional(_FILES_REGEX),
+
+
+ HANDLER_STATIC_DIR: validation.Optional(_FILES_REGEX),
+
+
+ MIME_TYPE: validation.Optional(str),
+ EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
+
+
+ HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
+
+ REQUIRE_MATCHING_FILE: validation.Optional(bool),
+ }
+
+ COMMON_FIELDS = set([URL, LOGIN, SECURE])
+
+ ALLOWED_FIELDS = {
+ HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION,
+ REQUIRE_MATCHING_FILE),
+ HANDLER_STATIC_DIR: (MIME_TYPE, EXPIRATION, REQUIRE_MATCHING_FILE),
+ HANDLER_SCRIPT: (),
+ }
+
+ def GetHandler(self):
+ """Get handler for mapping.
+
+ Returns:
+ Value of the handler (determined by handler id attribute).
+ """
+ return getattr(self, self.GetHandlerType())
+
+ def GetHandlerType(self):
+ """Get handler type of mapping.
+
+ Returns:
+ Handler type determined by which handler id attribute is set.
+
+ Raises:
+ UnknownHandlerType when none of the no handler id attributes
+ are set.
+
+ UnexpectedHandlerAttribute when an unexpected attribute
+ is set for the discovered handler type.
+
+ HandlerTypeMissingAttribute when the handler is missing a
+ required attribute for its handler type.
+ """
+ for id_field in URLMap.ALLOWED_FIELDS.iterkeys():
+ if getattr(self, id_field) is not None:
+ mapping_type = id_field
+ break
+ else:
+ raise appinfo_errors.UnknownHandlerType(
+ 'Unknown url handler type.\n%s' % str(self))
+
+ allowed_fields = URLMap.ALLOWED_FIELDS[mapping_type]
+
+ for attribute in self.ATTRIBUTES.iterkeys():
+ if (getattr(self, attribute) is not None and
+ not (attribute in allowed_fields or
+ attribute in URLMap.COMMON_FIELDS or
+ attribute == mapping_type)):
+ raise appinfo_errors.UnexpectedHandlerAttribute(
+ 'Unexpected attribute "%s" for mapping type %s.' %
+ (attribute, mapping_type))
+
+ if mapping_type == HANDLER_STATIC_FILES and not self.upload:
+ raise appinfo_errors.MissingHandlerAttribute(
+ 'Missing "%s" attribute for URL "%s".' % (UPLOAD, self.url))
+
+ return mapping_type
+
+ def CheckInitialized(self):
+ """Adds additional checking to make sure handler has correct fields.
+
+ In addition to normal ValidatedCheck calls GetHandlerType
+ which validates all the handler fields are configured
+ properly.
+
+ Raises:
+ UnknownHandlerType when none of the no handler id attributes
+ are set.
+
+ UnexpectedHandlerAttribute when an unexpected attribute
+ is set for the discovered handler type.
+
+ HandlerTypeMissingAttribute when the handler is missing a
+ required attribute for its handler type.
+ """
+ super(URLMap, self).CheckInitialized()
+ self.GetHandlerType()
+
+
+class AppInfoExternal(validation.Validated):
+ """Class representing users application info.
+
+ This class is passed to a yaml_object builder to provide the validation
+ for the application information file format parser.
+
+ Attributes:
+ application: Unique identifier for application.
+ version: Application's major version number.
+ runtime: Runtime used by application.
+ api_version: Which version of APIs to use.
+ handlers: List of URL handlers.
+ default_expiration: Default time delta to use for cache expiration for
+ all static files, unless they have their own specific 'expiration' set.
+ See the URLMap.expiration field's documentation for more information.
+ skip_files: An re object. Files that match this regular expression will
+ not be uploaded by appcfg.py. For example:
+ skip_files: |
+ .svn.*|
+ #.*#
+ """
+
+ ATTRIBUTES = {
+
+
+ APPLICATION: APPLICATION_RE_STRING,
+ VERSION: VERSION_RE_STRING,
+ RUNTIME: RUNTIME_RE_STRING,
+
+
+ API_VERSION: API_VERSION_RE_STRING,
+ HANDLERS: validation.Optional(validation.Repeated(URLMap)),
+
+ SERVICES: validation.Optional(validation.Repeated(
+ validation.Regex(_SERVICE_RE_STRING))),
+ DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
+ SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES)
+ }
+
+ def CheckInitialized(self):
+ """Ensures that at least one url mapping is provided.
+
+ Raises:
+ MissingURLMapping when no URLMap objects are present in object.
+ TooManyURLMappings when there are too many URLMap entries.
+ """
+ super(AppInfoExternal, self).CheckInitialized()
+ if not self.handlers:
+ raise appinfo_errors.MissingURLMapping(
+ 'No URLMap entries found in application configuration')
+ if len(self.handlers) > MAX_URL_MAPS:
+ raise appinfo_errors.TooManyURLMappings(
+ 'Found more than %d URLMap entries in application configuration' %
+ MAX_URL_MAPS)
+
+
+def LoadSingleAppInfo(app_info):
+ """Load a single AppInfo object where one and only one is expected.
+
+ Args:
+ app_info: A file-like object or string. If it is a string, parse it as
+ a configuration file. If it is a file-like object, read in data and
+ parse.
+
+ Returns:
+ An instance of AppInfoExternal as loaded from a YAML file.
+
+ Raises:
+ ValueError: if a specified service is not valid.
+ EmptyConfigurationFile: when there are no documents in YAML file.
+ MultipleConfigurationFile: when there is more than one document in YAML
+ file.
+ """
+ builder = yaml_object.ObjectBuilder(AppInfoExternal)
+ handler = yaml_builder.BuilderHandler(builder)
+ listener = yaml_listener.EventListener(handler)
+ listener.Parse(app_info)
+
+ app_infos = handler.GetResults()
+ if len(app_infos) < 1:
+ raise appinfo_errors.EmptyConfigurationFile()
+ if len(app_infos) > 1:
+ raise appinfo_errors.MultipleConfigurationFile()
+ return app_infos[0]
+
+
+def ParseExpiration(expiration):
+ """Parses an expiration delta string.
+
+ Args:
+ expiration: String that matches _DELTA_REGEX.
+
+ Returns:
+ Time delta in seconds.
+ """
+ delta = 0
+ for match in re.finditer(_DELTA_REGEX, expiration):
+ amount = int(match.group(1))
+ units = _EXPIRATION_CONVERSIONS.get(match.group(2).lower(), 1)
+ delta += amount * units
+ return delta
+
+
+
+_file_path_positive_re = re.compile(r'^[ 0-9a-zA-Z\._\+/\$-]{1,256}$')
+
+_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-|^_ah/')
+
+_file_path_negative_2_re = re.compile(r'//|/$')
+
+_file_path_negative_3_re = re.compile(r'^ | $|/ | /')
+
+
+def ValidFilename(filename):
+ """Determines if filename is valid.
+
+ filename must be a valid pathname.
+ - It must contain only letters, numbers, _, +, /, $, ., and -.
+ - It must be less than 256 chars.
+ - It must not contain "/./", "/../", or "//".
+ - It must not end in "/".
+ - All spaces must be in the middle of a directory or file name.
+
+ Args:
+ filename: The filename to validate.
+
+ Returns:
+ An error string if the filename is invalid. Returns '' if the filename
+ is valid.
+ """
+ if _file_path_positive_re.match(filename) is None:
+ return 'Invalid character in filename: %s' % filename
+ if _file_path_negative_1_re.search(filename) is not None:
+ return ('Filename cannot contain "." or ".." '
+ 'or start with "-" or "_ah/": %s' %
+ filename)
+ if _file_path_negative_2_re.search(filename) is not None:
+ return 'Filename cannot have trailing / or contain //: %s' % filename
+ if _file_path_negative_3_re.search(filename) is not None:
+ return 'Any spaces must be in the middle of a filename: %s' % filename
+ return ''
diff --git a/google_appengine/google/appengine/api/appinfo.pyc b/google_appengine/google/appengine/api/appinfo.pyc
new file mode 100644
index 0000000..af39ab3
--- /dev/null
+++ b/google_appengine/google/appengine/api/appinfo.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/appinfo_errors.py b/google_appengine/google/appengine/api/appinfo_errors.py
new file mode 100755
index 0000000..a79c623
--- /dev/null
+++ b/google_appengine/google/appengine/api/appinfo_errors.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Errors used in the Python appinfo API, used by app developers."""
+
+
+
+
+
+class Error(Exception):
+ """Base datastore AppInfo type."""
+
+class EmptyConfigurationFile(Error):
+ """Tried to load empty configuration file"""
+
+class MultipleConfigurationFile(Error):
+ """Tried to load configuration file with multiple AppInfo objects"""
+
+class UnknownHandlerType(Error):
+ """Raised when it is not possible to determine URL mapping type."""
+
+class UnexpectedHandlerAttribute(Error):
+ """Raised when a handler type has an attribute that it does not use."""
+
+class MissingHandlerAttribute(Error):
+ """Raised when a handler is missing an attribute required by its type."""
+
+class MissingURLMapping(Error):
+ """Raised when there are no URL mappings in external appinfo."""
+
+class TooManyURLMappings(Error):
+ """Raised when there are too many URL mappings in external appinfo."""
diff --git a/google_appengine/google/appengine/api/appinfo_errors.pyc b/google_appengine/google/appengine/api/appinfo_errors.pyc
new file mode 100644
index 0000000..3207355
--- /dev/null
+++ b/google_appengine/google/appengine/api/appinfo_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/capabilities/__init__.py b/google_appengine/google/appengine/api/capabilities/__init__.py
new file mode 100755
index 0000000..f672cbb
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/__init__.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Allows applications to identify API outages and scheduled downtime.
+
+Some examples:
+ def StoreUploadedProfileImage(self):
+ uploaded_image = self.request.get('img')
+ # If the images API is unavailable, we'll just skip the resize.
+ if CapabilitySet('images').is_enabled():
+ uploaded_image = images.resize(uploaded_image, 64, 64)
+ store(uploaded_image)
+
+ def RenderHTMLForm(self):
+ datastore_readonly = CapabilitySet('datastore_v3', capabilities=['write'])
+ if datastore_readonly.may_be_disabled_in(60):
+ # self.response.out('<p>Not accepting submissions right now: %s</p>' %
+ datastore_readonly.admin_message())
+ # ...render form with form elements disabled...
+ else:
+ # ...render form normally...
+
+ Individual API wrapper modules should expose CapabilitySet objects
+ for users rather than relying on users to create them. They may
+ also create convenience methods (e.g. db.IsReadOnly()) that delegate
+ to the relevant CapabilitySet.
+
+Classes defined here:
+ CapabilitySet: encapsulates one or more capabilities, allows introspection.
+ UnknownCapabilityError: thrown when an unknown capability is requested.
+"""
+
+
+
+
+
+from google.appengine.api.capabilities import capability_service_pb
+from google.appengine.base import capabilities_pb
+from google.appengine.api import apiproxy_stub_map
+
+
+IsEnabledRequest = capability_service_pb.IsEnabledRequest
+IsEnabledResponse = capability_service_pb.IsEnabledResponse
+CapabilityConfig = capabilities_pb.CapabilityConfig
+
+
+class UnknownCapabilityError(Exception):
+ """An unknown capability was requested."""
+
+
+class CapabilitySet(object):
+ """Encapsulates one or more capabilities.
+
+ Capabilities can either be named explicitly, or inferred from the
+ list of methods provided. If no capabilities or methods are
+ provided, this will check whether the entire package is enabled.
+ """
+ def __init__(self, package, capabilities=None, methods=None,
+ stub_map=apiproxy_stub_map):
+ """Constructor.
+
+ Args:
+ capabilities: list of strings
+ methods: list of strings
+ """
+ if capabilities is None:
+ capabilities = []
+ if methods is None:
+ methods = []
+ self._package = package
+ self._capabilities = ['*'] + capabilities
+ self._methods = methods
+ self._stub_map = stub_map
+
+ def is_enabled(self):
+ """Tests whether the capabilities is currently enabled.
+
+ Returns:
+ True if API calls that require these capabillities will succeed.
+
+ Raises:
+ UnknownCapabilityError, if a specified capability was not recognized.
+ """
+ config = self._get_status()
+ return config.summary_status() in (IsEnabledResponse.ENABLED,
+ IsEnabledResponse.SCHEDULED_FUTURE,
+ IsEnabledResponse.SCHEDULED_NOW)
+
+ def will_remain_enabled_for(self, time=60):
+ """Returns true if it will remain enabled for the specified amount of time.
+
+ Args:
+ time: Number of seconds in the future to look when checking for scheduled
+ downtime.
+
+ Returns:
+ True if there is no scheduled downtime for the specified capability
+ within the amount of time specified.
+
+ Raises:
+ UnknownCapabilityError, if a specified capability was not recognized.
+ """
+ config = self._get_status()
+
+ status = config.summary_status()
+ if status == IsEnabledResponse.ENABLED:
+ return True
+ elif status == IsEnabledResponse.SCHEDULED_NOW:
+ return False
+ elif status == IsEnabledResponse.SCHEDULED_FUTURE:
+ if config.has_time_until_scheduled():
+ return config.time_until_scheduled() >= time
+ else:
+ return True
+ elif status == IsEnabledResponse.DISABLED:
+ return False
+ else:
+ return False
+
+ def admin_message(self):
+ """Get any administrator notice messages for these capabilities.
+
+ Returns:
+ A string containing one or more admin messages, or an empty string.
+
+ Raises:
+ UnknownCapabilityError, if a specified capability was not recognized.
+ """
+ message_list = []
+ for config in self._get_status().config_list():
+ message = config.admin_message()
+ if message and message not in message_list:
+ message_list.append(message)
+ return ' '.join(message_list)
+
+ def _get_status(self):
+ """Get an IsEnabledResponse for the capabilities listed.
+
+ Returns:
+ IsEnabledResponse for the specified capabilities.
+
+ Raises:
+ UnknownCapabilityError: If an unknown capability was requested.
+ """
+ req = IsEnabledRequest()
+ req.set_package(self._package)
+ for capability in self._capabilities:
+ req.add_capability(capability)
+ for method in self._methods:
+ req.add_call(method)
+
+ resp = capability_service_pb.IsEnabledResponse()
+ self._stub_map.MakeSyncCall('capability_service', 'IsEnabled', req, resp)
+
+ if resp.summary_status() == IsEnabledResponse.UNKNOWN:
+ raise UnknownCapabilityError()
+
+ return resp
diff --git a/google_appengine/google/appengine/api/capabilities/__init__.pyc b/google_appengine/google/appengine/api/capabilities/__init__.pyc
new file mode 100644
index 0000000..c8ac026
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/capabilities/capability_service_pb.py b/google_appengine/google/appengine/api/capabilities/capability_service_pb.py
new file mode 100644
index 0000000..9f9ba29
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/capability_service_pb.py
@@ -0,0 +1,366 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.base.capabilities_pb import CapabilityConfig
+class IsEnabledRequest(ProtocolBuffer.ProtocolMessage):
+ has_package_ = 0
+ package_ = ""
+
+ def __init__(self, contents=None):
+ self.capability_ = []
+ self.call_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def package(self): return self.package_
+
+ def set_package(self, x):
+ self.has_package_ = 1
+ self.package_ = x
+
+ def clear_package(self):
+ if self.has_package_:
+ self.has_package_ = 0
+ self.package_ = ""
+
+ def has_package(self): return self.has_package_
+
+ def capability_size(self): return len(self.capability_)
+ def capability_list(self): return self.capability_
+
+ def capability(self, i):
+ return self.capability_[i]
+
+ def set_capability(self, i, x):
+ self.capability_[i] = x
+
+ def add_capability(self, x):
+ self.capability_.append(x)
+
+ def clear_capability(self):
+ self.capability_ = []
+
+ def call_size(self): return len(self.call_)
+ def call_list(self): return self.call_
+
+ def call(self, i):
+ return self.call_[i]
+
+ def set_call(self, i, x):
+ self.call_[i] = x
+
+ def add_call(self, x):
+ self.call_.append(x)
+
+ def clear_call(self):
+ self.call_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_package()): self.set_package(x.package())
+ for i in xrange(x.capability_size()): self.add_capability(x.capability(i))
+ for i in xrange(x.call_size()): self.add_call(x.call(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_package_ != x.has_package_: return 0
+ if self.has_package_ and self.package_ != x.package_: return 0
+ if len(self.capability_) != len(x.capability_): return 0
+ for e1, e2 in zip(self.capability_, x.capability_):
+ if e1 != e2: return 0
+ if len(self.call_) != len(x.call_): return 0
+ for e1, e2 in zip(self.call_, x.call_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_package_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: package not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.package_))
+ n += 1 * len(self.capability_)
+ for i in xrange(len(self.capability_)): n += self.lengthString(len(self.capability_[i]))
+ n += 1 * len(self.call_)
+ for i in xrange(len(self.call_)): n += self.lengthString(len(self.call_[i]))
+ return n + 1
+
+ def Clear(self):
+ self.clear_package()
+ self.clear_capability()
+ self.clear_call()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.package_)
+ for i in xrange(len(self.capability_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.capability_[i])
+ for i in xrange(len(self.call_)):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.call_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_package(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.add_capability(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.add_call(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_package_: res+=prefix+("package: %s\n" % self.DebugFormatString(self.package_))
+ cnt=0
+ for e in self.capability_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("capability%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ cnt=0
+ for e in self.call_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("call%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kpackage = 1
+ kcapability = 2
+ kcall = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "package",
+ 2: "capability",
+ 3: "call",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class IsEnabledResponse(ProtocolBuffer.ProtocolMessage):
+
+ ENABLED = 1
+ SCHEDULED_FUTURE = 2
+ SCHEDULED_NOW = 3
+ DISABLED = 4
+ UNKNOWN = 5
+
+ _SummaryStatus_NAMES = {
+ 1: "ENABLED",
+ 2: "SCHEDULED_FUTURE",
+ 3: "SCHEDULED_NOW",
+ 4: "DISABLED",
+ 5: "UNKNOWN",
+ }
+
+ def SummaryStatus_Name(cls, x): return cls._SummaryStatus_NAMES.get(x, "")
+ SummaryStatus_Name = classmethod(SummaryStatus_Name)
+
+ has_summary_status_ = 0
+ summary_status_ = 0
+ has_time_until_scheduled_ = 0
+ time_until_scheduled_ = 0
+
+ def __init__(self, contents=None):
+ self.config_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def summary_status(self): return self.summary_status_
+
+ def set_summary_status(self, x):
+ self.has_summary_status_ = 1
+ self.summary_status_ = x
+
+ def clear_summary_status(self):
+ if self.has_summary_status_:
+ self.has_summary_status_ = 0
+ self.summary_status_ = 0
+
+ def has_summary_status(self): return self.has_summary_status_
+
+ def time_until_scheduled(self): return self.time_until_scheduled_
+
+ def set_time_until_scheduled(self, x):
+ self.has_time_until_scheduled_ = 1
+ self.time_until_scheduled_ = x
+
+ def clear_time_until_scheduled(self):
+ if self.has_time_until_scheduled_:
+ self.has_time_until_scheduled_ = 0
+ self.time_until_scheduled_ = 0
+
+ def has_time_until_scheduled(self): return self.has_time_until_scheduled_
+
+ def config_size(self): return len(self.config_)
+ def config_list(self): return self.config_
+
+ def config(self, i):
+ return self.config_[i]
+
+ def mutable_config(self, i):
+ return self.config_[i]
+
+ def add_config(self):
+ x = CapabilityConfig()
+ self.config_.append(x)
+ return x
+
+ def clear_config(self):
+ self.config_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_summary_status()): self.set_summary_status(x.summary_status())
+ if (x.has_time_until_scheduled()): self.set_time_until_scheduled(x.time_until_scheduled())
+ for i in xrange(x.config_size()): self.add_config().CopyFrom(x.config(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_summary_status_ != x.has_summary_status_: return 0
+ if self.has_summary_status_ and self.summary_status_ != x.summary_status_: return 0
+ if self.has_time_until_scheduled_ != x.has_time_until_scheduled_: return 0
+ if self.has_time_until_scheduled_ and self.time_until_scheduled_ != x.time_until_scheduled_: return 0
+ if len(self.config_) != len(x.config_): return 0
+ for e1, e2 in zip(self.config_, x.config_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_summary_status_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: summary_status not set.')
+ for p in self.config_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.summary_status_)
+ if (self.has_time_until_scheduled_): n += 1 + self.lengthVarInt64(self.time_until_scheduled_)
+ n += 1 * len(self.config_)
+ for i in xrange(len(self.config_)): n += self.lengthString(self.config_[i].ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_summary_status()
+ self.clear_time_until_scheduled()
+ self.clear_config()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.summary_status_)
+ if (self.has_time_until_scheduled_):
+ out.putVarInt32(16)
+ out.putVarInt64(self.time_until_scheduled_)
+ for i in xrange(len(self.config_)):
+ out.putVarInt32(26)
+ out.putVarInt32(self.config_[i].ByteSize())
+ self.config_[i].OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_summary_status(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_time_until_scheduled(d.getVarInt64())
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_config().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_summary_status_: res+=prefix+("summary_status: %s\n" % self.DebugFormatInt32(self.summary_status_))
+ if self.has_time_until_scheduled_: res+=prefix+("time_until_scheduled: %s\n" % self.DebugFormatInt64(self.time_until_scheduled_))
+ cnt=0
+ for e in self.config_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("config%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ ksummary_status = 1
+ ktime_until_scheduled = 2
+ kconfig = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "summary_status",
+ 2: "time_until_scheduled",
+ 3: "config",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['IsEnabledRequest','IsEnabledResponse']
diff --git a/google_appengine/google/appengine/api/capabilities/capability_service_pb.pyc b/google_appengine/google/appengine/api/capabilities/capability_service_pb.pyc
new file mode 100644
index 0000000..d1a68c2
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/capability_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/capabilities/capability_stub.py b/google_appengine/google/appengine/api/capabilities/capability_stub.py
new file mode 100755
index 0000000..6d33d7e
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/capability_stub.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the capability service API, everything is always enabled."""
+
+
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import capabilities
+
+IsEnabledRequest = capabilities.IsEnabledRequest
+IsEnabledResponse = capabilities.IsEnabledResponse
+CapabilityConfig = capabilities.CapabilityConfig
+
+class CapabilityServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only capability service stub."""
+
+ def __init__(self, service_name='capability_service'):
+ """Constructor.
+
+ Args:
+ service_name: Service name expected for all calls.
+ """
+ super(CapabilityServiceStub, self).__init__(service_name)
+
+
+ def _Dynamic_IsEnabled(self, request, response):
+ """Implementation of CapabilityService::IsEnabled().
+
+ Args:
+ request: An IsEnabledRequest.
+ response: An IsEnabledResponse.
+ """
+ response.set_summary_status(IsEnabledResponse.ENABLED)
+
+ default_config = response.add_config()
+ default_config.set_package('')
+ default_config.set_capability('')
+ default_config.set_status(CapabilityConfig.ENABLED)
diff --git a/google_appengine/google/appengine/api/capabilities/capability_stub.pyc b/google_appengine/google/appengine/api/capabilities/capability_stub.pyc
new file mode 100644
index 0000000..6336e60
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/capability_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/croninfo.py b/google_appengine/google/appengine/api/croninfo.py
new file mode 100755
index 0000000..0eab26e
--- /dev/null
+++ b/google_appengine/google/appengine/api/croninfo.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""CronInfo tools.
+
+A library for working with CronInfo records, describing cron entries for an
+application. Supports loading the records from yaml.
+"""
+
+
+
+import logging
+import sys
+import traceback
+
+try:
+ import pytz
+except ImportError:
+ pytz = None
+
+from google.appengine.cron import groc
+from google.appengine.api import validation
+from google.appengine.api import yaml_builder
+from google.appengine.api import yaml_listener
+from google.appengine.api import yaml_object
+
+_URL_REGEX = r'^/.*$'
+_TIMEZONE_REGEX = r'^.{0,100}$'
+_DESCRIPTION_REGEX = r'^.{0,499}$'
+
+
+class GrocValidator(validation.Validator):
+ """Checks that a schedule is in valid groc format."""
+
+ def Validate(self, value):
+ """Validates a schedule."""
+ if value is None:
+ raise validation.MissingAttribute('schedule must be specified')
+ if not isinstance(value, basestring):
+ raise TypeError('schedule must be a string, not \'%r\''%type(value))
+ schedule = groc.CreateParser(value)
+ try:
+ schedule.timespec()
+ except groc.GrocException, e:
+ raise validation.ValidationError('schedule \'%s\' failed to parse: %s'%(
+ value, e.args[0]))
+ return value
+
+
+class TimezoneValidator(validation.Validator):
+ """Checks that a timezone can be correctly parsed and is known."""
+
+ def Validate(self, value):
+ """Validates a timezone."""
+ if value is None:
+ return
+ if not isinstance(value, basestring):
+ raise TypeError('timezone must be a string, not \'%r\'' % type(value))
+ if pytz is None:
+ return value
+ try:
+ pytz.timezone(value)
+ except pytz.UnknownTimeZoneError:
+ raise validation.ValidationError('timezone \'%s\' is unknown' % value)
+ except IOError:
+ return value
+ except:
+ unused_e, v, t = sys.exc_info()
+ logging.warning('pytz raised an unexpected error: %s.\n' % (v) +
+ 'Traceback:\n' + '\n'.join(traceback.format_tb(t)))
+ raise
+ return value
+
+
+CRON = 'cron'
+
+URL = 'url'
+SCHEDULE = 'schedule'
+TIMEZONE = 'timezone'
+DESCRIPTION = 'description'
+
+
+class MalformedCronfigurationFile(Exception):
+ """Configuration file for Cron is malformed."""
+ pass
+
+
+class CronEntry(validation.Validated):
+ """A cron entry describes a single cron job."""
+ ATTRIBUTES = {
+ URL: _URL_REGEX,
+ SCHEDULE: GrocValidator(),
+ TIMEZONE: TimezoneValidator(),
+ DESCRIPTION: validation.Optional(_DESCRIPTION_REGEX)
+ }
+
+
+class CronInfoExternal(validation.Validated):
+ """CronInfoExternal describes all cron entries for an application."""
+ ATTRIBUTES = {
+ CRON: validation.Optional(validation.Repeated(CronEntry))
+ }
+
+
+def LoadSingleCron(cron_info):
+ """Load a cron.yaml file or string and return a CronInfoExternal object."""
+ builder = yaml_object.ObjectBuilder(CronInfoExternal)
+ handler = yaml_builder.BuilderHandler(builder)
+ listener = yaml_listener.EventListener(handler)
+ listener.Parse(cron_info)
+
+ cron_info = handler.GetResults()
+ if len(cron_info) < 1:
+ raise MalformedCronfigurationFile('Empty cron configuration.')
+ if len(cron_info) > 1:
+ raise MalformedCronfigurationFile('Multiple cron sections '
+ 'in configuration.')
+ return cron_info[0]
diff --git a/google_appengine/google/appengine/api/croninfo.pyc b/google_appengine/google/appengine/api/croninfo.pyc
new file mode 100644
index 0000000..5540fb8
--- /dev/null
+++ b/google_appengine/google/appengine/api/croninfo.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore.py b/google_appengine/google/appengine/api/datastore.py
new file mode 100755
index 0000000..6931db8
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore.py
@@ -0,0 +1,2170 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""The Python datastore API used by app developers.
+
+Defines Entity, Query, and Iterator classes, as well as methods for all of the
+datastore's calls. Also defines conversions between the Python classes and
+their PB counterparts.
+
+The datastore errors are defined in the datastore_errors module. That module is
+only required to avoid circular imports. datastore imports datastore_types,
+which needs BadValueError, so it can't be defined in datastore.
+"""
+
+
+
+
+
+
+import heapq
+import itertools
+import logging
+import re
+import string
+import sys
+import traceback
+from xml.sax import saxutils
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.datastore import datastore_index
+from google.appengine.datastore import datastore_pb
+from google.appengine.runtime import apiproxy_errors
+from google.appengine.datastore import entity_pb
+
+try:
+ from google.appengine.api.labs.taskqueue import taskqueue_service_pb
+except ImportError:
+ from google.appengine.api.taskqueue import taskqueue_service_pb
+
+MAX_ALLOWABLE_QUERIES = 30
+
+DEFAULT_TRANSACTION_RETRIES = 3
+
+_MAX_INDEXED_PROPERTIES = 5000
+
+_MAX_ID_BATCH_SIZE = 1000 * 1000 * 1000
+
+Key = datastore_types.Key
+typename = datastore_types.typename
+
+_txes = {}
+
+
+def NormalizeAndTypeCheck(arg, types):
+ """Normalizes and type checks the given argument.
+
+ Args:
+ arg: an instance, tuple, list, iterator, or generator of the given type(s)
+ types: allowed type or tuple of types
+
+ Returns:
+ A (list, bool) tuple. The list is a normalized, shallow copy of the
+ argument. The boolean is True if the argument was a sequence, False
+ if it was a single object.
+
+ Raises:
+ AssertionError: types includes list or tuple.
+ BadArgumentError: arg is not an instance or sequence of one of the given
+ types.
+ """
+ if not isinstance(types, (list, tuple)):
+ types = (types,)
+
+ assert list not in types and tuple not in types
+
+ if isinstance(arg, types):
+ return ([arg], False)
+ else:
+ try:
+ for val in arg:
+ if not isinstance(val, types):
+ raise datastore_errors.BadArgumentError(
+ 'Expected one of %s; received %s (a %s).' %
+ (types, val, typename(val)))
+ except TypeError:
+ raise datastore_errors.BadArgumentError(
+ 'Expected an instance or sequence of %s; received %s (a %s).' %
+ (types, arg, typename(arg)))
+
+ return (list(arg), True)
+
+
+def NormalizeAndTypeCheckKeys(keys):
+ """Normalizes and type checks that the given argument is a valid key or keys.
+
+ A wrapper around NormalizeAndTypeCheck() that accepts strings, Keys, and
+ Entities, and normalizes to Keys.
+
+ Args:
+ keys: a Key or sequence of Keys
+
+ Returns:
+ A (list of Keys, bool) tuple. See NormalizeAndTypeCheck.
+
+ Raises:
+ BadArgumentError: arg is not an instance or sequence of one of the given
+ types.
+ """
+ keys, multiple = NormalizeAndTypeCheck(keys, (basestring, Entity, Key))
+
+ keys = [_GetCompleteKeyOrError(key) for key in keys]
+
+ return (keys, multiple)
+
+
+def Put(entities):
+ """Store one or more entities in the datastore.
+
+ The entities may be new or previously existing. For new entities, Put() will
+ fill in the app id and key assigned by the datastore.
+
+ If the argument is a single Entity, a single Key will be returned. If the
+ argument is a list of Entity, a list of Keys will be returned.
+
+ Args:
+ entities: Entity or list of Entities
+
+ Returns:
+ Key or list of Keys
+
+ Raises:
+ TransactionFailedError, if the Put could not be committed.
+ """
+ entities, multiple = NormalizeAndTypeCheck(entities, Entity)
+
+ if multiple and not entities:
+ return []
+
+ for entity in entities:
+ if not entity.kind() or not entity.app_id_namespace():
+ raise datastore_errors.BadRequestError(
+ 'App and kind must not be empty, in entity: %s' % entity)
+
+ req = datastore_pb.PutRequest()
+ req.entity_list().extend([e._ToPb() for e in entities])
+
+ keys = [e.key() for e in entities]
+ tx = _MaybeSetupTransaction(req, keys)
+
+ resp = datastore_pb.PutResponse()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ keys = resp.key_list()
+ num_keys = len(keys)
+ num_entities = len(entities)
+ if num_keys != num_entities:
+ raise datastore_errors.InternalError(
+ 'Put accepted %d entities but returned %d keys.' %
+ (num_entities, num_keys))
+
+ for entity, key in zip(entities, keys):
+ entity._Entity__key._Key__reference.CopyFrom(key)
+
+ if tx:
+ tx.entity_group = entities[0].entity_group()
+
+ if multiple:
+ return [Key._FromPb(k) for k in keys]
+ else:
+ return Key._FromPb(resp.key(0))
+
+
+def Get(keys):
+ """Retrieves one or more entities from the datastore.
+
+ Retrieves the entity or entities with the given key(s) from the datastore
+ and returns them as fully populated Entity objects, as defined below. If
+ there is an error, raises a subclass of datastore_errors.Error.
+
+ If keys is a single key or string, an Entity will be returned, or
+ EntityNotFoundError will be raised if no existing entity matches the key.
+
+ However, if keys is a list or tuple, a list of entities will be returned
+ that corresponds to the sequence of keys. It will include entities for keys
+ that were found and None placeholders for keys that were not found.
+
+ Args:
+ # the primary key(s) of the entity(ies) to retrieve
+ keys: Key or string or list of Keys or strings
+
+ Returns:
+ Entity or list of Entity objects
+ """
+ keys, multiple = NormalizeAndTypeCheckKeys(keys)
+
+ if multiple and not keys:
+ return []
+ req = datastore_pb.GetRequest()
+ req.key_list().extend([key._Key__reference for key in keys])
+ _MaybeSetupTransaction(req, keys)
+
+ resp = datastore_pb.GetResponse()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ entities = []
+ for group in resp.entity_list():
+ if group.has_entity():
+ entities.append(Entity._FromPb(group.entity()))
+ else:
+ entities.append(None)
+
+ if multiple:
+ return entities
+ else:
+ if entities[0] is None:
+ raise datastore_errors.EntityNotFoundError()
+ return entities[0]
+
+
+def Delete(keys):
+ """Deletes one or more entities from the datastore. Use with care!
+
+ Deletes the given entity(ies) from the datastore. You can only delete
+ entities from your app. If there is an error, raises a subclass of
+ datastore_errors.Error.
+
+ Args:
+ # the primary key(s) of the entity(ies) to delete
+ keys: Key or string or list of Keys or strings
+
+ Raises:
+ TransactionFailedError, if the Delete could not be committed.
+ """
+ keys, multiple = NormalizeAndTypeCheckKeys(keys)
+
+ if multiple and not keys:
+ return
+
+ req = datastore_pb.DeleteRequest()
+ req.key_list().extend([key._Key__reference for key in keys])
+
+ tx = _MaybeSetupTransaction(req, keys)
+
+ resp = datastore_pb.DeleteResponse()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+
+class Entity(dict):
+ """A datastore entity.
+
+ Includes read-only accessors for app id, kind, and primary key. Also
+ provides dictionary-style access to properties.
+ """
+ def __init__(self, kind, parent=None, _app=None, name=None, id=None,
+ unindexed_properties=[], _namespace=None):
+ """Constructor. Takes the kind and transaction root, which cannot be
+ changed after the entity is constructed, and an optional parent. Raises
+ BadArgumentError or BadKeyError if kind is invalid or parent is not an
+ existing Entity or Key in the datastore.
+
+ Args:
+ # this entity's kind
+ kind: string
+ # if provided, this entity's parent. Its key must be complete.
+ parent: Entity or Key
+ # if provided, this entity's name.
+ name: string
+ # if provided, this entity's id.
+ id: integer
+ # if provided, a sequence of property names that should not be indexed
+ # by the built-in single property indices.
+ unindexed_properties: list or tuple of strings
+ """
+ ref = entity_pb.Reference()
+ _app_namespace = datastore_types.ResolveAppIdNamespace(_app, _namespace)
+ ref.set_app(_app_namespace.to_encoded())
+
+ datastore_types.ValidateString(kind, 'kind',
+ datastore_errors.BadArgumentError)
+ if parent is not None:
+ parent = _GetCompleteKeyOrError(parent)
+ if _app_namespace != parent.app_id_namespace():
+ raise datastore_errors.BadArgumentError(
+ " %s doesn't match parent's app_namespace %s" %
+ (_app_namespace, parent.app_id_namespace()))
+ ref.CopyFrom(parent._Key__reference)
+
+ last_path = ref.mutable_path().add_element()
+ last_path.set_type(kind.encode('utf-8'))
+
+ if name is not None and id is not None:
+ raise datastore_errors.BadArgumentError(
+ "Cannot set both name and id on an Entity")
+
+ if name is not None:
+ datastore_types.ValidateString(name, 'name')
+ last_path.set_name(name.encode('utf-8'))
+
+ if id is not None:
+ datastore_types.ValidateInteger(id, 'id')
+ last_path.set_id(id)
+
+ unindexed_properties, multiple = NormalizeAndTypeCheck(unindexed_properties, basestring)
+ if not multiple:
+ raise datastore_errors.BadArgumentError(
+ 'unindexed_properties must be a sequence; received %s (a %s).' %
+ (unindexed_properties, typename(unindexed_properties)))
+ for prop in unindexed_properties:
+ datastore_types.ValidateProperty(prop, None)
+ self.__unindexed_properties = frozenset(unindexed_properties)
+
+ self.__key = Key._FromPb(ref)
+
+ def app(self):
+ """Returns the name of the application that created this entity, a
+ string or None if not set.
+ """
+ return self.__key.app()
+
+ def namespace(self):
+ """Returns the namespace of this entity, a string or None.
+ """
+ return self.__key.namespace()
+
+ def app_id_namespace(self):
+ """Returns the AppIdNamespace of this entity or None if not set.
+ """
+ return self.__key.app_id_namespace()
+
+ def kind(self):
+ """Returns this entity's kind, a string.
+ """
+ return self.__key.kind()
+
+ def is_saved(self):
+ """Returns if this entity has been saved to the datastore
+ """
+ last_path = self.__key._Key__reference.path().element_list()[-1]
+ return ((last_path.has_name() ^ last_path.has_id()) and
+ self.__key.has_id_or_name())
+
+ def key(self):
+ """Returns this entity's primary key, a Key instance.
+ """
+ return self.__key
+
+ def parent(self):
+ """Returns this entity's parent, as a Key. If this entity has no parent,
+ returns None.
+ """
+ return self.key().parent()
+
+ def entity_group(self):
+ """Returns this entity's entity group as a Key.
+
+ Note that the returned Key will be incomplete if this is a a root entity
+ and its key is incomplete.
+ """
+ return self.key().entity_group()
+
+ def unindexed_properties(self):
+ """Returns this entity's unindexed properties, as a frozenset of strings."""
+ return getattr(self, '_Entity__unindexed_properties', [])
+
+ def __setitem__(self, name, value):
+ """Implements the [] operator. Used to set property value(s).
+
+ If the property name is the empty string or not a string, raises
+ BadPropertyError. If the value is not a supported type, raises
+ BadValueError.
+ """
+ datastore_types.ValidateProperty(name, value)
+ dict.__setitem__(self, name, value)
+
+ def setdefault(self, name, value):
+ """If the property exists, returns its value. Otherwise sets it to value.
+
+ If the property name is the empty string or not a string, raises
+ BadPropertyError. If the value is not a supported type, raises
+ BadValueError.
+ """
+ datastore_types.ValidateProperty(name, value)
+ return dict.setdefault(self, name, value)
+
+ def update(self, other):
+ """Updates this entity's properties from the values in other.
+
+ If any property name is the empty string or not a string, raises
+ BadPropertyError. If any value is not a supported type, raises
+ BadValueError.
+ """
+ for name, value in other.items():
+ self.__setitem__(name, value)
+
+ def copy(self):
+ """The copy method is not supported.
+ """
+ raise NotImplementedError('Entity does not support the copy() method.')
+
+ def ToXml(self):
+ """Returns an XML representation of this entity. Atom and gd:namespace
+ properties are converted to XML according to their respective schemas. For
+ more information, see:
+
+ http://www.atomenabled.org/developers/syndication/
+ http://code.google.com/apis/gdata/common-elements.html
+
+ This is *not* optimized. It shouldn't be used anywhere near code that's
+ performance-critical.
+ """
+ xml = u'<entity kind=%s' % saxutils.quoteattr(self.kind())
+ if self.__key.has_id_or_name():
+ xml += ' key=%s' % saxutils.quoteattr(str(self.__key))
+ xml += '>'
+ if self.__key.has_id_or_name():
+ xml += '\n <key>%s</key>' % self.__key.ToTagUri()
+
+
+ properties = self.keys()
+ if properties:
+ properties.sort()
+ xml += '\n ' + '\n '.join(self._PropertiesToXml(properties))
+
+ xml += '\n</entity>\n'
+ return xml
+
+ def _PropertiesToXml(self, properties):
+ """ Returns a list of the XML representations of each of the given
+ properties. Ignores properties that don't exist in this entity.
+
+ Arg:
+ properties: string or list of strings
+
+ Returns:
+ list of strings
+ """
+ xml_properties = []
+
+ for propname in properties:
+ if not self.has_key(propname):
+ continue
+
+ propname_xml = saxutils.quoteattr(propname)
+
+ values = self[propname]
+ if not isinstance(values, list):
+ values = [values]
+
+ proptype = datastore_types.PropertyTypeName(values[0])
+ proptype_xml = saxutils.quoteattr(proptype)
+
+ escaped_values = self._XmlEscapeValues(propname)
+ open_tag = u'<property name=%s type=%s>' % (propname_xml, proptype_xml)
+ close_tag = u'</property>'
+ xml_properties += [open_tag + val + close_tag for val in escaped_values]
+
+ return xml_properties
+
+ def _XmlEscapeValues(self, property):
+ """ Returns a list of the XML-escaped string values for the given property.
+ Raises an AssertionError if the property doesn't exist.
+
+ Arg:
+ property: string
+
+ Returns:
+ list of strings
+ """
+ assert self.has_key(property)
+ xml = []
+
+ values = self[property]
+ if not isinstance(values, list):
+ values = [values]
+
+ for val in values:
+ if hasattr(val, 'ToXml'):
+ xml.append(val.ToXml())
+ else:
+ if val is None:
+ xml.append('')
+ else:
+ xml.append(saxutils.escape(unicode(val)))
+
+ return xml
+
+ def ToPb(self):
+ """Converts this Entity to its protocol buffer representation.
+
+ Returns:
+ entity_pb.Entity
+ """
+ return self._ToPb(False)
+
+ def _ToPb(self, mark_key_as_saved=True):
+ """Converts this Entity to its protocol buffer representation. Not
+ intended to be used by application developers.
+
+ Returns:
+ entity_pb.Entity
+ """
+
+ pb = entity_pb.EntityProto()
+ pb.mutable_key().CopyFrom(self.key()._ToPb())
+ last_path = pb.key().path().element_list()[-1]
+ if mark_key_as_saved and last_path.has_name() and last_path.has_id():
+ last_path.clear_id()
+
+ group = pb.mutable_entity_group()
+ if self.__key.has_id_or_name():
+ root = pb.key().path().element(0)
+ group.add_element().CopyFrom(root)
+
+ properties = self.items()
+ properties.sort()
+ for (name, values) in properties:
+ properties = datastore_types.ToPropertyPb(name, values)
+ if not isinstance(properties, list):
+ properties = [properties]
+
+ sample = values
+ if isinstance(sample, list):
+ sample = values[0]
+
+ if (isinstance(sample, datastore_types._RAW_PROPERTY_TYPES) or
+ name in self.unindexed_properties()):
+ pb.raw_property_list().extend(properties)
+ else:
+ pb.property_list().extend(properties)
+
+ if pb.property_size() > _MAX_INDEXED_PROPERTIES:
+ raise datastore_errors.BadRequestError(
+ 'Too many indexed properties for entity %r.' % self.key())
+
+ return pb
+
+ @staticmethod
+ def FromPb(pb):
+ """Static factory method. Returns the Entity representation of the
+ given protocol buffer (datastore_pb.Entity).
+
+ Args:
+ pb: datastore_pb.Entity or str encoding of a datastore_pb.Entity
+
+ Returns:
+ Entity: the Entity representation of pb
+ """
+ if isinstance(pb, str):
+ real_pb = entity_pb.EntityProto()
+ real_pb.ParseFromString(pb)
+ pb = real_pb
+
+ return Entity._FromPb(pb, require_valid_key=False)
+
+ @staticmethod
+ def _FromPb(pb, require_valid_key=True):
+ """Static factory method. Returns the Entity representation of the
+ given protocol buffer (datastore_pb.Entity). Not intended to be used by
+ application developers.
+
+ The Entity PB's key must be complete. If it isn't, an AssertionError is
+ raised.
+
+ Args:
+ # a protocol buffer Entity
+ pb: datastore_pb.Entity
+
+ Returns:
+ # the Entity representation of the argument
+ Entity
+ """
+ assert pb.key().path().element_size() > 0
+
+ last_path = pb.key().path().element_list()[-1]
+ if require_valid_key:
+ assert last_path.has_id() ^ last_path.has_name()
+ if last_path.has_id():
+ assert last_path.id() != 0
+ else:
+ assert last_path.has_name()
+ assert last_path.name()
+
+ unindexed_properties = [p.name() for p in pb.raw_property_list()]
+
+ e = Entity(unicode(last_path.type().decode('utf-8')),
+ unindexed_properties=unindexed_properties)
+ ref = e.__key._Key__reference
+ ref.CopyFrom(pb.key())
+
+ temporary_values = {}
+
+ for prop_list in (pb.property_list(), pb.raw_property_list()):
+ for prop in prop_list:
+ try:
+ value = datastore_types.FromPropertyPb(prop)
+ except (AssertionError, AttributeError, TypeError, ValueError), e:
+ raise datastore_errors.Error(
+ 'Property %s is corrupt in the datastore. %s: %s' %
+ (e.__class__, prop.name(), e))
+
+ multiple = prop.multiple()
+ if multiple:
+ value = [value]
+
+ name = prop.name()
+ cur_value = temporary_values.get(name)
+ if cur_value is None:
+ temporary_values[name] = value
+ elif not multiple:
+ raise datastore_errors.Error(
+ 'Property %s is corrupt in the datastore; it has multiple '
+ 'values, but is not marked as multiply valued.' % name)
+ else:
+ cur_value.extend(value)
+
+ for name, value in temporary_values.iteritems():
+ decoded_name = unicode(name.decode('utf-8'))
+
+ datastore_types.ValidateReadProperty(decoded_name, value)
+
+ dict.__setitem__(e, decoded_name, value)
+
+ return e
+
+
+class Query(dict):
+ """A datastore query.
+
+ (Instead of this, consider using appengine.ext.gql.Query! It provides a
+ query language interface on top of the same functionality.)
+
+ Queries are used to retrieve entities that match certain criteria, including
+ app id, kind, and property filters. Results may also be sorted by properties.
+
+ App id and kind are required. Only entities from the given app, of the given
+ type, are returned. If an ancestor is set, with Ancestor(), only entities
+ with that ancestor are returned.
+
+ Property filters are used to provide criteria based on individual property
+ values. A filter compares a specific property in each entity to a given
+ value or list of possible values.
+
+ An entity is returned if its property values match *all* of the query's
+ filters. In other words, filters are combined with AND, not OR. If an
+ entity does not have a value for a property used in a filter, it is not
+ returned.
+
+ Property filters map filter strings of the form '<property name> <operator>'
+ to filter values. Use dictionary accessors to set property filters, like so:
+
+ > query = Query('Person')
+ > query['name ='] = 'Ryan'
+ > query['age >='] = 21
+
+ This query returns all Person entities where the name property is 'Ryan',
+ 'Ken', or 'Bret', and the age property is at least 21.
+
+ Another way to build this query is:
+
+ > query = Query('Person')
+ > query.update({'name =': 'Ryan', 'age >=': 21})
+
+ The supported operators are =, >, <, >=, and <=. Only one inequality
+ filter may be used per query. Any number of equals filters may be used in
+ a single Query.
+
+ A filter value may be a list or tuple of values. This is interpreted as
+ multiple filters with the same filter string and different values, all ANDed
+ together. For example, this query returns everyone with the tags "google"
+ and "app engine":
+
+ > Query('Person', {'tag =': ('google', 'app engine')})
+
+ Result entities can be returned in different orders. Use the Order()
+ method to specify properties that results will be sorted by, and in which
+ direction.
+
+ Note that filters and orderings may be provided at any time before the query
+ is run. When the query is fully specified, Run() runs the query and returns
+ an iterator. The query results can be accessed through the iterator.
+
+ A query object may be reused after it's been run. Its filters and
+ orderings can be changed to create a modified query.
+
+ If you know how many result entities you need, use Get() to fetch them:
+
+ > query = Query('Person', {'age >': 21})
+ > for person in query.Get(4):
+ > print 'I have four pints left. Have one on me, %s!' % person['name']
+
+ If you don't know how many results you need, or if you need them all, you
+ can get an iterator over the results by calling Run():
+
+ > for person in Query('Person', {'age >': 21}).Run():
+ > print 'Have a pint on me, %s!' % person['name']
+
+ Get() is more efficient than Run(), so use Get() whenever possible.
+
+ Finally, the Count() method returns the number of result entities matched by
+ the query. The returned count is cached; successive Count() calls will not
+ re-scan the datastore unless the query is changed.
+ """
+ ASCENDING = datastore_pb.Query_Order.ASCENDING
+ DESCENDING = datastore_pb.Query_Order.DESCENDING
+
+ ORDER_FIRST = datastore_pb.Query.ORDER_FIRST
+ ANCESTOR_FIRST = datastore_pb.Query.ANCESTOR_FIRST
+ FILTER_FIRST = datastore_pb.Query.FILTER_FIRST
+
+ OPERATORS = {'<': datastore_pb.Query_Filter.LESS_THAN,
+ '<=': datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
+ '>': datastore_pb.Query_Filter.GREATER_THAN,
+ '>=': datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
+ '=': datastore_pb.Query_Filter.EQUAL,
+ '==': datastore_pb.Query_Filter.EQUAL,
+ }
+ INEQUALITY_OPERATORS = frozenset(['<', '<=', '>', '>='])
+ FILTER_REGEX = re.compile(
+ '^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS.keys()),
+ re.IGNORECASE | re.UNICODE)
+
+ __kind = None
+ __app = None
+ __orderings = None
+ __cached_count = None
+ __hint = None
+ __ancestor = None
+
+ __filter_order = None
+ __filter_counter = 0
+
+ __inequality_prop = None
+ __inequality_count = 0
+
+ def __init__(self, kind=None, filters={}, _app=None, keys_only=False,
+ _namespace=None):
+ """Constructor.
+
+ Raises BadArgumentError if kind is not a string. Raises BadValueError or
+ BadFilterError if filters is not a dictionary of valid filters.
+
+ Args:
+ # kind is required. filters is optional; if provided, it's used
+ # as an initial set of property filters. keys_only defaults to False.
+ kind: string
+ filters: dict
+ keys_only: boolean
+ """
+ if kind is not None:
+ datastore_types.ValidateString(kind, 'kind',
+ datastore_errors.BadArgumentError)
+
+ self.__kind = kind
+ self.__orderings = []
+ self.__filter_order = {}
+ self.update(filters)
+
+ self.__app = datastore_types.ResolveAppIdNamespace(_app,
+ _namespace).to_encoded()
+ self.__keys_only = keys_only
+
+ def Order(self, *orderings):
+ """Specify how the query results should be sorted.
+
+ Result entities will be sorted by the first property argument, then by the
+ second, and so on. For example, this:
+
+ > query = Query('Person')
+ > query.Order('bday', ('age', Query.DESCENDING))
+
+ sorts everyone in order of their birthday, starting with January 1.
+ People with the same birthday are sorted by age, oldest to youngest.
+
+ The direction for each sort property may be provided; if omitted, it
+ defaults to ascending.
+
+ Order() may be called multiple times. Each call resets the sort order
+ from scratch.
+
+ If an inequality filter exists in this Query it must be the first property
+ passed to Order. Any number of sort orders may be used after the
+ inequality filter property. Without inequality filters, any number of
+ filters with different orders may be specified.
+
+ Entities with multiple values for an order property are sorted by their
+ lowest value.
+
+ Note that a sort order implies an existence filter! In other words,
+ Entities without the sort order property are filtered out, and *not*
+ included in the query results.
+
+ If the sort order property has different types in different entities - ie,
+ if bob['id'] is an int and fred['id'] is a string - the entities will be
+ grouped first by the property type, then sorted within type. No attempt is
+ made to compare property values across types.
+
+ Raises BadArgumentError if any argument is of the wrong format.
+
+ Args:
+ # the properties to sort by, in sort order. each argument may be either a
+ # string or (string, direction) 2-tuple.
+
+ Returns:
+ # this query
+ Query
+ """
+ orderings = list(orderings)
+
+ for (order, i) in zip(orderings, range(len(orderings))):
+ if not (isinstance(order, basestring) or
+ (isinstance(order, tuple) and len(order) in [2, 3])):
+ raise datastore_errors.BadArgumentError(
+ 'Order() expects strings or 2- or 3-tuples; received %s (a %s). ' %
+ (order, typename(order)))
+
+ if isinstance(order, basestring):
+ order = (order,)
+
+ datastore_types.ValidateString(order[0], 'sort order property',
+ datastore_errors.BadArgumentError)
+ property = order[0]
+
+ direction = order[-1]
+ if direction not in (Query.ASCENDING, Query.DESCENDING):
+ if len(order) == 3:
+ raise datastore_errors.BadArgumentError(
+ 'Order() expects Query.ASCENDING or DESCENDING; received %s' %
+ str(direction))
+ direction = Query.ASCENDING
+
+ if (self.__kind is None and
+ (property != datastore_types._KEY_SPECIAL_PROPERTY or
+ direction != Query.ASCENDING)):
+ raise datastore_errors.BadArgumentError(
+ 'Only %s ascending orders are supported on kindless queries' %
+ datastore_types._KEY_SPECIAL_PROPERTY)
+
+ orderings[i] = (property, direction)
+
+ if (orderings and self.__inequality_prop and
+ orderings[0][0] != self.__inequality_prop):
+ raise datastore_errors.BadArgumentError(
+ 'First ordering property must be the same as inequality filter '
+ 'property, if specified for this query; received %s, expected %s' %
+ (orderings[0][0], self.__inequality_prop))
+
+ self.__orderings = orderings
+ return self
+
+ def Hint(self, hint):
+ """Sets a hint for how this query should run.
+
+ The query hint gives us information about how best to execute your query.
+ Currently, we can only do one index scan, so the query hint should be used
+ to indicates which index we should scan against.
+
+ Use FILTER_FIRST if your first filter will only match a few results. In
+ this case, it will be most efficient to scan against the index for this
+ property, load the results into memory, and apply the remaining filters
+ and sort orders there.
+
+ Similarly, use ANCESTOR_FIRST if the query's ancestor only has a few
+ descendants. In this case, it will be most efficient to scan all entities
+ below the ancestor and load them into memory first.
+
+ Use ORDER_FIRST if the query has a sort order and the result set is large
+ or you only plan to fetch the first few results. In that case, we
+ shouldn't try to load all of the results into memory; instead, we should
+ scan the index for this property, which is in sorted order.
+
+ Note that hints are currently ignored in the v3 datastore!
+
+ Arg:
+ one of datastore.Query.[ORDER_FIRST, ANCESTOR_FIRST, FILTER_FIRST]
+
+ Returns:
+ # this query
+ Query
+ """
+ if hint not in [self.ORDER_FIRST, self.ANCESTOR_FIRST, self.FILTER_FIRST]:
+ raise datastore_errors.BadArgumentError(
+ 'Query hint must be ORDER_FIRST, ANCESTOR_FIRST, or FILTER_FIRST.')
+
+ self.__hint = hint
+ return self
+
+ def Ancestor(self, ancestor):
+ """Sets an ancestor for this query.
+
+ This restricts the query to only return result entities that are descended
+ from a given entity. In other words, all of the results will have the
+ ancestor as their parent, or parent's parent, or etc.
+
+ Raises BadArgumentError or BadKeyError if parent is not an existing Entity
+ or Key in the datastore.
+
+ Args:
+ # the key must be complete
+ ancestor: Entity or Key
+
+ Returns:
+ # this query
+ Query
+ """
+ self.__ancestor = _GetCompleteKeyOrError(ancestor)
+ return self
+
+ def IsKeysOnly(self):
+ """Returns True if this query is keys only, false otherwise."""
+ return self.__keys_only
+
+ def Run(self):
+ """Runs this query.
+
+ If a filter string is invalid, raises BadFilterError. If a filter value is
+ invalid, raises BadValueError. If an IN filter is provided, and a sort
+ order on another property is provided, raises BadQueryError.
+
+ If you know in advance how many results you want, use Get() instead. It's
+ more efficient.
+
+ Returns:
+ # an iterator that provides access to the query results
+ Iterator
+ """
+ return self._Run()
+
+ def _Run(self, limit=None, offset=None,
+ prefetch_count=None, next_count=None):
+ """Runs this query, with an optional result limit and an optional offset.
+
+ Identical to Run, with the extra optional limit, offset, prefetch_count,
+ next_count parameters. These parameters must be integers >= 0.
+
+ This is not intended to be used by application developers. Use Get()
+ instead!
+ """
+ pb = self._ToPb(limit, offset, prefetch_count)
+ result = datastore_pb.QueryResult()
+
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', pb, result)
+ except apiproxy_errors.ApplicationError, err:
+ try:
+ _ToDatastoreError(err)
+ except datastore_errors.NeedIndexError, exc:
+ yaml = datastore_index.IndexYamlForQuery(
+ *datastore_index.CompositeIndexForQuery(pb)[1:-1])
+ raise datastore_errors.NeedIndexError(
+ str(exc) + '\nThis query needs this index:\n' + yaml)
+
+ return Iterator(result, batch_size=next_count)
+
+ def Get(self, limit, offset=0):
+ """Fetches and returns a maximum number of results from the query.
+
+ This method fetches and returns a list of resulting entities that matched
+ the query. If the query specified a sort order, entities are returned in
+ that order. Otherwise, the order is undefined.
+
+ The limit argument specifies the maximum number of entities to return. If
+ it's greater than the number of remaining entities, all of the remaining
+ entities are returned. In that case, the length of the returned list will
+ be smaller than limit.
+
+ The offset argument specifies the number of entities that matched the
+ query criteria to skip before starting to return results. The limit is
+ applied after the offset, so if you provide a limit of 10 and an offset of 5
+ and your query matches 20 records, the records whose index is 0 through 4
+ will be skipped and the records whose index is 5 through 14 will be
+ returned.
+
+ The results are always returned as a list. If there are no results left,
+ an empty list is returned.
+
+ If you know in advance how many results you want, this method is more
+ efficient than Run(), since it fetches all of the results at once. (The
+ datastore backend sets the the limit on the underlying
+ scan, which makes the scan significantly faster.)
+
+ Args:
+ # the maximum number of entities to return
+ int or long
+ # the number of entities to skip
+ int or long
+
+ Returns:
+ # a list of entities
+ [Entity, ...]
+ """
+ if not isinstance(limit, (int, long)) or limit <= 0:
+ raise datastore_errors.BadArgumentError(
+ 'Argument to Get named \'limit\' must be an int greater than 0; '
+ 'received %s (a %s)' % (limit, typename(limit)))
+
+ if not isinstance(offset, (int, long)) or offset < 0:
+ raise datastore_errors.BadArgumentError(
+ 'Argument to Get named \'offset\' must be an int greater than or '
+ 'equal to 0; received %s (a %s)' % (offset, typename(offset)))
+
+ return self._Run(limit=limit, offset=offset,
+ prefetch_count=limit)._Get(limit)
+
+ def Count(self, limit=None):
+ """Returns the number of entities that this query matches. The returned
+ count is cached; successive Count() calls will not re-scan the datastore
+ unless the query is changed.
+
+ Args:
+ limit, a number. If there are more results than this, stop short and
+ just return this number. Providing this argument makes the count
+ operation more efficient.
+ Returns:
+ The number of results.
+ """
+ if self.__cached_count:
+ return self.__cached_count
+
+ resp = api_base_pb.Integer64Proto()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Count',
+ self._ToPb(limit=limit), resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+ else:
+ self.__cached_count = resp.value()
+
+ return self.__cached_count
+
+ def __iter__(self):
+ raise NotImplementedError(
+ 'Query objects should not be used as iterators. Call Run() first.')
+
+ def __setitem__(self, filter, value):
+ """Implements the [] operator. Used to set filters.
+
+ If the filter string is empty or not a string, raises BadFilterError. If
+ the value is not a supported type, raises BadValueError.
+ """
+ if isinstance(value, tuple):
+ value = list(value)
+
+ datastore_types.ValidateProperty(' ', value, read_only=True)
+ match = self._CheckFilter(filter, value)
+ property = match.group(1)
+ operator = match.group(3)
+
+ dict.__setitem__(self, filter, value)
+
+ if operator in self.INEQUALITY_OPERATORS:
+ if self.__inequality_prop is None:
+ self.__inequality_prop = property
+ else:
+ assert self.__inequality_prop == property
+ self.__inequality_count += 1
+
+ if filter not in self.__filter_order:
+ self.__filter_order[filter] = self.__filter_counter
+ self.__filter_counter += 1
+
+ self.__cached_count = None
+
+ def setdefault(self, filter, value):
+ """If the filter exists, returns its value. Otherwise sets it to value.
+
+ If the property name is the empty string or not a string, raises
+ BadPropertyError. If the value is not a supported type, raises
+ BadValueError.
+ """
+ datastore_types.ValidateProperty(' ', value)
+ self._CheckFilter(filter, value)
+ self.__cached_count = None
+ return dict.setdefault(self, filter, value)
+
+ def __delitem__(self, filter):
+ """Implements the del [] operator. Used to remove filters.
+ """
+ dict.__delitem__(self, filter)
+ del self.__filter_order[filter]
+ self.__cached_count = None
+
+ match = Query.FILTER_REGEX.match(filter)
+ property = match.group(1)
+ operator = match.group(3)
+
+ if operator in self.INEQUALITY_OPERATORS:
+ assert self.__inequality_count >= 1
+ assert property == self.__inequality_prop
+ self.__inequality_count -= 1
+ if self.__inequality_count == 0:
+ self.__inequality_prop = None
+
+ def update(self, other):
+ """Updates this query's filters from the ones in other.
+
+ If any filter string is invalid, raises BadFilterError. If any value is
+ not a supported type, raises BadValueError.
+ """
+ for filter, value in other.items():
+ self.__setitem__(filter, value)
+
+ def copy(self):
+ """The copy method is not supported.
+ """
+ raise NotImplementedError('Query does not support the copy() method.')
+
+ def _CheckFilter(self, filter, values):
+ """Type check a filter string and list of values.
+
+ Raises BadFilterError if the filter string is empty, not a string, or
+ invalid. Raises BadValueError if the value type is not supported.
+
+ Args:
+ filter: String containing the filter text.
+ values: List of associated filter values.
+
+ Returns:
+ re.MatchObject (never None) that matches the 'filter'. Group 1 is the
+ property name, group 3 is the operator. (Group 2 is unused.)
+ """
+ try:
+ match = Query.FILTER_REGEX.match(filter)
+ if not match:
+ raise datastore_errors.BadFilterError(
+ 'Could not parse filter string: %s' % str(filter))
+ except TypeError:
+ raise datastore_errors.BadFilterError(
+ 'Could not parse filter string: %s' % str(filter))
+
+ property = match.group(1)
+ operator = match.group(3)
+ if operator is None:
+ operator = '='
+
+ if isinstance(values, tuple):
+ values = list(values)
+ elif not isinstance(values, list):
+ values = [values]
+ if isinstance(values[0], datastore_types._RAW_PROPERTY_TYPES):
+ raise datastore_errors.BadValueError(
+ 'Filtering on %s properties is not supported.' % typename(values[0]))
+
+ if operator in self.INEQUALITY_OPERATORS:
+ if self.__inequality_prop and property != self.__inequality_prop:
+ raise datastore_errors.BadFilterError(
+ 'Only one property per query may have inequality filters (%s).' %
+ ', '.join(self.INEQUALITY_OPERATORS))
+ elif len(self.__orderings) >= 1 and self.__orderings[0][0] != property:
+ raise datastore_errors.BadFilterError(
+ 'Inequality operators (%s) must be on the same property as the '
+ 'first sort order, if any sort orders are supplied' %
+ ', '.join(self.INEQUALITY_OPERATORS))
+
+ if (self.__kind is None and
+ property != datastore_types._KEY_SPECIAL_PROPERTY):
+ raise datastore_errors.BadFilterError(
+ 'Only %s filters are allowed on kindless queries.' %
+ datastore_types._KEY_SPECIAL_PROPERTY)
+
+ if property in datastore_types._SPECIAL_PROPERTIES:
+ if property == datastore_types._KEY_SPECIAL_PROPERTY:
+ for value in values:
+ if not isinstance(value, Key):
+ raise datastore_errors.BadFilterError(
+ '%s filter value must be a Key; received %s (a %s)' %
+ (datastore_types._KEY_SPECIAL_PROPERTY, value, typename(value)))
+
+ return match
+
+ def _ToPb(self, limit=None, offset=None, count=None):
+ """Converts this Query to its protocol buffer representation. Not
+ intended to be used by application developers. Enforced by hiding the
+ datastore_pb classes.
+
+ Args:
+ # an upper bound on the number of results returned by the query.
+ limit: int
+ # number of results that match the query to skip. limit is applied
+ # after the offset is fulfilled
+ offset: int
+ # the requested initial batch size
+ count: int
+
+ Returns:
+ # the PB representation of this Query
+ datastore_pb.Query
+
+ Raises:
+ BadRequestError if called inside a transaction and the query does not
+ include an ancestor.
+ """
+
+ if not self.__ancestor and _CurrentTransactionKey():
+ raise datastore_errors.BadRequestError(
+ 'Only ancestor queries are allowed inside transactions.')
+
+ pb = datastore_pb.Query()
+ _MaybeSetupTransaction(pb, [self.__ancestor])
+
+ if self.__kind is not None:
+ pb.set_kind(self.__kind.encode('utf-8'))
+ pb.set_keys_only(bool(self.__keys_only))
+ if self.__app:
+ pb.set_app(self.__app.encode('utf-8'))
+ if limit is not None:
+ pb.set_limit(limit)
+ if offset is not None:
+ pb.set_offset(offset)
+ if count is not None:
+ pb.set_count(count)
+ if self.__ancestor:
+ pb.mutable_ancestor().CopyFrom(self.__ancestor._Key__reference)
+
+ if ((self.__hint == self.ORDER_FIRST and self.__orderings) or
+ (self.__hint == self.ANCESTOR_FIRST and self.__ancestor) or
+ (self.__hint == self.FILTER_FIRST and len(self) > 0)):
+ pb.set_hint(self.__hint)
+
+ ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
+ ordered_filters.sort()
+
+ for i, filter_str in ordered_filters:
+ if filter_str not in self:
+ continue
+
+ values = self[filter_str]
+ match = self._CheckFilter(filter_str, values)
+ name = match.group(1)
+
+ props = datastore_types.ToPropertyPb(name, values)
+ if not isinstance(props, list):
+ props = [props]
+
+ op = match.group(3)
+ if op is None:
+ op = '='
+
+ for prop in props:
+ filter = pb.add_filter()
+ filter.set_op(self.OPERATORS[op])
+ filter.add_property().CopyFrom(prop)
+
+ for property, direction in self.__orderings:
+ order = pb.add_order()
+ order.set_property(property.encode('utf-8'))
+ order.set_direction(direction)
+
+ return pb
+
+
+def AllocateIds(model_key, size):
+ """Allocates a range of IDs of size for the key defined by model_key
+
+ Allocates a range of IDs in the datastore such that those IDs will not
+ be automatically assigned to new entities. You can only allocate IDs
+ for model keys from your app. If there is an error, raises a subclass of
+ datastore_errors.Error.
+
+ Args:
+ model_key: Key or string to serve as a model specifying the ID sequence
+ in which to allocate IDs
+
+ Returns:
+ (start, end) of the allocated range, inclusive.
+ """
+ keys, multiple = NormalizeAndTypeCheckKeys(model_key)
+
+ if len(keys) > 1:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot allocate IDs for more than one model key at a time')
+
+ if size > _MAX_ID_BATCH_SIZE:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot allocate more than %s ids at a time' % _MAX_ID_BATCH_SIZE)
+
+ req = datastore_pb.AllocateIdsRequest()
+ req.mutable_model_key().CopyFrom(keys[0]._Key__reference)
+ req.set_size(size)
+
+ resp = datastore_pb.AllocateIdsResponse()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'AllocateIds', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ return resp.start(), resp.end()
+
+
+class MultiQuery(Query):
+ """Class representing a query which requires multiple datastore queries.
+
+ This class is actually a subclass of datastore.Query as it is intended to act
+ like a normal Query object (supporting the same interface).
+
+ Does not support keys only queries, since it needs whole entities in order
+ to merge sort them. (That's not true if there are no sort orders, or if the
+ sort order is on __key__, but allowing keys only queries in those cases, but
+ not in others, would be confusing.)
+ """
+
+ def __init__(self, bound_queries, orderings):
+ if len(bound_queries) > MAX_ALLOWABLE_QUERIES:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot satisfy query -- too many subqueries (max: %d, got %d).'
+ ' Probable cause: too many IN/!= filters in query.' %
+ (MAX_ALLOWABLE_QUERIES, len(bound_queries)))
+
+ for query in bound_queries:
+ if query.IsKeysOnly():
+ raise datastore_errors.BadQueryError(
+ 'MultiQuery does not support keys_only.')
+
+ self.__bound_queries = bound_queries
+ self.__orderings = orderings
+
+ def __str__(self):
+ res = 'MultiQuery: '
+ for query in self.__bound_queries:
+ res = '%s %s' % (res, str(query))
+ return res
+
+ def Get(self, limit, offset=0):
+ """Get results of the query with a limit on the number of results.
+
+ Args:
+ limit: maximum number of values to return.
+ offset: offset requested -- if nonzero, this will override the offset in
+ the original query
+
+ Returns:
+ A list of entities with at most "limit" entries (less if the query
+ completes before reading limit values).
+ """
+ count = 1
+ result = []
+
+ iterator = self.Run()
+
+ try:
+ for i in xrange(offset):
+ val = iterator.next()
+ except StopIteration:
+ pass
+
+ try:
+ while count <= limit:
+ val = iterator.next()
+ result.append(val)
+ count += 1
+ except StopIteration:
+ pass
+ return result
+
+ class SortOrderEntity(object):
+ """Allow entity comparisons using provided orderings.
+
+ The iterator passed to the constructor is eventually consumed via
+ calls to GetNext(), which generate new SortOrderEntity s with the
+ same orderings.
+ """
+
+ def __init__(self, entity_iterator, orderings):
+ """Ctor.
+
+ Args:
+ entity_iterator: an iterator of entities which will be wrapped.
+ orderings: an iterable of (identifier, order) pairs. order
+ should be either Query.ASCENDING or Query.DESCENDING.
+ """
+ self.__entity_iterator = entity_iterator
+ self.__entity = None
+ self.__min_max_value_cache = {}
+ try:
+ self.__entity = entity_iterator.next()
+ except StopIteration:
+ pass
+ else:
+ self.__orderings = orderings
+
+ def __str__(self):
+ return str(self.__entity)
+
+ def GetEntity(self):
+ """Gets the wrapped entity."""
+ return self.__entity
+
+ def GetNext(self):
+ """Wrap and return the next entity.
+
+ The entity is retrieved from the iterator given at construction time.
+ """
+ return MultiQuery.SortOrderEntity(self.__entity_iterator,
+ self.__orderings)
+
+ def CmpProperties(self, that):
+ """Compare two entities and return their relative order.
+
+ Compares self to that based on the current sort orderings and the
+ key orders between them. Returns negative, 0, or positive depending on
+ whether self is less, equal to, or greater than that. This
+ comparison returns as if all values were to be placed in ascending order
+ (highest value last). Only uses the sort orderings to compare (ignores
+ keys).
+
+ Args:
+ that: SortOrderEntity
+
+ Returns:
+ Negative if self < that
+ Zero if self == that
+ Positive if self > that
+ """
+ if not self.__entity:
+ return cmp(self.__entity, that.__entity)
+
+ for (identifier, order) in self.__orderings:
+ value1 = self.__GetValueForId(self, identifier, order)
+ value2 = self.__GetValueForId(that, identifier, order)
+
+ result = cmp(value1, value2)
+ if order == Query.DESCENDING:
+ result = -result
+ if result:
+ return result
+ return 0
+
+ def __GetValueForId(self, sort_order_entity, identifier, sort_order):
+ value = _GetPropertyValue(sort_order_entity.__entity, identifier)
+ entity_key = sort_order_entity.__entity.key()
+ if (entity_key, identifier) in self.__min_max_value_cache:
+ value = self.__min_max_value_cache[(entity_key, identifier)]
+ elif isinstance(value, list):
+ if sort_order == Query.DESCENDING:
+ value = min(value)
+ else:
+ value = max(value)
+ self.__min_max_value_cache[(entity_key, identifier)] = value
+
+ return value
+
+ def __cmp__(self, that):
+ """Compare self to that w.r.t. values defined in the sort order.
+
+ Compare an entity with another, using sort-order first, then the key
+ order to break ties. This can be used in a heap to have faster min-value
+ lookup.
+
+ Args:
+ that: other entity to compare to
+ Returns:
+ negative: if self is less than that in sort order
+ zero: if self is equal to that in sort order
+ positive: if self is greater than that in sort order
+ """
+ property_compare = self.CmpProperties(that)
+ if property_compare:
+ return property_compare
+ else:
+ return cmp(self.__entity.key(), that.__entity.key())
+
+ def Run(self):
+ """Return an iterable output with all results in order."""
+ results = []
+ count = 1
+ log_level = logging.DEBUG - 1
+ for bound_query in self.__bound_queries:
+ logging.log(log_level, 'Running query #%i' % count)
+ results.append(bound_query.Run())
+ count += 1
+
+ def IterateResults(results):
+ """Iterator function to return all results in sorted order.
+
+ Iterate over the array of results, yielding the next element, in
+ sorted order. This function is destructive (results will be empty
+ when the operation is complete).
+
+ Args:
+ results: list of result iterators to merge and iterate through
+
+ Yields:
+ The next result in sorted order.
+ """
+ result_heap = []
+ for result in results:
+ heap_value = MultiQuery.SortOrderEntity(result, self.__orderings)
+ if heap_value.GetEntity():
+ heapq.heappush(result_heap, heap_value)
+
+ used_keys = set()
+
+ while result_heap:
+ top_result = heapq.heappop(result_heap)
+
+ results_to_push = []
+ if top_result.GetEntity().key() not in used_keys:
+ yield top_result.GetEntity()
+ else:
+ pass
+
+ used_keys.add(top_result.GetEntity().key())
+
+ results_to_push = []
+ while result_heap:
+ next = heapq.heappop(result_heap)
+ if cmp(top_result, next):
+ results_to_push.append(next)
+ break
+ else:
+ results_to_push.append(next.GetNext())
+ results_to_push.append(top_result.GetNext())
+
+ for popped_result in results_to_push:
+ if popped_result.GetEntity():
+ heapq.heappush(result_heap, popped_result)
+
+ return IterateResults(results)
+
+ def Count(self, limit=None):
+ """Return the number of matched entities for this query.
+
+ Will return the de-duplicated count of results. Will call the more
+ efficient Get() function if a limit is given.
+
+ Args:
+ limit: maximum number of entries to count (for any result > limit, return
+ limit).
+ Returns:
+ count of the number of entries returned.
+ """
+ if limit is None:
+ count = 0
+ for i in self.Run():
+ count += 1
+ return count
+ else:
+ return len(self.Get(limit))
+
+ def __setitem__(self, query_filter, value):
+ """Add a new filter by setting it on all subqueries.
+
+ If any of the setting operations raise an exception, the ones
+ that succeeded are undone and the exception is propagated
+ upward.
+
+ Args:
+ query_filter: a string of the form "property operand".
+ value: the value that the given property is compared against.
+ """
+ saved_items = []
+ for index, query in enumerate(self.__bound_queries):
+ saved_items.append(query.get(query_filter, None))
+ try:
+ query[query_filter] = value
+ except:
+ for q, old_value in itertools.izip(self.__bound_queries[:index],
+ saved_items):
+ if old_value is not None:
+ q[query_filter] = old_value
+ else:
+ del q[query_filter]
+ raise
+
+ def __delitem__(self, query_filter):
+ """Delete a filter by deleting it from all subqueries.
+
+ If a KeyError is raised during the attempt, it is ignored, unless
+ every subquery raised a KeyError. If any other exception is
+ raised, any deletes will be rolled back.
+
+ Args:
+ query_filter: the filter to delete.
+
+ Raises:
+ KeyError: No subquery had an entry containing query_filter.
+ """
+ subquery_count = len(self.__bound_queries)
+ keyerror_count = 0
+ saved_items = []
+ for index, query in enumerate(self.__bound_queries):
+ try:
+ saved_items.append(query.get(query_filter, None))
+ del query[query_filter]
+ except KeyError:
+ keyerror_count += 1
+ except:
+ for q, old_value in itertools.izip(self.__bound_queries[:index],
+ saved_items):
+ if old_value is not None:
+ q[query_filter] = old_value
+ raise
+
+ if keyerror_count == subquery_count:
+ raise KeyError(query_filter)
+
+ def __iter__(self):
+ return iter(self.__bound_queries)
+
+
+class Iterator(object):
+ """An iterator over the results of a datastore query.
+
+ Iterators are used to access the results of a Query. An iterator is
+ obtained by building a Query, then calling Run() on it.
+
+ Iterator implements Python's iterator protocol, so results can be accessed
+ with the for and in statements:
+
+ > it = Query('Person').Run()
+ > for person in it:
+ > print 'Hi, %s!' % person['name']
+ """
+ def __init__(self, query_result_pb, batch_size=None):
+ self.__cursor = query_result_pb.cursor()
+ self.__keys_only = query_result_pb.keys_only()
+ self.__batch_size = batch_size
+ self.__buffer = self._ProcessQueryResult(query_result_pb)
+
+ def _Get(self, count):
+ """Gets the next count result(s) of the query.
+
+ Not intended to be used by application developers. Use the python
+ iterator protocol instead.
+
+ This method uses _Next to returns the next entities or keys from the list of
+ matching results. If the query specified a sort order, results are returned
+ in that order. Otherwise, the order is undefined.
+
+ The argument, count, specifies the number of results to return. However, the
+ length of the returned list may be smaller than count. This is the case only
+ if count is greater than the number of remaining results.
+
+ The results are always returned as a list. If there are no results left,
+ an empty list is returned.
+
+ Args:
+ # the number of results to return; must be >= 1
+ count: int or long
+
+ Returns:
+ # a list of entities or keys
+ [Entity or Key, ...]
+ """
+ entity_list = self._Next(count)
+ while len(entity_list) < count and self.__more_results:
+ next_results = self._Next(count - len(entity_list), self.__batch_size)
+ if not next_results:
+ break
+ entity_list += next_results
+ return entity_list;
+
+ def _Next(self, count=None):
+ """Returns the next batch of results.
+
+ Not intended to be used by application developers. Use the python
+ iterator protocol instead.
+
+ This method returns the next entities or keys from the list of matching
+ results. If the query specified a sort order, results are returned in that
+ order. Otherwise, the order is undefined.
+
+ The optional argument, count, specifies the number of results to return.
+ However, the length of the returned list may be smaller than count. This is
+ the case if count is greater than the number of remaining results or the
+ size of the remaining results exceeds the RPC buffer limit. Use _Get to
+ insure all possible entities are retrieved.
+
+ If the count is omitted, the datastore backend decides how many entities to
+ send.
+
+ There is an internal buffer for use with the next() method. If this buffer
+ is not empty, up to 'count' values are removed from this buffer and
+ returned. It's best not to mix _Next() and next().
+
+ The results are always returned as a list. If there are no results left,
+ an empty list is returned.
+
+ Args:
+ # the number of results to return; must be >= 1
+ count: int or long or None
+
+ Returns:
+ # a list of entities or keys
+ [Entity or Key, ...]
+ """
+ if count is not None and (not isinstance(count, (int, long)) or count <= 0):
+ raise datastore_errors.BadArgumentError(
+ 'Argument to _Next must be an int greater than 0; received %s (a %s)' %
+ (count, typename(count)))
+
+ if self.__buffer:
+ if count is None:
+ entity_list = self.__buffer
+ self.__buffer = []
+ return entity_list
+ elif count <= len(self.__buffer):
+ entity_list = self.__buffer[:count]
+ del self.__buffer[:count]
+ return entity_list
+ else:
+ entity_list = self.__buffer
+ self.__buffer = []
+ count -= len(entity_list)
+ else:
+ entity_list = []
+
+
+ if not self.__more_results:
+ return entity_list
+
+ req = datastore_pb.NextRequest()
+ if count is not None:
+ req.set_count(count)
+ req.mutable_cursor().CopyFrom(self.__cursor)
+ result = datastore_pb.QueryResult()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', req, result)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ return entity_list + self._ProcessQueryResult(result)
+
+ def _ProcessQueryResult(self, result):
+ """Returns all results from datastore_pb.QueryResult and updates
+ self.__more_results
+
+ Not intended to be used by application developers. Use the python
+ iterator protocol instead.
+
+ The results are always returned as a list. If there are no results left,
+ an empty list is returned.
+
+ Args:
+ # the instance of datastore_pb.QueryResult to be stored
+ result: datastore_pb.QueryResult
+
+ Returns:
+ # a list of entities or keys
+ [Entity or Key, ...]
+ """
+ self.__more_results = result.more_results()
+
+ if self.__keys_only:
+ return [Key._FromPb(e.key()) for e in result.result_list()]
+ else:
+ return [Entity._FromPb(e) for e in result.result_list()]
+
+ def next(self):
+ if not self.__buffer:
+ self.__buffer = self._Next(self.__batch_size)
+ try:
+ return self.__buffer.pop(0)
+ except IndexError:
+ raise StopIteration
+
+ def __iter__(self): return self
+
+class _Transaction(object):
+ """Encapsulates a transaction currently in progress.
+
+ If we know the entity group for this transaction, it's stored in the
+ entity_group attribute, which is set by RunInTransaction().
+
+ modified_keys is a set containing the Keys of all entities modified (ie put
+ or deleted) in this transaction. If an entity is modified more than once, a
+ BadRequestError is raised.
+ """
+ def __init__(self, handle):
+ """Initializes the transaction.
+
+ Args:
+ handle: a datastore_pb.Transaction returned by a BeginTransaction call
+ """
+ assert isinstance(handle, datastore_pb.Transaction)
+ explanation = []
+ assert handle.IsInitialized(explanation), explanation
+
+ self.handle = handle
+ self.entity_group = None
+ self.modified_keys = None
+ self.modified_keys = set()
+
+
+def RunInTransaction(function, *args, **kwargs):
+ """Runs a function inside a datastore transaction.
+
+ Runs the user-provided function inside transaction, retries default
+ number of times.
+
+ Args:
+ # a function to be run inside the transaction
+ function: callable
+ # positional arguments to pass to the function
+ args: variable number of any type
+
+ Returns:
+ the function's return value, if any
+
+ Raises:
+ TransactionFailedError, if the transaction could not be committed.
+ """
+ return RunInTransactionCustomRetries(
+ DEFAULT_TRANSACTION_RETRIES, function, *args, **kwargs)
+
+
+def RunInTransactionCustomRetries(retries, function, *args, **kwargs):
+ """Runs a function inside a datastore transaction.
+
+ Runs the user-provided function inside a full-featured, ACID datastore
+ transaction. Every Put, Get, and Delete call in the function is made within
+ the transaction. All entities involved in these calls must belong to the
+ same entity group. Queries are not supported.
+
+ The trailing arguments are passed to the function as positional arguments.
+ If the function returns a value, that value will be returned by
+ RunInTransaction. Otherwise, it will return None.
+
+ The function may raise any exception to roll back the transaction instead of
+ committing it. If this happens, the transaction will be rolled back and the
+ exception will be re-raised up to RunInTransaction's caller.
+
+ If you want to roll back intentionally, but don't have an appropriate
+ exception to raise, you can raise an instance of datastore_errors.Rollback.
+ It will cause a rollback, but will *not* be re-raised up to the caller.
+
+ The function may be run more than once, so it should be idempotent. It
+ should avoid side effects, and it shouldn't have *any* side effects that
+ aren't safe to occur multiple times. This includes modifying the arguments,
+ since they persist across invocations of the function. However, this doesn't
+ include Put, Get, and Delete calls, of course.
+
+ Example usage:
+
+ > def decrement(key, amount=1):
+ > counter = datastore.Get(key)
+ > counter['count'] -= amount
+ > if counter['count'] < 0: # don't let the counter go negative
+ > raise datastore_errors.Rollback()
+ > datastore.Put(counter)
+ >
+ > counter = datastore.Query('Counter', {'name': 'foo'})
+ > datastore.RunInTransaction(decrement, counter.key(), amount=5)
+
+ Transactions satisfy the traditional ACID properties. They are:
+
+ - Atomic. All of a transaction's operations are executed or none of them are.
+
+ - Consistent. The datastore's state is consistent before and after a
+ transaction, whether it committed or rolled back. Invariants such as
+ "every entity has a primary key" are preserved.
+
+ - Isolated. Transactions operate on a snapshot of the datastore. Other
+ datastore operations do not see intermediated effects of the transaction;
+ they only see its effects after it has committed.
+
+ - Durable. On commit, all writes are persisted to the datastore.
+
+ Nested transactions are not supported.
+
+ Args:
+ # number of retries
+ retries: integer
+ # a function to be run inside the transaction
+ function: callable
+ # positional arguments to pass to the function
+ args: variable number of any type
+
+ Returns:
+ the function's return value, if any
+
+ Raises:
+ TransactionFailedError, if the transaction could not be committed.
+ """
+
+ if _CurrentTransactionKey():
+ raise datastore_errors.BadRequestError(
+ 'Nested transactions are not supported.')
+
+ if retries < 0:
+ raise datastore_errors.BadRequestError(
+ 'Number of retries should be non-negative number.')
+
+ tx_key = None
+
+ try:
+ tx_key = _NewTransactionKey()
+
+ for i in range(0, retries + 1):
+ handle = datastore_pb.Transaction()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
+ api_base_pb.VoidProto(), handle)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ tx = _Transaction(handle)
+ _txes[tx_key] = tx
+
+ try:
+ result = function(*args, **kwargs)
+ except:
+ original_exception = sys.exc_info()
+
+ try:
+ resp = api_base_pb.VoidProto()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
+ tx.handle, resp)
+ except:
+ exc_info = sys.exc_info()
+ logging.info('Exception sending Rollback:\n' +
+ ''.join(traceback.format_exception(*exc_info)))
+
+ type, value, trace = original_exception
+ if type is datastore_errors.Rollback:
+ return
+ else:
+ raise type, value, trace
+
+ try:
+ resp = datastore_pb.CommitResponse()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
+ tx.handle, resp)
+ except apiproxy_errors.ApplicationError, err:
+ if (err.application_error ==
+ datastore_pb.Error.CONCURRENT_TRANSACTION):
+ logging.warning('Transaction collision for entity group with '
+ 'key %r. Retrying...', tx.entity_group)
+ tx.handle = None
+ tx.entity_group = None
+ continue
+ else:
+ raise _ToDatastoreError(err)
+
+ return result
+
+ raise datastore_errors.TransactionFailedError(
+ 'The transaction could not be committed. Please try again.')
+
+ finally:
+ if tx_key in _txes:
+ del _txes[tx_key]
+ del tx_key
+
+
+def _MaybeSetupTransaction(request, keys):
+ """If we're in a transaction, validates and populates it in the request.
+
+ If we're currently inside a transaction, this records the entity group,
+ checks that the keys are all in that entity group, and populates the
+ transaction handle in the request.
+
+ Raises BadRequestError if the entity has a different entity group than the
+ current transaction.
+
+ Args:
+ request: GetRequest, PutRequest, DeleteRequest, or Query
+ keys: sequence of Keys
+
+ Returns:
+ _Transaction if we're inside a transaction, otherwise None
+ """
+ assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,
+ datastore_pb.DeleteRequest, datastore_pb.Query,
+ taskqueue_service_pb.TaskQueueAddRequest,
+ )), request.__class__
+ tx_key = None
+
+ try:
+ tx_key = _CurrentTransactionKey()
+ if tx_key:
+ tx = _txes[tx_key]
+
+ groups = [k.entity_group() for k in keys]
+ if tx.entity_group:
+ expected_group = tx.entity_group
+ elif groups:
+ expected_group = groups[0]
+ else:
+ expected_group = None
+
+ for group in groups:
+ if (group != expected_group or
+
+
+
+
+
+
+
+ (not group.has_id_or_name() and group is not expected_group)):
+ raise _DifferentEntityGroupError(expected_group, group)
+
+ if not tx.entity_group and group.has_id_or_name():
+ tx.entity_group = group
+
+ assert tx.handle.IsInitialized()
+ request.mutable_transaction().CopyFrom(tx.handle)
+
+ return tx
+
+ finally:
+ del tx_key
+
+
+def _DifferentEntityGroupError(a, b):
+ """Raises a BadRequestError that says the given entity groups are different.
+
+ Includes the two entity groups in the message, formatted more clearly and
+ concisely than repr(Key).
+
+ Args:
+ a, b are both Keys that represent entity groups.
+ """
+ def id_or_name(key):
+ if key.name():
+ return 'name=%r' % key.name()
+ else:
+ return 'id=%r' % key.id()
+
+ raise datastore_errors.BadRequestError(
+ 'Cannot operate on different entity groups in a transaction: '
+ '(kind=%r, %s) and (kind=%r, %s).' % (a.kind(), id_or_name(a),
+ b.kind(), id_or_name(b)))
+
+
+def _FindTransactionFrameInStack():
+ """Walks the stack to find a RunInTransaction() call.
+
+ Returns:
+ # this is the RunInTransactionCustomRetries() frame record, if found
+ frame record or None
+ """
+ frame = sys._getframe()
+ filename = frame.f_code.co_filename
+
+ frame = frame.f_back.f_back
+ while frame:
+ if (frame.f_code.co_filename == filename and
+ frame.f_code.co_name == 'RunInTransactionCustomRetries'):
+ return frame
+ frame = frame.f_back
+
+ return None
+
+_CurrentTransactionKey = _FindTransactionFrameInStack
+
+_NewTransactionKey = sys._getframe
+
+
+def _GetCompleteKeyOrError(arg):
+ """Expects an Entity or a Key, and returns the corresponding Key. Raises
+ BadArgumentError or BadKeyError if arg is a different type or is incomplete.
+
+ Args:
+ arg: Entity or Key
+
+ Returns:
+ Key
+ """
+ if isinstance(arg, Key):
+ key = arg
+ elif isinstance(arg, basestring):
+ key = Key(arg)
+ elif isinstance(arg, Entity):
+ key = arg.key()
+ elif not isinstance(arg, Key):
+ raise datastore_errors.BadArgumentError(
+ 'Expects argument to be an Entity or Key; received %s (a %s).' %
+ (arg, typename(arg)))
+ assert isinstance(key, Key)
+
+ if not key.has_id_or_name():
+ raise datastore_errors.BadKeyError('Key %r is not complete.' % key)
+
+ return key
+
+
+def _GetPropertyValue(entity, property):
+ """Returns an entity's value for a given property name.
+
+ Handles special properties like __key__ as well as normal properties.
+
+ Args:
+ entity: datastore.Entity
+ property: str; the property name
+
+ Returns:
+ property value. For __key__, a datastore_types.Key.
+
+ Raises:
+ KeyError, if the entity does not have the given property.
+ """
+ if property in datastore_types._SPECIAL_PROPERTIES:
+ assert property == datastore_types._KEY_SPECIAL_PROPERTY
+ return entity.key()
+ else:
+ return entity[property]
+
+
+def _AddOrAppend(dictionary, key, value):
+ """Adds the value to the existing values in the dictionary, if any.
+
+ If dictionary[key] doesn't exist, sets dictionary[key] to value.
+
+ If dictionary[key] is not a list, sets dictionary[key] to [old_value, value].
+
+ If dictionary[key] is a list, appends value to that list.
+
+ Args:
+ dictionary: a dict
+ key, value: anything
+ """
+ if key in dictionary:
+ existing_value = dictionary[key]
+ if isinstance(existing_value, list):
+ existing_value.append(value)
+ else:
+ dictionary[key] = [existing_value, value]
+ else:
+ dictionary[key] = value
+
+
+def _ToDatastoreError(err):
+ """Converts an apiproxy.ApplicationError to an error in datastore_errors.
+
+ Args:
+ err: apiproxy.ApplicationError
+
+ Returns:
+ a subclass of datastore_errors.Error
+ """
+ errors = {
+ datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,
+ datastore_pb.Error.CONCURRENT_TRANSACTION:
+ datastore_errors.TransactionFailedError,
+ datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
+ datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
+ datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
+ }
+
+ if err.application_error in errors:
+ raise errors[err.application_error](err.error_detail)
+ else:
+ raise datastore_errors.Error(err.error_detail)
diff --git a/google_appengine/google/appengine/api/datastore.pyc b/google_appengine/google/appengine/api/datastore.pyc
new file mode 100644
index 0000000..9056a21
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore_admin.py b/google_appengine/google/appengine/api/datastore_admin.py
new file mode 100755
index 0000000..da2b6c7
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_admin.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""The Python datastore admin API for managing indices and schemas.
+"""
+
+
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.datastore import datastore_index
+from google.appengine.datastore import datastore_pb
+from google.appengine.runtime import apiproxy_errors
+from google.appengine.datastore import entity_pb
+
+
+_DIRECTION_MAP = {
+ 'asc': entity_pb.Index_Property.ASCENDING,
+ 'ascending': entity_pb.Index_Property.ASCENDING,
+ 'desc': entity_pb.Index_Property.DESCENDING,
+ 'descending': entity_pb.Index_Property.DESCENDING,
+ }
+
+
+def GetSchema(_app=None, properties=True, start_kind=None, end_kind=None):
+ """Infers an app's schema from the entities in the datastore.
+
+ Note that the PropertyValue PBs in the returned EntityProtos are empty
+ placeholders, so they may cause problems if you try to convert them to
+ python values with e.g. datastore_types. In particular, user values will
+ throw UserNotFoundError because their email and auth domain fields will be
+ empty.
+
+ Args:
+ properties: boolean, whether to include property names and types
+ start_kind, end_kind: optional range endpoints for the kinds to return,
+ compared lexicographically
+
+ Returns:
+ list of entity_pb.EntityProto, with kind and property names and types
+ """
+ req = datastore_pb.GetSchemaRequest()
+ req.set_app(datastore_types.ResolveAppId(_app))
+ req.set_properties(properties)
+ if start_kind is not None:
+ req.set_start_kind(start_kind)
+ if end_kind is not None:
+ req.set_end_kind(end_kind)
+ resp = datastore_pb.Schema()
+
+ _Call('GetSchema', req, resp)
+ return resp.kind_list()
+
+
+def GetIndices(_app=None):
+ """Fetches all composite indices in the datastore for this app.
+
+ Returns:
+ list of entity_pb.CompositeIndex
+ """
+ req = api_base_pb.StringProto()
+ req.set_value(datastore_types.ResolveAppId(_app))
+ resp = datastore_pb.CompositeIndices()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'GetIndices', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise datastore._ToDatastoreError(err)
+
+ return resp.index_list()
+
+
+def CreateIndex(index):
+ """Creates a new composite index in the datastore for this app.
+
+ Args:
+ index: entity_pb.CompositeIndex
+
+ Returns:
+ int, the id allocated to the index
+ """
+ resp = api_base_pb.Integer64Proto()
+ _Call('CreateIndex', index, resp)
+ return resp.value()
+
+
+def UpdateIndex(index):
+ """Updates an index's status. The entire index definition must be present.
+
+ Args:
+ index: entity_pb.CompositeIndex
+ """
+ _Call('UpdateIndex', index, api_base_pb.VoidProto())
+
+
+def DeleteIndex(index):
+ """Deletes an index. The entire index definition must be present.
+
+ Args:
+ index: entity_pb.CompositeIndex
+ """
+ _Call('DeleteIndex', index, api_base_pb.VoidProto())
+
+
+def _Call(call, req, resp):
+ """Generic method for making a datastore API call.
+
+ Args:
+ call: string, the name of the RPC call
+ req: the request PB. if the app_id field is not set, it defaults to the
+ local app.
+ resp: the response PB
+ """
+ if hasattr(req, 'app_id'):
+ req.set_app_id(datastore_types.ResolveAppId(req.app_id(), 'req.app_id()'))
+
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', call, req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise datastore._ToDatastoreError(err)
+
+
+def IndexDefinitionToProto(app_id, index_definition):
+ """Transform individual Index definition to protocol buffer.
+
+ Args:
+ app_id: Application id for new protocol buffer CompositeIndex.
+ index_definition: datastore_index.Index object to transform.
+
+ Returns:
+ New entity_pb.CompositeIndex with default values set and index
+ information filled in.
+ """
+ proto = entity_pb.CompositeIndex()
+
+ proto.set_app_id(app_id)
+ proto.set_id(0)
+ proto.set_state(entity_pb.CompositeIndex.WRITE_ONLY)
+
+ definition_proto = proto.mutable_definition()
+ definition_proto.set_entity_type(index_definition.kind)
+ definition_proto.set_ancestor(index_definition.ancestor)
+
+ if index_definition.properties is not None:
+ for prop in index_definition.properties:
+ prop_proto = definition_proto.add_property()
+ prop_proto.set_name(prop.name)
+ prop_proto.set_direction(_DIRECTION_MAP[prop.direction])
+
+ return proto
+
+
+def IndexDefinitionsToProtos(app_id, index_definitions):
+ """Transform multiple index definitions to composite index records
+
+ Args:
+ app_id: Application id for new protocol buffer CompositeIndex.
+ index_definition: A list of datastore_index.Index objects to transform.
+
+ Returns:
+ A list of tranformed entity_pb.Compositeindex entities with default values
+ set and index information filled in.
+ """
+ return [IndexDefinitionToProto(app_id, index)
+ for index in index_definitions]
+
+
+def ProtoToIndexDefinition(proto):
+ """Transform individual index protocol buffer to index definition.
+
+ Args:
+ proto: An instance of entity_pb.CompositeIndex to transform.
+
+ Returns:
+ A new instance of datastore_index.Index.
+ """
+ properties = []
+ proto_index = proto.definition()
+ for prop_proto in proto_index.property_list():
+ prop_definition = datastore_index.Property(name=prop_proto.name())
+ if prop_proto.direction() == entity_pb.Index_Property.DESCENDING:
+ prop_definition.direction = 'descending'
+ properties.append(prop_definition)
+
+ index = datastore_index.Index(kind=proto_index.entity_type(),
+ properties=properties)
+ if proto_index.ancestor():
+ index.ancestor = True
+ return index
+
+def ProtosToIndexDefinitions(protos):
+ """Transform multiple index protocol buffers to index definitions.
+
+ Args:
+ A list of entity_pb.Index records.
+ """
+ return [ProtoToIndexDefinition(definition) for definition in protos]
diff --git a/google_appengine/google/appengine/api/datastore_admin.pyc b/google_appengine/google/appengine/api/datastore_admin.pyc
new file mode 100644
index 0000000..302bf52
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_admin.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore_entities.py b/google_appengine/google/appengine/api/datastore_entities.py
new file mode 100755
index 0000000..93ffdb5
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_entities.py
@@ -0,0 +1,343 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Classes for common kinds, including Contact, Message, and Event.
+
+Most of these kinds are based on the gd namespace "kinds" from GData:
+
+ http://code.google.com/apis/gdata/common-elements.html
+"""
+
+
+
+
+
+import types
+import urlparse
+from xml.sax import saxutils
+from google.appengine.datastore import datastore_pb
+from google.appengine.api import datastore
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+
+class GdKind(datastore.Entity):
+ """ A base class for gd namespace kinds.
+
+ This class contains common logic for all gd namespace kinds. For example,
+ this class translates datastore (app id, kind, key) tuples to tag:
+ URIs appropriate for use in <key> tags.
+ """
+
+ HEADER = u"""<entry xmlns:gd='http://schemas.google.com/g/2005'>
+ <category scheme='http://schemas.google.com/g/2005#kind'
+ term='http://schemas.google.com/g/2005#%s' />"""
+ FOOTER = u"""
+</entry>"""
+
+ _kind_properties = set()
+ _contact_properties = set()
+
+ def __init__(self, kind, title, kind_properties, contact_properties=[]):
+ """ Ctor.
+
+ title is the name of this particular entity, e.g. Bob Jones or Mom's
+ Birthday Party.
+
+ kind_properties is a list of property names that should be included in
+ this entity's XML encoding as first-class XML elements, instead of
+ <property> elements. 'title' and 'content' are added to kind_properties
+ automatically, and may not appear in contact_properties.
+
+ contact_properties is a list of property names that are Keys that point to
+ Contact entities, and should be included in this entity's XML encoding as
+ <gd:who> elements. If a property name is included in both kind_properties
+ and contact_properties, it is treated as a Contact property.
+
+ Args:
+ kind: string
+ title: string
+ kind_properties: list of strings
+ contact_properties: list of string
+ """
+ datastore.Entity.__init__(self, kind)
+
+ if not isinstance(title, types.StringTypes):
+ raise datastore_errors.BadValueError(
+ 'Expected a string for title; received %s (a %s).' %
+ (title, datastore_types.typename(title)))
+ self['title'] = title
+ self['content'] = ''
+
+ self._contact_properties = set(contact_properties)
+ assert not self._contact_properties.intersection(self.keys())
+
+ self._kind_properties = set(kind_properties) - self._contact_properties
+ self._kind_properties.add('title')
+ self._kind_properties.add('content')
+
+ def _KindPropertiesToXml(self):
+ """ Convert the properties that are part of this gd kind to XML. For
+ testability, the XML elements in the output are sorted alphabetically
+ by property name.
+
+ Returns:
+ string # the XML representation of the gd kind properties
+ """
+ properties = self._kind_properties.intersection(set(self.keys()))
+
+ xml = u''
+ for prop in sorted(properties):
+ prop_xml = saxutils.quoteattr(prop)[1:-1]
+
+ value = self[prop]
+ has_toxml = (hasattr(value, 'ToXml') or
+ isinstance(value, list) and hasattr(value[0], 'ToXml'))
+
+ for val in self._XmlEscapeValues(prop):
+ if has_toxml:
+ xml += '\n %s' % val
+ else:
+ xml += '\n <%s>%s</%s>' % (prop_xml, val, prop_xml)
+
+ return xml
+
+
+ def _ContactPropertiesToXml(self):
+ """ Convert this kind's Contact properties kind to XML. For testability,
+ the XML elements in the output are sorted alphabetically by property name.
+
+ Returns:
+ string # the XML representation of the Contact properties
+ """
+ properties = self._contact_properties.intersection(set(self.keys()))
+
+ xml = u''
+ for prop in sorted(properties):
+ values = self[prop]
+ if not isinstance(values, list):
+ values = [values]
+
+ for value in values:
+ assert isinstance(value, datastore_types.Key)
+ xml += """
+ <gd:who rel="http://schemas.google.com/g/2005#%s.%s>
+ <gd:entryLink href="%s" />
+ </gd:who>""" % (self.kind().lower(), prop, value.ToTagUri())
+
+ return xml
+
+
+ def _LeftoverPropertiesToXml(self):
+ """ Convert all of this entity's properties that *aren't* part of this gd
+ kind to XML.
+
+ Returns:
+ string # the XML representation of the leftover properties
+ """
+ leftovers = set(self.keys())
+ leftovers -= self._kind_properties
+ leftovers -= self._contact_properties
+ if leftovers:
+ return u'\n ' + '\n '.join(self._PropertiesToXml(leftovers))
+ else:
+ return u''
+
+ def ToXml(self):
+ """ Returns an XML representation of this entity, as a string.
+ """
+ xml = GdKind.HEADER % self.kind().lower()
+ xml += self._KindPropertiesToXml()
+ xml += self._ContactPropertiesToXml()
+ xml += self._LeftoverPropertiesToXml()
+ xml += GdKind.FOOTER
+ return xml
+
+
+class Message(GdKind):
+ """A message, such as an email, a discussion group posting, or a comment.
+
+ Includes the message title, contents, participants, and other properties.
+
+ This is the gd Message kind. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdMessageKind
+
+ These properties are meaningful. They are all optional.
+
+ property name property type meaning
+ -------------------------------------
+ title string message subject
+ content string message body
+ from Contact* sender
+ to Contact* primary recipient
+ cc Contact* CC recipient
+ bcc Contact* BCC recipient
+ reply-to Contact* intended recipient of replies
+ link Link* attachment
+ category Category* tag or label associated with this message
+ geoPt GeoPt* geographic location the message was posted from
+ rating Rating* message rating, as defined by the application
+
+ * means this property may be repeated.
+
+ The Contact properties should be Keys of Contact entities. They are
+ represented in the XML encoding as linked <gd:who> elements.
+ """
+ KIND_PROPERTIES = ['title', 'content', 'link', 'category', 'geoPt', 'rating']
+ CONTACT_PROPERTIES = ['from', 'to', 'cc', 'bcc', 'reply-to']
+
+ def __init__(self, title, kind='Message'):
+ GdKind.__init__(self, kind, title, Message.KIND_PROPERTIES,
+ Message.CONTACT_PROPERTIES)
+
+
+class Event(GdKind):
+ """A calendar event.
+
+ Includes the event title, description, location, organizer, start and end
+ time, and other details.
+
+ This is the gd Event kind. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdEventKind
+
+ These properties are meaningful. They are all optional.
+
+ property name property type meaning
+ -------------------------------------
+ title string event name
+ content string event description
+ author string the organizer's name
+ where string* human-readable location (not a GeoPt)
+ startTime timestamp start time
+ endTime timestamp end time
+ eventStatus string one of the Event.Status values
+ link Link* page with more information
+ category Category* tag or label associated with this event
+ attendee Contact* attendees and other related people
+
+ * means this property may be repeated.
+
+ The Contact properties should be Keys of Contact entities. They are
+ represented in the XML encoding as linked <gd:who> elements.
+ """
+ KIND_PROPERTIES = ['title', 'content', 'author', 'where', 'startTime',
+ 'endTime', 'eventStatus', 'link', 'category']
+ CONTACT_PROPERTIES = ['attendee']
+
+ class Status:
+ CONFIRMED = 'confirmed'
+ TENTATIVE = 'tentative'
+ CANCELED = 'canceled'
+
+ def __init__(self, title, kind='Event'):
+ GdKind.__init__(self, kind, title, Event.KIND_PROPERTIES,
+ Event.CONTACT_PROPERTIES)
+
+ def ToXml(self):
+ """ Override GdKind.ToXml() to special-case author, gd:where, gd:when, and
+ gd:eventStatus.
+ """
+ xml = GdKind.HEADER % self.kind().lower()
+
+ self._kind_properties = set(Contact.KIND_PROPERTIES)
+ xml += self._KindPropertiesToXml()
+
+ if 'author' in self:
+ xml += """
+ <author><name>%s</name></author>""" % self['author']
+
+ if 'eventStatus' in self:
+ xml += """
+ <gd:eventStatus value="http://schemas.google.com/g/2005#event.%s" />""" % (
+ self['eventStatus'])
+
+ if 'where' in self:
+ lines = ['<gd:where valueString="%s" />' % val
+ for val in self._XmlEscapeValues('where')]
+ xml += '\n ' + '\n '.join(lines)
+
+ iso_format = '%Y-%m-%dT%H:%M:%S'
+ xml += '\n <gd:when'
+ for key in ['startTime', 'endTime']:
+ if key in self:
+ xml += ' %s="%s"' % (key, self[key].isoformat())
+ xml += ' />'
+
+ self._kind_properties.update(['author', 'where', 'startTime', 'endTime',
+ 'eventStatus'])
+ xml += self._ContactPropertiesToXml()
+ xml += self._LeftoverPropertiesToXml()
+ xml += GdKind.FOOTER
+ return xml
+
+
+class Contact(GdKind):
+ """A contact: a person, a venue such as a club or a restaurant, or an
+ organization.
+
+ This is the gd Contact kind. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdContactKind
+
+ Most of the information about the contact is in the <gd:contactSection>
+ element; see the reference section for that element for details.
+
+ These properties are meaningful. They are all optional.
+
+ property name property type meaning
+ -------------------------------------
+ title string contact's name
+ content string notes
+ email Email* email address
+ geoPt GeoPt* geographic location
+ im IM* IM address
+ phoneNumber Phonenumber* phone number
+ postalAddress PostalAddress* mailing address
+ link Link* link to more information
+ category Category* tag or label associated with this contact
+
+ * means this property may be repeated.
+ """
+ CONTACT_SECTION_HEADER = """
+ <gd:contactSection>"""
+ CONTACT_SECTION_FOOTER = """
+ </gd:contactSection>"""
+
+ KIND_PROPERTIES = ['title', 'content', 'link', 'category']
+
+ CONTACT_SECTION_PROPERTIES = ['email', 'geoPt', 'im', 'phoneNumber',
+ 'postalAddress']
+
+ def __init__(self, title, kind='Contact'):
+ GdKind.__init__(self, kind, title, Contact.KIND_PROPERTIES)
+
+ def ToXml(self):
+ """ Override GdKind.ToXml() to put some properties inside a
+ gd:contactSection.
+ """
+ xml = GdKind.HEADER % self.kind().lower()
+
+ self._kind_properties = set(Contact.KIND_PROPERTIES)
+ xml += self._KindPropertiesToXml()
+
+ xml += Contact.CONTACT_SECTION_HEADER
+ self._kind_properties = set(Contact.CONTACT_SECTION_PROPERTIES)
+ xml += self._KindPropertiesToXml()
+ xml += Contact.CONTACT_SECTION_FOOTER
+
+ self._kind_properties.update(Contact.KIND_PROPERTIES)
+ xml += self._LeftoverPropertiesToXml()
+ xml += GdKind.FOOTER
+ return xml
diff --git a/google_appengine/google/appengine/api/datastore_errors.py b/google_appengine/google/appengine/api/datastore_errors.py
new file mode 100755
index 0000000..ff53ba2
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_errors.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Errors used in the Python datastore API."""
+
+
+
+
+
+
+class Error(Exception):
+ """Base datastore error type.
+ """
+
+class BadValueError(Error):
+ """Raised by Entity.__setitem__(), Query.__setitem__(), Get(), and others
+ when a property value or filter value is invalid.
+ """
+
+class BadPropertyError(Error):
+ """Raised by Entity.__setitem__() when a property name isn't a string.
+ """
+
+class BadRequestError(Error):
+ """Raised by datastore calls when the parameter(s) are invalid.
+ """
+
+class EntityNotFoundError(Error):
+ """DEPRECATED: Raised by Get() when the requested entity is not found.
+ """
+
+class BadArgumentError(Error):
+ """Raised by Query.Order(), Iterator.Next(), and others when they're
+ passed an invalid argument.
+ """
+
+class QueryNotFoundError(Error):
+ """DEPRECATED: Raised by Iterator methods when the Iterator is invalid. This
+ should not happen during normal usage; it protects against malicious users
+ and system errors.
+ """
+
+class TransactionNotFoundError(Error):
+ """DEPRECATED: Raised by RunInTransaction. This is an internal error; you
+ should not see this.
+ """
+
+class Rollback(Error):
+ """May be raised by transaction functions when they want to roll back
+ instead of committing. Note that *any* exception raised by a transaction
+ function will cause a rollback. This is purely for convenience. See
+ datastore.RunInTransaction for details.
+ """
+
+class TransactionFailedError(Error):
+ """Raised by RunInTransaction methods when the transaction could not be
+ committed, even after retrying. This is usually due to high contention.
+ """
+
+class BadFilterError(Error):
+ """Raised by Query.__setitem__() and Query.Run() when a filter string is
+ invalid.
+ """
+ def __init__(self, filter):
+ self.filter = filter
+
+ def __str__(self):
+ return (u'BadFilterError: invalid filter: %s.' % self.filter)
+
+class BadQueryError(Error):
+ """Raised by Query when a query or query string is invalid.
+ """
+
+class BadKeyError(Error):
+ """Raised by Key.__str__ when the key is invalid.
+ """
+
+class InternalError(Error):
+ """An internal datastore error. Please report this to Google.
+ """
+
+class NeedIndexError(Error):
+ """No matching index was found for a query that requires an index. Check
+ the Indexes page in the Admin Console and your index.yaml file.
+ """
+
+class Timeout(Error):
+ """The datastore operation timed out. This can happen when you attempt to
+ put, get, or delete too many entities or an entity with too many properties,
+ or if the datastore is overloaded or having trouble.
+ """
diff --git a/google_appengine/google/appengine/api/datastore_errors.pyc b/google_appengine/google/appengine/api/datastore_errors.pyc
new file mode 100644
index 0000000..4f947c1
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore_file_stub.py b/google_appengine/google/appengine/api/datastore_file_stub.py
new file mode 100755
index 0000000..ebd47fe
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_file_stub.py
@@ -0,0 +1,1061 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+In-memory persistent stub for the Python datastore API. Gets, queries,
+and searches are implemented as in-memory scans over all entities.
+
+Stores entities across sessions as pickled proto bufs in a single file. On
+startup, all entities are read from the file and loaded into memory. On
+every Put(), the file is wiped and all entities are written from scratch.
+Clients can also manually Read() and Write() the file themselves.
+
+Transactions are serialized through __tx_lock. Each transaction acquires it
+when it begins and releases it when it commits or rolls back. This is
+important, since there are other member variables like __tx_snapshot that are
+per-transaction, so they should only be used by one tx at a time.
+"""
+
+
+
+
+
+
+import datetime
+import logging
+import md5
+import os
+import struct
+import sys
+import tempfile
+import threading
+import warnings
+
+import cPickle as pickle
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import datastore
+from google.appengine.api import datastore_admin
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.api import users
+from google.appengine.datastore import datastore_pb
+from google.appengine.datastore import datastore_index
+from google.appengine.runtime import apiproxy_errors
+from google.net.proto import ProtocolBuffer
+from google.appengine.datastore import entity_pb
+
+warnings.filterwarnings('ignore', 'tempnam is a potential security risk')
+
+
+entity_pb.Reference.__hash__ = lambda self: hash(self.Encode())
+datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
+
+
+_MAXIMUM_RESULTS = 1000
+
+
+_MAX_QUERY_OFFSET = 1000
+
+
+_MAX_QUERY_COMPONENTS = 100
+
+_BATCH_SIZE = 20
+
+class _StoredEntity(object):
+ """Simple wrapper around an entity stored by the stub.
+
+ Public properties:
+ protobuf: Native protobuf Python object, entity_pb.EntityProto.
+ encoded_protobuf: Encoded binary representation of above protobuf.
+ native: datastore.Entity instance.
+ """
+
+ def __init__(self, entity):
+ """Create a _StoredEntity object and store an entity.
+
+ Args:
+ entity: entity_pb.EntityProto to store.
+ """
+ self.protobuf = entity
+
+ self.encoded_protobuf = entity.Encode()
+
+ self.native = datastore.Entity._FromPb(entity)
+
+
+class _Cursor(object):
+ """A query cursor.
+
+ Public properties:
+ cursor: the integer cursor
+ count: the original total number of results
+ keys_only: whether the query is keys_only
+
+ Class attributes:
+ _next_cursor: the next cursor to allocate
+ _next_cursor_lock: protects _next_cursor
+ """
+ _next_cursor = 1
+ _next_cursor_lock = threading.Lock()
+
+ def __init__(self, results, keys_only):
+ """Constructor.
+
+ Args:
+ # the query results, in order, such that pop(0) is the next result
+ results: list of entity_pb.EntityProto
+ keys_only: integer
+ """
+ self.__results = results
+ self.count = len(results)
+ self.keys_only = keys_only
+
+ self._next_cursor_lock.acquire()
+ try:
+ self.cursor = _Cursor._next_cursor
+ _Cursor._next_cursor += 1
+ finally:
+ self._next_cursor_lock.release()
+
+ def PopulateQueryResult(self, result, count):
+ """Populates a QueryResult with this cursor and the given number of results.
+
+ Args:
+ result: datastore_pb.QueryResult
+ count: integer
+ """
+ result.mutable_cursor().set_cursor(self.cursor)
+ result.set_keys_only(self.keys_only)
+
+ results_pbs = [r._ToPb() for r in self.__results[:count]]
+ result.result_list().extend(results_pbs)
+ del self.__results[:count]
+
+ result.set_more_results(len(self.__results) > 0)
+
+
+class DatastoreFileStub(apiproxy_stub.APIProxyStub):
+ """ Persistent stub for the Python datastore API.
+
+ Stores all entities in memory, and persists them to a file as pickled
+ protocol buffers. A DatastoreFileStub instance handles a single app's data
+ and is backed by files on disk.
+ """
+
+ _PROPERTY_TYPE_TAGS = {
+ datastore_types.Blob: entity_pb.PropertyValue.kstringValue,
+ bool: entity_pb.PropertyValue.kbooleanValue,
+ datastore_types.Category: entity_pb.PropertyValue.kstringValue,
+ datetime.datetime: entity_pb.PropertyValue.kint64Value,
+ datastore_types.Email: entity_pb.PropertyValue.kstringValue,
+ float: entity_pb.PropertyValue.kdoubleValue,
+ datastore_types.GeoPt: entity_pb.PropertyValue.kPointValueGroup,
+ datastore_types.IM: entity_pb.PropertyValue.kstringValue,
+ int: entity_pb.PropertyValue.kint64Value,
+ datastore_types.Key: entity_pb.PropertyValue.kReferenceValueGroup,
+ datastore_types.Link: entity_pb.PropertyValue.kstringValue,
+ long: entity_pb.PropertyValue.kint64Value,
+ datastore_types.PhoneNumber: entity_pb.PropertyValue.kstringValue,
+ datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,
+ datastore_types.Rating: entity_pb.PropertyValue.kint64Value,
+ str: entity_pb.PropertyValue.kstringValue,
+ datastore_types.Text: entity_pb.PropertyValue.kstringValue,
+ type(None): 0,
+ unicode: entity_pb.PropertyValue.kstringValue,
+ users.User: entity_pb.PropertyValue.kUserValueGroup,
+ }
+
+ WRITE_ONLY = entity_pb.CompositeIndex.WRITE_ONLY
+ READ_WRITE = entity_pb.CompositeIndex.READ_WRITE
+ DELETED = entity_pb.CompositeIndex.DELETED
+ ERROR = entity_pb.CompositeIndex.ERROR
+
+ _INDEX_STATE_TRANSITIONS = {
+ WRITE_ONLY: frozenset((READ_WRITE, DELETED, ERROR)),
+ READ_WRITE: frozenset((DELETED,)),
+ ERROR: frozenset((DELETED,)),
+ DELETED: frozenset((ERROR,)),
+ }
+
+ def __init__(self,
+ app_id,
+ datastore_file,
+ history_file,
+ require_indexes=False,
+ service_name='datastore_v3',
+ trusted=False):
+ """Constructor.
+
+ Initializes and loads the datastore from the backing files, if they exist.
+
+ Args:
+ app_id: string
+ datastore_file: string, stores all entities across sessions. Use None
+ not to use a file.
+ history_file: string, stores query history. Use None as with
+ datastore_file.
+ require_indexes: bool, default False. If True, composite indexes must
+ exist in index.yaml for queries that need them.
+ service_name: Service name expected for all calls.
+ trusted: bool, default False. If True, this stub allows an app to
+ access the data of another app.
+ """
+ super(DatastoreFileStub, self).__init__(service_name)
+
+
+ assert isinstance(app_id, basestring) and app_id != ''
+ self.__app_id = app_id
+ self.__datastore_file = datastore_file
+ self.__history_file = history_file
+ self.SetTrusted(trusted)
+
+ self.__entities = {}
+
+ self.__schema_cache = {}
+
+ self.__tx_snapshot = {}
+
+ self.__queries = {}
+
+ self.__transactions = {}
+
+ self.__indexes = {}
+ self.__require_indexes = require_indexes
+
+ self.__query_history = {}
+
+ self.__next_id = 1
+ self.__next_tx_handle = 1
+ self.__next_index_id = 1
+ self.__id_lock = threading.Lock()
+ self.__tx_handle_lock = threading.Lock()
+ self.__index_id_lock = threading.Lock()
+ self.__tx_lock = threading.Lock()
+ self.__entities_lock = threading.Lock()
+ self.__file_lock = threading.Lock()
+ self.__indexes_lock = threading.Lock()
+
+ self.Read()
+
+ def Clear(self):
+ """ Clears the datastore by deleting all currently stored entities and
+ queries. """
+ self.__entities = {}
+ self.__queries = {}
+ self.__transactions = {}
+ self.__query_history = {}
+ self.__schema_cache = {}
+
+ def SetTrusted(self, trusted):
+ """Set/clear the trusted bit in the stub.
+
+ This bit indicates that the app calling the stub is trusted. A
+ trusted app can write to datastores of other apps.
+
+ Args:
+ trusted: boolean.
+ """
+ self.__trusted = trusted
+
+ def __ValidateAppId(self, app_id):
+ """Verify that this is the stub for app_id.
+
+ Args:
+ app_id: An application ID.
+
+ Raises:
+ datastore_errors.BadRequestError: if this is not the stub for app_id.
+ """
+ if not self.__trusted and app_id != self.__app_id:
+ raise datastore_errors.BadRequestError(
+ 'app %s cannot access app %s\'s data' % (self.__app_id, app_id))
+
+ def __ValidateKey(self, key):
+ """Validate this key.
+
+ Args:
+ key: entity_pb.Reference
+
+ Raises:
+ datastore_errors.BadRequestError: if the key is invalid
+ """
+ assert isinstance(key, entity_pb.Reference)
+
+ self.__ValidateAppId(key.app())
+
+ for elem in key.path().element_list():
+ if elem.has_id() == elem.has_name():
+ raise datastore_errors.BadRequestError(
+ 'each key path element should have id or name but not both: %r' % key)
+
+ def _AppIdNamespaceKindForKey(self, key):
+ """ Get (app, kind) tuple from given key.
+
+ The (app, kind) tuple is used as an index into several internal
+ dictionaries, e.g. __entities.
+
+ Args:
+ key: entity_pb.Reference
+
+ Returns:
+ Tuple (app, kind), both are unicode strings.
+ """
+ last_path = key.path().element_list()[-1]
+ return key.app(), last_path.type()
+
+ def _StoreEntity(self, entity):
+ """ Store the given entity.
+
+ Args:
+ entity: entity_pb.EntityProto
+ """
+ key = entity.key()
+ app_kind = self._AppIdNamespaceKindForKey(key)
+ if app_kind not in self.__entities:
+ self.__entities[app_kind] = {}
+ self.__entities[app_kind][key] = _StoredEntity(entity)
+
+ if app_kind in self.__schema_cache:
+ del self.__schema_cache[app_kind]
+
+ READ_PB_EXCEPTIONS = (ProtocolBuffer.ProtocolBufferDecodeError, LookupError,
+ TypeError, ValueError)
+ READ_ERROR_MSG = ('Data in %s is corrupt or a different version. '
+ 'Try running with the --clear_datastore flag.\n%r')
+ READ_PY250_MSG = ('Are you using FloatProperty and/or GeoPtProperty? '
+ 'Unfortunately loading float values from the datastore '
+ 'file does not work with Python 2.5.0. '
+ 'Please upgrade to a newer Python 2.5 release or use '
+ 'the --clear_datastore flag.\n')
+
+ def Read(self):
+ """ Reads the datastore and history files into memory.
+
+ The in-memory query history is cleared, but the datastore is *not*
+ cleared; the entities in the files are merged into the entities in memory.
+ If you want them to overwrite the in-memory datastore, call Clear() before
+ calling Read().
+
+ If the datastore file contains an entity with the same app name, kind, and
+ key as an entity already in the datastore, the entity from the file
+ overwrites the entity in the datastore.
+
+ Also sets __next_id to one greater than the highest id allocated so far.
+ """
+ if self.__datastore_file and self.__datastore_file != '/dev/null':
+ for encoded_entity in self.__ReadPickled(self.__datastore_file):
+ try:
+ entity = entity_pb.EntityProto(encoded_entity)
+ except self.READ_PB_EXCEPTIONS, e:
+ raise datastore_errors.InternalError(self.READ_ERROR_MSG %
+ (self.__datastore_file, e))
+ except struct.error, e:
+ if (sys.version_info[0:3] == (2, 5, 0)
+ and e.message.startswith('unpack requires a string argument')):
+ raise datastore_errors.InternalError(self.READ_PY250_MSG +
+ self.READ_ERROR_MSG %
+ (self.__datastore_file, e))
+ else:
+ raise
+
+ self._StoreEntity(entity)
+
+ last_path = entity.key().path().element_list()[-1]
+ if last_path.has_id() and last_path.id() >= self.__next_id:
+ self.__next_id = last_path.id() + 1
+
+ self.__query_history = {}
+ for encoded_query, count in self.__ReadPickled(self.__history_file):
+ try:
+ query_pb = datastore_pb.Query(encoded_query)
+ except self.READ_PB_EXCEPTIONS, e:
+ raise datastore_errors.InternalError(self.READ_ERROR_MSG %
+ (self.__history_file, e))
+
+ if query_pb in self.__query_history:
+ self.__query_history[query_pb] += count
+ else:
+ self.__query_history[query_pb] = count
+
+ def Write(self):
+ """ Writes out the datastore and history files. Be careful! If the files
+ already exist, this method overwrites them!
+ """
+ self.__WriteDatastore()
+ self.__WriteHistory()
+
+ def __WriteDatastore(self):
+ """ Writes out the datastore file. Be careful! If the file already exist,
+ this method overwrites it!
+ """
+ if self.__datastore_file and self.__datastore_file != '/dev/null':
+ encoded = []
+ for kind_dict in self.__entities.values():
+ for entity in kind_dict.values():
+ encoded.append(entity.encoded_protobuf)
+
+ self.__WritePickled(encoded, self.__datastore_file)
+
+ def __WriteHistory(self):
+ """ Writes out the history file. Be careful! If the file already exist,
+ this method overwrites it!
+ """
+ if self.__history_file and self.__history_file != '/dev/null':
+ encoded = [(query.Encode(), count)
+ for query, count in self.__query_history.items()]
+
+ self.__WritePickled(encoded, self.__history_file)
+
+ def __ReadPickled(self, filename):
+ """Reads a pickled object from the given file and returns it.
+ """
+ self.__file_lock.acquire()
+
+ try:
+ try:
+ if filename and filename != '/dev/null' and os.path.isfile(filename):
+ return pickle.load(open(filename, 'rb'))
+ else:
+ logging.warning('Could not read datastore data from %s', filename)
+ except (AttributeError, LookupError, ImportError, NameError, TypeError,
+ ValueError, struct.error, pickle.PickleError), e:
+ raise datastore_errors.InternalError(
+ 'Could not read data from %s. Try running with the '
+ '--clear_datastore flag. Cause:\n%r' % (filename, e))
+ finally:
+ self.__file_lock.release()
+
+ return []
+
+ def __WritePickled(self, obj, filename, openfile=file):
+ """Pickles the object and writes it to the given file.
+ """
+ if not filename or filename == '/dev/null' or not obj:
+ return
+
+ tmpfile = openfile(os.tempnam(os.path.dirname(filename)), 'wb')
+
+ pickler = pickle.Pickler(tmpfile, protocol=1)
+ pickler.fast = True
+ pickler.dump(obj)
+
+ tmpfile.close()
+
+ self.__file_lock.acquire()
+ try:
+ try:
+ os.rename(tmpfile.name, filename)
+ except OSError:
+ try:
+ os.remove(filename)
+ except:
+ pass
+ os.rename(tmpfile.name, filename)
+ finally:
+ self.__file_lock.release()
+
+ def MakeSyncCall(self, service, call, request, response):
+ """ The main RPC entry point. service must be 'datastore_v3'.
+ """
+ self.assertPbIsInitialized(request)
+ super(DatastoreFileStub, self).MakeSyncCall(service,
+ call,
+ request,
+ response)
+ self.assertPbIsInitialized(response)
+
+ def assertPbIsInitialized(self, pb):
+ """Raises an exception if the given PB is not initialized and valid."""
+ explanation = []
+ assert pb.IsInitialized(explanation), explanation
+ pb.Encode()
+
+ def QueryHistory(self):
+ """Returns a dict that maps Query PBs to times they've been run.
+ """
+ return dict((pb, times) for pb, times in self.__query_history.items()
+ if pb.app() == self.__app_id)
+
+ def _Dynamic_Put(self, put_request, put_response):
+ clones = []
+ for entity in put_request.entity_list():
+ self.__ValidateKey(entity.key())
+
+ clone = entity_pb.EntityProto()
+ clone.CopyFrom(entity)
+
+ for property in clone.property_list():
+ if property.value().has_uservalue():
+ uid = md5.new(property.value().uservalue().email().lower()).digest()
+ uid = '1' + ''.join(['%02d' % ord(x) for x in uid])[:20]
+ property.mutable_value().mutable_uservalue().set_obfuscated_gaiaid(
+ uid)
+
+ clones.append(clone)
+
+ assert clone.has_key()
+ assert clone.key().path().element_size() > 0
+
+ last_path = clone.key().path().element_list()[-1]
+ if last_path.id() == 0 and not last_path.has_name():
+ self.__id_lock.acquire()
+ last_path.set_id(self.__next_id)
+ self.__next_id += 1
+ self.__id_lock.release()
+
+ assert clone.entity_group().element_size() == 0
+ group = clone.mutable_entity_group()
+ root = clone.key().path().element(0)
+ group.add_element().CopyFrom(root)
+
+ else:
+ assert (clone.has_entity_group() and
+ clone.entity_group().element_size() > 0)
+
+ self.__entities_lock.acquire()
+
+ try:
+ for clone in clones:
+ self._StoreEntity(clone)
+ finally:
+ self.__entities_lock.release()
+
+ if not put_request.has_transaction():
+ self.__WriteDatastore()
+
+ put_response.key_list().extend([c.key() for c in clones])
+
+
+ def _Dynamic_Get(self, get_request, get_response):
+ if get_request.has_transaction():
+ entities = self.__tx_snapshot
+ else:
+ entities = self.__entities
+
+ for key in get_request.key_list():
+ self.__ValidateAppId(key.app())
+ app_kind = self._AppIdNamespaceKindForKey(key)
+
+ group = get_response.add_entity()
+ try:
+ entity = entities[app_kind][key].protobuf
+ except KeyError:
+ entity = None
+
+ if entity:
+ group.mutable_entity().CopyFrom(entity)
+
+
+ def _Dynamic_Delete(self, delete_request, delete_response):
+ self.__entities_lock.acquire()
+ try:
+ for key in delete_request.key_list():
+ self.__ValidateAppId(key.app())
+ app_kind = self._AppIdNamespaceKindForKey(key)
+ try:
+ del self.__entities[app_kind][key]
+ if not self.__entities[app_kind]:
+ del self.__entities[app_kind]
+
+ del self.__schema_cache[app_kind]
+ except KeyError:
+ pass
+
+ if not delete_request.has_transaction():
+ self.__WriteDatastore()
+ finally:
+ self.__entities_lock.release()
+
+
+ def _Dynamic_RunQuery(self, query, query_result):
+ if not self.__tx_lock.acquire(False):
+ if not query.has_ancestor():
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Only ancestor queries are allowed inside transactions.')
+ entities = self.__tx_snapshot
+ else:
+ entities = self.__entities
+ self.__tx_lock.release()
+
+ app_id_namespace = datastore_types.parse_app_id_namespace(query.app())
+ app_id = app_id_namespace.app_id()
+ self.__ValidateAppId(app_id)
+
+ if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')
+
+ num_components = len(query.filter_list()) + len(query.order_list())
+ if query.has_ancestor():
+ num_components += 1
+ if num_components > _MAX_QUERY_COMPONENTS:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ ('query is too large. may not have more than %s filters'
+ ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))
+
+ (filters, orders) = datastore_index.Normalize(query.filter_list(),
+ query.order_list())
+
+ if self.__require_indexes:
+ required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
+ if required:
+ required_key = kind, ancestor, props
+ indexes = self.__indexes.get(app_id)
+ if not indexes:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.NEED_INDEX,
+ "This query requires a composite index, but none are defined. "
+ "You must create an index.yaml file in your application root.")
+ eq_filters_set = set(props[:num_eq_filters])
+ remaining_filters = props[num_eq_filters:]
+ for index in indexes:
+ definition = datastore_admin.ProtoToIndexDefinition(index)
+ index_key = datastore_index.IndexToKey(definition)
+ if required_key == index_key:
+ break
+ if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
+ this_props = index_key[2]
+ this_eq_filters_set = set(this_props[:num_eq_filters])
+ this_remaining_filters = this_props[num_eq_filters:]
+ if (eq_filters_set == this_eq_filters_set and
+ remaining_filters == this_remaining_filters):
+ break
+ else:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.NEED_INDEX,
+ "This query requires a composite index that is not defined. "
+ "You must update the index.yaml file in your application root.")
+
+ try:
+ query.set_app(app_id_namespace.to_encoded())
+ if query.has_kind():
+ results = entities[app_id_namespace.to_encoded(), query.kind()].values()
+ results = [entity.native for entity in results]
+ else:
+ results = []
+ for key in entities:
+ if key[0] == app_id_namespace.to_encoded():
+ results += [entity.native for entity in entities[key].values()]
+ except KeyError:
+ results = []
+
+ if query.has_ancestor():
+ ancestor_path = query.ancestor().path().element_list()
+ def is_descendant(entity):
+ path = entity.key()._Key__reference.path().element_list()
+ return path[:len(ancestor_path)] == ancestor_path
+ results = filter(is_descendant, results)
+
+ operators = {datastore_pb.Query_Filter.LESS_THAN: '<',
+ datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
+ datastore_pb.Query_Filter.GREATER_THAN: '>',
+ datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
+ datastore_pb.Query_Filter.EQUAL: '==',
+ }
+
+ def has_prop_indexed(entity, prop):
+ """Returns True if prop is in the entity and is indexed."""
+ if prop in datastore_types._SPECIAL_PROPERTIES:
+ return True
+ elif prop in entity.unindexed_properties():
+ return False
+
+ values = entity.get(prop, [])
+ if not isinstance(values, (tuple, list)):
+ values = [values]
+
+ for value in values:
+ if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
+ return True
+ return False
+
+ for filt in filters:
+ assert filt.op() != datastore_pb.Query_Filter.IN
+
+ prop = filt.property(0).name().decode('utf-8')
+ op = operators[filt.op()]
+
+ filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
+ for filter_prop in filt.property_list()]
+
+ def passes_filter(entity):
+ """Returns True if the entity passes the filter, False otherwise.
+
+ The filter being evaluated is filt, the current filter that we're on
+ in the list of filters in the query.
+ """
+ if not has_prop_indexed(entity, prop):
+ return False
+
+ try:
+ entity_vals = datastore._GetPropertyValue(entity, prop)
+ except KeyError:
+ entity_vals = []
+
+ if not isinstance(entity_vals, list):
+ entity_vals = [entity_vals]
+
+ for fixed_entity_val in entity_vals:
+ for filter_val in filter_val_list:
+ fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
+ fixed_entity_val.__class__)
+ filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
+ if fixed_entity_type == filter_type:
+ comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
+ elif op != '==':
+ comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
+ else:
+ continue
+
+ logging.log(logging.DEBUG - 1,
+ 'Evaling filter expression "%s"', comp)
+
+ try:
+ ret = eval(comp)
+ if ret and ret != NotImplementedError:
+ return True
+ except TypeError:
+ pass
+
+ return False
+
+ results = filter(passes_filter, results)
+
+ for order in orders:
+ prop = order.property().decode('utf-8')
+ results = [entity for entity in results if has_prop_indexed(entity, prop)]
+
+ def order_compare_entities(a, b):
+ """ Return a negative, zero or positive number depending on whether
+ entity a is considered smaller than, equal to, or larger than b,
+ according to the query's orderings. """
+ cmped = 0
+ for o in orders:
+ prop = o.property().decode('utf-8')
+
+ reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
+
+ a_val = datastore._GetPropertyValue(a, prop)
+ if isinstance(a_val, list):
+ a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
+
+ b_val = datastore._GetPropertyValue(b, prop)
+ if isinstance(b_val, list):
+ b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
+
+ cmped = order_compare_properties(a_val, b_val)
+
+ if o.direction() is datastore_pb.Query_Order.DESCENDING:
+ cmped = -cmped
+
+ if cmped != 0:
+ return cmped
+
+ if cmped == 0:
+ return cmp(a.key(), b.key())
+
+ def order_compare_properties(x, y):
+ """Return a negative, zero or positive number depending on whether
+ property value x is considered smaller than, equal to, or larger than
+ property value y. If x and y are different types, they're compared based
+ on the type ordering used in the real datastore, which is based on the
+ tag numbers in the PropertyValue PB.
+ """
+ if isinstance(x, datetime.datetime):
+ x = datastore_types.DatetimeToTimestamp(x)
+ if isinstance(y, datetime.datetime):
+ y = datastore_types.DatetimeToTimestamp(y)
+
+ x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
+ y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)
+
+ if x_type == y_type:
+ try:
+ return cmp(x, y)
+ except TypeError:
+ return 0
+ else:
+ return cmp(x_type, y_type)
+
+ results.sort(order_compare_entities)
+
+ offset = 0
+ limit = len(results)
+ if query.has_offset():
+ offset = query.offset()
+ if query.has_limit():
+ limit = query.limit()
+ if limit > _MAXIMUM_RESULTS:
+ limit = _MAXIMUM_RESULTS
+ results = results[offset:limit + offset]
+
+ clone = datastore_pb.Query()
+ clone.CopyFrom(query)
+ clone.clear_hint()
+ if clone in self.__query_history:
+ self.__query_history[clone] += 1
+ else:
+ self.__query_history[clone] = 1
+ self.__WriteHistory()
+
+ cursor = _Cursor(results, query.keys_only())
+ self.__queries[cursor.cursor] = cursor
+
+ if query.has_count():
+ count = query.count()
+ elif query.has_limit():
+ count = query.limit()
+ else:
+ count = _BATCH_SIZE
+
+ cursor.PopulateQueryResult(query_result, count)
+
+ def _Dynamic_Next(self, next_request, query_result):
+ cursor_handle = next_request.cursor().cursor()
+
+ try:
+ cursor = self.__queries[cursor_handle]
+ except KeyError:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST, 'Cursor %d not found' % cursor_handle)
+
+ count = _BATCH_SIZE
+ if next_request.has_count():
+ count = next_request.count()
+ cursor.PopulateQueryResult(query_result, count)
+
+ def _Dynamic_Count(self, query, integer64proto):
+ self.__ValidateAppId(query.app())
+ query_result = datastore_pb.QueryResult()
+ self._Dynamic_RunQuery(query, query_result)
+ cursor = query_result.cursor().cursor()
+ integer64proto.set_value(self.__queries[cursor].count)
+ del self.__queries[cursor]
+
+ def _Dynamic_BeginTransaction(self, request, transaction):
+ self.__tx_handle_lock.acquire()
+ handle = self.__next_tx_handle
+ self.__next_tx_handle += 1
+ self.__tx_handle_lock.release()
+
+ self.__transactions[handle] = None
+ transaction.set_handle(handle)
+
+ self.__tx_lock.acquire()
+ snapshot = [(app_kind, dict(entities))
+ for app_kind, entities in self.__entities.items()]
+ self.__tx_snapshot = dict(snapshot)
+
+ def _Dynamic_Commit(self, transaction, transaction_response):
+ if not self.__transactions.has_key(transaction.handle()):
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Transaction handle %d not found' % transaction.handle())
+
+ self.__tx_snapshot = {}
+ try:
+ self.__WriteDatastore()
+ finally:
+ self.__tx_lock.release()
+
+ def _Dynamic_Rollback(self, transaction, transaction_response):
+ if not self.__transactions.has_key(transaction.handle()):
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Transaction handle %d not found' % transaction.handle())
+
+ self.__entities = self.__tx_snapshot
+ self.__tx_snapshot = {}
+ self.__tx_lock.release()
+
+ def _Dynamic_GetSchema(self, req, schema):
+ app_str = req.app()
+ self.__ValidateAppId(app_str)
+
+ kinds = []
+
+ for app, kind in self.__entities:
+ if (app != app_str or
+ (req.has_start_kind() and kind < req.start_kind()) or
+ (req.has_end_kind() and kind > req.end_kind())):
+ continue
+
+ app_kind = (app, kind)
+ if app_kind in self.__schema_cache:
+ kinds.append(self.__schema_cache[app_kind])
+ continue
+
+ kind_pb = entity_pb.EntityProto()
+ kind_pb.mutable_key().set_app('')
+ kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
+ kind_pb.mutable_entity_group()
+
+ props = {}
+
+ for entity in self.__entities[app_kind].values():
+ for prop in entity.protobuf.property_list():
+ if prop.name() not in props:
+ props[prop.name()] = entity_pb.PropertyValue()
+ props[prop.name()].MergeFrom(prop.value())
+
+ for value_pb in props.values():
+ if value_pb.has_int64value():
+ value_pb.set_int64value(0)
+ if value_pb.has_booleanvalue():
+ value_pb.set_booleanvalue(False)
+ if value_pb.has_stringvalue():
+ value_pb.set_stringvalue('none')
+ if value_pb.has_doublevalue():
+ value_pb.set_doublevalue(0.0)
+ if value_pb.has_pointvalue():
+ value_pb.mutable_pointvalue().set_x(0.0)
+ value_pb.mutable_pointvalue().set_y(0.0)
+ if value_pb.has_uservalue():
+ value_pb.mutable_uservalue().set_gaiaid(0)
+ value_pb.mutable_uservalue().set_email('none')
+ value_pb.mutable_uservalue().set_auth_domain('none')
+ value_pb.mutable_uservalue().clear_nickname()
+ value_pb.mutable_uservalue().clear_obfuscated_gaiaid()
+ if value_pb.has_referencevalue():
+ value_pb.clear_referencevalue()
+ value_pb.mutable_referencevalue().set_app('none')
+ pathelem = value_pb.mutable_referencevalue().add_pathelement()
+ pathelem.set_type('none')
+ pathelem.set_name('none')
+
+ for name, value_pb in props.items():
+ prop_pb = kind_pb.add_property()
+ prop_pb.set_name(name)
+ prop_pb.set_multiple(False)
+ prop_pb.mutable_value().CopyFrom(value_pb)
+
+ kinds.append(kind_pb)
+ self.__schema_cache[app_kind] = kind_pb
+
+ for kind_pb in kinds:
+ kind = schema.add_kind()
+ kind.CopyFrom(kind_pb)
+ if not req.properties():
+ kind.clear_property()
+
+ schema.set_more_results(False)
+
+ def _Dynamic_AllocateIds(self, allocate_ids_request, allocate_ids_response):
+ model_key = allocate_ids_request.model_key()
+ size = allocate_ids_request.size()
+
+ self.__ValidateAppId(model_key.app())
+
+ try:
+ self.__id_lock.acquire()
+ start = self.__next_id
+ self.__next_id += size
+ end = self.__next_id - 1
+ finally:
+ self.__id_lock.release()
+
+ allocate_ids_response.set_start(start)
+ allocate_ids_response.set_end(end)
+
+ def _Dynamic_CreateIndex(self, index, id_response):
+ self.__ValidateAppId(index.app_id())
+ if index.id() != 0:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ 'New index id must be 0.')
+ elif self.__FindIndex(index):
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ 'Index already exists.')
+
+ self.__index_id_lock.acquire()
+ index.set_id(self.__next_index_id)
+ id_response.set_value(self.__next_index_id)
+ self.__next_index_id += 1
+ self.__index_id_lock.release()
+
+ clone = entity_pb.CompositeIndex()
+ clone.CopyFrom(index)
+ app = index.app_id()
+ clone.set_app_id(app)
+
+ self.__indexes_lock.acquire()
+ try:
+ if app not in self.__indexes:
+ self.__indexes[app] = []
+ self.__indexes[app].append(clone)
+ finally:
+ self.__indexes_lock.release()
+
+ def _Dynamic_GetIndices(self, app_str, composite_indices):
+ self.__ValidateAppId(app_str.value())
+ composite_indices.index_list().extend(
+ self.__indexes.get(app_str.value(), []))
+
+ def _Dynamic_UpdateIndex(self, index, void):
+ self.__ValidateAppId(index.app_id())
+ stored_index = self.__FindIndex(index)
+ if not stored_index:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ "Index doesn't exist.")
+ elif (index.state() != stored_index.state() and
+ index.state() not in self._INDEX_STATE_TRANSITIONS[stored_index.state()]):
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ "cannot move index state from %s to %s" %
+ (entity_pb.CompositeIndex.State_Name(stored_index.state()),
+ (entity_pb.CompositeIndex.State_Name(index.state()))))
+
+ self.__indexes_lock.acquire()
+ try:
+ stored_index.set_state(index.state())
+ finally:
+ self.__indexes_lock.release()
+
+ def _Dynamic_DeleteIndex(self, index, void):
+ self.__ValidateAppId(index.app_id())
+ stored_index = self.__FindIndex(index)
+ if not stored_index:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ "Index doesn't exist.")
+
+ app = index.app_id()
+ self.__indexes_lock.acquire()
+ try:
+ self.__indexes[app].remove(stored_index)
+ finally:
+ self.__indexes_lock.release()
+
+ def __FindIndex(self, index):
+ """Finds an existing index by definition.
+
+ Args:
+ definition: entity_pb.CompositeIndex
+
+ Returns:
+ entity_pb.CompositeIndex, if it exists; otherwise None
+ """
+ app = index.app_id()
+ self.__ValidateAppId(app)
+ if app in self.__indexes:
+ for stored_index in self.__indexes[app]:
+ if index.definition() == stored_index.definition():
+ return stored_index
+
+ return None
diff --git a/google_appengine/google/appengine/api/datastore_file_stub.pyc b/google_appengine/google/appengine/api/datastore_file_stub.pyc
new file mode 100644
index 0000000..2efca54
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_file_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore_types.py b/google_appengine/google/appengine/api/datastore_types.py
new file mode 100755
index 0000000..c2d1d5f
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_types.py
@@ -0,0 +1,1788 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Higher-level, semantic data types for the datastore. These types
+are expected to be set as attributes of Entities. See "Supported Data Types"
+in the API Guide.
+
+Most of these types are based on XML elements from Atom and GData elements
+from the atom and gd namespaces. For more information, see:
+
+ http://www.atomenabled.org/developers/syndication/
+ http://code.google.com/apis/gdata/common-elements.html
+
+The namespace schemas are:
+
+ http://www.w3.org/2005/Atom
+ http://schemas.google.com/g/2005
+"""
+
+
+
+
+
+import base64
+import calendar
+import datetime
+import os
+import re
+import string
+import time
+import urlparse
+from xml.sax import saxutils
+from google.appengine.datastore import datastore_pb
+from google.appengine.api import datastore_errors
+from google.appengine.api import users
+from google.appengine.api import namespace_manager
+from google.net.proto import ProtocolBuffer
+from google.appengine.datastore import entity_pb
+
+_MAX_STRING_LENGTH = 500
+
+_MAX_LINK_PROPERTY_LENGTH = 2083
+
+RESERVED_PROPERTY_NAME = re.compile('^__.*__$')
+
+_KEY_SPECIAL_PROPERTY = '__key__'
+_SPECIAL_PROPERTIES = frozenset([_KEY_SPECIAL_PROPERTY])
+
+_NAMESPACE_SEPARATOR='!'
+
+class UtcTzinfo(datetime.tzinfo):
+ def utcoffset(self, dt): return datetime.timedelta(0)
+ def dst(self, dt): return datetime.timedelta(0)
+ def tzname(self, dt): return 'UTC'
+ def __repr__(self): return 'datastore_types.UTC'
+
+UTC = UtcTzinfo()
+
+
+def typename(obj):
+ """Returns the type of obj as a string. More descriptive and specific than
+ type(obj), and safe for any object, unlike __class__."""
+ if hasattr(obj, '__class__'):
+ return getattr(obj, '__class__').__name__
+ else:
+ return type(obj).__name__
+
+
+def ValidateString(value,
+ name='unused',
+ exception=datastore_errors.BadValueError,
+ max_len=_MAX_STRING_LENGTH,
+ empty_ok=False):
+ """Raises an exception if value is not a valid string or a subclass thereof.
+
+ A string is valid if it's not empty, no more than _MAX_STRING_LENGTH bytes,
+ and not a Blob. The exception type can be specified with the exception
+ argument; it defaults to BadValueError.
+
+ Args:
+ value: the value to validate.
+ name: the name of this value; used in the exception message.
+ exception: the type of exception to raise.
+ max_len: the maximum allowed length, in bytes.
+ empty_ok: allow empty value.
+ """
+ if value is None and empty_ok:
+ return
+ if not isinstance(value, basestring) or isinstance(value, Blob):
+ raise exception('%s should be a string; received %s (a %s):' %
+ (name, value, typename(value)))
+ if not value and not empty_ok:
+ raise exception('%s must not be empty.' % name)
+
+ if len(value.encode('utf-8')) > max_len:
+ raise exception('%s must be under %d bytes.' % (name, max_len))
+
+def ValidateInteger(value,
+ name='unused',
+ exception=datastore_errors.BadValueError,
+ empty_ok=False,
+ zero_ok=False,
+ negative_ok=False):
+ """Raises an exception if value is not a valid integer.
+
+ An integer is valid if it's not negative or empty and is an integer.
+ The exception type can be specified with the exception argument;
+ it defaults to BadValueError.
+
+ Args:
+ value: the value to validate.
+ name: the name of this value; used in the exception message.
+ exception: the type of exception to raise.
+ empty_ok: allow None value.
+ zero_ok: allow zero value.
+ negative_ok: allow negative value.
+ """
+ if value is None and empty_ok:
+ return
+ if not isinstance(value, int):
+ raise exception('%s should be an integer; received %s (a %s).' %
+ (name, value, typename(value)))
+ if not value and not zero_ok:
+ raise exception('%s must not be 0 (zero)' % name)
+ if value < 0 and not negative_ok:
+ raise exception('%s must not be negative.' % name)
+
+def ResolveAppId(app, name='_app'):
+ """Validate app id, providing a default.
+
+ If the argument is None, $APPLICATION_ID is substituted.
+
+ Args:
+ app: The app id argument value to be validated.
+ name: The argument name, for error messages.
+
+ Returns:
+ The value of app, or the substituted default. Always a non-empty string.
+
+ Raises:
+ BadArgumentError if the value is empty or not a string.
+ """
+ if app is None:
+ app = os.environ.get('APPLICATION_ID', '')
+ ValidateString(app, '_app', datastore_errors.BadArgumentError)
+ return app
+
+
+class AppIdNamespace(object):
+ """Combined AppId and Namespace
+
+ An identifier that combines the application identifier and the
+ namespace.
+ """
+ __app_id = None
+ __namespace = None
+
+ def __init__(self, app_id, namespace):
+ """Constructor. Creates a AppIdNamespace from two strings.
+
+ Args:
+ app_id: application identifier string
+ namespace: namespace identifier string
+ Raises:
+ BadArgumentError if the values contain
+ the _NAMESPACE_SEPARATOR character (!) or
+ the app_id is empty.
+ """
+ self.__app_id = app_id
+ if namespace:
+ self.__namespace = namespace
+ else:
+ self.__namespace = None
+ ValidateString(self.__app_id, 'app_id', datastore_errors.BadArgumentError)
+ ValidateString(self.__namespace,
+ 'namespace', datastore_errors.BadArgumentError,
+ empty_ok=True)
+ if _NAMESPACE_SEPARATOR in self.__app_id:
+ raise datastore_errors.BadArgumentError(
+ 'app_id must not contain a "%s"' % _NAMESPACE_SEPARATOR)
+ if self.__namespace and _NAMESPACE_SEPARATOR in self.__namespace:
+ raise datastore_errors.BadArgumentError(
+ 'namespace must not contain a "%s"' % _NAMESPACE_SEPARATOR)
+
+ def __cmp__(self, other):
+ """Returns negative, zero, or positive when comparing two AppIdNamespace.
+
+ Args:
+ other: AppIdNamespace to compare to.
+
+ Returns:
+ Negative if self is less than "other"
+ Zero if "other" is equal to self
+ Positive if self is greater than "other"
+ """
+ if not isinstance(other, AppIdNamespace):
+ return cmp(id(self), id(other))
+ return cmp((self.__app_id, self.__namespace),
+ (other.__app_id, other.__namespace))
+
+ def to_encoded(self):
+ """Returns this AppIdNamespace's string equivalent
+
+ i.e. "app!namespace"
+ """
+ if not self.__namespace:
+ return self.__app_id
+ else:
+ return self.__app_id + _NAMESPACE_SEPARATOR + self.__namespace
+
+ def app_id(self):
+ """Returns this AppId portion of this AppIdNamespace.
+ """
+ return self.__app_id;
+
+ def namespace(self):
+ """Returns this namespace portion of this AppIdNamespace.
+ """
+ return self.__namespace;
+
+
+def PartitionString(value, separator):
+ """Equivalent to python2.5 str.partition()
+ TODO(gmariani) use str.partition() when python 2.5 is adopted.
+
+ Args:
+ value: String to be partitioned
+ separator: Separator string
+ """
+ index = value.find(separator);
+ if index == -1:
+ return (value, '', value[0:0]);
+ else:
+ return (value[0:index], separator, value[index+len(separator):len(value)])
+
+
+def parse_app_id_namespace(app_id_namespace):
+ """
+ An app_id_namespace string is valid if it's not empty, and contains
+ at most one namespace separator ('!'). Also, an app_id_namespace
+ with an empty namespace must not contain a namespace separator.
+
+ Args:
+ app_id_namespace: an encoded app_id_namespace.
+ Raises exception if format of app_id_namespace is invalid.
+ """
+ if not app_id_namespace:
+ raise datastore_errors.BadArgumentError(
+ 'app_id_namespace must be non empty')
+ parts = PartitionString(app_id_namespace, _NAMESPACE_SEPARATOR)
+ if parts[1] == _NAMESPACE_SEPARATOR:
+ if not parts[2]:
+ raise datastore_errors.BadArgumentError(
+ 'app_id_namespace must not contain a "%s" if the namespace is empty' %
+ _NAMESPACE_SEPARATOR)
+ if parts[2]:
+ return AppIdNamespace(parts[0], parts[2])
+ return AppIdNamespace(parts[0], None)
+
+def ResolveAppIdNamespace(
+ app_id=None, namespace=None, app_id_namespace=None):
+ """Validate an app id/namespace and substitute default values.
+
+ If the argument is None, $APPLICATION_ID!$NAMESPACE is substituted.
+
+ Args:
+ app_id: The app id argument value to be validated.
+ namespace: The namespace argument value to be validated.
+ app_id_namespace: An AppId/Namespace pair
+
+ Returns:
+ An AppIdNamespace object initialized with AppId and Namespace.
+
+ Raises:
+ BadArgumentError if the value is empty or not a string.
+ """
+ if app_id_namespace is None:
+ if app_id is None:
+ app_id = os.environ.get('APPLICATION_ID', '')
+ if namespace is None:
+ namespace = namespace_manager.get_request_namespace();
+ else:
+ if not app_id is None:
+ raise datastore_errors.BadArgumentError(
+ 'app_id is overspecified. Cannot define app_id_namespace and app_id')
+ if not namespace is None:
+ raise datastore_errors.BadArgumentError(
+ 'namespace is overspecified. ' +
+ 'Cannot define app_id_namespace and namespace')
+ return parse_app_id_namespace(app_id_namespace)
+
+ return AppIdNamespace(app_id, namespace)
+
+
+class Key(object):
+ """The primary key for a datastore entity.
+
+ A datastore GUID. A Key instance uniquely identifies an entity across all
+ apps, and includes all information necessary to fetch the entity from the
+ datastore with Get().
+
+ Key implements __hash__, and key instances are immutable, so Keys may be
+ used in sets and as dictionary keys.
+ """
+ __reference = None
+
+ def __init__(self, encoded=None):
+ """Constructor. Creates a Key from a string.
+
+ Args:
+ # a base64-encoded primary key, generated by Key.__str__
+ encoded: str
+ """
+ if encoded is not None:
+ if not isinstance(encoded, basestring):
+ try:
+ repr_encoded = repr(encoded)
+ except:
+ repr_encoded = "<couldn't encode>"
+ raise datastore_errors.BadArgumentError(
+ 'Key() expects a string; received %s (a %s).' %
+ (repr_encoded, typename(encoded)))
+ try:
+ modulo = len(encoded) % 4
+ if modulo != 0:
+ encoded += ('=' * (4 - modulo))
+
+ encoded_pb = base64.urlsafe_b64decode(str(encoded))
+ self.__reference = entity_pb.Reference(encoded_pb)
+ assert self.__reference.IsInitialized()
+
+ except (AssertionError, TypeError), e:
+ raise datastore_errors.BadKeyError(
+ 'Invalid string key %s. Details: %s' % (encoded, e))
+ except Exception, e:
+ if e.__class__.__name__ == 'ProtocolBufferDecodeError':
+ raise datastore_errors.BadKeyError('Invalid string key %s.' % encoded)
+ else:
+ raise
+ else:
+ self.__reference = entity_pb.Reference()
+
+ def to_path(self):
+ """Construct the "path" of this key as a list.
+
+ Returns:
+ A list [kind_1, id_or_name_1, ..., kind_n, id_or_name_n] of the key path.
+
+ Raises:
+ datastore_errors.BadKeyError if this key does not have a valid path.
+ """
+ path = []
+ for path_element in self.__reference.path().element_list():
+ path.append(path_element.type().decode('utf-8'))
+ if path_element.has_name():
+ path.append(path_element.name().decode('utf-8'))
+ elif path_element.has_id():
+ path.append(path_element.id())
+ else:
+ raise datastore_errors.BadKeyError('Incomplete key found in to_path')
+ return path
+
+ @staticmethod
+ def from_path(*args, **kwds):
+ """Static method to construct a Key out of a "path" (kind, id or name, ...).
+
+ This is useful when an application wants to use just the id or name portion
+ of a key in e.g. a URL, where the rest of the URL provides enough context to
+ fill in the rest, i.e. the app id (always implicit), the entity kind, and
+ possibly an ancestor key. Since ids and names are usually small, they're
+ more attractive for use in end-user-visible URLs than the full string
+ representation of a key.
+
+ Args:
+ kind: the entity kind (a str or unicode instance)
+ id_or_name: the id (an int or long) or name (a str or unicode instance)
+
+ Additional positional arguments are allowed and should be
+ alternating kind and id/name.
+
+ Keyword args:
+ parent: optional parent Key; default None.
+
+ Returns:
+ A new Key instance whose .kind() and .id() or .name() methods return
+ the *last* kind and id or name positional arguments passed.
+
+ Raises:
+ BadArgumentError for invalid arguments.
+ BadKeyError if the parent key is incomplete.
+ """
+ parent = kwds.pop('parent', None)
+ _app_id_namespace_obj = ResolveAppIdNamespace(
+ kwds.pop('_app', None),
+ kwds.pop('_namespace', None),
+ kwds.pop('_app_id_namespace', None))
+
+ if kwds:
+ raise datastore_errors.BadArgumentError(
+ 'Excess keyword arguments ' + repr(kwds))
+
+ if not args or len(args) % 2:
+ raise datastore_errors.BadArgumentError(
+ 'A non-zero even number of positional arguments is required '
+ '(kind, id or name, kind, id or name, ...); received %s' % repr(args))
+
+ if parent is not None:
+ if not isinstance(parent, Key):
+ raise datastore_errors.BadArgumentError(
+ 'Expected None or a Key as parent; received %r (a %s).' %
+ (parent, typename(parent)))
+ if not parent.has_id_or_name():
+ raise datastore_errors.BadKeyError(
+ 'The parent Key is incomplete.')
+ if _app_id_namespace_obj != parent.app_id_namespace():
+ raise datastore_errors.BadArgumentError(
+ 'The app_id/namespace arguments (%r) should match ' +
+ 'parent.app_id_namespace().to_encoded() (%s)' %
+ (_app_id_namespace_obj, parent.app_id_namespace()))
+
+ key = Key()
+ ref = key.__reference
+ if parent is not None:
+ ref.CopyFrom(parent.__reference)
+ else:
+ ref.set_app(_app_id_namespace_obj.to_encoded())
+
+ path = ref.mutable_path()
+ for i in xrange(0, len(args), 2):
+ kind, id_or_name = args[i:i+2]
+ if isinstance(kind, basestring):
+ kind = kind.encode('utf-8')
+ else:
+ raise datastore_errors.BadArgumentError(
+ 'Expected a string kind as argument %d; received %r (a %s).' %
+ (i + 1, kind, typename(kind)))
+ elem = path.add_element()
+ elem.set_type(kind)
+ if isinstance(id_or_name, (int, long)):
+ elem.set_id(id_or_name)
+ elif isinstance(id_or_name, basestring):
+ ValidateString(id_or_name, 'name')
+ elem.set_name(id_or_name.encode('utf-8'))
+ else:
+ raise datastore_errors.BadArgumentError(
+ 'Expected an integer id or string name as argument %d; '
+ 'received %r (a %s).' % (i + 2, id_or_name, typename(id_or_name)))
+
+ assert ref.IsInitialized()
+ return key
+
+ def app(self):
+ """Returns this entity's app id, a string."""
+ if self.__reference.app():
+ return self.app_id_namespace().app_id().decode('utf-8')
+ else:
+ return None
+
+ def namespace(self):
+ """Returns this entity's app id, a string."""
+ if self.__reference.app():
+ return self.app_id_namespace().namespace().decode('utf-8')
+ else:
+ return None
+
+ def app_id_namespace(self):
+ """Returns this entity's app id/namespace, an appIdNamespace object."""
+ if self.__reference.app():
+ return parse_app_id_namespace(self.__reference.app())
+ else:
+ return None
+
+ def kind(self):
+ """Returns this entity's kind, as a string."""
+ if self.__reference.path().element_size() > 0:
+ encoded = self.__reference.path().element_list()[-1].type()
+ return unicode(encoded.decode('utf-8'))
+ else:
+ return None
+
+ def id(self):
+ """Returns this entity's id, or None if it doesn't have one."""
+ elems = self.__reference.path().element_list()
+ if elems and elems[-1].has_id() and elems[-1].id():
+ return elems[-1].id()
+ else:
+ return None
+
+ def name(self):
+ """Returns this entity's name, or None if it doesn't have one."""
+ elems = self.__reference.path().element_list()
+ if elems and elems[-1].has_name() and elems[-1].name():
+ return elems[-1].name().decode('utf-8')
+ else:
+ return None
+
+ def id_or_name(self):
+ """Returns this entity's id or name, whichever it has, or None."""
+ if self.id() is not None:
+ return self.id()
+ else:
+ return self.name()
+
+ def has_id_or_name(self):
+ """Returns True if this entity has an id or name, False otherwise.
+ """
+ return self.id_or_name() is not None
+
+ def parent(self):
+ """Returns this entity's parent, as a Key. If this entity has no parent,
+ returns None."""
+ if self.__reference.path().element_size() > 1:
+ parent = Key()
+ parent.__reference.CopyFrom(self.__reference)
+ parent.__reference.path().element_list().pop()
+ return parent
+ else:
+ return None
+
+ def ToTagUri(self):
+ """Returns a tag: URI for this entity for use in XML output.
+
+ Foreign keys for entities may be represented in XML output as tag URIs.
+ RFC 4151 describes the tag URI scheme. From http://taguri.org/:
+
+ The tag algorithm lets people mint - create - identifiers that no one
+ else using the same algorithm could ever mint. It is simple enough to do
+ in your head, and the resulting identifiers can be easy to read, write,
+ and remember. The identifiers conform to the URI (URL) Syntax.
+
+ Tag URIs for entities use the app's auth domain and the date that the URI
+ is generated. The namespace-specific part is <kind>[<key>].
+
+ For example, here is the tag URI for a Kitten with the key "Fluffy" in the
+ catsinsinks app:
+
+ tag:catsinsinks.googleapps.com,2006-08-29:Kitten[Fluffy]
+
+ Raises a BadKeyError if this entity's key is incomplete.
+ """
+ if not self.has_id_or_name():
+ raise datastore_errors.BadKeyError(
+ 'ToTagUri() called for an entity with an incomplete key.')
+
+ return u'tag:%s.%s,%s:%s[%s]' % (
+ saxutils.escape(self.app_id_namespace().to_encoded()),
+ os.environ['AUTH_DOMAIN'],
+ datetime.date.today().isoformat(),
+ saxutils.escape(self.kind()),
+ saxutils.escape(str(self)))
+
+ ToXml = ToTagUri
+
+ def entity_group(self):
+ """Returns this key's entity group as a Key.
+
+ Note that the returned Key will be incomplete if this Key is for a root
+ entity and it is incomplete.
+ """
+ group = Key._FromPb(self.__reference)
+ del group.__reference.path().element_list()[1:]
+ return group
+
+ @staticmethod
+ def _FromPb(pb):
+ """Static factory method. Creates a Key from an entity_pb.Reference.
+
+ Not intended to be used by application developers. Enforced by hiding the
+ entity_pb classes.
+
+ Args:
+ pb: entity_pb.Reference
+ """
+ if not isinstance(pb, entity_pb.Reference):
+ raise datastore_errors.BadArgumentError(
+ 'Key constructor takes an entity_pb.Reference; received %s (a %s).' %
+ (pb, typename(pb)))
+
+ key = Key()
+ key.__reference = entity_pb.Reference()
+ key.__reference.CopyFrom(pb)
+ return key
+
+ def _ToPb(self):
+ """Converts this Key to its protocol buffer representation.
+
+ Not intended to be used by application developers. Enforced by hiding the
+ entity_pb classes.
+
+ Returns:
+ # the Reference PB representation of this Key
+ entity_pb.Reference
+ """
+ pb = entity_pb.Reference()
+ pb.CopyFrom(self.__reference)
+ if not self.has_id_or_name():
+ pb.mutable_path().element_list()[-1].set_id(0)
+
+ pb.app().decode('utf-8')
+ for pathelem in pb.path().element_list():
+ pathelem.type().decode('utf-8')
+
+ return pb
+
+ def __str__(self):
+ """Encodes this Key as an opaque string.
+
+ Returns a string representation of this key, suitable for use in HTML,
+ URLs, and other similar use cases. If the entity's key is incomplete,
+ raises a BadKeyError.
+
+ Unfortunately, this string encoding isn't particularly compact, and its
+ length varies with the length of the path. If you want a shorter identifier
+ and you know the kind and parent (if any) ahead of time, consider using just
+ the entity's id or name.
+
+ Returns:
+ string
+ """
+ if (self.has_id_or_name()):
+ encoded = base64.urlsafe_b64encode(self.__reference.Encode())
+ return encoded.replace('=', '')
+ else:
+ raise datastore_errors.BadKeyError(
+ 'Cannot string encode an incomplete key!\n%s' % self.__reference)
+
+ def __repr__(self):
+ """Returns an eval()able string representation of this key.
+
+ Returns a Python string of the form 'datastore_types.Key.from_path(...)'
+ that can be used to recreate this key.
+
+ Returns:
+ string
+ """
+ args = []
+ for elem in self.__reference.path().element_list():
+ args.append(repr(elem.type().decode('utf-8')))
+ if elem.has_name():
+ args.append(repr(elem.name().decode('utf-8')))
+ else:
+ args.append(repr(elem.id()))
+
+ args.append('_app_id_namespace=%r' % self.__reference.app().decode('utf-8'))
+ return u'datastore_types.Key.from_path(%s)' % ', '.join(args)
+
+ def __cmp__(self, other):
+ """Returns negative, zero, or positive when comparing two keys.
+
+ TODO(ryanb): for API v2, we should change this to make incomplete keys, ie
+ keys without an id or name, not equal to any other keys.
+
+ Args:
+ other: Key to compare to.
+
+ Returns:
+ Negative if self is less than "other"
+ Zero if "other" is equal to self
+ Positive if self is greater than "other"
+ """
+ if not isinstance(other, Key):
+ return -2
+
+ self_args = []
+ other_args = []
+
+ self_args.append(self.__reference.app())
+ other_args.append(other.__reference.app())
+
+ for elem in self.__reference.path().element_list():
+ self_args.append(elem.type())
+ if elem.has_name():
+ self_args.append(elem.name())
+ else:
+ self_args.append(elem.id())
+
+ for elem in other.__reference.path().element_list():
+ other_args.append(elem.type())
+ if elem.has_name():
+ other_args.append(elem.name())
+ else:
+ other_args.append(elem.id())
+
+ for self_component, other_component in zip(self_args, other_args):
+ comparison = cmp(self_component, other_component)
+ if comparison != 0:
+ return comparison
+
+ return cmp(len(self_args), len(other_args))
+
+ def __hash__(self):
+ """Returns a 32-bit integer hash of this key.
+
+ Implements Python's hash protocol so that Keys may be used in sets and as
+ dictionary keys.
+
+ Returns:
+ int
+ """
+ return hash(self.__str__())
+
+
+class Category(unicode):
+ """A tag, ie a descriptive word or phrase. Entities may be tagged by users,
+ and later returned by a queries for that tag. Tags can also be used for
+ ranking results (frequency), photo captions, clustering, activity, etc.
+
+ Here's a more in-depth description: http://www.zeldman.com/daily/0405d.shtml
+
+ This is the Atom "category" element. In XML output, the tag is provided as
+ the term attribute. See:
+ http://www.atomenabled.org/developers/syndication/#category
+
+ Raises BadValueError if tag is not a string or subtype.
+ """
+ TERM = 'user-tag'
+
+ def __init__(self, tag):
+ super(Category, self).__init__(self, tag)
+ ValidateString(tag, 'tag')
+
+ def ToXml(self):
+ return u'<category term="%s" label=%s />' % (Category.TERM,
+ saxutils.quoteattr(self))
+
+
+class Link(unicode):
+ """A fully qualified URL. Usually http: scheme, but may also be file:, ftp:,
+ news:, among others.
+
+ If you have email (mailto:) or instant messaging (aim:, xmpp:) links,
+ consider using the Email or IM classes instead.
+
+ This is the Atom "link" element. In XML output, the link is provided as the
+ href attribute. See:
+ http://www.atomenabled.org/developers/syndication/#link
+
+ Raises BadValueError if link is not a fully qualified, well-formed URL.
+ """
+ def __init__(self, link):
+ super(Link, self).__init__(self, link)
+ ValidateString(link, 'link', max_len=_MAX_LINK_PROPERTY_LENGTH)
+
+ scheme, domain, path, params, query, fragment = urlparse.urlparse(link)
+ if (not scheme or (scheme != 'file' and not domain) or
+ (scheme == 'file' and not path)):
+ raise datastore_errors.BadValueError('Invalid URL: %s' % link)
+
+ def ToXml(self):
+ return u'<link href=%s />' % saxutils.quoteattr(self)
+
+
+class Email(unicode):
+ """An RFC2822 email address. Makes no attempt at validation; apart from
+ checking MX records, email address validation is a rathole.
+
+ This is the gd:email element. In XML output, the email address is provided as
+ the address attribute. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdEmail
+
+ Raises BadValueError if email is not a valid email address.
+ """
+ def __init__(self, email):
+ super(Email, self).__init__(self, email)
+ ValidateString(email, 'email')
+
+ def ToXml(self):
+ return u'<gd:email address=%s />' % saxutils.quoteattr(self)
+
+
+class GeoPt(object):
+ """A geographical point, specified by floating-point latitude and longitude
+ coordinates. Often used to integrate with mapping sites like Google Maps.
+ May also be used as ICBM coordinates.
+
+ This is the georss:point element. In XML output, the coordinates are
+ provided as the lat and lon attributes. See: http://georss.org/
+
+ Serializes to '<lat>,<lon>'. Raises BadValueError if it's passed an invalid
+ serialized string, or if lat and lon are not valid floating points in the
+ ranges [-90, 90] and [-180, 180], respectively.
+ """
+ lat = None
+ lon = None
+
+ def __init__(self, lat, lon=None):
+ if lon is None:
+ try:
+ split = lat.split(',')
+ lat, lon = split
+ except (AttributeError, ValueError):
+ raise datastore_errors.BadValueError(
+ 'Expected a "lat,long" formatted string; received %s (a %s).' %
+ (lat, typename(lat)))
+
+ try:
+ lat = float(lat)
+ lon = float(lon)
+ if abs(lat) > 90:
+ raise datastore_errors.BadValueError(
+ 'Latitude must be between -90 and 90; received %f' % lat)
+ if abs(lon) > 180:
+ raise datastore_errors.BadValueError(
+ 'Longitude must be between -180 and 180; received %f' % lon)
+ except (TypeError, ValueError):
+ raise datastore_errors.BadValueError(
+ 'Expected floats for lat and long; received %s (a %s) and %s (a %s).' %
+ (lat, typename(lat), lon, typename(lon)))
+
+ self.lat = lat
+ self.lon = lon
+
+ def __cmp__(self, other):
+ if not isinstance(other, GeoPt):
+ try:
+ other = GeoPt(other)
+ except datastore_errors.BadValueError:
+ return NotImplemented
+
+ lat_cmp = cmp(self.lat, other.lat)
+ if lat_cmp != 0:
+ return lat_cmp
+ else:
+ return cmp(self.lon, other.lon)
+
+ def __hash__(self):
+ """Returns a 32-bit integer hash of this point.
+
+ Implements Python's hash protocol so that GeoPts may be used in sets and
+ as dictionary keys.
+
+ Returns:
+ int
+ """
+ return hash((self.lat, self.lon))
+
+ def __repr__(self):
+ """Returns an eval()able string representation of this GeoPt.
+
+ The returned string is of the form 'datastore_types.GeoPt([lat], [lon])'.
+
+ Returns:
+ string
+ """
+ return 'datastore_types.GeoPt(%r, %r)' % (self.lat, self.lon)
+
+ def __unicode__(self):
+ return u'%s,%s' % (unicode(self.lat), unicode(self.lon))
+
+ __str__ = __unicode__
+
+ def ToXml(self):
+ return u'<georss:point>%s %s</georss:point>' % (unicode(self.lat),
+ unicode(self.lon))
+
+
+class IM(object):
+ """An instant messaging handle. Includes both an address and its protocol.
+ The protocol value is either a standard IM scheme or a URL identifying the
+ IM network for the protocol. Possible values include:
+
+ Value Description
+ sip SIP/SIMPLE
+ unknown Unknown or unspecified
+ xmpp XMPP/Jabber
+ http://aim.com/ AIM
+ http://icq.com/ ICQ
+ http://talk.google.com/ Google Talk
+ http://messenger.msn.com/ MSN Messenger
+ http://messenger.yahoo.com/ Yahoo Messenger
+ http://sametime.com/ Lotus Sametime
+ http://gadu-gadu.pl/ Gadu-Gadu
+
+ This is the gd:im element. In XML output, the address and protocol are
+ provided as the address and protocol attributes, respectively. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdIm
+
+ Serializes to '<protocol> <address>'. Raises BadValueError if tag is not a
+ standard IM scheme or a URL.
+ """
+ PROTOCOLS = [ 'sip', 'unknown', 'xmpp' ]
+
+ protocol = None
+ address = None
+
+ def __init__(self, protocol, address=None):
+ if address is None:
+ try:
+ split = protocol.split(' ')
+ protocol, address = split
+ except (AttributeError, ValueError):
+ raise datastore_errors.BadValueError(
+ 'Expected string of format "protocol address"; received %s' %
+ str(protocol))
+
+ ValidateString(address, 'address')
+ if protocol not in self.PROTOCOLS:
+ Link(protocol)
+
+ self.address = address
+ self.protocol = protocol
+
+ def __cmp__(self, other):
+ if not isinstance(other, IM):
+ try:
+ other = IM(other)
+ except datastore_errors.BadValueError:
+ return NotImplemented
+
+
+ return cmp((self.address, self.protocol),
+ (other.address, other.protocol))
+
+ def __repr__(self):
+ """Returns an eval()able string representation of this IM.
+
+ The returned string is of the form:
+
+ datastore_types.IM('address', 'protocol')
+
+ Returns:
+ string
+ """
+ return 'datastore_types.IM(%r, %r)' % (self.protocol, self.address)
+
+ def __unicode__(self):
+ return u'%s %s' % (self.protocol, self.address)
+
+ __str__ = __unicode__
+
+ def ToXml(self):
+ return (u'<gd:im protocol=%s address=%s />' %
+ (saxutils.quoteattr(self.protocol),
+ saxutils.quoteattr(self.address)))
+
+ def __len__(self):
+ return len(unicode(self))
+
+
+class PhoneNumber(unicode):
+ """A human-readable phone number or address.
+
+ No validation is performed. Phone numbers have many different formats -
+ local, long distance, domestic, international, internal extension, TTY,
+ VOIP, SMS, and alternative networks like Skype, XFire and Roger Wilco. They
+ all have their own numbering and addressing formats.
+
+ This is the gd:phoneNumber element. In XML output, the phone number is
+ provided as the text of the element. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdPhoneNumber
+
+ Raises BadValueError if phone is not a string or subtype.
+ """
+ def __init__(self, phone):
+ super(PhoneNumber, self).__init__(self, phone)
+ ValidateString(phone, 'phone')
+
+ def ToXml(self):
+ return u'<gd:phoneNumber>%s</gd:phoneNumber>' % saxutils.escape(self)
+
+
+class PostalAddress(unicode):
+ """A human-readable mailing address. Again, mailing address formats vary
+ widely, so no validation is performed.
+
+ This is the gd:postalAddress element. In XML output, the address is provided
+ as the text of the element. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdPostalAddress
+
+ Raises BadValueError if address is not a string or subtype.
+ """
+ def __init__(self, address):
+ super(PostalAddress, self).__init__(self, address)
+ ValidateString(address, 'address')
+
+ def ToXml(self):
+ return u'<gd:postalAddress>%s</gd:postalAddress>' % saxutils.escape(self)
+
+
+class Rating(long):
+ """A user-provided integer rating for a piece of content. Normalized to a
+ 0-100 scale.
+
+ This is the gd:rating element. In XML output, the address is provided
+ as the text of the element. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdRating
+
+ Serializes to the decimal string representation of the rating. Raises
+ BadValueError if the rating is not an integer in the range [0, 100].
+ """
+ MIN = 0
+ MAX = 100
+
+ def __init__(self, rating):
+ super(Rating, self).__init__(self, rating)
+ if isinstance(rating, float) or isinstance(rating, complex):
+ raise datastore_errors.BadValueError(
+ 'Expected int or long; received %s (a %s).' %
+ (rating, typename(rating)))
+
+ try:
+ if long(rating) < Rating.MIN or long(rating) > Rating.MAX:
+ raise datastore_errors.BadValueError()
+ except ValueError:
+ raise datastore_errors.BadValueError(
+ 'Expected int or long; received %s (a %s).' %
+ (rating, typename(rating)))
+
+ def ToXml(self):
+ return (u'<gd:rating value="%d" min="%d" max="%d" />' %
+ (self, Rating.MIN, Rating.MAX))
+
+
+class Text(unicode):
+ """A long string type.
+
+ Strings of any length can be stored in the datastore using this
+ type. It behaves identically to the Python unicode type, except for
+ the constructor, which only accepts str and unicode arguments.
+ """
+
+ def __new__(cls, arg=None, encoding=None):
+ """Constructor.
+
+ We only accept unicode and str instances, the latter with encoding.
+
+ Args:
+ arg: optional unicode or str instance; default u''
+ encoding: optional encoding; disallowed when isinstance(arg, unicode),
+ defaults to 'ascii' when isinstance(arg, str);
+ """
+ if arg is None:
+ arg = u''
+ if isinstance(arg, unicode):
+ if encoding is not None:
+ raise TypeError('Text() with a unicode argument '
+ 'should not specify an encoding')
+ return super(Text, cls).__new__(cls, arg)
+
+ if isinstance(arg, str):
+ if encoding is None:
+ encoding = 'ascii'
+ return super(Text, cls).__new__(cls, arg, encoding)
+
+ raise TypeError('Text() argument should be str or unicode, not %s' %
+ type(arg).__name__)
+
+class Blob(str):
+ """A blob type, appropriate for storing binary data of any length.
+
+ This behaves identically to the Python str type, except for the
+ constructor, which only accepts str arguments.
+ """
+
+ def __new__(cls, arg=None):
+ """Constructor.
+
+ We only accept str instances.
+
+ Args:
+ arg: optional str instance (default '')
+ """
+ if arg is None:
+ arg = ''
+ if isinstance(arg, str):
+ return super(Blob, cls).__new__(cls, arg)
+
+ raise TypeError('Blob() argument should be str instance, not %s' %
+ type(arg).__name__)
+
+ def ToXml(self):
+ """Output a blob as XML.
+
+ Returns:
+ Base64 encoded version of itself for safe insertion in to an XML document.
+ """
+ encoded = base64.urlsafe_b64encode(self)
+ return saxutils.escape(encoded)
+
+class ByteString(str):
+ """A byte-string type, appropriate for storing short amounts of indexed data.
+
+ This behaves identically to Blob, except it's used only for short, indexed
+ byte strings.
+ """
+
+ def __new__(cls, arg=None):
+ """Constructor.
+
+ We only accept str instances.
+
+ Args:
+ arg: optional str instance (default '')
+ """
+ if arg is None:
+ arg = ''
+ if isinstance(arg, str):
+ return super(ByteString, cls).__new__(cls, arg)
+
+ raise TypeError('ByteString() argument should be str instance, not %s' %
+ type(arg).__name__)
+
+ def ToXml(self):
+ """Output a ByteString as XML.
+
+ Returns:
+ Base64 encoded version of itself for safe insertion in to an XML document.
+ """
+ encoded = base64.urlsafe_b64encode(self)
+ return saxutils.escape(encoded)
+
+
+class BlobKey(object):
+ """Key used to identify a blob in Blobstore.
+
+ This object wraps a string that gets used internally by the Blobstore API
+ to identify application blobs. The BlobKey corresponds to the entity name
+ of the underlying BlobReference entity. The structure of the key is:
+
+ _<blob-key>
+
+ This class is exposed in the API in both google.appengine.ext.db and
+ google.appengine.ext.blobstore.
+ """
+
+ def __init__(self, blob_key):
+ """Constructor.
+
+ Used to convert a string to a BlobKey. Normally used internally by
+ Blobstore API.
+
+ Args:
+ blob_key: Key name of BlobReference that this key belongs to.
+ """
+ self.__blob_key = blob_key
+
+ def __str__(self):
+ """Convert to string."""
+ return self.__blob_key
+
+ def __repr__(self):
+ """Returns an eval()able string representation of this key.
+
+ Returns a Python string of the form 'datastore_types.BlobKey(...)'
+ that can be used to recreate this key.
+
+ Returns:
+ string
+ """
+ s = type(self).__module__
+ return '%s.%s(%r)' % (type(self).__module__,
+ type(self).__name__,
+ self.__blob_key)
+
+ def __cmp__(self, other):
+ if type(other) is type(self):
+ return cmp(str(self), str(other))
+ elif isinstance(other, basestring):
+ return cmp(self.__blob_key, other)
+ else:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(self.__blob_key)
+
+ def ToXml(self):
+ return str(self)
+
+
+_PROPERTY_MEANINGS = {
+
+
+
+ Blob: entity_pb.Property.BLOB,
+ ByteString: entity_pb.Property.BYTESTRING,
+ Text: entity_pb.Property.TEXT,
+ datetime.datetime: entity_pb.Property.GD_WHEN,
+ Category: entity_pb.Property.ATOM_CATEGORY,
+ Link: entity_pb.Property.ATOM_LINK,
+ Email: entity_pb.Property.GD_EMAIL,
+ GeoPt: entity_pb.Property.GEORSS_POINT,
+ IM: entity_pb.Property.GD_IM,
+ PhoneNumber: entity_pb.Property.GD_PHONENUMBER,
+ PostalAddress: entity_pb.Property.GD_POSTALADDRESS,
+ Rating: entity_pb.Property.GD_RATING,
+ BlobKey: entity_pb.Property.BLOBKEY,
+}
+
+_PROPERTY_TYPES = frozenset([
+ Blob,
+ ByteString,
+ bool,
+ Category,
+ datetime.datetime,
+ Email,
+ float,
+ GeoPt,
+ IM,
+ int,
+ Key,
+ Link,
+ long,
+ PhoneNumber,
+ PostalAddress,
+ Rating,
+ str,
+ Text,
+ type(None),
+ unicode,
+ users.User,
+ BlobKey,
+])
+
+_RAW_PROPERTY_TYPES = (Blob, Text)
+
+def ValidatePropertyInteger(name, value):
+ """Raises an exception if the supplied integer is invalid.
+
+ Args:
+ name: Name of the property this is for.
+ value: Integer value.
+
+ Raises:
+ OverflowError if the value does not fit within a signed int64.
+ """
+ if not (-0x8000000000000000 <= value <= 0x7fffffffffffffff):
+ raise OverflowError('%d is out of bounds for int64' % value)
+
+
+def ValidateStringLength(name, value, max_len):
+ """Raises an exception if the supplied string is too long.
+
+ Args:
+ name: Name of the property this is for.
+ value: String value.
+ max_len: Maximum length the string may be.
+
+ Raises:
+ OverflowError if the value is larger than the maximum length.
+ """
+ if len(value) > max_len:
+ raise datastore_errors.BadValueError(
+ 'Property %s is %d bytes long; it must be %d or less. '
+ 'Consider Text instead, which can store strings of any length.' %
+ (name, len(value), max_len))
+
+
+def ValidatePropertyString(name, value):
+ """Validates the length of an indexed string property.
+
+ Args:
+ name: Name of the property this is for.
+ value: String value.
+ """
+ ValidateStringLength(name, value, max_len=_MAX_STRING_LENGTH)
+
+
+def ValidatePropertyLink(name, value):
+ """Validates the length of an indexed Link property.
+
+ Args:
+ name: Name of the property this is for.
+ value: String value.
+ """
+ ValidateStringLength(name, value, max_len=_MAX_LINK_PROPERTY_LENGTH)
+
+
+def ValidatePropertyNothing(name, value):
+ """No-op validation function.
+
+ Args:
+ name: Name of the property this is for.
+ value: Not used.
+ """
+ pass
+
+
+def ValidatePropertyKey(name, value):
+ """Raises an exception if the supplied datastore.Key instance is invalid.
+
+ Args:
+ name: Name of the property this is for.
+ value: A datastore.Key instance.
+
+ Raises:
+ datastore_errors.BadValueError if the value is invalid.
+ """
+ if not value.has_id_or_name():
+ raise datastore_errors.BadValueError(
+ 'Incomplete key found for reference property %s.' % name)
+
+
+_VALIDATE_PROPERTY_VALUES = {
+ Blob: ValidatePropertyNothing,
+ ByteString: ValidatePropertyString,
+ bool: ValidatePropertyNothing,
+ Category: ValidatePropertyString,
+ datetime.datetime: ValidatePropertyNothing,
+ Email: ValidatePropertyString,
+ float: ValidatePropertyNothing,
+ GeoPt: ValidatePropertyNothing,
+ IM: ValidatePropertyString,
+ int: ValidatePropertyInteger,
+ Key: ValidatePropertyKey,
+ Link: ValidatePropertyLink,
+ long: ValidatePropertyInteger,
+ PhoneNumber: ValidatePropertyString,
+ PostalAddress: ValidatePropertyString,
+ Rating: ValidatePropertyInteger,
+ str: ValidatePropertyString,
+ Text: ValidatePropertyNothing,
+ type(None): ValidatePropertyNothing,
+ unicode: ValidatePropertyString,
+ users.User: ValidatePropertyNothing,
+ BlobKey: ValidatePropertyString,
+}
+
+assert set(_VALIDATE_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
+
+
+def ValidateProperty(name, values, read_only=False):
+ """Helper function for validating property values.
+
+ Args:
+ name: Name of the property this is for.
+ value: Value for the property as a Python native type.
+
+ Raises:
+ BadPropertyError if the property name is invalid. BadValueError if the
+ property did not validate correctly or the value was an empty list. Other
+ exception types (like OverflowError) if the property value does not meet
+ type-specific criteria.
+ """
+ ValidateString(name, 'property name', datastore_errors.BadPropertyError)
+
+ if not read_only and RESERVED_PROPERTY_NAME.match(name):
+ raise datastore_errors.BadPropertyError(
+ '%s is a reserved property name.' % name)
+
+ values_type = type(values)
+
+ if values_type is tuple:
+ raise datastore_errors.BadValueError(
+ 'May not use tuple property value; property %s is %s.' %
+ (name, repr(values)))
+
+ if values_type is list:
+ multiple = True
+ else:
+ multiple = False
+ values = [values]
+
+ if not values:
+ raise datastore_errors.BadValueError(
+ 'May not use the empty list as a property value; property %s is %s.' %
+ (name, repr(values)))
+
+ try:
+ for v in values:
+ prop_validator = _VALIDATE_PROPERTY_VALUES.get(v.__class__)
+ if prop_validator is None:
+ raise datastore_errors.BadValueError(
+ 'Unsupported type for property %s: %s' % (name, v.__class__))
+ prop_validator(name, v)
+
+ except (KeyError, ValueError, TypeError, IndexError, AttributeError), msg:
+ raise datastore_errors.BadValueError(
+ 'Error type checking values for property %s: %s' % (name, msg))
+
+
+ValidateReadProperty = ValidateProperty
+
+
+def PackBlob(name, value, pbvalue):
+ """Packs a Blob property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A Blob instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_stringvalue(value)
+
+
+def PackString(name, value, pbvalue):
+ """Packs a string-typed property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A string, unicode, or string-like value instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_stringvalue(unicode(value).encode('utf-8'))
+
+
+def PackDatetime(name, value, pbvalue):
+ """Packs a datetime-typed property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A datetime.datetime instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_int64value(DatetimeToTimestamp(value))
+
+
+def DatetimeToTimestamp(value):
+ """Converts a datetime.datetime to microseconds since the epoch, as a float.
+ Args:
+ value: datetime.datetime
+
+ Returns: value as a long
+ """
+ if value.tzinfo:
+ value = value.astimezone(UTC)
+ return long(calendar.timegm(value.timetuple()) * 1000000L) + value.microsecond
+
+
+def PackGeoPt(name, value, pbvalue):
+ """Packs a GeoPt property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A GeoPt instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.mutable_pointvalue().set_x(value.lat)
+ pbvalue.mutable_pointvalue().set_y(value.lon)
+
+
+def PackUser(name, value, pbvalue):
+ """Packs a User property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A users.User instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.mutable_uservalue().set_email(value.email().encode('utf-8'))
+ pbvalue.mutable_uservalue().set_auth_domain(
+ value.auth_domain().encode('utf-8'))
+ pbvalue.mutable_uservalue().set_gaiaid(0)
+
+ if value.user_id() is not None:
+ pbvalue.mutable_uservalue().set_obfuscated_gaiaid(
+ value.user_id().encode('utf-8'))
+
+
+def PackKey(name, value, pbvalue):
+ """Packs a reference property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A Key instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ ref = value._Key__reference
+ pbvalue.mutable_referencevalue().set_app(ref.app())
+ for elem in ref.path().element_list():
+ pbvalue.mutable_referencevalue().add_pathelement().CopyFrom(elem)
+
+
+def PackBool(name, value, pbvalue):
+ """Packs a boolean property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A boolean instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_booleanvalue(value)
+
+
+def PackInteger(name, value, pbvalue):
+ """Packs an integer property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: An int or long instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_int64value(value)
+
+
+def PackFloat(name, value, pbvalue):
+ """Packs a float property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A float instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_doublevalue(value)
+
+
+_PACK_PROPERTY_VALUES = {
+ Blob: PackBlob,
+ ByteString: PackBlob,
+ bool: PackBool,
+ Category: PackString,
+ datetime.datetime: PackDatetime,
+ Email: PackString,
+ float: PackFloat,
+ GeoPt: PackGeoPt,
+ IM: PackString,
+ int: PackInteger,
+ Key: PackKey,
+ Link: PackString,
+ long: PackInteger,
+ PhoneNumber: PackString,
+ PostalAddress: PackString,
+ Rating: PackInteger,
+ str: PackString,
+ Text: PackString,
+ type(None): lambda name, value, pbvalue: None,
+ unicode: PackString,
+ users.User: PackUser,
+ BlobKey: PackString,
+}
+
+assert set(_PACK_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
+
+
+def ToPropertyPb(name, values):
+ """Creates type-specific entity_pb.PropertyValues.
+
+ Determines the type and meaning of the PropertyValue based on the Python
+ type of the input value(s).
+
+ NOTE: This function does not validate anything!
+
+ Args:
+ name: string or unicode; the property name
+ values: The values for this property, either a single one or a list of them.
+ All values must be a supported type. Lists of values must all be of the
+ same type.
+
+ Returns:
+ A list of entity_pb.PropertyValue instances.
+ """
+ encoded_name = name.encode('utf-8')
+
+ values_type = type(values)
+ if values_type is list:
+ multiple = True
+ else:
+ multiple = False
+ values = [values]
+
+ pbs = []
+ for v in values:
+ pb = entity_pb.Property()
+ pb.set_name(encoded_name)
+ pb.set_multiple(multiple)
+
+ meaning = _PROPERTY_MEANINGS.get(v.__class__)
+ if meaning is not None:
+ pb.set_meaning(meaning)
+
+ pack_prop = _PACK_PROPERTY_VALUES[v.__class__]
+ pbvalue = pack_prop(name, v, pb.mutable_value())
+ pbs.append(pb)
+
+ if multiple:
+ return pbs
+ else:
+ return pbs[0]
+
+
+def FromReferenceProperty(value):
+ """Converts a reference PropertyValue to a Key.
+
+ Args:
+ value: entity_pb.PropertyValue
+
+ Returns:
+ Key
+
+ Raises:
+ BadValueError if the value is not a PropertyValue.
+ """
+ assert isinstance(value, entity_pb.PropertyValue)
+ assert value.has_referencevalue()
+ ref = value.referencevalue()
+
+ key = Key()
+ key_ref = key._Key__reference
+ key_ref.set_app(ref.app())
+
+ for pathelem in ref.pathelement_list():
+ key_ref.mutable_path().add_element().CopyFrom(pathelem)
+
+ return key
+
+
+_EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+_PROPERTY_CONVERSIONS = {
+ entity_pb.Property.GD_WHEN:
+
+
+ lambda val: _EPOCH + datetime.timedelta(microseconds=val),
+ entity_pb.Property.ATOM_CATEGORY: Category,
+ entity_pb.Property.ATOM_LINK: Link,
+ entity_pb.Property.GD_EMAIL: Email,
+ entity_pb.Property.GD_IM: IM,
+ entity_pb.Property.GD_PHONENUMBER: PhoneNumber,
+ entity_pb.Property.GD_POSTALADDRESS: PostalAddress,
+ entity_pb.Property.GD_RATING: Rating,
+ entity_pb.Property.BLOB: Blob,
+ entity_pb.Property.BYTESTRING: ByteString,
+ entity_pb.Property.TEXT: Text,
+ entity_pb.Property.BLOBKEY: BlobKey,
+}
+
+
+def FromPropertyPb(pb):
+ """Converts a property PB to a python value.
+
+ Args:
+ pb: entity_pb.Property
+
+ Returns:
+ # return type is determined by the type of the argument
+ string, int, bool, double, users.User, or one of the atom or gd types
+ """
+ pbval = pb.value()
+ meaning = pb.meaning()
+
+ if pbval.has_stringvalue():
+ value = pbval.stringvalue()
+ if meaning not in (entity_pb.Property.BLOB, entity_pb.Property.BYTESTRING):
+ value = unicode(value.decode('utf-8'))
+ elif pbval.has_int64value():
+ value = long(pbval.int64value())
+ elif pbval.has_booleanvalue():
+ value = bool(pbval.booleanvalue())
+ elif pbval.has_doublevalue():
+ value = pbval.doublevalue()
+ elif pbval.has_referencevalue():
+ value = FromReferenceProperty(pbval)
+ elif pbval.has_pointvalue():
+ value = GeoPt(pbval.pointvalue().x(), pbval.pointvalue().y())
+ elif pbval.has_uservalue():
+ email = unicode(pbval.uservalue().email().decode('utf-8'))
+ auth_domain = unicode(pbval.uservalue().auth_domain().decode('utf-8'))
+ obfuscated_gaiaid = pbval.uservalue().obfuscated_gaiaid().decode('utf-8')
+ obfuscated_gaiaid = unicode(obfuscated_gaiaid)
+ value = users.User(email=email,
+ _auth_domain=auth_domain,
+ _user_id=obfuscated_gaiaid)
+ else:
+ value = None
+
+ try:
+ if pb.has_meaning() and pb.meaning() in _PROPERTY_CONVERSIONS:
+ conversion = _PROPERTY_CONVERSIONS[meaning]
+ value = conversion(value)
+ except (KeyError, ValueError, IndexError, TypeError, AttributeError), msg:
+ raise datastore_errors.BadValueError(
+ 'Error converting pb: %s\nException was: %s' % (pb, msg))
+
+ return value
+
+
+def PropertyTypeName(value):
+ """Returns the name of the type of the given property value, as a string.
+
+ Raises BadValueError if the value is not a valid property type.
+
+ Args:
+ value: any valid property value
+
+ Returns:
+ string
+ """
+ if value.__class__ in _PROPERTY_MEANINGS:
+ meaning = _PROPERTY_MEANINGS[value.__class__]
+ name = entity_pb.Property._Meaning_NAMES[meaning]
+ return name.lower().replace('_', ':')
+ elif isinstance(value, basestring):
+ return 'string'
+ elif isinstance(value, users.User):
+ return 'user'
+ elif isinstance(value, long):
+ return 'int'
+ elif value is None:
+ return 'null'
+ else:
+ return typename(value).lower()
+
+_PROPERTY_TYPE_STRINGS = {
+ 'string': unicode,
+ 'bool': bool,
+ 'int': long,
+ 'null': type(None),
+ 'float': float,
+ 'key': Key,
+ 'blob': Blob,
+ 'bytestring': ByteString,
+ 'text': Text,
+ 'user': users.User,
+ 'atom:category': Category,
+ 'atom:link': Link,
+ 'gd:email': Email,
+ 'gd:when': datetime.datetime,
+ 'georss:point': GeoPt,
+ 'gd:im': IM,
+ 'gd:phonenumber': PhoneNumber,
+ 'gd:postaladdress': PostalAddress,
+ 'gd:rating': Rating,
+ 'blobkey': BlobKey,
+ }
+
+
+def FromPropertyTypeName(type_name):
+ """Returns the python type given a type name.
+
+ Args:
+ type_name: A string representation of a datastore type name.
+
+ Returns:
+ A python type.
+ """
+ return _PROPERTY_TYPE_STRINGS[type_name]
+
+
+def PropertyValueFromString(type_,
+ value_string,
+ _auth_domain=None):
+ """Returns an instance of a property value given a type and string value.
+
+ The reverse of this method is just str() and type() of the python value.
+
+ Note that this does *not* support non-UTC offsets in ISO 8601-formatted
+ datetime strings, e.g. the -08:00 suffix in '2002-12-25 00:00:00-08:00'.
+ It only supports -00:00 and +00:00 suffixes, which are UTC.
+
+ Args:
+ type_: A python class.
+ value_string: A string representation of the value of the property.
+
+ Returns:
+ An instance of 'type'.
+
+ Raises:
+ ValueError if type_ is datetime and value_string has a timezone offset.
+ """
+ if type_ == datetime.datetime:
+ value_string = value_string.strip()
+ if value_string[-6] in ('+', '-'):
+ if value_string[-5:] == '00:00':
+ value_string = value_string[:-6]
+ else:
+ raise ValueError('Non-UTC offsets in datetimes are not supported.')
+
+ split = value_string.split('.')
+ iso_date = split[0]
+ microseconds = 0
+ if len(split) > 1:
+ microseconds = int(split[1])
+
+ time_struct = time.strptime(iso_date, '%Y-%m-%d %H:%M:%S')[0:6]
+ value = datetime.datetime(*(time_struct + (microseconds,)))
+ return value
+ elif type_ == Rating:
+ return Rating(int(value_string))
+ elif type_ == bool:
+ return value_string == 'True'
+ elif type_ == users.User:
+ return users.User(value_string, _auth_domain)
+ elif type_ == type(None):
+ return None
+ return type_(value_string)
diff --git a/google_appengine/google/appengine/api/datastore_types.pyc b/google_appengine/google/appengine/api/datastore_types.pyc
new file mode 100644
index 0000000..5c19ce2
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_types.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/images/__init__.py b/google_appengine/google/appengine/api/images/__init__.py
new file mode 100755
index 0000000..757afcf
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/__init__.py
@@ -0,0 +1,827 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Image manipulation API.
+
+Classes defined in this module:
+ Image: class used to encapsulate image information and transformations for
+ that image.
+
+ The current manipulations that are available are resize, rotate,
+ horizontal_flip, vertical_flip, crop and im_feeling_lucky.
+
+ It should be noted that each transform can only be called once per image
+ per execute_transforms() call.
+"""
+
+
+
+import struct
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api.images import images_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+JPEG = images_service_pb.OutputSettings.JPEG
+PNG = images_service_pb.OutputSettings.PNG
+
+OUTPUT_ENCODING_TYPES = frozenset([JPEG, PNG])
+
+TOP_LEFT = images_service_pb.CompositeImageOptions.TOP_LEFT
+TOP_CENTER = images_service_pb.CompositeImageOptions.TOP
+TOP_RIGHT = images_service_pb.CompositeImageOptions.TOP_RIGHT
+CENTER_LEFT = images_service_pb.CompositeImageOptions.LEFT
+CENTER_CENTER = images_service_pb.CompositeImageOptions.CENTER
+CENTER_RIGHT = images_service_pb.CompositeImageOptions.RIGHT
+BOTTOM_LEFT = images_service_pb.CompositeImageOptions.BOTTOM_LEFT
+BOTTOM_CENTER = images_service_pb.CompositeImageOptions.BOTTOM
+BOTTOM_RIGHT = images_service_pb.CompositeImageOptions.BOTTOM_RIGHT
+
+ANCHOR_TYPES = frozenset([TOP_LEFT, TOP_CENTER, TOP_RIGHT, CENTER_LEFT,
+ CENTER_CENTER, CENTER_RIGHT, BOTTOM_LEFT,
+ BOTTOM_CENTER, BOTTOM_RIGHT])
+
+MAX_TRANSFORMS_PER_REQUEST = 10
+
+MAX_COMPOSITES_PER_REQUEST = 16
+
+
+class Error(Exception):
+ """Base error class for this module."""
+
+
+class TransformationError(Error):
+ """Error while attempting to transform the image."""
+
+
+class BadRequestError(Error):
+ """The parameters given had something wrong with them."""
+
+
+class NotImageError(Error):
+ """The image data given is not recognizable as an image."""
+
+
+class BadImageError(Error):
+ """The image data given is corrupt."""
+
+
+class LargeImageError(Error):
+ """The image data given is too large to process."""
+
+
+class Image(object):
+ """Image object to manipulate."""
+
+ def __init__(self, image_data):
+ """Constructor.
+
+ Args:
+ image_data: str, image data in string form.
+
+ Raises:
+ NotImageError if the given data is empty.
+ """
+ if not image_data:
+ raise NotImageError("Empty image data.")
+
+ self._image_data = image_data
+ self._transforms = []
+ self._width = None
+ self._height = None
+
+ def _check_transform_limits(self):
+ """Ensure some simple limits on the number of transforms allowed.
+
+ Raises:
+ BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
+ requested for this image
+ """
+ if len(self._transforms) >= MAX_TRANSFORMS_PER_REQUEST:
+ raise BadRequestError("%d transforms have already been requested on this "
+ "image." % MAX_TRANSFORMS_PER_REQUEST)
+
+ def _update_dimensions(self):
+ """Updates the width and height fields of the image.
+
+ Raises:
+ NotImageError if the image data is not an image.
+ BadImageError if the image data is corrupt.
+ """
+ size = len(self._image_data)
+ if size >= 6 and self._image_data.startswith("GIF"):
+ self._update_gif_dimensions()
+ elif size >= 8 and self._image_data.startswith("\x89PNG\x0D\x0A\x1A\x0A"):
+ self._update_png_dimensions()
+ elif size >= 2 and self._image_data.startswith("\xff\xD8"):
+ self._update_jpeg_dimensions()
+ elif (size >= 8 and (self._image_data.startswith("II\x2a\x00") or
+ self._image_data.startswith("MM\x00\x2a"))):
+ self._update_tiff_dimensions()
+ elif size >= 2 and self._image_data.startswith("BM"):
+ self._update_bmp_dimensions()
+ elif size >= 4 and self._image_data.startswith("\x00\x00\x01\x00"):
+ self._update_ico_dimensions()
+ else:
+ raise NotImageError("Unrecognized image format")
+
+ def _update_gif_dimensions(self):
+ """Updates the width and height fields of the gif image.
+
+ Raises:
+ BadImageError if the image string is not a valid gif image.
+ """
+ size = len(self._image_data)
+ if size >= 10:
+ self._width, self._height = struct.unpack("<HH", self._image_data[6:10])
+ else:
+ raise BadImageError("Corrupt GIF format")
+
+ def _update_png_dimensions(self):
+ """Updates the width and height fields of the png image.
+
+ Raises:
+ BadImageError if the image string is not a valid png image.
+ """
+ size = len(self._image_data)
+ if size >= 24 and self._image_data[12:16] == "IHDR":
+ self._width, self._height = struct.unpack(">II", self._image_data[16:24])
+ else:
+ raise BadImageError("Corrupt PNG format")
+
+ def _update_jpeg_dimensions(self):
+ """Updates the width and height fields of the jpeg image.
+
+ Raises:
+ BadImageError if the image string is not a valid jpeg image.
+ """
+ size = len(self._image_data)
+ offset = 2
+ while offset < size:
+ while offset < size and ord(self._image_data[offset]) != 0xFF:
+ offset += 1
+ while offset < size and ord(self._image_data[offset]) == 0xFF:
+ offset += 1
+ if (offset < size and ord(self._image_data[offset]) & 0xF0 == 0xC0 and
+ ord(self._image_data[offset]) != 0xC4):
+ offset += 4
+ if offset + 4 <= size:
+ self._height, self._width = struct.unpack(
+ ">HH",
+ self._image_data[offset:offset + 4])
+ break
+ else:
+ raise BadImageError("Corrupt JPEG format")
+ elif offset + 3 <= size:
+ offset += 1
+ offset += struct.unpack(">H", self._image_data[offset:offset + 2])[0]
+ else:
+ raise BadImageError("Corrupt JPEG format")
+ if self._height is None or self._width is None:
+ raise BadImageError("Corrupt JPEG format")
+
+ def _update_tiff_dimensions(self):
+ """Updates the width and height fields of the tiff image.
+
+ Raises:
+ BadImageError if the image string is not a valid tiff image.
+ """
+ size = len(self._image_data)
+ if self._image_data.startswith("II"):
+ endianness = "<"
+ else:
+ endianness = ">"
+ ifd_offset = struct.unpack(endianness + "I", self._image_data[4:8])[0]
+ if ifd_offset + 14 <= size:
+ ifd_size = struct.unpack(
+ endianness + "H",
+ self._image_data[ifd_offset:ifd_offset + 2])[0]
+ ifd_offset += 2
+ for unused_i in range(0, ifd_size):
+ if ifd_offset + 12 <= size:
+ tag = struct.unpack(
+ endianness + "H",
+ self._image_data[ifd_offset:ifd_offset + 2])[0]
+ if tag == 0x100 or tag == 0x101:
+ value_type = struct.unpack(
+ endianness + "H",
+ self._image_data[ifd_offset + 2:ifd_offset + 4])[0]
+ if value_type == 3:
+ format = endianness + "H"
+ end_offset = ifd_offset + 10
+ elif value_type == 4:
+ format = endianness + "I"
+ end_offset = ifd_offset + 12
+ else:
+ format = endianness + "B"
+ end_offset = ifd_offset + 9
+ if tag == 0x100:
+ self._width = struct.unpack(
+ format,
+ self._image_data[ifd_offset + 8:end_offset])[0]
+ if self._height is not None:
+ break
+ else:
+ self._height = struct.unpack(
+ format,
+ self._image_data[ifd_offset + 8:end_offset])[0]
+ if self._width is not None:
+ break
+ ifd_offset += 12
+ else:
+ raise BadImageError("Corrupt TIFF format")
+ if self._width is None or self._height is None:
+ raise BadImageError("Corrupt TIFF format")
+
+ def _update_bmp_dimensions(self):
+ """Updates the width and height fields of the bmp image.
+
+ Raises:
+ BadImageError if the image string is not a valid bmp image.
+ """
+ size = len(self._image_data)
+ if size >= 18:
+ header_length = struct.unpack("<I", self._image_data[14:18])[0]
+ if ((header_length == 40 or header_length == 108 or
+ header_length == 124 or header_length == 64) and size >= 26):
+ self._width, self._height = struct.unpack("<II",
+ self._image_data[18:26])
+ elif header_length == 12 and size >= 22:
+ self._width, self._height = struct.unpack("<HH",
+ self._image_data[18:22])
+ else:
+ raise BadImageError("Corrupt BMP format")
+ else:
+ raise BadImageError("Corrupt BMP format")
+
+ def _update_ico_dimensions(self):
+ """Updates the width and height fields of the ico image.
+
+ Raises:
+ BadImageError if the image string is not a valid ico image.
+ """
+ size = len(self._image_data)
+ if size >= 8:
+ self._width, self._height = struct.unpack("<BB", self._image_data[6:8])
+ if not self._width:
+ self._width = 256
+ if not self._height:
+ self._height = 256
+ else:
+ raise BadImageError("Corrupt ICO format")
+
+ def resize(self, width=0, height=0):
+ """Resize the image maintaining the aspect ratio.
+
+ If both width and height are specified, the more restricting of the two
+ values will be used when resizing the photo. The maximum dimension allowed
+ for both width and height is 4000 pixels.
+
+ Args:
+ width: int, width (in pixels) to change the image width to.
+ height: int, height (in pixels) to change the image height to.
+
+ Raises:
+ TypeError when width or height is not either 'int' or 'long' types.
+ BadRequestError when there is something wrong with the given height or
+ width or if MAX_TRANSFORMS_PER_REQUEST transforms have already been
+ requested on this image.
+ """
+ if (not isinstance(width, (int, long)) or
+ not isinstance(height, (int, long))):
+ raise TypeError("Width and height must be integers.")
+ if width < 0 or height < 0:
+ raise BadRequestError("Width and height must be >= 0.")
+
+ if not width and not height:
+ raise BadRequestError("At least one of width or height must be > 0.")
+
+ if width > 4000 or height > 4000:
+ raise BadRequestError("Both width and height must be <= 4000.")
+
+ self._check_transform_limits()
+
+ transform = images_service_pb.Transform()
+ transform.set_width(width)
+ transform.set_height(height)
+
+ self._transforms.append(transform)
+
+ def rotate(self, degrees):
+ """Rotate an image a given number of degrees clockwise.
+
+ Args:
+ degrees: int, must be a multiple of 90.
+
+ Raises:
+ TypeError when degrees is not either 'int' or 'long' types.
+ BadRequestError when there is something wrong with the given degrees or
+ if MAX_TRANSFORMS_PER_REQUEST transforms have already been requested.
+ """
+ if not isinstance(degrees, (int, long)):
+ raise TypeError("Degrees must be integers.")
+
+ if degrees % 90 != 0:
+ raise BadRequestError("degrees argument must be multiple of 90.")
+
+ degrees = degrees % 360
+
+ self._check_transform_limits()
+
+ transform = images_service_pb.Transform()
+ transform.set_rotate(degrees)
+
+ self._transforms.append(transform)
+
+ def horizontal_flip(self):
+ """Flip the image horizontally.
+
+ Raises:
+ BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
+ requested on the image.
+ """
+ self._check_transform_limits()
+
+ transform = images_service_pb.Transform()
+ transform.set_horizontal_flip(True)
+
+ self._transforms.append(transform)
+
+ def vertical_flip(self):
+ """Flip the image vertically.
+
+ Raises:
+ BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
+ requested on the image.
+ """
+ self._check_transform_limits()
+ transform = images_service_pb.Transform()
+ transform.set_vertical_flip(True)
+
+ self._transforms.append(transform)
+
+ def _validate_crop_arg(self, val, val_name):
+ """Validate the given value of a Crop() method argument.
+
+ Args:
+ val: float, value of the argument.
+ val_name: str, name of the argument.
+
+ Raises:
+ TypeError if the args are not of type 'float'.
+ BadRequestError when there is something wrong with the given bounding box.
+ """
+ if type(val) != float:
+ raise TypeError("arg '%s' must be of type 'float'." % val_name)
+
+ if not (0 <= val <= 1.0):
+ raise BadRequestError("arg '%s' must be between 0.0 and 1.0 "
+ "(inclusive)" % val_name)
+
+ def crop(self, left_x, top_y, right_x, bottom_y):
+ """Crop the image.
+
+ The four arguments are the scaling numbers to describe the bounding box
+ which will crop the image. The upper left point of the bounding box will
+ be at (left_x*image_width, top_y*image_height) the lower right point will
+ be at (right_x*image_width, bottom_y*image_height).
+
+ Args:
+ left_x: float value between 0.0 and 1.0 (inclusive).
+ top_y: float value between 0.0 and 1.0 (inclusive).
+ right_x: float value between 0.0 and 1.0 (inclusive).
+ bottom_y: float value between 0.0 and 1.0 (inclusive).
+
+ Raises:
+ TypeError if the args are not of type 'float'.
+ BadRequestError when there is something wrong with the given bounding box
+ or if MAX_TRANSFORMS_PER_REQUEST transforms have already been requested
+ for this image.
+ """
+ self._validate_crop_arg(left_x, "left_x")
+ self._validate_crop_arg(top_y, "top_y")
+ self._validate_crop_arg(right_x, "right_x")
+ self._validate_crop_arg(bottom_y, "bottom_y")
+
+ if left_x >= right_x:
+ raise BadRequestError("left_x must be less than right_x")
+ if top_y >= bottom_y:
+ raise BadRequestError("top_y must be less than bottom_y")
+
+ self._check_transform_limits()
+
+ transform = images_service_pb.Transform()
+ transform.set_crop_left_x(left_x)
+ transform.set_crop_top_y(top_y)
+ transform.set_crop_right_x(right_x)
+ transform.set_crop_bottom_y(bottom_y)
+
+ self._transforms.append(transform)
+
+ def im_feeling_lucky(self):
+ """Automatically adjust image contrast and color levels.
+
+ This is similar to the "I'm Feeling Lucky" button in Picasa.
+
+ Raises:
+ BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already
+ been requested for this image.
+ """
+ self._check_transform_limits()
+ transform = images_service_pb.Transform()
+ transform.set_autolevels(True)
+
+ self._transforms.append(transform)
+
+ def execute_transforms(self, output_encoding=PNG):
+ """Perform transformations on given image.
+
+ Args:
+ output_encoding: A value from OUTPUT_ENCODING_TYPES.
+
+ Returns:
+ str, image data after the transformations have been performed on it.
+
+ Raises:
+ BadRequestError when there is something wrong with the request
+ specifications.
+ NotImageError when the image data given is not an image.
+ BadImageError when the image data given is corrupt.
+ LargeImageError when the image data given is too large to process.
+ TransformtionError when something errors during image manipulation.
+ Error when something unknown, but bad, happens.
+ """
+ if output_encoding not in OUTPUT_ENCODING_TYPES:
+ raise BadRequestError("Output encoding type not in recognized set "
+ "%s" % OUTPUT_ENCODING_TYPES)
+
+ if not self._transforms:
+ raise BadRequestError("Must specify at least one transformation.")
+
+ request = images_service_pb.ImagesTransformRequest()
+ response = images_service_pb.ImagesTransformResponse()
+
+ request.mutable_image().set_content(self._image_data)
+
+ for transform in self._transforms:
+ request.add_transform().CopyFrom(transform)
+
+ request.mutable_output().set_mime_type(output_encoding)
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("images",
+ "Transform",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA):
+ raise BadRequestError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.NOT_IMAGE):
+ raise NotImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
+ raise BadImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
+ raise LargeImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
+ raise TransformationError()
+ else:
+ raise Error()
+
+ self._image_data = response.image().content()
+ self._transforms = []
+ self._width = None
+ self._height = None
+ return self._image_data
+
+ @property
+ def width(self):
+ """Gets the width of the image."""
+ if self._width is None:
+ self._update_dimensions()
+ return self._width
+
+ @property
+ def height(self):
+ """Gets the height of the image."""
+ if self._height is None:
+ self._update_dimensions()
+ return self._height
+
+ def histogram(self):
+ """Calculates the histogram of the image.
+
+ Returns: 3 256-element lists containing the number of occurences of each
+ value of each color in the order RGB. As described at
+ http://en.wikipedia.org/wiki/Color_histogram for N = 256. i.e. the first
+ value of the first list contains the number of pixels with a red value of
+ 0, the second the number with a red value of 1.
+
+ Raises:
+ NotImageError when the image data given is not an image.
+ BadImageError when the image data given is corrupt.
+ LargeImageError when the image data given is too large to process.
+ Error when something unknown, but bad, happens.
+ """
+ request = images_service_pb.ImagesHistogramRequest()
+ response = images_service_pb.ImagesHistogramResponse()
+
+ request.mutable_image().set_content(self._image_data)
+ try:
+ apiproxy_stub_map.MakeSyncCall("images",
+ "Histogram",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ images_service_pb.ImagesServiceError.NOT_IMAGE):
+ raise NotImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
+ raise BadImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
+ raise LargeImageError()
+ else:
+ raise Error()
+ histogram = response.histogram()
+ return [histogram.red_list(),
+ histogram.green_list(),
+ histogram.blue_list()]
+
+
+def resize(image_data, width=0, height=0, output_encoding=PNG):
+ """Resize a given image file maintaining the aspect ratio.
+
+ If both width and height are specified, the more restricting of the two
+ values will be used when resizing the photo. The maximum dimension allowed
+ for both width and height is 4000 pixels.
+
+ Args:
+ image_data: str, source image data.
+ width: int, width (in pixels) to change the image width to.
+ height: int, height (in pixels) to change the image height to.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ TypeError when width or height not either 'int' or 'long' types.
+ BadRequestError when there is something wrong with the given height or
+ width.
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.resize(width, height)
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def rotate(image_data, degrees, output_encoding=PNG):
+ """Rotate a given image a given number of degrees clockwise.
+
+ Args:
+ image_data: str, source image data.
+ degrees: value from ROTATE_DEGREE_VALUES.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ TypeError when degrees is not either 'int' or 'long' types.
+ BadRequestError when there is something wrong with the given degrees.
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.rotate(degrees)
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def horizontal_flip(image_data, output_encoding=PNG):
+ """Flip the image horizontally.
+
+ Args:
+ image_data: str, source image data.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.horizontal_flip()
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def vertical_flip(image_data, output_encoding=PNG):
+ """Flip the image vertically.
+
+ Args:
+ image_data: str, source image data.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.vertical_flip()
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def crop(image_data, left_x, top_y, right_x, bottom_y, output_encoding=PNG):
+ """Crop the given image.
+
+ The four arguments are the scaling numbers to describe the bounding box
+ which will crop the image. The upper left point of the bounding box will
+ be at (left_x*image_width, top_y*image_height) the lower right point will
+ be at (right_x*image_width, bottom_y*image_height).
+
+ Args:
+ image_data: str, source image data.
+ left_x: float value between 0.0 and 1.0 (inclusive).
+ top_y: float value between 0.0 and 1.0 (inclusive).
+ right_x: float value between 0.0 and 1.0 (inclusive).
+ bottom_y: float value between 0.0 and 1.0 (inclusive).
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ TypeError if the args are not of type 'float'.
+ BadRequestError when there is something wrong with the given bounding box.
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.crop(left_x, top_y, right_x, bottom_y)
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def im_feeling_lucky(image_data, output_encoding=PNG):
+ """Automatically adjust image levels.
+
+ This is similar to the "I'm Feeling Lucky" button in Picasa.
+
+ Args:
+ image_data: str, source image data.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.im_feeling_lucky()
+ return image.execute_transforms(output_encoding=output_encoding)
+
+def composite(inputs, width, height, color=0, output_encoding=PNG):
+ """Composite one or more images onto a canvas.
+
+ Args:
+ inputs: a list of tuples (image_data, x_offset, y_offset, opacity, anchor)
+ where
+ image_data: str, source image data.
+ x_offset: x offset in pixels from the anchor position
+ y_offset: y offset in piyels from the anchor position
+ opacity: opacity of the image specified as a float in range [0.0, 1.0]
+ anchor: anchoring point from ANCHOR_POINTS. The anchor point of the image
+ is aligned with the same anchor point of the canvas. e.g. TOP_RIGHT would
+ place the top right corner of the image at the top right corner of the
+ canvas then apply the x and y offsets.
+ width: canvas width in pixels.
+ height: canvas height in pixels.
+ color: canvas background color encoded as a 32 bit unsigned int where each
+ color channel is represented by one byte in order ARGB.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Returns:
+ str, image data of the composited image.
+
+ Raises:
+ TypeError If width, height, color, x_offset or y_offset are not of type
+ int or long or if opacity is not a float
+ BadRequestError If more than MAX_TRANSFORMS_PER_REQUEST compositions have
+ been requested, if the canvas width or height is greater than 4000 or less
+ than or equal to 0, if the color is invalid or if for any composition
+ option, the opacity is outside the range [0,1] or the anchor is invalid.
+ """
+ if (not isinstance(width, (int, long)) or
+ not isinstance(height, (int, long)) or
+ not isinstance(color, (int, long))):
+ raise TypeError("Width, height and color must be integers.")
+ if output_encoding not in OUTPUT_ENCODING_TYPES:
+ raise BadRequestError("Output encoding type '%s' not in recognized set "
+ "%s" % (output_encoding, OUTPUT_ENCODING_TYPES))
+
+ if not inputs:
+ raise BadRequestError("Must provide at least one input")
+ if len(inputs) > MAX_COMPOSITES_PER_REQUEST:
+ raise BadRequestError("A maximum of %d composition operations can be"
+ "performed in a single request" %
+ MAX_COMPOSITES_PER_REQUEST)
+
+ if width <= 0 or height <= 0:
+ raise BadRequestError("Width and height must be > 0.")
+ if width > 4000 or height > 4000:
+ raise BadRequestError("Width and height must be <= 4000.")
+
+ if color > 0xffffffff or color < 0:
+ raise BadRequestError("Invalid color")
+ if color >= 0x80000000:
+ color -= 0x100000000
+
+ image_map = {}
+
+ request = images_service_pb.ImagesCompositeRequest()
+ response = images_service_pb.ImagesTransformResponse()
+ for (image, x, y, opacity, anchor) in inputs:
+ if not image:
+ raise BadRequestError("Each input must include an image")
+ if (not isinstance(x, (int, long)) or
+ not isinstance(y, (int, long)) or
+ not isinstance(opacity, (float))):
+ raise TypeError("x_offset, y_offset must be integers and opacity must"
+ "be a float")
+ if x > 4000 or x < -4000:
+ raise BadRequestError("xOffsets must be in range [-4000, 4000]")
+ if y > 4000 or y < -4000:
+ raise BadRequestError("yOffsets must be in range [-4000, 4000]")
+ if opacity < 0 or opacity > 1:
+ raise BadRequestError("Opacity must be in the range 0.0 to 1.0")
+ if anchor not in ANCHOR_TYPES:
+ raise BadRequestError("Anchor type '%s' not in recognized set %s" %
+ (anchor, ANCHOR_TYPES))
+ if image not in image_map:
+ image_map[image] = request.image_size()
+ request.add_image().set_content(image)
+
+ option = request.add_options()
+ option.set_x_offset(x)
+ option.set_y_offset(y)
+ option.set_opacity(opacity)
+ option.set_anchor(anchor)
+ option.set_source_index(image_map[image])
+
+ request.mutable_canvas().mutable_output().set_mime_type(output_encoding)
+ request.mutable_canvas().set_width(width)
+ request.mutable_canvas().set_height(height)
+ request.mutable_canvas().set_color(color)
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("images",
+ "Composite",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA):
+ raise BadRequestError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.NOT_IMAGE):
+ raise NotImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
+ raise BadImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
+ raise LargeImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
+ raise TransformationError()
+ else:
+ raise Error()
+
+ return response.image().content()
+
+
+def histogram(image_data):
+ """Calculates the histogram of the given image.
+
+ Args:
+ image_data: str, source image data.
+ Returns: 3 256-element lists containing the number of occurences of each
+ value of each color in the order RGB.
+
+ Raises:
+ NotImageError when the image data given is not an image.
+ BadImageError when the image data given is corrupt.
+ LargeImageError when the image data given is too large to process.
+ Error when something unknown, but bad, happens.
+ """
+ image = Image(image_data)
+ return image.histogram()
diff --git a/google_appengine/google/appengine/api/images/__init__.pyc b/google_appengine/google/appengine/api/images/__init__.pyc
new file mode 100644
index 0000000..40ef57a
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/images/images_not_implemented_stub.py b/google_appengine/google/appengine/api/images/images_not_implemented_stub.py
new file mode 100755
index 0000000..30f6159
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_not_implemented_stub.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A NotImplemented Images API stub for when the PIL library is not found."""
+
+
+
+class ImagesNotImplementedServiceStub(object):
+ """Stub version of images API which raises a NotImplementedError."""
+
+ def MakeSyncCall(self, service, call, request, response):
+ """Main entry point.
+
+ Args:
+ service: str, must be 'images'.
+ call: str, name of the RPC to make, must be part of ImagesService.
+ request: pb object, corresponding args to the 'call' argument.
+ response: pb object, return value for the 'call' argument.
+ """
+ raise NotImplementedError("Unable to find the Python PIL library. Please "
+ "view the SDK documentation for details about "
+ "installing PIL on your system.")
diff --git a/google_appengine/google/appengine/api/images/images_not_implemented_stub.pyc b/google_appengine/google/appengine/api/images/images_not_implemented_stub.pyc
new file mode 100644
index 0000000..6885635
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_not_implemented_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/images/images_service_pb.py b/google_appengine/google/appengine/api/images/images_service_pb.py
new file mode 100644
index 0000000..927040c
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_service_pb.py
@@ -0,0 +1,1988 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class ImagesServiceError(ProtocolBuffer.ProtocolMessage):
+
+ UNSPECIFIED_ERROR = 1
+ BAD_TRANSFORM_DATA = 2
+ NOT_IMAGE = 3
+ BAD_IMAGE_DATA = 4
+ IMAGE_TOO_LARGE = 5
+
+ _ErrorCode_NAMES = {
+ 1: "UNSPECIFIED_ERROR",
+ 2: "BAD_TRANSFORM_DATA",
+ 3: "NOT_IMAGE",
+ 4: "BAD_IMAGE_DATA",
+ 5: "IMAGE_TOO_LARGE",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesServiceTransform(ProtocolBuffer.ProtocolMessage):
+
+ RESIZE = 1
+ ROTATE = 2
+ HORIZONTAL_FLIP = 3
+ VERTICAL_FLIP = 4
+ CROP = 5
+ IM_FEELING_LUCKY = 6
+
+ _Type_NAMES = {
+ 1: "RESIZE",
+ 2: "ROTATE",
+ 3: "HORIZONTAL_FLIP",
+ 4: "VERTICAL_FLIP",
+ 5: "CROP",
+ 6: "IM_FEELING_LUCKY",
+ }
+
+ def Type_Name(cls, x): return cls._Type_NAMES.get(x, "")
+ Type_Name = classmethod(Type_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Transform(ProtocolBuffer.ProtocolMessage):
+ has_width_ = 0
+ width_ = 0
+ has_height_ = 0
+ height_ = 0
+ has_rotate_ = 0
+ rotate_ = 0
+ has_horizontal_flip_ = 0
+ horizontal_flip_ = 0
+ has_vertical_flip_ = 0
+ vertical_flip_ = 0
+ has_crop_left_x_ = 0
+ crop_left_x_ = 0.0
+ has_crop_top_y_ = 0
+ crop_top_y_ = 0.0
+ has_crop_right_x_ = 0
+ crop_right_x_ = 1.0
+ has_crop_bottom_y_ = 0
+ crop_bottom_y_ = 1.0
+ has_autolevels_ = 0
+ autolevels_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def width(self): return self.width_
+
+ def set_width(self, x):
+ self.has_width_ = 1
+ self.width_ = x
+
+ def clear_width(self):
+ if self.has_width_:
+ self.has_width_ = 0
+ self.width_ = 0
+
+ def has_width(self): return self.has_width_
+
+ def height(self): return self.height_
+
+ def set_height(self, x):
+ self.has_height_ = 1
+ self.height_ = x
+
+ def clear_height(self):
+ if self.has_height_:
+ self.has_height_ = 0
+ self.height_ = 0
+
+ def has_height(self): return self.has_height_
+
+ def rotate(self): return self.rotate_
+
+ def set_rotate(self, x):
+ self.has_rotate_ = 1
+ self.rotate_ = x
+
+ def clear_rotate(self):
+ if self.has_rotate_:
+ self.has_rotate_ = 0
+ self.rotate_ = 0
+
+ def has_rotate(self): return self.has_rotate_
+
+ def horizontal_flip(self): return self.horizontal_flip_
+
+ def set_horizontal_flip(self, x):
+ self.has_horizontal_flip_ = 1
+ self.horizontal_flip_ = x
+
+ def clear_horizontal_flip(self):
+ if self.has_horizontal_flip_:
+ self.has_horizontal_flip_ = 0
+ self.horizontal_flip_ = 0
+
+ def has_horizontal_flip(self): return self.has_horizontal_flip_
+
+ def vertical_flip(self): return self.vertical_flip_
+
+ def set_vertical_flip(self, x):
+ self.has_vertical_flip_ = 1
+ self.vertical_flip_ = x
+
+ def clear_vertical_flip(self):
+ if self.has_vertical_flip_:
+ self.has_vertical_flip_ = 0
+ self.vertical_flip_ = 0
+
+ def has_vertical_flip(self): return self.has_vertical_flip_
+
+ def crop_left_x(self): return self.crop_left_x_
+
+ def set_crop_left_x(self, x):
+ self.has_crop_left_x_ = 1
+ self.crop_left_x_ = x
+
+ def clear_crop_left_x(self):
+ if self.has_crop_left_x_:
+ self.has_crop_left_x_ = 0
+ self.crop_left_x_ = 0.0
+
+ def has_crop_left_x(self): return self.has_crop_left_x_
+
+ def crop_top_y(self): return self.crop_top_y_
+
+ def set_crop_top_y(self, x):
+ self.has_crop_top_y_ = 1
+ self.crop_top_y_ = x
+
+ def clear_crop_top_y(self):
+ if self.has_crop_top_y_:
+ self.has_crop_top_y_ = 0
+ self.crop_top_y_ = 0.0
+
+ def has_crop_top_y(self): return self.has_crop_top_y_
+
+ def crop_right_x(self): return self.crop_right_x_
+
+ def set_crop_right_x(self, x):
+ self.has_crop_right_x_ = 1
+ self.crop_right_x_ = x
+
+ def clear_crop_right_x(self):
+ if self.has_crop_right_x_:
+ self.has_crop_right_x_ = 0
+ self.crop_right_x_ = 1.0
+
+ def has_crop_right_x(self): return self.has_crop_right_x_
+
+ def crop_bottom_y(self): return self.crop_bottom_y_
+
+ def set_crop_bottom_y(self, x):
+ self.has_crop_bottom_y_ = 1
+ self.crop_bottom_y_ = x
+
+ def clear_crop_bottom_y(self):
+ if self.has_crop_bottom_y_:
+ self.has_crop_bottom_y_ = 0
+ self.crop_bottom_y_ = 1.0
+
+ def has_crop_bottom_y(self): return self.has_crop_bottom_y_
+
+ def autolevels(self): return self.autolevels_
+
+ def set_autolevels(self, x):
+ self.has_autolevels_ = 1
+ self.autolevels_ = x
+
+ def clear_autolevels(self):
+ if self.has_autolevels_:
+ self.has_autolevels_ = 0
+ self.autolevels_ = 0
+
+ def has_autolevels(self): return self.has_autolevels_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_width()): self.set_width(x.width())
+ if (x.has_height()): self.set_height(x.height())
+ if (x.has_rotate()): self.set_rotate(x.rotate())
+ if (x.has_horizontal_flip()): self.set_horizontal_flip(x.horizontal_flip())
+ if (x.has_vertical_flip()): self.set_vertical_flip(x.vertical_flip())
+ if (x.has_crop_left_x()): self.set_crop_left_x(x.crop_left_x())
+ if (x.has_crop_top_y()): self.set_crop_top_y(x.crop_top_y())
+ if (x.has_crop_right_x()): self.set_crop_right_x(x.crop_right_x())
+ if (x.has_crop_bottom_y()): self.set_crop_bottom_y(x.crop_bottom_y())
+ if (x.has_autolevels()): self.set_autolevels(x.autolevels())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_width_ != x.has_width_: return 0
+ if self.has_width_ and self.width_ != x.width_: return 0
+ if self.has_height_ != x.has_height_: return 0
+ if self.has_height_ and self.height_ != x.height_: return 0
+ if self.has_rotate_ != x.has_rotate_: return 0
+ if self.has_rotate_ and self.rotate_ != x.rotate_: return 0
+ if self.has_horizontal_flip_ != x.has_horizontal_flip_: return 0
+ if self.has_horizontal_flip_ and self.horizontal_flip_ != x.horizontal_flip_: return 0
+ if self.has_vertical_flip_ != x.has_vertical_flip_: return 0
+ if self.has_vertical_flip_ and self.vertical_flip_ != x.vertical_flip_: return 0
+ if self.has_crop_left_x_ != x.has_crop_left_x_: return 0
+ if self.has_crop_left_x_ and self.crop_left_x_ != x.crop_left_x_: return 0
+ if self.has_crop_top_y_ != x.has_crop_top_y_: return 0
+ if self.has_crop_top_y_ and self.crop_top_y_ != x.crop_top_y_: return 0
+ if self.has_crop_right_x_ != x.has_crop_right_x_: return 0
+ if self.has_crop_right_x_ and self.crop_right_x_ != x.crop_right_x_: return 0
+ if self.has_crop_bottom_y_ != x.has_crop_bottom_y_: return 0
+ if self.has_crop_bottom_y_ and self.crop_bottom_y_ != x.crop_bottom_y_: return 0
+ if self.has_autolevels_ != x.has_autolevels_: return 0
+ if self.has_autolevels_ and self.autolevels_ != x.autolevels_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_width_): n += 1 + self.lengthVarInt64(self.width_)
+ if (self.has_height_): n += 1 + self.lengthVarInt64(self.height_)
+ if (self.has_rotate_): n += 1 + self.lengthVarInt64(self.rotate_)
+ if (self.has_horizontal_flip_): n += 2
+ if (self.has_vertical_flip_): n += 2
+ if (self.has_crop_left_x_): n += 5
+ if (self.has_crop_top_y_): n += 5
+ if (self.has_crop_right_x_): n += 5
+ if (self.has_crop_bottom_y_): n += 5
+ if (self.has_autolevels_): n += 2
+ return n + 0
+
+ def Clear(self):
+ self.clear_width()
+ self.clear_height()
+ self.clear_rotate()
+ self.clear_horizontal_flip()
+ self.clear_vertical_flip()
+ self.clear_crop_left_x()
+ self.clear_crop_top_y()
+ self.clear_crop_right_x()
+ self.clear_crop_bottom_y()
+ self.clear_autolevels()
+
+ def OutputUnchecked(self, out):
+ if (self.has_width_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.width_)
+ if (self.has_height_):
+ out.putVarInt32(16)
+ out.putVarInt32(self.height_)
+ if (self.has_rotate_):
+ out.putVarInt32(24)
+ out.putVarInt32(self.rotate_)
+ if (self.has_horizontal_flip_):
+ out.putVarInt32(32)
+ out.putBoolean(self.horizontal_flip_)
+ if (self.has_vertical_flip_):
+ out.putVarInt32(40)
+ out.putBoolean(self.vertical_flip_)
+ if (self.has_crop_left_x_):
+ out.putVarInt32(53)
+ out.putFloat(self.crop_left_x_)
+ if (self.has_crop_top_y_):
+ out.putVarInt32(61)
+ out.putFloat(self.crop_top_y_)
+ if (self.has_crop_right_x_):
+ out.putVarInt32(69)
+ out.putFloat(self.crop_right_x_)
+ if (self.has_crop_bottom_y_):
+ out.putVarInt32(77)
+ out.putFloat(self.crop_bottom_y_)
+ if (self.has_autolevels_):
+ out.putVarInt32(80)
+ out.putBoolean(self.autolevels_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_width(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_height(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.set_rotate(d.getVarInt32())
+ continue
+ if tt == 32:
+ self.set_horizontal_flip(d.getBoolean())
+ continue
+ if tt == 40:
+ self.set_vertical_flip(d.getBoolean())
+ continue
+ if tt == 53:
+ self.set_crop_left_x(d.getFloat())
+ continue
+ if tt == 61:
+ self.set_crop_top_y(d.getFloat())
+ continue
+ if tt == 69:
+ self.set_crop_right_x(d.getFloat())
+ continue
+ if tt == 77:
+ self.set_crop_bottom_y(d.getFloat())
+ continue
+ if tt == 80:
+ self.set_autolevels(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_width_: res+=prefix+("width: %s\n" % self.DebugFormatInt32(self.width_))
+ if self.has_height_: res+=prefix+("height: %s\n" % self.DebugFormatInt32(self.height_))
+ if self.has_rotate_: res+=prefix+("rotate: %s\n" % self.DebugFormatInt32(self.rotate_))
+ if self.has_horizontal_flip_: res+=prefix+("horizontal_flip: %s\n" % self.DebugFormatBool(self.horizontal_flip_))
+ if self.has_vertical_flip_: res+=prefix+("vertical_flip: %s\n" % self.DebugFormatBool(self.vertical_flip_))
+ if self.has_crop_left_x_: res+=prefix+("crop_left_x: %s\n" % self.DebugFormatFloat(self.crop_left_x_))
+ if self.has_crop_top_y_: res+=prefix+("crop_top_y: %s\n" % self.DebugFormatFloat(self.crop_top_y_))
+ if self.has_crop_right_x_: res+=prefix+("crop_right_x: %s\n" % self.DebugFormatFloat(self.crop_right_x_))
+ if self.has_crop_bottom_y_: res+=prefix+("crop_bottom_y: %s\n" % self.DebugFormatFloat(self.crop_bottom_y_))
+ if self.has_autolevels_: res+=prefix+("autolevels: %s\n" % self.DebugFormatBool(self.autolevels_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kwidth = 1
+ kheight = 2
+ krotate = 3
+ khorizontal_flip = 4
+ kvertical_flip = 5
+ kcrop_left_x = 6
+ kcrop_top_y = 7
+ kcrop_right_x = 8
+ kcrop_bottom_y = 9
+ kautolevels = 10
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "width",
+ 2: "height",
+ 3: "rotate",
+ 4: "horizontal_flip",
+ 5: "vertical_flip",
+ 6: "crop_left_x",
+ 7: "crop_top_y",
+ 8: "crop_right_x",
+ 9: "crop_bottom_y",
+ 10: "autolevels",
+ }, 10)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.FLOAT,
+ 7: ProtocolBuffer.Encoder.FLOAT,
+ 8: ProtocolBuffer.Encoder.FLOAT,
+ 9: ProtocolBuffer.Encoder.FLOAT,
+ 10: ProtocolBuffer.Encoder.NUMERIC,
+ }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImageData(ProtocolBuffer.ProtocolMessage):
+ has_content_ = 0
+ content_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def content(self): return self.content_
+
+ def set_content(self, x):
+ self.has_content_ = 1
+ self.content_ = x
+
+ def clear_content(self):
+ if self.has_content_:
+ self.has_content_ = 0
+ self.content_ = ""
+
+ def has_content(self): return self.has_content_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_content()): self.set_content(x.content())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_content_ != x.has_content_: return 0
+ if self.has_content_ and self.content_ != x.content_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_content_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: content not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.content_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_content()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.content_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_content(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_content_: res+=prefix+("content: %s\n" % self.DebugFormatString(self.content_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcontent = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "content",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class OutputSettings(ProtocolBuffer.ProtocolMessage):
+
+ PNG = 0
+ JPEG = 1
+
+ _MIME_TYPE_NAMES = {
+ 0: "PNG",
+ 1: "JPEG",
+ }
+
+ def MIME_TYPE_Name(cls, x): return cls._MIME_TYPE_NAMES.get(x, "")
+ MIME_TYPE_Name = classmethod(MIME_TYPE_Name)
+
+ has_mime_type_ = 0
+ mime_type_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def mime_type(self): return self.mime_type_
+
+ def set_mime_type(self, x):
+ self.has_mime_type_ = 1
+ self.mime_type_ = x
+
+ def clear_mime_type(self):
+ if self.has_mime_type_:
+ self.has_mime_type_ = 0
+ self.mime_type_ = 0
+
+ def has_mime_type(self): return self.has_mime_type_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_mime_type()): self.set_mime_type(x.mime_type())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_mime_type_ != x.has_mime_type_: return 0
+ if self.has_mime_type_ and self.mime_type_ != x.mime_type_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_mime_type_): n += 1 + self.lengthVarInt64(self.mime_type_)
+ return n + 0
+
+ def Clear(self):
+ self.clear_mime_type()
+
+ def OutputUnchecked(self, out):
+ if (self.has_mime_type_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.mime_type_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_mime_type(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_mime_type_: res+=prefix+("mime_type: %s\n" % self.DebugFormatInt32(self.mime_type_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kmime_type = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "mime_type",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesTransformRequest(ProtocolBuffer.ProtocolMessage):
+ has_image_ = 0
+ has_output_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = ImageData()
+ self.transform_ = []
+ self.output_ = OutputSettings()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image(self): return self.image_
+
+ def mutable_image(self): self.has_image_ = 1; return self.image_
+
+ def clear_image(self):self.has_image_ = 0; self.image_.Clear()
+
+ def has_image(self): return self.has_image_
+
+ def transform_size(self): return len(self.transform_)
+ def transform_list(self): return self.transform_
+
+ def transform(self, i):
+ return self.transform_[i]
+
+ def mutable_transform(self, i):
+ return self.transform_[i]
+
+ def add_transform(self):
+ x = Transform()
+ self.transform_.append(x)
+ return x
+
+ def clear_transform(self):
+ self.transform_ = []
+ def output(self): return self.output_
+
+ def mutable_output(self): self.has_output_ = 1; return self.output_
+
+ def clear_output(self):self.has_output_ = 0; self.output_.Clear()
+
+ def has_output(self): return self.has_output_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_image()): self.mutable_image().MergeFrom(x.image())
+ for i in xrange(x.transform_size()): self.add_transform().CopyFrom(x.transform(i))
+ if (x.has_output()): self.mutable_output().MergeFrom(x.output())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_image_ != x.has_image_: return 0
+ if self.has_image_ and self.image_ != x.image_: return 0
+ if len(self.transform_) != len(x.transform_): return 0
+ for e1, e2 in zip(self.transform_, x.transform_):
+ if e1 != e2: return 0
+ if self.has_output_ != x.has_output_: return 0
+ if self.has_output_ and self.output_ != x.output_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_image_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: image not set.')
+ elif not self.image_.IsInitialized(debug_strs): initialized = 0
+ for p in self.transform_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (not self.has_output_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: output not set.')
+ elif not self.output_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.image_.ByteSize())
+ n += 1 * len(self.transform_)
+ for i in xrange(len(self.transform_)): n += self.lengthString(self.transform_[i].ByteSize())
+ n += self.lengthString(self.output_.ByteSize())
+ return n + 2
+
+ def Clear(self):
+ self.clear_image()
+ self.clear_transform()
+ self.clear_output()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_.ByteSize())
+ self.image_.OutputUnchecked(out)
+ for i in xrange(len(self.transform_)):
+ out.putVarInt32(18)
+ out.putVarInt32(self.transform_[i].ByteSize())
+ self.transform_[i].OutputUnchecked(out)
+ out.putVarInt32(26)
+ out.putVarInt32(self.output_.ByteSize())
+ self.output_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_image().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_transform().TryMerge(tmp)
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_output().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_image_:
+ res+=prefix+"image <\n"
+ res+=self.image_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt=0
+ for e in self.transform_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("transform%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_output_:
+ res+=prefix+"output <\n"
+ res+=self.output_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+ ktransform = 2
+ koutput = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ 2: "transform",
+ 3: "output",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesTransformResponse(ProtocolBuffer.ProtocolMessage):
+ has_image_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = ImageData()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image(self): return self.image_
+
+ def mutable_image(self): self.has_image_ = 1; return self.image_
+
+ def clear_image(self):self.has_image_ = 0; self.image_.Clear()
+
+ def has_image(self): return self.has_image_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_image()): self.mutable_image().MergeFrom(x.image())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_image_ != x.has_image_: return 0
+ if self.has_image_ and self.image_ != x.image_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_image_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: image not set.')
+ elif not self.image_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.image_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_image()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_.ByteSize())
+ self.image_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_image().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_image_:
+ res+=prefix+"image <\n"
+ res+=self.image_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CompositeImageOptions(ProtocolBuffer.ProtocolMessage):
+
+ TOP_LEFT = 0
+ TOP = 1
+ TOP_RIGHT = 2
+ LEFT = 3
+ CENTER = 4
+ RIGHT = 5
+ BOTTOM_LEFT = 6
+ BOTTOM = 7
+ BOTTOM_RIGHT = 8
+
+ _ANCHOR_NAMES = {
+ 0: "TOP_LEFT",
+ 1: "TOP",
+ 2: "TOP_RIGHT",
+ 3: "LEFT",
+ 4: "CENTER",
+ 5: "RIGHT",
+ 6: "BOTTOM_LEFT",
+ 7: "BOTTOM",
+ 8: "BOTTOM_RIGHT",
+ }
+
+ def ANCHOR_Name(cls, x): return cls._ANCHOR_NAMES.get(x, "")
+ ANCHOR_Name = classmethod(ANCHOR_Name)
+
+ has_source_index_ = 0
+ source_index_ = 0
+ has_x_offset_ = 0
+ x_offset_ = 0
+ has_y_offset_ = 0
+ y_offset_ = 0
+ has_opacity_ = 0
+ opacity_ = 0.0
+ has_anchor_ = 0
+ anchor_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def source_index(self): return self.source_index_
+
+ def set_source_index(self, x):
+ self.has_source_index_ = 1
+ self.source_index_ = x
+
+ def clear_source_index(self):
+ if self.has_source_index_:
+ self.has_source_index_ = 0
+ self.source_index_ = 0
+
+ def has_source_index(self): return self.has_source_index_
+
+ def x_offset(self): return self.x_offset_
+
+ def set_x_offset(self, x):
+ self.has_x_offset_ = 1
+ self.x_offset_ = x
+
+ def clear_x_offset(self):
+ if self.has_x_offset_:
+ self.has_x_offset_ = 0
+ self.x_offset_ = 0
+
+ def has_x_offset(self): return self.has_x_offset_
+
+ def y_offset(self): return self.y_offset_
+
+ def set_y_offset(self, x):
+ self.has_y_offset_ = 1
+ self.y_offset_ = x
+
+ def clear_y_offset(self):
+ if self.has_y_offset_:
+ self.has_y_offset_ = 0
+ self.y_offset_ = 0
+
+ def has_y_offset(self): return self.has_y_offset_
+
+ def opacity(self): return self.opacity_
+
+ def set_opacity(self, x):
+ self.has_opacity_ = 1
+ self.opacity_ = x
+
+ def clear_opacity(self):
+ if self.has_opacity_:
+ self.has_opacity_ = 0
+ self.opacity_ = 0.0
+
+ def has_opacity(self): return self.has_opacity_
+
+ def anchor(self): return self.anchor_
+
+ def set_anchor(self, x):
+ self.has_anchor_ = 1
+ self.anchor_ = x
+
+ def clear_anchor(self):
+ if self.has_anchor_:
+ self.has_anchor_ = 0
+ self.anchor_ = 0
+
+ def has_anchor(self): return self.has_anchor_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_source_index()): self.set_source_index(x.source_index())
+ if (x.has_x_offset()): self.set_x_offset(x.x_offset())
+ if (x.has_y_offset()): self.set_y_offset(x.y_offset())
+ if (x.has_opacity()): self.set_opacity(x.opacity())
+ if (x.has_anchor()): self.set_anchor(x.anchor())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_source_index_ != x.has_source_index_: return 0
+ if self.has_source_index_ and self.source_index_ != x.source_index_: return 0
+ if self.has_x_offset_ != x.has_x_offset_: return 0
+ if self.has_x_offset_ and self.x_offset_ != x.x_offset_: return 0
+ if self.has_y_offset_ != x.has_y_offset_: return 0
+ if self.has_y_offset_ and self.y_offset_ != x.y_offset_: return 0
+ if self.has_opacity_ != x.has_opacity_: return 0
+ if self.has_opacity_ and self.opacity_ != x.opacity_: return 0
+ if self.has_anchor_ != x.has_anchor_: return 0
+ if self.has_anchor_ and self.anchor_ != x.anchor_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_source_index_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: source_index not set.')
+ if (not self.has_x_offset_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: x_offset not set.')
+ if (not self.has_y_offset_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: y_offset not set.')
+ if (not self.has_opacity_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: opacity not set.')
+ if (not self.has_anchor_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: anchor not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.source_index_)
+ n += self.lengthVarInt64(self.x_offset_)
+ n += self.lengthVarInt64(self.y_offset_)
+ n += self.lengthVarInt64(self.anchor_)
+ return n + 9
+
+ def Clear(self):
+ self.clear_source_index()
+ self.clear_x_offset()
+ self.clear_y_offset()
+ self.clear_opacity()
+ self.clear_anchor()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.source_index_)
+ out.putVarInt32(16)
+ out.putVarInt32(self.x_offset_)
+ out.putVarInt32(24)
+ out.putVarInt32(self.y_offset_)
+ out.putVarInt32(37)
+ out.putFloat(self.opacity_)
+ out.putVarInt32(40)
+ out.putVarInt32(self.anchor_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_source_index(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_x_offset(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.set_y_offset(d.getVarInt32())
+ continue
+ if tt == 37:
+ self.set_opacity(d.getFloat())
+ continue
+ if tt == 40:
+ self.set_anchor(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_source_index_: res+=prefix+("source_index: %s\n" % self.DebugFormatInt32(self.source_index_))
+ if self.has_x_offset_: res+=prefix+("x_offset: %s\n" % self.DebugFormatInt32(self.x_offset_))
+ if self.has_y_offset_: res+=prefix+("y_offset: %s\n" % self.DebugFormatInt32(self.y_offset_))
+ if self.has_opacity_: res+=prefix+("opacity: %s\n" % self.DebugFormatFloat(self.opacity_))
+ if self.has_anchor_: res+=prefix+("anchor: %s\n" % self.DebugFormatInt32(self.anchor_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ ksource_index = 1
+ kx_offset = 2
+ ky_offset = 3
+ kopacity = 4
+ kanchor = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "source_index",
+ 2: "x_offset",
+ 3: "y_offset",
+ 4: "opacity",
+ 5: "anchor",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.FLOAT,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesCanvas(ProtocolBuffer.ProtocolMessage):
+ has_width_ = 0
+ width_ = 0
+ has_height_ = 0
+ height_ = 0
+ has_output_ = 0
+ has_color_ = 0
+ color_ = -1
+
+ def __init__(self, contents=None):
+ self.output_ = OutputSettings()
+ if contents is not None: self.MergeFromString(contents)
+
+ def width(self): return self.width_
+
+ def set_width(self, x):
+ self.has_width_ = 1
+ self.width_ = x
+
+ def clear_width(self):
+ if self.has_width_:
+ self.has_width_ = 0
+ self.width_ = 0
+
+ def has_width(self): return self.has_width_
+
+ def height(self): return self.height_
+
+ def set_height(self, x):
+ self.has_height_ = 1
+ self.height_ = x
+
+ def clear_height(self):
+ if self.has_height_:
+ self.has_height_ = 0
+ self.height_ = 0
+
+ def has_height(self): return self.has_height_
+
+ def output(self): return self.output_
+
+ def mutable_output(self): self.has_output_ = 1; return self.output_
+
+ def clear_output(self):self.has_output_ = 0; self.output_.Clear()
+
+ def has_output(self): return self.has_output_
+
+ def color(self): return self.color_
+
+ def set_color(self, x):
+ self.has_color_ = 1
+ self.color_ = x
+
+ def clear_color(self):
+ if self.has_color_:
+ self.has_color_ = 0
+ self.color_ = -1
+
+ def has_color(self): return self.has_color_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_width()): self.set_width(x.width())
+ if (x.has_height()): self.set_height(x.height())
+ if (x.has_output()): self.mutable_output().MergeFrom(x.output())
+ if (x.has_color()): self.set_color(x.color())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_width_ != x.has_width_: return 0
+ if self.has_width_ and self.width_ != x.width_: return 0
+ if self.has_height_ != x.has_height_: return 0
+ if self.has_height_ and self.height_ != x.height_: return 0
+ if self.has_output_ != x.has_output_: return 0
+ if self.has_output_ and self.output_ != x.output_: return 0
+ if self.has_color_ != x.has_color_: return 0
+ if self.has_color_ and self.color_ != x.color_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_width_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: width not set.')
+ if (not self.has_height_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: height not set.')
+ if (not self.has_output_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: output not set.')
+ elif not self.output_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.width_)
+ n += self.lengthVarInt64(self.height_)
+ n += self.lengthString(self.output_.ByteSize())
+ if (self.has_color_): n += 1 + self.lengthVarInt64(self.color_)
+ return n + 3
+
+ def Clear(self):
+ self.clear_width()
+ self.clear_height()
+ self.clear_output()
+ self.clear_color()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.width_)
+ out.putVarInt32(16)
+ out.putVarInt32(self.height_)
+ out.putVarInt32(26)
+ out.putVarInt32(self.output_.ByteSize())
+ self.output_.OutputUnchecked(out)
+ if (self.has_color_):
+ out.putVarInt32(32)
+ out.putVarInt32(self.color_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_width(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_height(d.getVarInt32())
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_output().TryMerge(tmp)
+ continue
+ if tt == 32:
+ self.set_color(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_width_: res+=prefix+("width: %s\n" % self.DebugFormatInt32(self.width_))
+ if self.has_height_: res+=prefix+("height: %s\n" % self.DebugFormatInt32(self.height_))
+ if self.has_output_:
+ res+=prefix+"output <\n"
+ res+=self.output_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_color_: res+=prefix+("color: %s\n" % self.DebugFormatInt32(self.color_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kwidth = 1
+ kheight = 2
+ koutput = 3
+ kcolor = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "width",
+ 2: "height",
+ 3: "output",
+ 4: "color",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesCompositeRequest(ProtocolBuffer.ProtocolMessage):
+ has_canvas_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = []
+ self.options_ = []
+ self.canvas_ = ImagesCanvas()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image_size(self): return len(self.image_)
+ def image_list(self): return self.image_
+
+ def image(self, i):
+ return self.image_[i]
+
+ def mutable_image(self, i):
+ return self.image_[i]
+
+ def add_image(self):
+ x = ImageData()
+ self.image_.append(x)
+ return x
+
+ def clear_image(self):
+ self.image_ = []
+ def options_size(self): return len(self.options_)
+ def options_list(self): return self.options_
+
+ def options(self, i):
+ return self.options_[i]
+
+ def mutable_options(self, i):
+ return self.options_[i]
+
+ def add_options(self):
+ x = CompositeImageOptions()
+ self.options_.append(x)
+ return x
+
+ def clear_options(self):
+ self.options_ = []
+ def canvas(self): return self.canvas_
+
+ def mutable_canvas(self): self.has_canvas_ = 1; return self.canvas_
+
+ def clear_canvas(self):self.has_canvas_ = 0; self.canvas_.Clear()
+
+ def has_canvas(self): return self.has_canvas_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.image_size()): self.add_image().CopyFrom(x.image(i))
+ for i in xrange(x.options_size()): self.add_options().CopyFrom(x.options(i))
+ if (x.has_canvas()): self.mutable_canvas().MergeFrom(x.canvas())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.image_) != len(x.image_): return 0
+ for e1, e2 in zip(self.image_, x.image_):
+ if e1 != e2: return 0
+ if len(self.options_) != len(x.options_): return 0
+ for e1, e2 in zip(self.options_, x.options_):
+ if e1 != e2: return 0
+ if self.has_canvas_ != x.has_canvas_: return 0
+ if self.has_canvas_ and self.canvas_ != x.canvas_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.image_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ for p in self.options_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (not self.has_canvas_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: canvas not set.')
+ elif not self.canvas_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.image_)
+ for i in xrange(len(self.image_)): n += self.lengthString(self.image_[i].ByteSize())
+ n += 1 * len(self.options_)
+ for i in xrange(len(self.options_)): n += self.lengthString(self.options_[i].ByteSize())
+ n += self.lengthString(self.canvas_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_image()
+ self.clear_options()
+ self.clear_canvas()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.image_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_[i].ByteSize())
+ self.image_[i].OutputUnchecked(out)
+ for i in xrange(len(self.options_)):
+ out.putVarInt32(18)
+ out.putVarInt32(self.options_[i].ByteSize())
+ self.options_[i].OutputUnchecked(out)
+ out.putVarInt32(26)
+ out.putVarInt32(self.canvas_.ByteSize())
+ self.canvas_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_image().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_options().TryMerge(tmp)
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_canvas().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.image_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("image%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ cnt=0
+ for e in self.options_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("options%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_canvas_:
+ res+=prefix+"canvas <\n"
+ res+=self.canvas_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+ koptions = 2
+ kcanvas = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ 2: "options",
+ 3: "canvas",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesCompositeResponse(ProtocolBuffer.ProtocolMessage):
+ has_image_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = ImageData()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image(self): return self.image_
+
+ def mutable_image(self): self.has_image_ = 1; return self.image_
+
+ def clear_image(self):self.has_image_ = 0; self.image_.Clear()
+
+ def has_image(self): return self.has_image_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_image()): self.mutable_image().MergeFrom(x.image())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_image_ != x.has_image_: return 0
+ if self.has_image_ and self.image_ != x.image_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_image_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: image not set.')
+ elif not self.image_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.image_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_image()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_.ByteSize())
+ self.image_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_image().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_image_:
+ res+=prefix+"image <\n"
+ res+=self.image_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesHistogramRequest(ProtocolBuffer.ProtocolMessage):
+ has_image_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = ImageData()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image(self): return self.image_
+
+ def mutable_image(self): self.has_image_ = 1; return self.image_
+
+ def clear_image(self):self.has_image_ = 0; self.image_.Clear()
+
+ def has_image(self): return self.has_image_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_image()): self.mutable_image().MergeFrom(x.image())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_image_ != x.has_image_: return 0
+ if self.has_image_ and self.image_ != x.image_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_image_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: image not set.')
+ elif not self.image_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.image_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_image()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_.ByteSize())
+ self.image_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_image().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_image_:
+ res+=prefix+"image <\n"
+ res+=self.image_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesHistogram(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.red_ = []
+ self.green_ = []
+ self.blue_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def red_size(self): return len(self.red_)
+ def red_list(self): return self.red_
+
+ def red(self, i):
+ return self.red_[i]
+
+ def set_red(self, i, x):
+ self.red_[i] = x
+
+ def add_red(self, x):
+ self.red_.append(x)
+
+ def clear_red(self):
+ self.red_ = []
+
+ def green_size(self): return len(self.green_)
+ def green_list(self): return self.green_
+
+ def green(self, i):
+ return self.green_[i]
+
+ def set_green(self, i, x):
+ self.green_[i] = x
+
+ def add_green(self, x):
+ self.green_.append(x)
+
+ def clear_green(self):
+ self.green_ = []
+
+ def blue_size(self): return len(self.blue_)
+ def blue_list(self): return self.blue_
+
+ def blue(self, i):
+ return self.blue_[i]
+
+ def set_blue(self, i, x):
+ self.blue_[i] = x
+
+ def add_blue(self, x):
+ self.blue_.append(x)
+
+ def clear_blue(self):
+ self.blue_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.red_size()): self.add_red(x.red(i))
+ for i in xrange(x.green_size()): self.add_green(x.green(i))
+ for i in xrange(x.blue_size()): self.add_blue(x.blue(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.red_) != len(x.red_): return 0
+ for e1, e2 in zip(self.red_, x.red_):
+ if e1 != e2: return 0
+ if len(self.green_) != len(x.green_): return 0
+ for e1, e2 in zip(self.green_, x.green_):
+ if e1 != e2: return 0
+ if len(self.blue_) != len(x.blue_): return 0
+ for e1, e2 in zip(self.blue_, x.blue_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.red_)
+ for i in xrange(len(self.red_)): n += self.lengthVarInt64(self.red_[i])
+ n += 1 * len(self.green_)
+ for i in xrange(len(self.green_)): n += self.lengthVarInt64(self.green_[i])
+ n += 1 * len(self.blue_)
+ for i in xrange(len(self.blue_)): n += self.lengthVarInt64(self.blue_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_red()
+ self.clear_green()
+ self.clear_blue()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.red_)):
+ out.putVarInt32(8)
+ out.putVarInt32(self.red_[i])
+ for i in xrange(len(self.green_)):
+ out.putVarInt32(16)
+ out.putVarInt32(self.green_[i])
+ for i in xrange(len(self.blue_)):
+ out.putVarInt32(24)
+ out.putVarInt32(self.blue_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.add_red(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.add_green(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.add_blue(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.red_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("red%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ cnt=0
+ for e in self.green_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("green%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ cnt=0
+ for e in self.blue_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("blue%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kred = 1
+ kgreen = 2
+ kblue = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "red",
+ 2: "green",
+ 3: "blue",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesHistogramResponse(ProtocolBuffer.ProtocolMessage):
+ has_histogram_ = 0
+
+ def __init__(self, contents=None):
+ self.histogram_ = ImagesHistogram()
+ if contents is not None: self.MergeFromString(contents)
+
+ def histogram(self): return self.histogram_
+
+ def mutable_histogram(self): self.has_histogram_ = 1; return self.histogram_
+
+ def clear_histogram(self):self.has_histogram_ = 0; self.histogram_.Clear()
+
+ def has_histogram(self): return self.has_histogram_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_histogram()): self.mutable_histogram().MergeFrom(x.histogram())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_histogram_ != x.has_histogram_: return 0
+ if self.has_histogram_ and self.histogram_ != x.histogram_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_histogram_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: histogram not set.')
+ elif not self.histogram_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.histogram_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_histogram()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.histogram_.ByteSize())
+ self.histogram_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_histogram().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_histogram_:
+ res+=prefix+"histogram <\n"
+ res+=self.histogram_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ khistogram = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "histogram",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['ImagesServiceError','ImagesServiceTransform','Transform','ImageData','OutputSettings','ImagesTransformRequest','ImagesTransformResponse','CompositeImageOptions','ImagesCanvas','ImagesCompositeRequest','ImagesCompositeResponse','ImagesHistogramRequest','ImagesHistogram','ImagesHistogramResponse']
diff --git a/google_appengine/google/appengine/api/images/images_service_pb.pyc b/google_appengine/google/appengine/api/images/images_service_pb.pyc
new file mode 100644
index 0000000..6b98746
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/images/images_stub.py b/google_appengine/google/appengine/api/images/images_stub.py
new file mode 100755
index 0000000..d89f47e
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_stub.py
@@ -0,0 +1,411 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the images API."""
+
+
+
+import logging
+import StringIO
+
+try:
+ import PIL
+ from PIL import _imaging
+ from PIL import Image
+except ImportError:
+ import _imaging
+ import Image
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import images
+from google.appengine.api.images import images_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+def _ArgbToRgbaTuple(argb):
+ """Convert from a single ARGB value to a tuple containing RGBA.
+
+ Args:
+ argb: Signed 32 bit integer containing an ARGB value.
+
+ Returns:
+ RGBA tuple.
+ """
+ unsigned_argb = argb % 0x100000000
+ return ((unsigned_argb >> 16) & 0xFF,
+ (unsigned_argb >> 8) & 0xFF,
+ unsigned_argb & 0xFF,
+ (unsigned_argb >> 24) & 0xFF)
+
+
+class ImagesServiceStub(apiproxy_stub.APIProxyStub):
+ """Stub version of images API to be used with the dev_appserver."""
+
+ def __init__(self, service_name='images'):
+ """Preloads PIL to load all modules in the unhardened environment.
+
+ Args:
+ service_name: Service name expected for all calls.
+ """
+ super(ImagesServiceStub, self).__init__(service_name)
+ Image.init()
+
+ def _Dynamic_Composite(self, request, response):
+ """Implementation of ImagesService::Composite.
+
+ Based off documentation of the PIL library at
+ http://www.pythonware.com/library/pil/handbook/index.htm
+
+ Args:
+ request: ImagesCompositeRequest, contains image request info.
+ response: ImagesCompositeResponse, contains transformed image.
+ """
+ width = request.canvas().width()
+ height = request.canvas().height()
+ color = _ArgbToRgbaTuple(request.canvas().color())
+ canvas = Image.new("RGBA", (width, height), color)
+ sources = []
+ if (not request.canvas().width() or request.canvas().width() > 4000 or
+ not request.canvas().height() or request.canvas().height() > 4000):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if not request.image_size():
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if not request.options_size():
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if request.options_size() > images.MAX_COMPOSITES_PER_REQUEST:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ for image in request.image_list():
+ sources.append(self._OpenImage(image.content()))
+
+ for options in request.options_list():
+ if (options.anchor() < images.TOP_LEFT or
+ options.anchor() > images.BOTTOM_RIGHT):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if options.source_index() >= len(sources) or options.source_index() < 0:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if options.opacity() < 0 or options.opacity() > 1:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ source = sources[options.source_index()]
+ x_anchor = (options.anchor() % 3) * 0.5
+ y_anchor = (options.anchor() / 3) * 0.5
+ x_offset = int(options.x_offset() + x_anchor * (width - source.size[0]))
+ y_offset = int(options.y_offset() + y_anchor * (height - source.size[1]))
+ alpha = options.opacity() * 255
+ mask = Image.new("L", source.size, alpha)
+ canvas.paste(source, (x_offset, y_offset), mask)
+ response_value = self._EncodeImage(canvas, request.canvas().output())
+ response.mutable_image().set_content(response_value)
+
+ def _Dynamic_Histogram(self, request, response):
+ """Trivial implementation of ImagesService::Histogram.
+
+ Based off documentation of the PIL library at
+ http://www.pythonware.com/library/pil/handbook/index.htm
+
+ Args:
+ request: ImagesHistogramRequest, contains the image.
+ response: ImagesHistogramResponse, contains histogram of the image.
+ """
+ image = self._OpenImage(request.image().content())
+ img_format = image.format
+ if img_format not in ("BMP", "GIF", "ICO", "JPEG", "PNG", "TIFF"):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.NOT_IMAGE)
+ image = image.convert("RGBA")
+ red = [0] * 256
+ green = [0] * 256
+ blue = [0] * 256
+ for pixel in image.getdata():
+ red[int((pixel[0] * pixel[3]) / 255)] += 1
+ green[int((pixel[1] * pixel[3]) / 255)] += 1
+ blue[int((pixel[2] * pixel[3]) / 255)] += 1
+ histogram = response.mutable_histogram()
+ for value in red:
+ histogram.add_red(value)
+ for value in green:
+ histogram.add_green(value)
+ for value in blue:
+ histogram.add_blue(value)
+
+ def _Dynamic_Transform(self, request, response):
+ """Trivial implementation of ImagesService::Transform.
+
+ Based off documentation of the PIL library at
+ http://www.pythonware.com/library/pil/handbook/index.htm
+
+ Args:
+ request: ImagesTransformRequest, contains image request info.
+ response: ImagesTransformResponse, contains transformed image.
+ """
+ original_image = self._OpenImage(request.image().content())
+
+ new_image = self._ProcessTransforms(original_image,
+ request.transform_list())
+
+ response_value = self._EncodeImage(new_image, request.output())
+ response.mutable_image().set_content(response_value)
+
+ def _EncodeImage(self, image, output_encoding):
+ """Encode the given image and return it in string form.
+
+ Args:
+ image: PIL Image object, image to encode.
+ output_encoding: ImagesTransformRequest.OutputSettings object.
+
+ Returns:
+ str with encoded image information in given encoding format.
+ """
+ image_string = StringIO.StringIO()
+
+ image_encoding = "PNG"
+
+ if (output_encoding.mime_type() == images_service_pb.OutputSettings.JPEG):
+ image_encoding = "JPEG"
+
+ image = image.convert("RGB")
+
+ image.save(image_string, image_encoding)
+
+ return image_string.getvalue()
+
+ def _OpenImage(self, image):
+ """Opens an image provided as a string.
+
+ Args:
+ image: image data to be opened
+
+ Raises:
+ apiproxy_errors.ApplicationError if the image cannot be opened or if it
+ is an unsupported format.
+
+ Returns:
+ Image containing the image data passed in.
+ """
+ if not image:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.NOT_IMAGE)
+
+ image = StringIO.StringIO(image)
+ try:
+ image = Image.open(image)
+ except IOError:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
+
+ img_format = image.format
+ if img_format not in ("BMP", "GIF", "ICO", "JPEG", "PNG", "TIFF"):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.NOT_IMAGE)
+ return image
+
+ def _ValidateCropArg(self, arg):
+ """Check an argument for the Crop transform.
+
+ Args:
+ arg: float, argument to Crop transform to check.
+
+ Raises:
+ apiproxy_errors.ApplicationError on problem with argument.
+ """
+ if not isinstance(arg, float):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+
+ if not (0 <= arg <= 1.0):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+
+ def _CalculateNewDimensions(self,
+ current_width,
+ current_height,
+ req_width,
+ req_height):
+ """Get new resize dimensions keeping the current aspect ratio.
+
+ This uses the more restricting of the two requested values to determine
+ the new ratio.
+
+ Args:
+ current_width: int, current width of the image.
+ current_height: int, current height of the image.
+ req_width: int, requested new width of the image.
+ req_height: int, requested new height of the image.
+
+ Returns:
+ tuple (width, height) which are both ints of the new ratio.
+ """
+
+ width_ratio = float(req_width) / current_width
+ height_ratio = float(req_height) / current_height
+
+ if req_width == 0 or (width_ratio > height_ratio and req_height != 0):
+ return int(height_ratio * current_width), req_height
+ else:
+ return req_width, int(width_ratio * current_height)
+
+ def _Resize(self, image, transform):
+ """Use PIL to resize the given image with the given transform.
+
+ Args:
+ image: PIL.Image.Image object to resize.
+ transform: images_service_pb.Transform to use when resizing.
+
+ Returns:
+ PIL.Image.Image with transforms performed on it.
+
+ Raises:
+ BadRequestError if the resize data given is bad.
+ """
+ width = 0
+ height = 0
+
+ if transform.has_width():
+ width = transform.width()
+ if width < 0 or 4000 < width:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+
+ if transform.has_height():
+ height = transform.height()
+ if height < 0 or 4000 < height:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+
+ current_width, current_height = image.size
+ new_width, new_height = self._CalculateNewDimensions(current_width,
+ current_height,
+ width,
+ height)
+
+ return image.resize((new_width, new_height), Image.ANTIALIAS)
+
+ def _Rotate(self, image, transform):
+ """Use PIL to rotate the given image with the given transform.
+
+ Args:
+ image: PIL.Image.Image object to rotate.
+ transform: images_service_pb.Transform to use when rotating.
+
+ Returns:
+ PIL.Image.Image with transforms performed on it.
+
+ Raises:
+ BadRequestError if the rotate data given is bad.
+ """
+ degrees = transform.rotate()
+ if degrees < 0 or degrees % 90 != 0:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ degrees %= 360
+
+ degrees = 360 - degrees
+ return image.rotate(degrees)
+
+ def _Crop(self, image, transform):
+ """Use PIL to crop the given image with the given transform.
+
+ Args:
+ image: PIL.Image.Image object to crop.
+ transform: images_service_pb.Transform to use when cropping.
+
+ Returns:
+ PIL.Image.Image with transforms performed on it.
+
+ Raises:
+ BadRequestError if the crop data given is bad.
+ """
+ left_x = 0.0
+ top_y = 0.0
+ right_x = 1.0
+ bottom_y = 1.0
+
+ if transform.has_crop_left_x():
+ left_x = transform.crop_left_x()
+ self._ValidateCropArg(left_x)
+
+ if transform.has_crop_top_y():
+ top_y = transform.crop_top_y()
+ self._ValidateCropArg(top_y)
+
+ if transform.has_crop_right_x():
+ right_x = transform.crop_right_x()
+ self._ValidateCropArg(right_x)
+
+ if transform.has_crop_bottom_y():
+ bottom_y = transform.crop_bottom_y()
+ self._ValidateCropArg(bottom_y)
+
+ width, height = image.size
+
+ box = (int(transform.crop_left_x() * width),
+ int(transform.crop_top_y() * height),
+ int(transform.crop_right_x() * width),
+ int(transform.crop_bottom_y() * height))
+
+ return image.crop(box)
+
+ def _ProcessTransforms(self, image, transforms):
+ """Execute PIL operations based on transform values.
+
+ Args:
+ image: PIL.Image.Image instance, image to manipulate.
+ trasnforms: list of ImagesTransformRequest.Transform objects.
+
+ Returns:
+ PIL.Image.Image with transforms performed on it.
+
+ Raises:
+ BadRequestError if we are passed more than one of the same type of
+ transform.
+ """
+ new_image = image
+ if len(transforms) > images.MAX_TRANSFORMS_PER_REQUEST:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ for transform in transforms:
+ if transform.has_width() or transform.has_height():
+ new_image = self._Resize(new_image, transform)
+
+ elif transform.has_rotate():
+ new_image = self._Rotate(new_image, transform)
+
+ elif transform.has_horizontal_flip():
+ new_image = new_image.transpose(Image.FLIP_LEFT_RIGHT)
+
+ elif transform.has_vertical_flip():
+ new_image = new_image.transpose(Image.FLIP_TOP_BOTTOM)
+
+ elif (transform.has_crop_left_x() or
+ transform.has_crop_top_y() or
+ transform.has_crop_right_x() or
+ transform.has_crop_bottom_y()):
+ new_image = self._Crop(new_image, transform)
+
+ elif transform.has_autolevels():
+ logging.info("I'm Feeling Lucky autolevels will be visible once this "
+ "application is deployed.")
+ else:
+ logging.warn("Found no transformations found to perform.")
+
+ return new_image
diff --git a/google_appengine/google/appengine/api/images/images_stub.pyc b/google_appengine/google/appengine/api/images/images_stub.pyc
new file mode 100644
index 0000000..a29f50c
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/__init__.py b/google_appengine/google/appengine/api/labs/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/api/labs/__init__.pyc b/google_appengine/google/appengine/api/labs/__init__.pyc
new file mode 100644
index 0000000..3557233
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/__init__.py b/google_appengine/google/appengine/api/labs/taskqueue/__init__.py
new file mode 100644
index 0000000..cf9ea5a
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Task Queue API module."""
+
+from taskqueue import *
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/__init__.pyc b/google_appengine/google/appengine/api/labs/taskqueue/__init__.pyc
new file mode 100644
index 0000000..a9ca241
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py
new file mode 100755
index 0000000..733df36
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py
@@ -0,0 +1,633 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Task Queue API.
+
+Enables an application to queue background work for itself. Work is done through
+webhooks that process tasks pushed from a queue. Tasks will execute in
+best-effort order of ETA. Webhooks that fail will cause tasks to be retried at a
+later time. Multiple queues may exist with independent throttling controls.
+
+Webhook URLs may be specified directly for Tasks, or the default URL scheme
+may be used, which will translate Task names into URLs relative to a Queue's
+base path. A default queue is also provided for simple usage.
+"""
+
+
+
+import datetime
+import re
+import time
+import urllib
+import urlparse
+
+import taskqueue_service_pb
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import urlfetch
+from google.appengine.runtime import apiproxy_errors
+
+
+class Error(Exception):
+ """Base-class for exceptions in this module."""
+
+
+class UnknownQueueError(Error):
+ """The queue specified is unknown."""
+
+
+class TransientError(Error):
+ """There was a transient error while accessing the queue.
+
+ Please Try again later.
+ """
+
+
+class InternalError(Error):
+ """There was an internal error while accessing this queue.
+
+ If this problem continues, please contact the App Engine team through
+ our support forum with a description of your problem.
+ """
+
+
+class InvalidTaskError(Error):
+ """The task's parameters, headers, or method is invalid."""
+
+
+class InvalidTaskNameError(InvalidTaskError):
+ """The task's name is invalid."""
+
+
+class TaskTooLargeError(InvalidTaskError):
+ """The task is too large with its headers and payload."""
+
+
+class TaskAlreadyExistsError(InvalidTaskError):
+ """Task already exists. It has not yet run."""
+
+
+class TombstonedTaskError(InvalidTaskError):
+ """Task has been tombstoned."""
+
+
+class InvalidUrlError(InvalidTaskError):
+ """The task's relative URL is invalid."""
+
+
+class BadTaskStateError(Error):
+ """The task is in the wrong state for the requested operation."""
+
+
+class InvalidQueueError(Error):
+ """The Queue's configuration is invalid."""
+
+
+class InvalidQueueNameError(InvalidQueueError):
+ """The Queue's name is invalid."""
+
+
+class _RelativeUrlError(Error):
+ """The relative URL supplied is invalid."""
+
+
+class PermissionDeniedError(Error):
+ """The requested operation is not allowed for this app."""
+
+
+MAX_QUEUE_NAME_LENGTH = 100
+
+MAX_TASK_NAME_LENGTH = 500
+
+MAX_TASK_SIZE_BYTES = 10 * (2 ** 10)
+
+MAX_URL_LENGTH = 2083
+
+_DEFAULT_QUEUE = 'default'
+
+_DEFAULT_QUEUE_PATH = '/_ah/queue'
+
+_METHOD_MAP = {
+ 'GET': taskqueue_service_pb.TaskQueueAddRequest.GET,
+ 'POST': taskqueue_service_pb.TaskQueueAddRequest.POST,
+ 'HEAD': taskqueue_service_pb.TaskQueueAddRequest.HEAD,
+ 'PUT': taskqueue_service_pb.TaskQueueAddRequest.PUT,
+ 'DELETE': taskqueue_service_pb.TaskQueueAddRequest.DELETE,
+}
+
+_NON_POST_METHODS = frozenset(['GET', 'HEAD', 'PUT', 'DELETE'])
+
+_BODY_METHODS = frozenset(['POST', 'PUT'])
+
+_TASK_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_TASK_NAME_LENGTH
+
+_TASK_NAME_RE = re.compile(_TASK_NAME_PATTERN)
+
+_QUEUE_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_QUEUE_NAME_LENGTH
+
+_QUEUE_NAME_RE = re.compile(_QUEUE_NAME_PATTERN)
+
+
+class _UTCTimeZone(datetime.tzinfo):
+ """UTC timezone."""
+
+ ZERO = datetime.timedelta(0)
+
+ def utcoffset(self, dt):
+ return self.ZERO
+
+ def dst(self, dt):
+ return self.ZERO
+
+ def tzname(self, dt):
+ return 'UTC'
+
+
+_UTC = _UTCTimeZone()
+
+
+def _parse_relative_url(relative_url):
+ """Parses a relative URL and splits it into its path and query string.
+
+ Args:
+ relative_url: The relative URL, starting with a '/'.
+
+ Returns:
+ Tuple (path, query) where:
+ path: The path in the relative URL.
+ query: The query string in the URL without the '?' character.
+
+ Raises:
+ _RelativeUrlError if the relative_url is invalid for whatever reason
+ """
+ if not relative_url:
+ raise _RelativeUrlError('Relative URL is empty')
+ (scheme, netloc, path, query, fragment) = urlparse.urlsplit(relative_url)
+ if scheme or netloc:
+ raise _RelativeUrlError('Relative URL may not have a scheme or location')
+ if fragment:
+ raise _RelativeUrlError('Relative URL may not specify a fragment')
+ if not path or path[0] != '/':
+ raise _RelativeUrlError('Relative URL path must start with "/"')
+ return path, query
+
+
+def _flatten_params(params):
+ """Converts a dictionary of parameters to a list of parameters.
+
+ Any unicode strings in keys or values will be encoded as UTF-8.
+
+ Args:
+ params: Dictionary mapping parameter keys to values. Values will be
+ converted to a string and added to the list as tuple (key, value). If
+ a values is iterable and not a string, each contained value will be
+ added as a separate (key, value) tuple.
+
+ Returns:
+ List of (key, value) tuples.
+ """
+ def get_string(value):
+ if isinstance(value, unicode):
+ return unicode(value).encode('utf-8')
+ else:
+ return str(value)
+
+ param_list = []
+ for key, value in params.iteritems():
+ key = get_string(key)
+ if isinstance(value, basestring):
+ param_list.append((key, get_string(value)))
+ else:
+ try:
+ iterator = iter(value)
+ except TypeError:
+ param_list.append((key, str(value)))
+ else:
+ param_list.extend((key, get_string(v)) for v in iterator)
+
+ return param_list
+
+
+class Task(object):
+ """Represents a single Task on a queue."""
+
+ __CONSTRUCTOR_KWARGS = frozenset([
+ 'countdown', 'eta', 'headers', 'method', 'name', 'params', 'url'])
+
+ def __init__(self, payload=None, **kwargs):
+ """Initializer.
+
+ All parameters are optional.
+
+ Args:
+ payload: The payload data for this Task that will be delivered to the
+ webhook as the HTTP request body. This is only allowed for POST and PUT
+ methods.
+ countdown: Time in seconds into the future that this Task should execute.
+ Defaults to zero.
+ eta: Absolute time when the Task should execute. May not be specified
+ if 'countdown' is also supplied.
+ headers: Dictionary of headers to pass to the webhook. Values in the
+ dictionary may be iterable to indicate repeated header fields.
+ method: Method to use when accessing the webhook. Defaults to 'POST'.
+ name: Name to give the Task; if not specified, a name will be
+ auto-generated when added to a queue and assigned to this object. Must
+ match the _TASK_NAME_PATTERN regular expression.
+ params: Dictionary of parameters to use for this Task. For POST requests
+ these params will be encoded as 'application/x-www-form-urlencoded' and
+ set to the payload. For all other methods, the parameters will be
+ converted to a query string. May not be specified if the URL already
+ contains a query string.
+ url: Relative URL where the webhook that should handle this task is
+ located for this application. May have a query string unless this is
+ a POST method.
+
+ Raises:
+ InvalidTaskError if any of the parameters are invalid;
+ InvalidTaskNameError if the task name is invalid; InvalidUrlError if
+ the task URL is invalid or too long; TaskTooLargeError if the task with
+ its payload is too large.
+ """
+ args_diff = set(kwargs.iterkeys()) - self.__CONSTRUCTOR_KWARGS
+ if args_diff:
+ raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
+
+ self.__name = kwargs.get('name')
+ if self.__name and not _TASK_NAME_RE.match(self.__name):
+ raise InvalidTaskNameError(
+ 'Task name does not match expression "%s"; found %s' %
+ (_TASK_NAME_PATTERN, self.__name))
+
+ self.__default_url, self.__relative_url, query = Task.__determine_url(
+ kwargs.get('url', ''))
+ self.__headers = urlfetch._CaselessDict()
+ self.__headers.update(kwargs.get('headers', {}))
+ self.__method = kwargs.get('method', 'POST').upper()
+ self.__payload = None
+ params = kwargs.get('params', {})
+
+ if query and params:
+ raise InvalidTaskError('Query string and parameters both present; '
+ 'only one of these may be supplied')
+
+ if self.__method == 'POST':
+ if payload and params:
+ raise InvalidTaskError('Message body and parameters both present for '
+ 'POST method; only one of these may be supplied')
+ elif query:
+ raise InvalidTaskError('POST method may not have a query string; '
+ 'use the "params" keyword argument instead')
+ elif params:
+ self.__payload = Task.__encode_params(params)
+ self.__headers.setdefault(
+ 'content-type', 'application/x-www-form-urlencoded')
+ elif payload is not None:
+ self.__payload = Task.__convert_payload(payload, self.__headers)
+ elif self.__method in _NON_POST_METHODS:
+ if payload and self.__method not in _BODY_METHODS:
+ raise InvalidTaskError('Payload may only be specified for methods %s' %
+ ', '.join(_BODY_METHODS))
+ if payload:
+ self.__payload = Task.__convert_payload(payload, self.__headers)
+ if params:
+ query = Task.__encode_params(params)
+ if query:
+ self.__relative_url = '%s?%s' % (self.__relative_url, query)
+ else:
+ raise InvalidTaskError('Invalid method: %s' % self.__method)
+
+ self.__headers_list = _flatten_params(self.__headers)
+ self.__eta = Task.__determine_eta(
+ kwargs.get('eta'), kwargs.get('countdown'))
+ self.__enqueued = False
+
+ if self.size > MAX_TASK_SIZE_BYTES:
+ raise TaskTooLargeError('Task size must be less than %d; found %d' %
+ (MAX_TASK_SIZE_BYTES, self.size))
+
+ @staticmethod
+ def __determine_url(relative_url):
+ """Determines the URL of a task given a relative URL and a name.
+
+ Args:
+ relative_url: The relative URL for the Task.
+
+ Returns:
+ Tuple (default_url, relative_url, query) where:
+ default_url: True if this Task is using the default URL scheme;
+ False otherwise.
+ relative_url: String containing the relative URL for this Task.
+ query: The query string for this task.
+
+ Raises:
+ InvalidUrlError if the relative_url is invalid.
+ """
+ if not relative_url:
+ default_url, query = True, ''
+ else:
+ default_url = False
+ try:
+ relative_url, query = _parse_relative_url(relative_url)
+ except _RelativeUrlError, e:
+ raise InvalidUrlError(e)
+
+ if len(relative_url) > MAX_URL_LENGTH:
+ raise InvalidUrlError(
+ 'Task URL must be less than %d characters; found %d' %
+ (MAX_URL_LENGTH, len(relative_url)))
+
+ return (default_url, relative_url, query)
+
+ @staticmethod
+ def __determine_eta(eta=None, countdown=None, now=datetime.datetime.now):
+ """Determines the ETA for a task.
+
+ If 'eta' and 'countdown' are both None, the current time will be used.
+ Otherwise, only one of them may be specified.
+
+ Args:
+ eta: A datetime.datetime specifying the absolute ETA or None
+ countdown: Count in seconds into the future from the present time that
+ the ETA should be assigned to.
+
+ Returns:
+ A datetime in the UTC timezone containing the ETA.
+
+ Raises:
+ InvalidTaskError if the parameters are invalid.
+ """
+ if eta is not None and countdown is not None:
+ raise InvalidTaskError('May not use a countdown and ETA together')
+ elif eta is not None:
+ if not isinstance(eta, datetime.datetime):
+ raise InvalidTaskError('ETA must be a datetime.datetime instance')
+ elif countdown is not None:
+ try:
+ countdown = float(countdown)
+ except ValueError:
+ raise InvalidTaskError('Countdown must be a number')
+ else:
+ eta = now() + datetime.timedelta(seconds=countdown)
+ else:
+ eta = now()
+
+ if eta.tzinfo is None:
+ eta = eta.replace(tzinfo=_UTC)
+ return eta.astimezone(_UTC)
+
+ @staticmethod
+ def __encode_params(params):
+ """URL-encodes a list of parameters.
+
+ Args:
+ params: Dictionary of parameters, possibly with iterable values.
+
+ Returns:
+ URL-encoded version of the params, ready to be added to a query string or
+ POST body.
+ """
+ return urllib.urlencode(_flatten_params(params))
+
+ @staticmethod
+ def __convert_payload(payload, headers):
+ """Converts a Task payload into UTF-8 and sets headers if necessary.
+
+ Args:
+ payload: The payload data to convert.
+ headers: Dictionary of headers.
+
+ Returns:
+ The payload as a non-unicode string.
+
+ Raises:
+ InvalidTaskError if the payload is not a string or unicode instance.
+ """
+ if isinstance(payload, unicode):
+ headers.setdefault('content-type', 'text/plain; charset=utf-8')
+ payload = payload.encode('utf-8')
+ elif not isinstance(payload, str):
+ raise InvalidTaskError(
+ 'Task payloads must be strings; invalid payload: %r' % payload)
+ return payload
+
+ @property
+ def on_queue_url(self):
+ """Returns True if this Task will run on the queue's URL."""
+ return self.__default_url
+
+ @property
+ def eta(self):
+ """Returns an datetime corresponding to when this Task will execute."""
+ return self.__eta
+
+ @property
+ def headers(self):
+ """Returns a copy of the headers for this Task."""
+ return self.__headers.copy()
+
+ @property
+ def method(self):
+ """Returns the method to use for this Task."""
+ return self.__method
+
+ @property
+ def name(self):
+ """Returns the name of this Task.
+
+ Will be None if using auto-assigned Task names and this Task has not yet
+ been added to a Queue.
+ """
+ return self.__name
+
+ @property
+ def payload(self):
+ """Returns the payload for this task, which may be None."""
+ return self.__payload
+
+ @property
+ def size(self):
+ """Returns the size of this task in bytes."""
+ HEADER_SEPERATOR = len(': \r\n')
+ header_size = sum((len(key) + len(value) + HEADER_SEPERATOR)
+ for key, value in self.__headers_list)
+ return (len(self.__method) + len(self.__payload or '') +
+ len(self.__relative_url) + header_size)
+
+ @property
+ def url(self):
+ """Returns the relative URL for this Task."""
+ return self.__relative_url
+
+ @property
+ def was_enqueued(self):
+ """Returns True if this Task has been enqueued.
+
+ Note: This will not check if this task already exists in the queue.
+ """
+ return self.__enqueued
+
+ def add(self, queue_name=_DEFAULT_QUEUE, transactional=True):
+ """Adds this Task to a queue. See Queue.add."""
+ return Queue(queue_name).add(self, transactional=transactional)
+
+
+class Queue(object):
+ """Represents a Queue."""
+
+ def __init__(self, name=_DEFAULT_QUEUE):
+ """Initializer.
+
+ Args:
+ name: Name of this queue. If not supplied, defaults to the default queue.
+
+ Raises:
+ InvalidQueueNameError if the queue name is invalid.
+ """
+ if not _QUEUE_NAME_RE.match(name):
+ raise InvalidQueueNameError(
+ 'Queue name does not match pattern "%s"; found %s' %
+ (_QUEUE_NAME_PATTERN, name))
+ self.__name = name
+ self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name)
+
+ def add(self, task, transactional=True):
+ """Adds a Task to this Queue.
+
+ Args:
+ task: The Task to add.
+ transactional: If false adds the task to a queue irrespectively to the
+ enclosing transaction success or failure. (optional)
+
+ Returns:
+ The Task that was supplied to this method.
+
+ Raises:
+ BadTaskStateError if the Task has already been added to a queue.
+ Error-subclass on application errors.
+ """
+ if task.was_enqueued:
+ raise BadTaskStateError('Task has already been enqueued')
+
+ request = taskqueue_service_pb.TaskQueueAddRequest()
+ response = taskqueue_service_pb.TaskQueueAddResponse()
+
+ adjusted_url = task.url
+ if task.on_queue_url:
+ adjusted_url = self.__url + task.url
+
+
+ request.set_queue_name(self.__name)
+ request.set_eta_usec(int(time.mktime(task.eta.utctimetuple())) * 10**6)
+ request.set_method(_METHOD_MAP.get(task.method))
+ request.set_url(adjusted_url)
+
+ if task.name:
+ request.set_task_name(task.name)
+ else:
+ request.set_task_name('')
+
+ if task.payload:
+ request.set_body(task.payload)
+ for key, value in _flatten_params(task.headers):
+ header = request.add_header()
+ header.set_key(key)
+ header.set_value(value)
+
+ if transactional:
+ from google.appengine.api import datastore
+ datastore._MaybeSetupTransaction(request, [])
+
+ call_tuple = ('taskqueue', 'Add', request, response)
+ apiproxy_stub_map.apiproxy.GetPreCallHooks().Call(*call_tuple)
+ try:
+ apiproxy_stub_map.MakeSyncCall(*call_tuple)
+ except apiproxy_errors.ApplicationError, e:
+ self.__TranslateError(e)
+ else:
+ apiproxy_stub_map.apiproxy.GetPostCallHooks().Call(*call_tuple)
+
+ if response.has_chosen_task_name():
+ task._Task__name = response.chosen_task_name()
+ task._Task__enqueued = True
+ return task
+
+ @property
+ def name(self):
+ """Returns the name of this queue."""
+ return self.__name
+
+ @staticmethod
+ def __TranslateError(error):
+ """Translates a TaskQueueServiceError into an exception.
+
+ Args:
+ error: Value from TaskQueueServiceError enum.
+
+ Raises:
+ The corresponding Exception sub-class for that error code.
+ """
+ if (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE):
+ raise UnknownQueueError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR):
+ raise TransientError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR):
+ raise InternalError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE):
+ raise TaskTooLargeError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME):
+ raise InvalidTaskNameError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME):
+ raise InvalidQueueNameError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_URL):
+ raise InvalidUrlError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE):
+ raise InvalidQueueError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED):
+ raise PermissionDeniedError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS):
+ raise TaskAlreadyExistsError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK):
+ raise TombstonedTaskError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA):
+ raise InvalidTaskError(error.error_detail)
+ else:
+ raise Error('Application error %s: %s' %
+ (error.application_error, error.error_detail))
+
+
+def add(*args, **kwargs):
+ """Convenience method will create a Task and add it to the default queue.
+
+ Args:
+ *args, **kwargs: Passed to the Task constructor.
+
+ Returns:
+ The Task that was added to the queue.
+ """
+ return Task(*args, **kwargs).add()
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.pyc b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.pyc
new file mode 100644
index 0000000..ebb0b63
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
new file mode 100644
index 0000000..1038974
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
@@ -0,0 +1,1645 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.datastore.datastore_v3_pb import *
+class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ UNKNOWN_QUEUE = 1
+ TRANSIENT_ERROR = 2
+ INTERNAL_ERROR = 3
+ TASK_TOO_LARGE = 4
+ INVALID_TASK_NAME = 5
+ INVALID_QUEUE_NAME = 6
+ INVALID_URL = 7
+ INVALID_QUEUE_RATE = 8
+ PERMISSION_DENIED = 9
+ TASK_ALREADY_EXISTS = 10
+ TOMBSTONED_TASK = 11
+ INVALID_ETA = 12
+ INVALID_REQUEST = 13
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "UNKNOWN_QUEUE",
+ 2: "TRANSIENT_ERROR",
+ 3: "INTERNAL_ERROR",
+ 4: "TASK_TOO_LARGE",
+ 5: "INVALID_TASK_NAME",
+ 6: "INVALID_QUEUE_NAME",
+ 7: "INVALID_URL",
+ 8: "INVALID_QUEUE_RATE",
+ 9: "PERMISSION_DENIED",
+ 10: "TASK_ALREADY_EXISTS",
+ 11: "TOMBSTONED_TASK",
+ 12: "INVALID_ETA",
+ 13: "INVALID_REQUEST",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueAddRequest_Header(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(58)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(66)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 52: break
+ if tt == 58:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 66:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
+
+ GET = 1
+ POST = 2
+ HEAD = 3
+ PUT = 4
+ DELETE = 5
+
+ _RequestMethod_NAMES = {
+ 1: "GET",
+ 2: "POST",
+ 3: "HEAD",
+ 4: "PUT",
+ 5: "DELETE",
+ }
+
+ def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
+ RequestMethod_Name = classmethod(RequestMethod_Name)
+
+ has_queue_name_ = 0
+ queue_name_ = ""
+ has_task_name_ = 0
+ task_name_ = ""
+ has_eta_usec_ = 0
+ eta_usec_ = 0
+ has_method_ = 0
+ method_ = 2
+ has_url_ = 0
+ url_ = ""
+ has_body_ = 0
+ body_ = ""
+ has_transaction_ = 0
+ transaction_ = None
+
+ def __init__(self, contents=None):
+ self.header_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+ def task_name(self): return self.task_name_
+
+ def set_task_name(self, x):
+ self.has_task_name_ = 1
+ self.task_name_ = x
+
+ def clear_task_name(self):
+ if self.has_task_name_:
+ self.has_task_name_ = 0
+ self.task_name_ = ""
+
+ def has_task_name(self): return self.has_task_name_
+
+ def eta_usec(self): return self.eta_usec_
+
+ def set_eta_usec(self, x):
+ self.has_eta_usec_ = 1
+ self.eta_usec_ = x
+
+ def clear_eta_usec(self):
+ if self.has_eta_usec_:
+ self.has_eta_usec_ = 0
+ self.eta_usec_ = 0
+
+ def has_eta_usec(self): return self.has_eta_usec_
+
+ def method(self): return self.method_
+
+ def set_method(self, x):
+ self.has_method_ = 1
+ self.method_ = x
+
+ def clear_method(self):
+ if self.has_method_:
+ self.has_method_ = 0
+ self.method_ = 2
+
+ def has_method(self): return self.has_method_
+
+ def url(self): return self.url_
+
+ def set_url(self, x):
+ self.has_url_ = 1
+ self.url_ = x
+
+ def clear_url(self):
+ if self.has_url_:
+ self.has_url_ = 0
+ self.url_ = ""
+
+ def has_url(self): return self.has_url_
+
+ def header_size(self): return len(self.header_)
+ def header_list(self): return self.header_
+
+ def header(self, i):
+ return self.header_[i]
+
+ def mutable_header(self, i):
+ return self.header_[i]
+
+ def add_header(self):
+ x = TaskQueueAddRequest_Header()
+ self.header_.append(x)
+ return x
+
+ def clear_header(self):
+ self.header_ = []
+ def body(self): return self.body_
+
+ def set_body(self, x):
+ self.has_body_ = 1
+ self.body_ = x
+
+ def clear_body(self):
+ if self.has_body_:
+ self.has_body_ = 0
+ self.body_ = ""
+
+ def has_body(self): return self.has_body_
+
+ def transaction(self):
+ if self.transaction_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.transaction_ is None: self.transaction_ = Transaction()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.transaction_
+
+ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+ def clear_transaction(self):
+ if self.has_transaction_:
+ self.has_transaction_ = 0;
+ if self.transaction_ is not None: self.transaction_.Clear()
+
+ def has_transaction(self): return self.has_transaction_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+ if (x.has_task_name()): self.set_task_name(x.task_name())
+ if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
+ if (x.has_method()): self.set_method(x.method())
+ if (x.has_url()): self.set_url(x.url())
+ for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
+ if (x.has_body()): self.set_body(x.body())
+ if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ if self.has_task_name_ != x.has_task_name_: return 0
+ if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
+ if self.has_eta_usec_ != x.has_eta_usec_: return 0
+ if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
+ if self.has_method_ != x.has_method_: return 0
+ if self.has_method_ and self.method_ != x.method_: return 0
+ if self.has_url_ != x.has_url_: return 0
+ if self.has_url_ and self.url_ != x.url_: return 0
+ if len(self.header_) != len(x.header_): return 0
+ for e1, e2 in zip(self.header_, x.header_):
+ if e1 != e2: return 0
+ if self.has_body_ != x.has_body_: return 0
+ if self.has_body_ and self.body_ != x.body_: return 0
+ if self.has_transaction_ != x.has_transaction_: return 0
+ if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ if (not self.has_task_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: task_name not set.')
+ if (not self.has_eta_usec_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: eta_usec not set.')
+ if (not self.has_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: url not set.')
+ for p in self.header_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.queue_name_))
+ n += self.lengthString(len(self.task_name_))
+ n += self.lengthVarInt64(self.eta_usec_)
+ if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
+ n += self.lengthString(len(self.url_))
+ n += 2 * len(self.header_)
+ for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
+ if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
+ if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+ return n + 4
+
+ def Clear(self):
+ self.clear_queue_name()
+ self.clear_task_name()
+ self.clear_eta_usec()
+ self.clear_method()
+ self.clear_url()
+ self.clear_header()
+ self.clear_body()
+ self.clear_transaction()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.queue_name_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.task_name_)
+ out.putVarInt32(24)
+ out.putVarInt64(self.eta_usec_)
+ out.putVarInt32(34)
+ out.putPrefixedString(self.url_)
+ if (self.has_method_):
+ out.putVarInt32(40)
+ out.putVarInt32(self.method_)
+ for i in xrange(len(self.header_)):
+ out.putVarInt32(51)
+ self.header_[i].OutputUnchecked(out)
+ out.putVarInt32(52)
+ if (self.has_body_):
+ out.putVarInt32(74)
+ out.putPrefixedString(self.body_)
+ if (self.has_transaction_):
+ out.putVarInt32(82)
+ out.putVarInt32(self.transaction_.ByteSize())
+ self.transaction_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_task_name(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_eta_usec(d.getVarInt64())
+ continue
+ if tt == 34:
+ self.set_url(d.getPrefixedString())
+ continue
+ if tt == 40:
+ self.set_method(d.getVarInt32())
+ continue
+ if tt == 51:
+ self.add_header().TryMerge(d)
+ continue
+ if tt == 74:
+ self.set_body(d.getPrefixedString())
+ continue
+ if tt == 82:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_transaction().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
+ if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
+ if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
+ if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
+ cnt=0
+ for e in self.header_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Header%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+ if self.has_transaction_:
+ res+=prefix+"transaction <\n"
+ res+=self.transaction_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kqueue_name = 1
+ ktask_name = 2
+ keta_usec = 3
+ kmethod = 5
+ kurl = 4
+ kHeaderGroup = 6
+ kHeaderkey = 7
+ kHeadervalue = 8
+ kbody = 9
+ ktransaction = 10
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "queue_name",
+ 2: "task_name",
+ 3: "eta_usec",
+ 4: "url",
+ 5: "method",
+ 6: "Header",
+ 7: "key",
+ 8: "value",
+ 9: "body",
+ 10: "transaction",
+ }, 10)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.STARTGROUP,
+ 7: ProtocolBuffer.Encoder.STRING,
+ 8: ProtocolBuffer.Encoder.STRING,
+ 9: ProtocolBuffer.Encoder.STRING,
+ 10: ProtocolBuffer.Encoder.STRING,
+ }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueAddResponse(ProtocolBuffer.ProtocolMessage):
+ has_chosen_task_name_ = 0
+ chosen_task_name_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def chosen_task_name(self): return self.chosen_task_name_
+
+ def set_chosen_task_name(self, x):
+ self.has_chosen_task_name_ = 1
+ self.chosen_task_name_ = x
+
+ def clear_chosen_task_name(self):
+ if self.has_chosen_task_name_:
+ self.has_chosen_task_name_ = 0
+ self.chosen_task_name_ = ""
+
+ def has_chosen_task_name(self): return self.has_chosen_task_name_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
+ if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_chosen_task_name()
+
+ def OutputUnchecked(self, out):
+ if (self.has_chosen_task_name_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.chosen_task_name_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_chosen_task_name(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kchosen_task_name = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "chosen_task_name",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueUpdateQueueRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_id_ = 0
+ app_id_ = ""
+ has_queue_name_ = 0
+ queue_name_ = ""
+ has_bucket_refill_per_second_ = 0
+ bucket_refill_per_second_ = 0.0
+ has_bucket_capacity_ = 0
+ bucket_capacity_ = 0
+ has_user_specified_rate_ = 0
+ user_specified_rate_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+ def bucket_refill_per_second(self): return self.bucket_refill_per_second_
+
+ def set_bucket_refill_per_second(self, x):
+ self.has_bucket_refill_per_second_ = 1
+ self.bucket_refill_per_second_ = x
+
+ def clear_bucket_refill_per_second(self):
+ if self.has_bucket_refill_per_second_:
+ self.has_bucket_refill_per_second_ = 0
+ self.bucket_refill_per_second_ = 0.0
+
+ def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
+
+ def bucket_capacity(self): return self.bucket_capacity_
+
+ def set_bucket_capacity(self, x):
+ self.has_bucket_capacity_ = 1
+ self.bucket_capacity_ = x
+
+ def clear_bucket_capacity(self):
+ if self.has_bucket_capacity_:
+ self.has_bucket_capacity_ = 0
+ self.bucket_capacity_ = 0
+
+ def has_bucket_capacity(self): return self.has_bucket_capacity_
+
+ def user_specified_rate(self): return self.user_specified_rate_
+
+ def set_user_specified_rate(self, x):
+ self.has_user_specified_rate_ = 1
+ self.user_specified_rate_ = x
+
+ def clear_user_specified_rate(self):
+ if self.has_user_specified_rate_:
+ self.has_user_specified_rate_ = 0
+ self.user_specified_rate_ = ""
+
+ def has_user_specified_rate(self): return self.has_user_specified_rate_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+ if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
+ if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
+ if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
+ if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
+ if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
+ if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
+ if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
+ if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ if (not self.has_bucket_refill_per_second_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bucket_refill_per_second not set.')
+ if (not self.has_bucket_capacity_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bucket_capacity not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += self.lengthString(len(self.queue_name_))
+ n += self.lengthVarInt64(self.bucket_capacity_)
+ if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
+ return n + 12
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_queue_name()
+ self.clear_bucket_refill_per_second()
+ self.clear_bucket_capacity()
+ self.clear_user_specified_rate()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.queue_name_)
+ out.putVarInt32(25)
+ out.putDouble(self.bucket_refill_per_second_)
+ out.putVarInt32(32)
+ out.putVarInt32(self.bucket_capacity_)
+ if (self.has_user_specified_rate_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.user_specified_rate_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if tt == 25:
+ self.set_bucket_refill_per_second(d.getDouble())
+ continue
+ if tt == 32:
+ self.set_bucket_capacity(d.getVarInt32())
+ continue
+ if tt == 42:
+ self.set_user_specified_rate(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
+ if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormatInt32(self.bucket_capacity_))
+ if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kqueue_name = 2
+ kbucket_refill_per_second = 3
+ kbucket_capacity = 4
+ kuser_specified_rate = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "queue_name",
+ 3: "bucket_refill_per_second",
+ 4: "bucket_capacity",
+ 5: "user_specified_rate",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.DOUBLE,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueUpdateQueueResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueFetchQueuesRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_id_ = 0
+ app_id_ = ""
+ has_max_rows_ = 0
+ max_rows_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def max_rows(self): return self.max_rows_
+
+ def set_max_rows(self, x):
+ self.has_max_rows_ = 1
+ self.max_rows_ = x
+
+ def clear_max_rows(self):
+ if self.has_max_rows_:
+ self.has_max_rows_ = 0
+ self.max_rows_ = 0
+
+ def has_max_rows(self): return self.has_max_rows_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ if (x.has_max_rows()): self.set_max_rows(x.max_rows())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if self.has_max_rows_ != x.has_max_rows_: return 0
+ if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_max_rows_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: max_rows not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += self.lengthVarInt64(self.max_rows_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_max_rows()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ out.putVarInt32(16)
+ out.putVarInt32(self.max_rows_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_max_rows(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kmax_rows = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "max_rows",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueFetchQueuesResponse_Queue(ProtocolBuffer.ProtocolMessage):
+ has_queue_name_ = 0
+ queue_name_ = ""
+ has_bucket_refill_per_second_ = 0
+ bucket_refill_per_second_ = 0.0
+ has_bucket_capacity_ = 0
+ bucket_capacity_ = 0.0
+ has_user_specified_rate_ = 0
+ user_specified_rate_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+ def bucket_refill_per_second(self): return self.bucket_refill_per_second_
+
+ def set_bucket_refill_per_second(self, x):
+ self.has_bucket_refill_per_second_ = 1
+ self.bucket_refill_per_second_ = x
+
+ def clear_bucket_refill_per_second(self):
+ if self.has_bucket_refill_per_second_:
+ self.has_bucket_refill_per_second_ = 0
+ self.bucket_refill_per_second_ = 0.0
+
+ def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
+
+ def bucket_capacity(self): return self.bucket_capacity_
+
+ def set_bucket_capacity(self, x):
+ self.has_bucket_capacity_ = 1
+ self.bucket_capacity_ = x
+
+ def clear_bucket_capacity(self):
+ if self.has_bucket_capacity_:
+ self.has_bucket_capacity_ = 0
+ self.bucket_capacity_ = 0.0
+
+ def has_bucket_capacity(self): return self.has_bucket_capacity_
+
+ def user_specified_rate(self): return self.user_specified_rate_
+
+ def set_user_specified_rate(self, x):
+ self.has_user_specified_rate_ = 1
+ self.user_specified_rate_ = x
+
+ def clear_user_specified_rate(self):
+ if self.has_user_specified_rate_:
+ self.has_user_specified_rate_ = 0
+ self.user_specified_rate_ = ""
+
+ def has_user_specified_rate(self): return self.has_user_specified_rate_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+ if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
+ if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
+ if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
+ if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
+ if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
+ if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
+ if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
+ if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ if (not self.has_bucket_refill_per_second_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bucket_refill_per_second not set.')
+ if (not self.has_bucket_capacity_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bucket_capacity not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.queue_name_))
+ if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
+ return n + 19
+
+ def Clear(self):
+ self.clear_queue_name()
+ self.clear_bucket_refill_per_second()
+ self.clear_bucket_capacity()
+ self.clear_user_specified_rate()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.queue_name_)
+ out.putVarInt32(25)
+ out.putDouble(self.bucket_refill_per_second_)
+ out.putVarInt32(33)
+ out.putDouble(self.bucket_capacity_)
+ if (self.has_user_specified_rate_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.user_specified_rate_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if tt == 25:
+ self.set_bucket_refill_per_second(d.getDouble())
+ continue
+ if tt == 33:
+ self.set_bucket_capacity(d.getDouble())
+ continue
+ if tt == 42:
+ self.set_user_specified_rate(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
+ if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormat(self.bucket_capacity_))
+ if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
+ return res
+
+class TaskQueueFetchQueuesResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.queue_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def queue_size(self): return len(self.queue_)
+ def queue_list(self): return self.queue_
+
+ def queue(self, i):
+ return self.queue_[i]
+
+ def mutable_queue(self, i):
+ return self.queue_[i]
+
+ def add_queue(self):
+ x = TaskQueueFetchQueuesResponse_Queue()
+ self.queue_.append(x)
+ return x
+
+ def clear_queue(self):
+ self.queue_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.queue_size()): self.add_queue().CopyFrom(x.queue(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.queue_) != len(x.queue_): return 0
+ for e1, e2 in zip(self.queue_, x.queue_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.queue_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.queue_)
+ for i in xrange(len(self.queue_)): n += self.queue_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_queue()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.queue_)):
+ out.putVarInt32(11)
+ self.queue_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_queue().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.queue_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Queue%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kQueueGroup = 1
+ kQueuequeue_name = 2
+ kQueuebucket_refill_per_second = 3
+ kQueuebucket_capacity = 4
+ kQueueuser_specified_rate = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Queue",
+ 2: "queue_name",
+ 3: "bucket_refill_per_second",
+ 4: "bucket_capacity",
+ 5: "user_specified_rate",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.DOUBLE,
+ 4: ProtocolBuffer.Encoder.DOUBLE,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueFetchQueueStatsRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_id_ = 0
+ app_id_ = ""
+ has_max_num_tasks_ = 0
+ max_num_tasks_ = 0
+
+ def __init__(self, contents=None):
+ self.queue_name_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def queue_name_size(self): return len(self.queue_name_)
+ def queue_name_list(self): return self.queue_name_
+
+ def queue_name(self, i):
+ return self.queue_name_[i]
+
+ def set_queue_name(self, i, x):
+ self.queue_name_[i] = x
+
+ def add_queue_name(self, x):
+ self.queue_name_.append(x)
+
+ def clear_queue_name(self):
+ self.queue_name_ = []
+
+ def max_num_tasks(self): return self.max_num_tasks_
+
+ def set_max_num_tasks(self, x):
+ self.has_max_num_tasks_ = 1
+ self.max_num_tasks_ = x
+
+ def clear_max_num_tasks(self):
+ if self.has_max_num_tasks_:
+ self.has_max_num_tasks_ = 0
+ self.max_num_tasks_ = 0
+
+ def has_max_num_tasks(self): return self.has_max_num_tasks_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ for i in xrange(x.queue_name_size()): self.add_queue_name(x.queue_name(i))
+ if (x.has_max_num_tasks()): self.set_max_num_tasks(x.max_num_tasks())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if len(self.queue_name_) != len(x.queue_name_): return 0
+ for e1, e2 in zip(self.queue_name_, x.queue_name_):
+ if e1 != e2: return 0
+ if self.has_max_num_tasks_ != x.has_max_num_tasks_: return 0
+ if self.has_max_num_tasks_ and self.max_num_tasks_ != x.max_num_tasks_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_max_num_tasks_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: max_num_tasks not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += 1 * len(self.queue_name_)
+ for i in xrange(len(self.queue_name_)): n += self.lengthString(len(self.queue_name_[i]))
+ n += self.lengthVarInt64(self.max_num_tasks_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_queue_name()
+ self.clear_max_num_tasks()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ for i in xrange(len(self.queue_name_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.queue_name_[i])
+ out.putVarInt32(24)
+ out.putVarInt32(self.max_num_tasks_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.add_queue_name(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_max_num_tasks(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ cnt=0
+ for e in self.queue_name_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("queue_name%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_max_num_tasks_: res+=prefix+("max_num_tasks: %s\n" % self.DebugFormatInt32(self.max_num_tasks_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kqueue_name = 2
+ kmax_num_tasks = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "queue_name",
+ 3: "max_num_tasks",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueFetchQueueStatsResponse_QueueStats(ProtocolBuffer.ProtocolMessage):
+ has_num_tasks_ = 0
+ num_tasks_ = 0
+ has_oldest_eta_usec_ = 0
+ oldest_eta_usec_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def num_tasks(self): return self.num_tasks_
+
+ def set_num_tasks(self, x):
+ self.has_num_tasks_ = 1
+ self.num_tasks_ = x
+
+ def clear_num_tasks(self):
+ if self.has_num_tasks_:
+ self.has_num_tasks_ = 0
+ self.num_tasks_ = 0
+
+ def has_num_tasks(self): return self.has_num_tasks_
+
+ def oldest_eta_usec(self): return self.oldest_eta_usec_
+
+ def set_oldest_eta_usec(self, x):
+ self.has_oldest_eta_usec_ = 1
+ self.oldest_eta_usec_ = x
+
+ def clear_oldest_eta_usec(self):
+ if self.has_oldest_eta_usec_:
+ self.has_oldest_eta_usec_ = 0
+ self.oldest_eta_usec_ = 0
+
+ def has_oldest_eta_usec(self): return self.has_oldest_eta_usec_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_num_tasks()): self.set_num_tasks(x.num_tasks())
+ if (x.has_oldest_eta_usec()): self.set_oldest_eta_usec(x.oldest_eta_usec())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_num_tasks_ != x.has_num_tasks_: return 0
+ if self.has_num_tasks_ and self.num_tasks_ != x.num_tasks_: return 0
+ if self.has_oldest_eta_usec_ != x.has_oldest_eta_usec_: return 0
+ if self.has_oldest_eta_usec_ and self.oldest_eta_usec_ != x.oldest_eta_usec_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_num_tasks_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: num_tasks not set.')
+ if (not self.has_oldest_eta_usec_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: oldest_eta_usec not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.num_tasks_)
+ n += self.lengthVarInt64(self.oldest_eta_usec_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_num_tasks()
+ self.clear_oldest_eta_usec()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(16)
+ out.putVarInt32(self.num_tasks_)
+ out.putVarInt32(24)
+ out.putVarInt64(self.oldest_eta_usec_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 16:
+ self.set_num_tasks(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.set_oldest_eta_usec(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_num_tasks_: res+=prefix+("num_tasks: %s\n" % self.DebugFormatInt32(self.num_tasks_))
+ if self.has_oldest_eta_usec_: res+=prefix+("oldest_eta_usec: %s\n" % self.DebugFormatInt64(self.oldest_eta_usec_))
+ return res
+
+class TaskQueueFetchQueueStatsResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.queuestats_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def queuestats_size(self): return len(self.queuestats_)
+ def queuestats_list(self): return self.queuestats_
+
+ def queuestats(self, i):
+ return self.queuestats_[i]
+
+ def mutable_queuestats(self, i):
+ return self.queuestats_[i]
+
+ def add_queuestats(self):
+ x = TaskQueueFetchQueueStatsResponse_QueueStats()
+ self.queuestats_.append(x)
+ return x
+
+ def clear_queuestats(self):
+ self.queuestats_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.queuestats_size()): self.add_queuestats().CopyFrom(x.queuestats(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.queuestats_) != len(x.queuestats_): return 0
+ for e1, e2 in zip(self.queuestats_, x.queuestats_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.queuestats_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.queuestats_)
+ for i in xrange(len(self.queuestats_)): n += self.queuestats_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_queuestats()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.queuestats_)):
+ out.putVarInt32(11)
+ self.queuestats_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_queuestats().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.queuestats_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("QueueStats%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kQueueStatsGroup = 1
+ kQueueStatsnum_tasks = 2
+ kQueueStatsoldest_eta_usec = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "QueueStats",
+ 2: "num_tasks",
+ 3: "oldest_eta_usec",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['TaskQueueServiceError','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats']
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.pyc b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.pyc
new file mode 100644
index 0000000..e0c961a
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py
new file mode 100755
index 0000000..dfe5e16
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the Task Queue API.
+
+This stub only stores tasks; it doesn't actually run them. It also validates
+the tasks by checking their queue name against the queue.yaml.
+
+As well as implementing Task Queue API functions, the stub exposes various other
+functions that are used by the dev_appserver's admin console to display the
+application's queues and tasks.
+"""
+
+
+
+import base64
+import datetime
+import os
+
+import taskqueue_service_pb
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import queueinfo
+from google.appengine.api import urlfetch
+from google.appengine.runtime import apiproxy_errors
+
+
+DEFAULT_RATE = '5.00/s'
+
+DEFAULT_BUCKET_SIZE = 5
+
+MAX_ETA_DELTA_DAYS = 30
+
+
+def _ParseQueueYaml(unused_self, root_path):
+ """Loads the queue.yaml file and parses it.
+
+ Args:
+ unused_self: Allows this function to be bound to a class member. Not used.
+ root_path: Directory containing queue.yaml. Not used.
+
+ Returns:
+ None if queue.yaml doesn't exist, otherwise a queueinfo.QueueEntry object
+ populaeted from the queue.yaml.
+ """
+ if root_path is None:
+ return None
+ for queueyaml in ('queue.yaml', 'queue.yml'):
+ try:
+ fh = open(os.path.join(root_path, queueyaml), 'r')
+ except IOError:
+ continue
+ try:
+ queue_info = queueinfo.LoadSingleQueue(fh)
+ return queue_info
+ finally:
+ fh.close()
+ return None
+
+
+def _CompareTasksByEta(a, b):
+ """Python sort comparator for tasks by estimated time of arrival (ETA).
+
+ Args:
+ a: A taskqueue_service_pb.TaskQueueAddRequest.
+ b: A taskqueue_service_pb.TaskQueueAddRequest.
+
+ Returns:
+ Standard 1/0/-1 comparison result.
+ """
+ if a.eta_usec() > b.eta_usec():
+ return 1
+ if a.eta_usec() < b.eta_usec():
+ return -1
+ return 0
+
+
+def _FormatEta(eta_usec):
+ """Formats a task ETA as a date string in UTC."""
+ eta = datetime.datetime.fromtimestamp(eta_usec/1000000)
+ return eta.strftime('%Y/%m/%d %H:%M:%S')
+
+
+def _EtaDelta(eta_usec):
+ """Formats a task ETA as a relative time string."""
+ eta = datetime.datetime.fromtimestamp(eta_usec/1000000)
+ now = datetime.datetime.utcnow()
+ if eta > now:
+ return str(eta - now) + ' from now'
+ else:
+ return str(now - eta) + ' ago'
+
+
+class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only task queue service stub.
+
+ This stub does not attempt to automatically execute tasks. Instead, it
+ stores them for display on a console. The user may manually execute the
+ tasks from the console.
+ """
+
+ queue_yaml_parser = _ParseQueueYaml
+
+ def __init__(self, service_name='taskqueue', root_path=None):
+ """Constructor.
+
+ Args:
+ service_name: Service name expected for all calls.
+ root_path: Root path to the directory of the application which may contain
+ a queue.yaml file. If None, then it's assumed no queue.yaml file is
+ available.
+ """
+ super(TaskQueueServiceStub, self).__init__(service_name)
+ self._taskqueues = {}
+ self._next_task_id = 1
+ self._root_path = root_path
+
+ def _Dynamic_Add(self, request, response):
+ """Local implementation of the Add RPC in TaskQueueService.
+
+ Must adhere to the '_Dynamic_' naming convention for stubbing to work.
+ See taskqueue_service.proto for a full description of the RPC.
+
+ Args:
+ request: A taskqueue_service_pb.TaskQueueAddRequest.
+ response: A taskqueue_service_pb.TaskQueueAddResponse.
+ """
+ if request.eta_usec() < 0:
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+
+ eta = datetime.datetime.utcfromtimestamp(request.eta_usec() / 1e6)
+ max_eta = (datetime.datetime.utcnow() +
+ datetime.timedelta(days=MAX_ETA_DELTA_DAYS))
+ if eta > max_eta:
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+
+ if not self._IsValidQueue(request.queue_name()):
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
+
+ if not request.task_name():
+ request.set_task_name('task%d' % self._next_task_id)
+ response.set_chosen_task_name(request.task_name())
+ self._next_task_id += 1
+
+ tasks = self._taskqueues.setdefault(request.queue_name(), [])
+ for task in tasks:
+ if task.task_name() == request.task_name():
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)
+ tasks.append(request)
+ tasks.sort(_CompareTasksByEta)
+
+ def _IsValidQueue(self, queue_name):
+ """Determines whether a queue is valid, i.e. tasks can be added to it.
+
+ Valid queues are the 'default' queue, plus any queues in the queue.yaml
+ file.
+
+ Args:
+ queue_name: the name of the queue to validate.
+
+ Returns:
+ True iff queue is valid.
+ """
+ if queue_name == 'default':
+ return True
+ queue_info = self.queue_yaml_parser(self._root_path)
+ if queue_info and queue_info.queue:
+ for entry in queue_info.queue:
+ if entry.name == queue_name:
+ return True
+ return False
+
+ def GetQueues(self):
+ """Gets all the applications's queues.
+
+ Returns:
+ A list of dictionaries, where each dictionary contains one queue's
+ attributes. E.g.:
+ [{'name': 'some-queue',
+ 'max_rate': '1/s',
+ 'bucket_size': 5,
+ 'oldest_task': '2009/02/02 05:37:42',
+ 'eta_delta': '0:00:06.342511 ago',
+ 'tasks_in_queue': 12}, ...]
+ """
+ queues = []
+ queue_info = self.queue_yaml_parser(self._root_path)
+ has_default = False
+ if queue_info and queue_info.queue:
+ for entry in queue_info.queue:
+ if entry.name == 'default':
+ has_default = True
+ queue = {}
+ queues.append(queue)
+ queue['name'] = entry.name
+ queue['max_rate'] = entry.rate
+ if entry.bucket_size:
+ queue['bucket_size'] = entry.bucket_size
+ else:
+ queue['bucket_size'] = DEFAULT_BUCKET_SIZE
+
+ tasks = self._taskqueues.setdefault(entry.name, [])
+ if tasks:
+ queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
+ queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
+ else:
+ queue['oldest_task'] = ''
+ queue['tasks_in_queue'] = len(tasks)
+
+ if not has_default:
+ queue = {}
+ queues.append(queue)
+ queue['name'] = 'default'
+ queue['max_rate'] = DEFAULT_RATE
+ queue['bucket_size'] = DEFAULT_BUCKET_SIZE
+
+ tasks = self._taskqueues.get('default', [])
+ if tasks:
+ queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
+ queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
+ else:
+ queue['oldest_task'] = ''
+ queue['tasks_in_queue'] = len(tasks)
+ return queues
+
+ def GetTasks(self, queue_name):
+ """Gets a queue's tasks.
+
+ Args:
+ queue_name: Queue's name to return tasks for.
+
+ Returns:
+ A list of dictionaries, where each dictionary contains one task's
+ attributes. E.g.
+ [{'name': 'task-123',
+ 'url': '/update',
+ 'method': 'GET',
+ 'eta': '2009/02/02 05:37:42',
+ 'eta_delta': '0:00:06.342511 ago',
+ 'body': '',
+ 'headers': {'X-AppEngine-QueueName': 'update-queue',
+ 'X-AppEngine-TaskName': 'task-123',
+ 'X-AppEngine-TaskRetryCount': '0',
+ 'X-AppEngine-Development-Payload': '1',
+ 'Content-Length': 0,
+ 'Content-Type': 'application/octet-streamn'}, ...]
+
+ Raises:
+ ValueError: A task request contains an unknown HTTP method type.
+ """
+ tasks = self._taskqueues.get(queue_name, [])
+ result_tasks = []
+ for task_request in tasks:
+ task = {}
+ result_tasks.append(task)
+ task['name'] = task_request.task_name()
+ task['url'] = task_request.url()
+ method = task_request.method()
+ if method == taskqueue_service_pb.TaskQueueAddRequest.GET:
+ task['method'] = 'GET'
+ elif method == taskqueue_service_pb.TaskQueueAddRequest.POST:
+ task['method'] = 'POST'
+ elif method == taskqueue_service_pb.TaskQueueAddRequest.HEAD:
+ task['method'] = 'HEAD'
+ elif method == taskqueue_service_pb.TaskQueueAddRequest.PUT:
+ task['method'] = 'PUT'
+ elif method == taskqueue_service_pb.TaskQueueAddRequest.DELETE:
+ task['method'] = 'DELETE'
+ else:
+ raise ValueError('Unexpected method: %d' % method)
+
+ task['eta'] = _FormatEta(task_request.eta_usec())
+ task['eta_delta'] = _EtaDelta(task_request.eta_usec())
+ task['body'] = base64.b64encode(task_request.body())
+ headers = urlfetch._CaselessDict()
+ task['headers'] = headers
+ for req_header in task_request.header_list():
+ headers[req_header.key()] = req_header.value()
+
+ headers['X-AppEngine-QueueName'] = queue_name
+ headers['X-AppEngine-TaskName'] = task['name']
+ headers['X-AppEngine-TaskRetryCount'] = '0'
+ headers['X-AppEngine-Development-Payload'] = '1'
+ headers['Content-Length'] = len(task['body'])
+ headers['Content-Type'] = headers.get(
+ 'Content-Type', 'application/octet-stream')
+
+ return result_tasks
+
+ def DeleteTask(self, queue_name, task_name):
+ """Deletes a task from a queue.
+
+ Args:
+ queue_name: the name of the queue to delete the task from.
+ task_name: the name of the task to delete.
+ """
+ tasks = self._taskqueues.get(queue_name, [])
+ for task in tasks:
+ if task.task_name() == task_name:
+ tasks.remove(task)
+ return
+
+ def FlushQueue(self, queue_name):
+ """Removes all tasks from a queue.
+
+ Args:
+ queue_name: the name of the queue to remove tasks from.
+ """
+ self._taskqueues[queue_name] = []
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.pyc b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.pyc
new file mode 100644
index 0000000..d23fecb
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/mail.py b/google_appengine/google/appengine/api/mail.py
new file mode 100755
index 0000000..ca6df88
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail.py
@@ -0,0 +1,1127 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Sends email on behalf of application.
+
+Provides functions for application developers to provide email services
+for their applications. Also provides a few utility methods.
+"""
+
+
+
+
+
+
+import email
+from email import MIMEBase
+from email import MIMEMultipart
+from email import MIMEText
+import logging
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import mail_service_pb
+from google.appengine.api import users
+from google.appengine.api.mail_errors import *
+from google.appengine.runtime import apiproxy_errors
+
+
+
+ERROR_MAP = {
+ mail_service_pb.MailServiceError.BAD_REQUEST:
+ BadRequestError,
+
+ mail_service_pb.MailServiceError.UNAUTHORIZED_SENDER:
+ InvalidSenderError,
+
+ mail_service_pb.MailServiceError.INVALID_ATTACHMENT_TYPE:
+ InvalidAttachmentTypeError,
+}
+
+
+EXTENSION_MIME_MAP = {
+ 'asc': 'text/plain',
+ 'bmp': 'image/x-ms-bmp',
+ 'css': 'text/css',
+ 'csv': 'text/csv',
+ 'diff': 'text/plain',
+ 'gif': 'image/gif',
+ 'htm': 'text/html',
+ 'html': 'text/html',
+ 'ics': 'text/calendar',
+ 'jpe': 'image/jpeg',
+ 'jpeg': 'image/jpeg',
+ 'jpg': 'image/jpeg',
+ 'pdf': 'application/pdf',
+ 'png': 'image/png',
+ 'pot': 'text/plain',
+ 'rss': 'text/rss+xml',
+ 'text': 'text/plain',
+ 'tif': 'image/tiff',
+ 'tiff': 'image/tiff',
+ 'txt': 'text/plain',
+ 'vcf': 'text/directory',
+ 'wbmp': 'image/vnd.wap.wbmp',
+ }
+
+EXTENSION_WHITELIST = frozenset(EXTENSION_MIME_MAP.iterkeys())
+
+
+def invalid_email_reason(email_address, field):
+ """Determine reason why email is invalid.
+
+ Args:
+ email_address: Email to check.
+ field: Field that is invalid.
+
+ Returns:
+ String indicating invalid email reason if there is one,
+ else None.
+ """
+ if email_address is None:
+ return 'None email address for %s.' % field
+
+ if isinstance(email_address, users.User):
+ email_address = email_address.email()
+ if not isinstance(email_address, basestring):
+ return 'Invalid email address type for %s.' % field
+ stripped_address = email_address.strip()
+ if not stripped_address:
+ return 'Empty email address for %s.' % field
+ return None
+
+InvalidEmailReason = invalid_email_reason
+
+
+def is_email_valid(email_address):
+ """Determine if email is invalid.
+
+ Args:
+ email_address: Email to check.
+
+ Returns:
+ True if email is valid, else False.
+ """
+ return invalid_email_reason(email_address, '') is None
+
+IsEmailValid = is_email_valid
+
+
+def check_email_valid(email_address, field):
+ """Check that email is valid.
+
+ Args:
+ email_address: Email to check.
+ field: Field to check.
+
+ Raises:
+ InvalidEmailError if email_address is invalid.
+ """
+ reason = invalid_email_reason(email_address, field)
+ if reason is not None:
+ raise InvalidEmailError(reason)
+
+CheckEmailValid = check_email_valid
+
+
+def _email_check_and_list(emails, field):
+ """Generate a list of emails.
+
+ Args:
+ emails: Single email or list of emails.
+
+ Returns:
+ Sequence of email addresses.
+
+ Raises:
+ InvalidEmailError if any email addresses are invalid.
+ """
+ if isinstance(emails, types.StringTypes):
+ check_email_valid(value)
+ else:
+ for address in iter(emails):
+ check_email_valid(address, field)
+
+
+def _email_sequence(emails):
+ """Forces email to be sequenceable type.
+
+ Iterable values are returned as is. This function really just wraps the case
+ where there is a single email string.
+
+ Args:
+ emails: Emails (or email) to coerce to sequence.
+
+ Returns:
+ Single tuple with email in it if only one email string provided,
+ else returns emails as is.
+ """
+ if isinstance(emails, basestring):
+ return emails,
+ return emails
+
+
+def _attachment_sequence(attachments):
+ """Forces attachments to be sequenceable type.
+
+ Iterable values are returned as is. This function really just wraps the case
+ where there is a single attachment.
+
+ Args:
+ attachments: Attachments (or attachment) to coerce to sequence.
+
+ Returns:
+ Single tuple with attachment tuple in it if only one attachment provided,
+ else returns attachments as is.
+ """
+ if len(attachments) == 2 and isinstance(attachments[0], basestring):
+ return attachments,
+ return attachments
+
+
+def _parse_mime_message(mime_message):
+ """Helper function converts a mime_message in to email.Message.Message.
+
+ Args:
+ mime_message: MIME Message, string or file containing mime message.
+
+ Returns:
+ Instance of email.Message.Message. Will return mime_message if already
+ an instance.
+ """
+ if isinstance(mime_message, email.Message.Message):
+ return mime_message
+ elif isinstance(mime_message, basestring):
+ return email.message_from_string(mime_message)
+ else:
+ return email.message_from_file(mime_message)
+
+
+def send_mail(sender,
+ to,
+ subject,
+ body,
+ make_sync_call=apiproxy_stub_map.MakeSyncCall,
+ **kw):
+ """Sends mail on behalf of application.
+
+ Args:
+ sender: Sender email address as appears in the 'from' email line.
+ to: List of 'to' addresses or a single address.
+ subject: Message subject string.
+ body: Body of type text/plain.
+ make_sync_call: Function used to make sync call to API proxy.
+ kw: Keyword arguments compatible with EmailMessage keyword based
+ constructor.
+
+ Raises:
+ InvalidEmailError when invalid email address provided.
+ """
+ kw['sender'] = sender
+ kw['to'] = to
+ kw['subject'] = subject
+ kw['body'] = body
+ message = EmailMessage(**kw)
+ message.send(make_sync_call)
+
+SendMail = send_mail
+
+
+def send_mail_to_admins(sender,
+ subject,
+ body,
+ make_sync_call=apiproxy_stub_map.MakeSyncCall,
+ **kw):
+ """Sends mail to admins on behalf of application.
+
+ Args:
+ sender: Sender email address as appears in the 'from' email line.
+ subject: Message subject string.
+ body: Body of type text/plain.
+ make_sync_call: Function used to make sync call to API proxy.
+ kw: Keyword arguments compatible with EmailMessage keyword based
+ constructor.
+
+ Raises:
+ InvalidEmailError when invalid email address provided.
+ """
+ kw['sender'] = sender
+ kw['subject'] = subject
+ kw['body'] = body
+ message = AdminEmailMessage(**kw)
+ message.send(make_sync_call)
+
+SendMailToAdmins = send_mail_to_admins
+
+
+def _GetMimeType(file_name):
+ """Determine mime-type from file name.
+
+ Parses file name and determines mime-type based on extension map.
+
+ This method is not part of the public API and should not be used by
+ applications.
+
+ Args:
+ file_name: File to determine extension for.
+
+ Returns:
+ Mime-type associated with file extension.
+
+ Raises:
+ InvalidAttachmentTypeError when the file name of an attachment.
+ """
+ extension_index = file_name.rfind('.')
+ if extension_index == -1:
+ raise InvalidAttachmentTypeError(
+ "File '%s' does not have an extension" % file_name)
+ extension = file_name[extension_index + 1:]
+ mime_type = EXTENSION_MIME_MAP.get(extension, None)
+ if mime_type is None:
+ raise InvalidAttachmentTypeError(
+ "Extension '%s' is not supported." % extension)
+ return mime_type
+
+
+def mail_message_to_mime_message(protocol_message):
+ """Generate a MIMEMultitype message from protocol buffer.
+
+ Generates a complete MIME multi-part email object from a MailMessage
+ protocol buffer. The body fields are sent as individual alternatives
+ if they are both present, otherwise, only one body part is sent.
+
+ Multiple entry email fields such as 'To', 'Cc' and 'Bcc' are converted
+ to a list of comma separated email addresses.
+
+ Args:
+ protocol_message: Message PB to convert to MIMEMultitype.
+
+ Returns:
+ MIMEMultitype representing the provided MailMessage.
+
+ Raises:
+ InvalidAttachmentTypeError when the file name of an attachment
+ """
+ parts = []
+ if protocol_message.has_textbody():
+ parts.append(MIMEText.MIMEText(protocol_message.textbody()))
+ if protocol_message.has_htmlbody():
+ parts.append(MIMEText.MIMEText(protocol_message.htmlbody(),
+ _subtype='html'))
+
+ if len(parts) == 1:
+ payload = parts
+ else:
+ payload = [MIMEMultipart.MIMEMultipart('alternative', _subparts=parts)]
+
+ result = MIMEMultipart.MIMEMultipart(_subparts=payload)
+ for attachment in protocol_message.attachment_list():
+ file_name = attachment.filename()
+ mime_type = _GetMimeType(file_name)
+ maintype, subtype = mime_type.split('/')
+ mime_attachment = MIMEBase.MIMEBase(maintype, subtype)
+ mime_attachment.add_header('Content-Disposition',
+ 'attachment',
+ filename=attachment.filename())
+ mime_attachment.set_payload(attachment.data())
+ result.attach(mime_attachment)
+
+ if protocol_message.to_size():
+ result['To'] = ', '.join(protocol_message.to_list())
+ if protocol_message.cc_size():
+ result['Cc'] = ', '.join(protocol_message.cc_list())
+ if protocol_message.bcc_size():
+ result['Bcc'] = ', '.join(protocol_message.bcc_list())
+
+ result['From'] = protocol_message.sender()
+ result['Reply-To'] = protocol_message.replyto()
+ result['Subject'] = protocol_message.subject()
+
+ return result
+
+MailMessageToMIMEMessage = mail_message_to_mime_message
+
+
+def _to_str(value):
+ """Helper function to make sure unicode values converted to utf-8.
+
+ Args:
+ value: str or unicode to convert to utf-8.
+
+ Returns:
+ UTF-8 encoded str of value, otherwise value unchanged.
+ """
+ if isinstance(value, unicode):
+ return value.encode('utf-8')
+ return value
+
+
+class EncodedPayload(object):
+ """Wrapper for a payload that contains encoding information.
+
+ When an email is recieved, it is usually encoded using a certain
+ character set, and then possibly further encoded using a transfer
+ encoding in that character set. Most of the times, it is possible
+ to decode the encoded payload as is, however, in the case where it
+ is not, the encoded payload and the original encoding information
+ must be preserved.
+
+ Attributes:
+ payload: The original encoded payload.
+ charset: The character set of the encoded payload. None means use
+ default character set.
+ encoding: The transfer encoding of the encoded payload. None means
+ content not encoded.
+ """
+
+ def __init__(self, payload, charset=None, encoding=None):
+ """Constructor.
+
+ Args:
+ payload: Maps to attribute of the same name.
+ charset: Maps to attribute of the same name.
+ encoding: Maps to attribute of the same name.
+ """
+ self.payload = payload
+ self.charset = charset
+ self.encoding = encoding
+
+ def decode(self):
+ """Attempt to decode the encoded data.
+
+ Attempt to use pythons codec library to decode the payload. All
+ exceptions are passed back to the caller.
+
+ Returns:
+ Binary or unicode version of payload content.
+ """
+ payload = self.payload
+
+ if self.encoding and self.encoding.lower() != '7bit':
+ try:
+ payload = payload.decode(self.encoding).lower()
+ except LookupError:
+ raise UnknownEncodingError('Unknown decoding %s.' % self.encoding)
+ except (Exception, Error), e:
+ raise PayloadEncodingError('Could not decode payload: %s' % e)
+
+ if self.charset and str(self.charset).lower() != '7bit':
+ try:
+ payload = payload.decode(str(self.charset)).lower()
+ except LookupError:
+ raise UnknownCharsetError('Unknown charset %s.' % self.charset)
+ except (Exception, Error), e:
+ raise PayloadEncodingError('Could read characters: %s' % e)
+
+ return payload
+
+ def __eq__(self, other):
+ """Equality operator.
+
+ Args:
+ other: The other EncodedPayload object to compare with. Comparison
+ with other object types are not implemented.
+
+ Returns:
+ True of payload and encodings are equal, else false.
+ """
+ if isinstance(other, EncodedPayload):
+ return (self.payload == other.payload and
+ self.charset == other.charset and
+ self.encoding == other.encoding)
+ else:
+ return NotImplemented
+
+ def copy_to(self, mime_message):
+ """Copy contents to MIME message payload.
+
+ If no content transfer encoding is specified, and the character set does
+ not equal the over-all message encoding, the payload will be base64
+ encoded.
+
+ Args:
+ mime_message: Message instance to receive new payload.
+ """
+ if self.encoding:
+ mime_message['content-transfer-encoding'] = self.encoding
+ mime_message.set_payload(self.payload, self.charset)
+
+ def to_mime_message(self):
+ """Convert to MIME message.
+
+ Returns:
+ MIME message instance of payload.
+ """
+ mime_message = email.Message.Message()
+ self.copy_to(mime_message)
+ return mime_message
+
+ def __str__(self):
+ """String representation of encoded message.
+
+ Returns:
+ MIME encoded representation of encoded payload as an independent message.
+ """
+ return str(self.to_mime_message())
+
+ def __repr__(self):
+ """Basic representation of encoded payload.
+
+ Returns:
+ Payload itself is represented by its hash value.
+ """
+ result = '<EncodedPayload payload=#%d' % hash(self.payload)
+ if self.charset:
+ result += ' charset=%s' % self.charset
+ if self.encoding:
+ result += ' encoding=%s' % self.encoding
+ return result + '>'
+
+
+class _EmailMessageBase(object):
+ """Base class for email API service objects.
+
+ Subclasses must define a class variable called _API_CALL with the name
+ of its underlying mail sending API call.
+ """
+
+ PROPERTIES = set([
+ 'sender',
+ 'reply_to',
+ 'subject',
+ 'body',
+ 'html',
+ 'attachments',
+ ])
+
+ PROPERTIES.update(('to', 'cc', 'bcc'))
+
+ def __init__(self, mime_message=None, **kw):
+ """Initialize Email message.
+
+ Creates new MailMessage protocol buffer and initializes it with any
+ keyword arguments.
+
+ Args:
+ mime_message: MIME message to initialize from. If instance of
+ email.Message.Message will take ownership as original message.
+ kw: List of keyword properties as defined by PROPERTIES.
+ """
+ if mime_message:
+ mime_message = _parse_mime_message(mime_message)
+ self.update_from_mime_message(mime_message)
+ self.__original = mime_message
+
+ self.initialize(**kw)
+
+ @property
+ def original(self):
+ """Get original MIME message from which values were set."""
+ return self.__original
+
+ def initialize(self, **kw):
+ """Keyword initialization.
+
+ Used to set all fields of the email message using keyword arguments.
+
+ Args:
+ kw: List of keyword properties as defined by PROPERTIES.
+ """
+ for name, value in kw.iteritems():
+ setattr(self, name, value)
+
+ def Initialize(self, **kw):
+ self.initialize(**kw)
+
+ def check_initialized(self):
+ """Check if EmailMessage is properly initialized.
+
+ Test used to determine if EmailMessage meets basic requirements
+ for being used with the mail API. This means that the following
+ fields must be set or have at least one value in the case of
+ multi value fields:
+
+ - Subject must be set.
+ - A recipient must be specified.
+ - Must contain a body.
+ - All bodies and attachments must decode properly.
+
+ This check does not include determining if the sender is actually
+ authorized to send email for the application.
+
+ Raises:
+ Appropriate exception for initialization failure.
+
+ InvalidAttachmentTypeError: Use of incorrect attachment type.
+ MissingRecipientsError: No recipients specified in to, cc or bcc.
+ MissingSenderError: No sender specified.
+ MissingSubjectError: Subject is not specified.
+ MissingBodyError: No body specified.
+ PayloadEncodingError: Payload is not properly encoded.
+ UnknownEncodingError: Payload has unknown encoding.
+ UnknownCharsetError: Payload has unknown character set.
+ """
+ if not hasattr(self, 'sender'):
+ raise MissingSenderError()
+ if not hasattr(self, 'subject'):
+ raise MissingSubjectError()
+
+ found_body = False
+
+ try:
+ body = self.body
+ except AttributeError:
+ pass
+ else:
+ if isinstance(body, EncodedPayload):
+ body.decode()
+ found_body = True
+
+ try:
+ html = self.html
+ except AttributeError:
+ pass
+ else:
+ if isinstance(html, EncodedPayload):
+ html.decode()
+ found_body = True
+
+ if not found_body:
+ raise MissingBodyError()
+
+ if hasattr(self, 'attachments'):
+ for file_name, data in _attachment_sequence(self.attachments):
+ _GetMimeType(file_name)
+
+ if isinstance(data, EncodedPayload):
+ data.decode()
+
+ def CheckInitialized(self):
+ self.check_initialized()
+
+ def is_initialized(self):
+ """Determine if EmailMessage is properly initialized.
+
+ Returns:
+ True if message is properly initializes, otherwise False.
+ """
+ try:
+ self.check_initialized()
+ return True
+ except Error:
+ return False
+
+ def IsInitialized(self):
+ return self.is_initialized()
+
+ def ToProto(self):
+ """Convert mail message to protocol message.
+
+ Unicode strings are converted to UTF-8 for all fields.
+
+ This method is overriden by EmailMessage to support the sender fields.
+
+ Returns:
+ MailMessage protocol version of mail message.
+
+ Raises:
+ Passes through decoding errors that occur when using when decoding
+ EncodedPayload objects.
+ """
+ self.check_initialized()
+ message = mail_service_pb.MailMessage()
+ message.set_sender(_to_str(self.sender))
+
+ if hasattr(self, 'reply_to'):
+ message.set_replyto(_to_str(self.reply_to))
+ message.set_subject(_to_str(self.subject))
+
+ if hasattr(self, 'body'):
+ body = self.body
+ if isinstance(body, EncodedPayload):
+ body = body.decode()
+ message.set_textbody(_to_str(body))
+ if hasattr(self, 'html'):
+ html = self.html
+ if isinstance(html, EncodedPayload):
+ html = html.decode()
+ message.set_htmlbody(_to_str(html))
+
+ if hasattr(self, 'attachments'):
+ for file_name, data in _attachment_sequence(self.attachments):
+ if isinstance(data, EncodedPayload):
+ data = data.decode()
+ attachment = message.add_attachment()
+ attachment.set_filename(_to_str(file_name))
+ attachment.set_data(_to_str(data))
+ return message
+
+ def to_mime_message(self):
+ """Generate a MIMEMultitype message from EmailMessage.
+
+ Calls MailMessageToMessage after converting self to protocol
+ buffer. Protocol buffer is better at handing corner cases
+ than EmailMessage class.
+
+ Returns:
+ MIMEMultitype representing the provided MailMessage.
+
+ Raises:
+ Appropriate exception for initialization failure.
+
+ InvalidAttachmentTypeError: Use of incorrect attachment type.
+ MissingSenderError: No sender specified.
+ MissingSubjectError: Subject is not specified.
+ MissingBodyError: No body specified.
+ """
+ return mail_message_to_mime_message(self.ToProto())
+
+ def ToMIMEMessage(self):
+ return self.to_mime_message()
+
+ def send(self, make_sync_call=apiproxy_stub_map.MakeSyncCall):
+ """Send email message.
+
+ Send properly initialized email message via email API.
+
+ Args:
+ make_sync_call: Method which will make synchronous call to api proxy.
+
+ Raises:
+ Errors defined in this file above.
+ """
+ message = self.ToProto()
+ response = api_base_pb.VoidProto()
+
+ try:
+ make_sync_call('mail', self._API_CALL, message, response)
+ except apiproxy_errors.ApplicationError, e:
+ if e.application_error in ERROR_MAP:
+ raise ERROR_MAP[e.application_error](e.error_detail)
+ raise e
+
+ def Send(self, *args, **kwds):
+ self.send(*args, **kwds)
+
+ def _check_attachment(self, attachment):
+ file_name, data = attachment
+ if not (isinstance(file_name, basestring) or
+ isinstance(data, basestring)):
+ raise TypeError()
+
+ def _check_attachments(self, attachments):
+ """Checks values going to attachment field.
+
+ Mainly used to check type safety of the values. Each value of the list
+ must be a pair of the form (file_name, data), and both values a string
+ type.
+
+ Args:
+ attachments: Collection of attachment tuples.
+
+ Raises:
+ TypeError if values are not string type.
+ """
+ if len(attachments) == 2 and isinstance(attachments[0], basestring):
+ self._check_attachment(attachments)
+ else:
+ for attachment in attachments:
+ self._check_attachment(attachment)
+
+ def __setattr__(self, attr, value):
+ """Property setting access control.
+
+ Controls write access to email fields.
+
+ Args:
+ attr: Attribute to access.
+ value: New value for field.
+
+ Raises:
+ ValueError: If provided with an empty field.
+ AttributeError: If not an allowed assignment field.
+ """
+ if not attr.startswith('_EmailMessageBase'):
+ if attr in ['sender', 'reply_to']:
+ check_email_valid(value, attr)
+
+ if not value:
+ raise ValueError('May not set empty value for \'%s\'' % attr)
+
+ if attr not in self.PROPERTIES:
+ raise AttributeError('\'EmailMessage\' has no attribute \'%s\'' % attr)
+
+ if attr == 'attachments':
+ self._check_attachments(value)
+
+ super(_EmailMessageBase, self).__setattr__(attr, value)
+
+ def _add_body(self, content_type, payload):
+ """Add body to email from payload.
+
+ Will overwrite any existing default plain or html body.
+
+ Args:
+ content_type: Content-type of body.
+ payload: Payload to store body as.
+ """
+ if content_type == 'text/plain':
+ self.body = payload
+ elif content_type == 'text/html':
+ self.html = payload
+
+ def _update_payload(self, mime_message):
+ """Update payload of mail message from mime_message.
+
+ This function works recusively when it receives a multipart body.
+ If it receives a non-multi mime object, it will determine whether or
+ not it is an attachment by whether it has a filename or not. Attachments
+ and bodies are then wrapped in EncodedPayload with the correct charsets and
+ encodings.
+
+ Args:
+ mime_message: A Message MIME email object.
+ """
+ payload = mime_message.get_payload()
+
+ if payload:
+ if mime_message.get_content_maintype() == 'multipart':
+ for alternative in payload:
+ self._update_payload(alternative)
+ else:
+ filename = mime_message.get_param('filename',
+ header='content-disposition')
+ if not filename:
+ filename = mime_message.get_param('name')
+
+ payload = EncodedPayload(payload,
+ mime_message.get_charset(),
+ mime_message['content-transfer-encoding'])
+
+ if filename:
+ try:
+ attachments = self.attachments
+ except AttributeError:
+ self.attachments = (filename, payload)
+ else:
+ if isinstance(attachments[0], basestring):
+ self.attachments = [attachments]
+ attachments = self.attachments
+ attachments.append((filename, payload))
+ else:
+ self._add_body(mime_message.get_content_type(), payload)
+
+ def update_from_mime_message(self, mime_message):
+ """Copy information from a mime message.
+
+ Set information of instance to values of mime message. This method
+ will only copy values that it finds. Any missing values will not
+ be copied, nor will they overwrite old values with blank values.
+
+ This object is not guaranteed to be initialized after this call.
+
+ Args:
+ mime_message: email.Message instance to copy information from.
+
+ Returns:
+ MIME Message instance of mime_message argument.
+ """
+ mime_message = _parse_mime_message(mime_message)
+
+ sender = mime_message['from']
+ if sender:
+ self.sender = sender
+
+ reply_to = mime_message['reply-to']
+ if reply_to:
+ self.reply_to = reply_to
+
+ subject = mime_message['subject']
+ if subject:
+ self.subject = subject
+
+ self._update_payload(mime_message)
+
+ def bodies(self, content_type=None):
+ """Iterate over all bodies.
+
+ Yields:
+ Tuple (content_type, payload) for html and body in that order.
+ """
+ if (not content_type or
+ content_type == 'text' or
+ content_type == 'text/html'):
+ try:
+ yield 'text/html', self.html
+ except AttributeError:
+ pass
+
+ if (not content_type or
+ content_type == 'text' or
+ content_type == 'text/plain'):
+ try:
+ yield 'text/plain', self.body
+ except AttributeError:
+ pass
+
+
+class EmailMessage(_EmailMessageBase):
+ """Main interface to email API service.
+
+ This class is used to programmatically build an email message to send via
+ the Mail API. The usage is to construct an instance, populate its fields
+ and call Send().
+
+ Example Usage:
+ An EmailMessage can be built completely by the constructor.
+
+ EmailMessage(sender='sender@nowhere.com',
+ to='recipient@nowhere.com',
+ subject='a subject',
+ body='This is an email to you').Send()
+
+ It might be desirable for an application to build an email in different
+ places throughout the code. For this, EmailMessage is mutable.
+
+ message = EmailMessage()
+ message.sender = 'sender@nowhere.com'
+ message.to = ['recipient1@nowhere.com', 'recipient2@nowhere.com']
+ message.subject = 'a subject'
+ message.body = 'This is an email to you')
+ message.check_initialized()
+ message.send()
+ """
+
+ _API_CALL = 'Send'
+ PROPERTIES = set(_EmailMessageBase.PROPERTIES)
+
+ def check_initialized(self):
+ """Provide additional checks to ensure recipients have been specified.
+
+ Raises:
+ MissingRecipientError when no recipients specified in to, cc or bcc.
+ """
+ if (not hasattr(self, 'to') and
+ not hasattr(self, 'cc') and
+ not hasattr(self, 'bcc')):
+ raise MissingRecipientsError()
+ super(EmailMessage, self).check_initialized()
+
+ def CheckInitialized(self):
+ self.check_initialized()
+
+ def ToProto(self):
+ """Does addition conversion of recipient fields to protocol buffer.
+
+ Returns:
+ MailMessage protocol version of mail message including sender fields.
+ """
+ message = super(EmailMessage, self).ToProto()
+
+ for attribute, adder in (('to', message.add_to),
+ ('cc', message.add_cc),
+ ('bcc', message.add_bcc)):
+ if hasattr(self, attribute):
+ for address in _email_sequence(getattr(self, attribute)):
+ adder(_to_str(address))
+ return message
+
+ def __setattr__(self, attr, value):
+ """Provides additional checks on recipient fields."""
+ if attr in ['to', 'cc', 'bcc']:
+ if isinstance(value, basestring):
+ check_email_valid(value, attr)
+ else:
+ for address in value:
+ check_email_valid(address, attr)
+
+ super(EmailMessage, self).__setattr__(attr, value)
+
+ def update_from_mime_message(self, mime_message):
+ """Copy information from a mime message.
+
+ Update fields for recipients.
+
+ Args:
+ mime_message: email.Message instance to copy information from.
+ """
+ mime_message = _parse_mime_message(mime_message)
+ super(EmailMessage, self).update_from_mime_message(mime_message)
+
+ to = mime_message.get_all('to')
+ if to:
+ if len(to) == 1:
+ self.to = to[0]
+ else:
+ self.to = to
+
+ cc = mime_message.get_all('cc')
+ if cc:
+ if len(cc) == 1:
+ self.cc = cc[0]
+ else:
+ self.cc = cc
+
+ bcc = mime_message.get_all('bcc')
+ if bcc:
+ if len(bcc) == 1:
+ self.bcc = bcc[0]
+ else:
+ self.bcc = bcc
+
+
+class AdminEmailMessage(_EmailMessageBase):
+ """Interface to sending email messages to all admins via the amil API.
+
+ This class is used to programmatically build an admin email message to send
+ via the Mail API. The usage is to construct an instance, populate its fields
+ and call Send().
+
+ Unlike the normal email message, addresses in the recipient fields are
+ ignored and not used for sending.
+
+ Example Usage:
+ An AdminEmailMessage can be built completely by the constructor.
+
+ AdminEmailMessage(sender='sender@nowhere.com',
+ subject='a subject',
+ body='This is an email to you').Send()
+
+ It might be desirable for an application to build an admin email in
+ different places throughout the code. For this, AdminEmailMessage is
+ mutable.
+
+ message = AdminEmailMessage()
+ message.sender = 'sender@nowhere.com'
+ message.subject = 'a subject'
+ message.body = 'This is an email to you')
+ message.check_initialized()
+ message.send()
+ """
+
+ _API_CALL = 'SendToAdmins'
+ __UNUSED_PROPERTIES = set(('to', 'cc', 'bcc'))
+
+ def __setattr__(self, attr, value):
+ if attr in self.__UNUSED_PROPERTIES:
+ logging.warning('\'%s\' is not a valid property to set '
+ 'for AdminEmailMessage. It is unused.', attr)
+ super(AdminEmailMessage, self).__setattr__(attr, value)
+
+
+class InboundEmailMessage(EmailMessage):
+ """Parsed email object as recevied from external source.
+
+ Has a date field and can store any number of additional bodies. These
+ additional attributes make the email more flexible as required for
+ incoming mail, where the developer has less control over the content.
+
+ Example Usage:
+
+ # Read mail message from CGI input.
+ message = InboundEmailMessage(sys.stdin.read())
+ logging.info('Received email message from %s at %s',
+ message.sender,
+ message.date)
+ enriched_body = list(message.bodies('text/enriched'))[0]
+ ... Do something with body ...
+ """
+
+ __HEADER_PROPERTIES = {'date': 'date',
+ 'message_id': 'message-id',
+ }
+
+ PROPERTIES = frozenset(_EmailMessageBase.PROPERTIES |
+ set(('alternate_bodies',)) |
+ set(__HEADER_PROPERTIES.iterkeys()))
+
+ def update_from_mime_message(self, mime_message):
+ """Update values from MIME message.
+
+ Copies over date values.
+
+ Args:
+ mime_message: email.Message instance to copy information from.
+ """
+ mime_message = _parse_mime_message(mime_message)
+ super(InboundEmailMessage, self).update_from_mime_message(mime_message)
+
+ for property, header in InboundEmailMessage.__HEADER_PROPERTIES.iteritems():
+ value = mime_message[header]
+ if value:
+ setattr(self, property, value)
+
+ def _add_body(self, content_type, payload):
+ """Add body to inbound message.
+
+ Method is overidden to handle incoming messages that have more than one
+ plain or html bodies or has any unidentified bodies.
+
+ This method will not overwrite existing html and body values. This means
+ that when updating, the text and html bodies that are first in the MIME
+ document order are assigned to the body and html properties.
+
+ Args:
+ content_type: Content-type of additional body.
+ payload: Content of additional body.
+ """
+ if (content_type == 'text/plain' and not hasattr(self, 'body') or
+ content_type == 'text/html' and not hasattr(self, 'html')):
+ super(InboundEmailMessage, self)._add_body(content_type, payload)
+ else:
+ try:
+ alternate_bodies = self.alternate_bodies
+ except AttributeError:
+ alternate_bodies = self.alternate_bodies = [(content_type, payload)]
+ else:
+ alternate_bodies.append((content_type, payload))
+
+ def bodies(self, content_type=None):
+ """Iterate over all bodies.
+
+ Args:
+ content_type: Content type to filter on. Allows selection of only
+ specific types of content. Can be just the base type of the content
+ type. For example:
+ content_type = 'text/html' # Matches only HTML content.
+ content_type = 'text' # Matches text of any kind.
+
+ Yields:
+ Tuple (content_type, payload) for all bodies of message, including body,
+ html and all alternate_bodies in that order.
+ """
+ main_bodies = super(InboundEmailMessage, self).bodies(content_type)
+ for payload_type, payload in main_bodies:
+ yield payload_type, payload
+
+ partial_type = bool(content_type and content_type.find('/') < 0)
+
+ try:
+ for payload_type, payload in self.alternate_bodies:
+ if content_type:
+ if partial_type:
+ match_type = payload_type.split('/')[0]
+ else:
+ match_type = payload_type
+ match = match_type == content_type
+ else:
+ match = True
+
+ if match:
+ yield payload_type, payload
+ except AttributeError:
+ pass
diff --git a/google_appengine/google/appengine/api/mail.pyc b/google_appengine/google/appengine/api/mail.pyc
new file mode 100644
index 0000000..2a8a69c
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/mail_errors.py b/google_appengine/google/appengine/api/mail_errors.py
new file mode 100755
index 0000000..6d2b9c3
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_errors.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Exceptions raised my mail API."""
+
+
+class Error(Exception):
+ """Base Mail error type."""
+
+class BadRequestError(Error):
+ """Email is not valid."""
+
+class InvalidSenderError(Error):
+ """Sender is not a permitted to send mail for this application."""
+
+class InvalidEmailError(Error):
+ """Bad email set on an email field."""
+
+class InvalidAttachmentTypeError(Error):
+ """Invalid file type for attachments. We don't send viruses!"""
+
+class MissingRecipientsError(Error):
+ """No recipients specified in message."""
+
+class MissingSenderError(Error):
+ """No sender specified in message."""
+
+class MissingSubjectError(Error):
+ """Subject not specified in message."""
+
+class MissingBodyError(Error):
+ """No body specified in message."""
+
+class PayloadEncodingError(Error):
+ """Unknown payload encoding."""
+
+class UnknownEncodingError(PayloadEncodingError):
+ """Raised when encoding is not known."""
+
+class UnknownCharsetError(PayloadEncodingError):
+ """Raised when charset is not known."""
diff --git a/google_appengine/google/appengine/api/mail_errors.pyc b/google_appengine/google/appengine/api/mail_errors.pyc
new file mode 100644
index 0000000..78f8b20
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/mail_service_pb.py b/google_appengine/google/appengine/api/mail_service_pb.py
new file mode 100644
index 0000000..1b608ea
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_service_pb.py
@@ -0,0 +1,584 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.api.api_base_pb import *
+class MailServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ INTERNAL_ERROR = 1
+ BAD_REQUEST = 2
+ UNAUTHORIZED_SENDER = 3
+ INVALID_ATTACHMENT_TYPE = 4
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "INTERNAL_ERROR",
+ 2: "BAD_REQUEST",
+ 3: "UNAUTHORIZED_SENDER",
+ 4: "INVALID_ATTACHMENT_TYPE",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MailAttachment(ProtocolBuffer.ProtocolMessage):
+ has_filename_ = 0
+ filename_ = ""
+ has_data_ = 0
+ data_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def filename(self): return self.filename_
+
+ def set_filename(self, x):
+ self.has_filename_ = 1
+ self.filename_ = x
+
+ def clear_filename(self):
+ if self.has_filename_:
+ self.has_filename_ = 0
+ self.filename_ = ""
+
+ def has_filename(self): return self.has_filename_
+
+ def data(self): return self.data_
+
+ def set_data(self, x):
+ self.has_data_ = 1
+ self.data_ = x
+
+ def clear_data(self):
+ if self.has_data_:
+ self.has_data_ = 0
+ self.data_ = ""
+
+ def has_data(self): return self.has_data_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_filename()): self.set_filename(x.filename())
+ if (x.has_data()): self.set_data(x.data())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_filename_ != x.has_filename_: return 0
+ if self.has_filename_ and self.filename_ != x.filename_: return 0
+ if self.has_data_ != x.has_data_: return 0
+ if self.has_data_ and self.data_ != x.data_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_filename_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: filename not set.')
+ if (not self.has_data_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: data not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.filename_))
+ n += self.lengthString(len(self.data_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_filename()
+ self.clear_data()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.filename_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.data_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_filename(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_data(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_filename_: res+=prefix+("FileName: %s\n" % self.DebugFormatString(self.filename_))
+ if self.has_data_: res+=prefix+("Data: %s\n" % self.DebugFormatString(self.data_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kFileName = 1
+ kData = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "FileName",
+ 2: "Data",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MailMessage(ProtocolBuffer.ProtocolMessage):
+ has_sender_ = 0
+ sender_ = ""
+ has_replyto_ = 0
+ replyto_ = ""
+ has_subject_ = 0
+ subject_ = ""
+ has_textbody_ = 0
+ textbody_ = ""
+ has_htmlbody_ = 0
+ htmlbody_ = ""
+
+ def __init__(self, contents=None):
+ self.to_ = []
+ self.cc_ = []
+ self.bcc_ = []
+ self.attachment_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def sender(self): return self.sender_
+
+ def set_sender(self, x):
+ self.has_sender_ = 1
+ self.sender_ = x
+
+ def clear_sender(self):
+ if self.has_sender_:
+ self.has_sender_ = 0
+ self.sender_ = ""
+
+ def has_sender(self): return self.has_sender_
+
+ def replyto(self): return self.replyto_
+
+ def set_replyto(self, x):
+ self.has_replyto_ = 1
+ self.replyto_ = x
+
+ def clear_replyto(self):
+ if self.has_replyto_:
+ self.has_replyto_ = 0
+ self.replyto_ = ""
+
+ def has_replyto(self): return self.has_replyto_
+
+ def to_size(self): return len(self.to_)
+ def to_list(self): return self.to_
+
+ def to(self, i):
+ return self.to_[i]
+
+ def set_to(self, i, x):
+ self.to_[i] = x
+
+ def add_to(self, x):
+ self.to_.append(x)
+
+ def clear_to(self):
+ self.to_ = []
+
+ def cc_size(self): return len(self.cc_)
+ def cc_list(self): return self.cc_
+
+ def cc(self, i):
+ return self.cc_[i]
+
+ def set_cc(self, i, x):
+ self.cc_[i] = x
+
+ def add_cc(self, x):
+ self.cc_.append(x)
+
+ def clear_cc(self):
+ self.cc_ = []
+
+ def bcc_size(self): return len(self.bcc_)
+ def bcc_list(self): return self.bcc_
+
+ def bcc(self, i):
+ return self.bcc_[i]
+
+ def set_bcc(self, i, x):
+ self.bcc_[i] = x
+
+ def add_bcc(self, x):
+ self.bcc_.append(x)
+
+ def clear_bcc(self):
+ self.bcc_ = []
+
+ def subject(self): return self.subject_
+
+ def set_subject(self, x):
+ self.has_subject_ = 1
+ self.subject_ = x
+
+ def clear_subject(self):
+ if self.has_subject_:
+ self.has_subject_ = 0
+ self.subject_ = ""
+
+ def has_subject(self): return self.has_subject_
+
+ def textbody(self): return self.textbody_
+
+ def set_textbody(self, x):
+ self.has_textbody_ = 1
+ self.textbody_ = x
+
+ def clear_textbody(self):
+ if self.has_textbody_:
+ self.has_textbody_ = 0
+ self.textbody_ = ""
+
+ def has_textbody(self): return self.has_textbody_
+
+ def htmlbody(self): return self.htmlbody_
+
+ def set_htmlbody(self, x):
+ self.has_htmlbody_ = 1
+ self.htmlbody_ = x
+
+ def clear_htmlbody(self):
+ if self.has_htmlbody_:
+ self.has_htmlbody_ = 0
+ self.htmlbody_ = ""
+
+ def has_htmlbody(self): return self.has_htmlbody_
+
+ def attachment_size(self): return len(self.attachment_)
+ def attachment_list(self): return self.attachment_
+
+ def attachment(self, i):
+ return self.attachment_[i]
+
+ def mutable_attachment(self, i):
+ return self.attachment_[i]
+
+ def add_attachment(self):
+ x = MailAttachment()
+ self.attachment_.append(x)
+ return x
+
+ def clear_attachment(self):
+ self.attachment_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_sender()): self.set_sender(x.sender())
+ if (x.has_replyto()): self.set_replyto(x.replyto())
+ for i in xrange(x.to_size()): self.add_to(x.to(i))
+ for i in xrange(x.cc_size()): self.add_cc(x.cc(i))
+ for i in xrange(x.bcc_size()): self.add_bcc(x.bcc(i))
+ if (x.has_subject()): self.set_subject(x.subject())
+ if (x.has_textbody()): self.set_textbody(x.textbody())
+ if (x.has_htmlbody()): self.set_htmlbody(x.htmlbody())
+ for i in xrange(x.attachment_size()): self.add_attachment().CopyFrom(x.attachment(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_sender_ != x.has_sender_: return 0
+ if self.has_sender_ and self.sender_ != x.sender_: return 0
+ if self.has_replyto_ != x.has_replyto_: return 0
+ if self.has_replyto_ and self.replyto_ != x.replyto_: return 0
+ if len(self.to_) != len(x.to_): return 0
+ for e1, e2 in zip(self.to_, x.to_):
+ if e1 != e2: return 0
+ if len(self.cc_) != len(x.cc_): return 0
+ for e1, e2 in zip(self.cc_, x.cc_):
+ if e1 != e2: return 0
+ if len(self.bcc_) != len(x.bcc_): return 0
+ for e1, e2 in zip(self.bcc_, x.bcc_):
+ if e1 != e2: return 0
+ if self.has_subject_ != x.has_subject_: return 0
+ if self.has_subject_ and self.subject_ != x.subject_: return 0
+ if self.has_textbody_ != x.has_textbody_: return 0
+ if self.has_textbody_ and self.textbody_ != x.textbody_: return 0
+ if self.has_htmlbody_ != x.has_htmlbody_: return 0
+ if self.has_htmlbody_ and self.htmlbody_ != x.htmlbody_: return 0
+ if len(self.attachment_) != len(x.attachment_): return 0
+ for e1, e2 in zip(self.attachment_, x.attachment_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_sender_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: sender not set.')
+ if (not self.has_subject_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: subject not set.')
+ for p in self.attachment_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.sender_))
+ if (self.has_replyto_): n += 1 + self.lengthString(len(self.replyto_))
+ n += 1 * len(self.to_)
+ for i in xrange(len(self.to_)): n += self.lengthString(len(self.to_[i]))
+ n += 1 * len(self.cc_)
+ for i in xrange(len(self.cc_)): n += self.lengthString(len(self.cc_[i]))
+ n += 1 * len(self.bcc_)
+ for i in xrange(len(self.bcc_)): n += self.lengthString(len(self.bcc_[i]))
+ n += self.lengthString(len(self.subject_))
+ if (self.has_textbody_): n += 1 + self.lengthString(len(self.textbody_))
+ if (self.has_htmlbody_): n += 1 + self.lengthString(len(self.htmlbody_))
+ n += 1 * len(self.attachment_)
+ for i in xrange(len(self.attachment_)): n += self.lengthString(self.attachment_[i].ByteSize())
+ return n + 2
+
+ def Clear(self):
+ self.clear_sender()
+ self.clear_replyto()
+ self.clear_to()
+ self.clear_cc()
+ self.clear_bcc()
+ self.clear_subject()
+ self.clear_textbody()
+ self.clear_htmlbody()
+ self.clear_attachment()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.sender_)
+ if (self.has_replyto_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.replyto_)
+ for i in xrange(len(self.to_)):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.to_[i])
+ for i in xrange(len(self.cc_)):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.cc_[i])
+ for i in xrange(len(self.bcc_)):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.bcc_[i])
+ out.putVarInt32(50)
+ out.putPrefixedString(self.subject_)
+ if (self.has_textbody_):
+ out.putVarInt32(58)
+ out.putPrefixedString(self.textbody_)
+ if (self.has_htmlbody_):
+ out.putVarInt32(66)
+ out.putPrefixedString(self.htmlbody_)
+ for i in xrange(len(self.attachment_)):
+ out.putVarInt32(74)
+ out.putVarInt32(self.attachment_[i].ByteSize())
+ self.attachment_[i].OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_sender(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_replyto(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.add_to(d.getPrefixedString())
+ continue
+ if tt == 34:
+ self.add_cc(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.add_bcc(d.getPrefixedString())
+ continue
+ if tt == 50:
+ self.set_subject(d.getPrefixedString())
+ continue
+ if tt == 58:
+ self.set_textbody(d.getPrefixedString())
+ continue
+ if tt == 66:
+ self.set_htmlbody(d.getPrefixedString())
+ continue
+ if tt == 74:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_attachment().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_sender_: res+=prefix+("Sender: %s\n" % self.DebugFormatString(self.sender_))
+ if self.has_replyto_: res+=prefix+("ReplyTo: %s\n" % self.DebugFormatString(self.replyto_))
+ cnt=0
+ for e in self.to_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("To%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ cnt=0
+ for e in self.cc_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Cc%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ cnt=0
+ for e in self.bcc_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Bcc%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_subject_: res+=prefix+("Subject: %s\n" % self.DebugFormatString(self.subject_))
+ if self.has_textbody_: res+=prefix+("TextBody: %s\n" % self.DebugFormatString(self.textbody_))
+ if self.has_htmlbody_: res+=prefix+("HtmlBody: %s\n" % self.DebugFormatString(self.htmlbody_))
+ cnt=0
+ for e in self.attachment_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Attachment%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kSender = 1
+ kReplyTo = 2
+ kTo = 3
+ kCc = 4
+ kBcc = 5
+ kSubject = 6
+ kTextBody = 7
+ kHtmlBody = 8
+ kAttachment = 9
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Sender",
+ 2: "ReplyTo",
+ 3: "To",
+ 4: "Cc",
+ 5: "Bcc",
+ 6: "Subject",
+ 7: "TextBody",
+ 8: "HtmlBody",
+ 9: "Attachment",
+ }, 9)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.STRING,
+ 7: ProtocolBuffer.Encoder.STRING,
+ 8: ProtocolBuffer.Encoder.STRING,
+ 9: ProtocolBuffer.Encoder.STRING,
+ }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['MailServiceError','MailAttachment','MailMessage']
diff --git a/google_appengine/google/appengine/api/mail_service_pb.pyc b/google_appengine/google/appengine/api/mail_service_pb.pyc
new file mode 100644
index 0000000..c60ce72
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/mail_stub.py b/google_appengine/google/appengine/api/mail_stub.py
new file mode 100755
index 0000000..151ea76
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_stub.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the mail API, writes email to logs and can optionally
+send real email via SMTP or sendmail."""
+
+
+
+
+
+from email import MIMEBase
+from email import MIMEMultipart
+from email import MIMEText
+import logging
+import mail
+import mimetypes
+import subprocess
+import smtplib
+
+from google.appengine.api import apiproxy_stub
+
+
+class MailServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only mail service stub.
+
+ This stub does not actually attempt to send email. instead it merely logs
+ a description of the email to the developers console.
+
+ Args:
+ host: Host of SMTP server to use. Blank disables sending SMTP.
+ port: Port of SMTP server to use.
+ user: User to log in to SMTP server as.
+ password: Password for SMTP server user.
+ """
+
+ def __init__(self,
+ host=None,
+ port=25,
+ user='',
+ password='',
+ enable_sendmail=False,
+ show_mail_body=False,
+ service_name='mail'):
+ """Constructor.
+
+ Args:
+ host: Host of SMTP mail server.
+ post: Port of SMTP mail server.
+ user: Sending user of SMTP mail.
+ password: SMTP password.
+ enable_sendmail: Whether sendmail enabled or not.
+ show_mail_body: Whether to show mail body in log.
+ service_name: Service name expected for all calls.
+ """
+ super(MailServiceStub, self).__init__(service_name)
+ self._smtp_host = host
+ self._smtp_port = port
+ self._smtp_user = user
+ self._smtp_password = password
+ self._enable_sendmail = enable_sendmail
+ self._show_mail_body = show_mail_body
+
+ def _GenerateLog(self, method, message, log):
+ """Generate a list of log messages representing sent mail.
+
+ Args:
+ message: Message to write to log.
+ log: Log function of type string -> None
+ """
+ log('MailService.%s' % method)
+ log(' From: %s' % message.sender())
+
+ for address in message.to_list():
+ log(' To: %s' % address)
+ for address in message.cc_list():
+ log(' Cc: %s' % address)
+ for address in message.bcc_list():
+ log(' Bcc: %s' % address)
+
+ if message.replyto():
+ log(' Reply-to: %s' % message.replyto())
+
+ log(' Subject: %s' % message.subject())
+
+ if message.has_textbody():
+ log(' Body:')
+ log(' Content-type: text/plain')
+ log(' Data length: %d' % len(message.textbody()))
+ if self._show_mail_body:
+ log('-----\n' + message.textbody() + '\n-----')
+
+ if message.has_htmlbody():
+ log(' Body:')
+ log(' Content-type: text/html')
+ log(' Data length: %d' % len(message.htmlbody()))
+ if self._show_mail_body:
+ log('-----\n' + message.htmlbody() + '\n-----')
+
+ for attachment in message.attachment_list():
+ log(' Attachment:')
+ log(' File name: %s' % attachment.filename())
+ log(' Data length: %s' % len(attachment.data()))
+
+ def _SendSMTP(self, mime_message, smtp_lib=smtplib.SMTP):
+ """Send MIME message via SMTP.
+
+ Connects to SMTP server and sends MIME message. If user is supplied
+ will try to login to that server to send as authenticated. Does not
+ currently support encryption.
+
+ Args:
+ mime_message: MimeMessage to send. Create using ToMIMEMessage.
+ smtp_lib: Class of SMTP library. Used for dependency injection.
+ """
+ smtp = smtp_lib()
+ try:
+ smtp.connect(self._smtp_host, self._smtp_port)
+ if self._smtp_user:
+ smtp.login(self._smtp_user, self._smtp_password)
+
+ tos = ', '.join([mime_message[to] for to in ['To', 'Cc', 'Bcc']
+ if mime_message[to]])
+ smtp.sendmail(mime_message['From'], tos, str(mime_message))
+ finally:
+ smtp.quit()
+
+ def _SendSendmail(self, mime_message,
+ popen=subprocess.Popen,
+ sendmail_command='sendmail'):
+ """Send MIME message via sendmail, if exists on computer.
+
+ Attempts to send email via sendmail. Any IO failure, including
+ the program not being found is ignored.
+
+ Args:
+ mime_message: MimeMessage to send. Create using ToMIMEMessage.
+ popen: popen function to create a new sub-process.
+ """
+ try:
+ tos = [mime_message[to] for to in ['To', 'Cc', 'Bcc'] if mime_message[to]]
+ sendmail_command = '%s %s' % (sendmail_command, ' '.join(tos))
+
+ try:
+ child = popen(sendmail_command,
+ shell=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+ except (IOError, OSError), e:
+ logging.error('Unable to open pipe to sendmail')
+ raise
+ try:
+ child.stdin.write(str(mime_message))
+ child.stdin.close()
+ finally:
+ while child.poll() is None:
+ child.stdout.read(100)
+ child.stdout.close()
+ except (IOError, OSError), e:
+ logging.error('Error sending mail using sendmail: ' + str(e))
+
+ def _Send(self, request, response, log=logging.info,
+ smtp_lib=smtplib.SMTP,
+ popen=subprocess.Popen,
+ sendmail_command='sendmail'):
+ """Implementation of MailServer::Send().
+
+ Logs email message. Contents of attachments are not shown, only
+ their sizes. If SMTP is configured, will send via SMTP, else
+ will use Sendmail if it is installed.
+
+ Args:
+ request: The message to send, a SendMailRequest.
+ response: The send response, a SendMailResponse.
+ log: Log function to send log information. Used for dependency
+ injection.
+ smtp_lib: Class of SMTP library. Used for dependency injection.
+ popen2: popen2 function to use for opening pipe to other process.
+ Used for dependency injection.
+ """
+ self._GenerateLog('Send', request, log)
+
+ if self._smtp_host and self._enable_sendmail:
+ log('Both SMTP and sendmail are enabled. Ignoring sendmail.')
+
+ import email
+
+ mime_message = mail.MailMessageToMIMEMessage(request)
+ if self._smtp_host:
+ self._SendSMTP(mime_message, smtp_lib)
+ elif self._enable_sendmail:
+ self._SendSendmail(mime_message, popen, sendmail_command)
+ else:
+ logging.info('You are not currently sending out real email. '
+ 'If you have sendmail installed you can use it '
+ 'by using the server with --enable_sendmail')
+
+ _Dynamic_Send = _Send
+
+ def _SendToAdmins(self, request, response, log=logging.info):
+ """Implementation of MailServer::SendToAdmins().
+
+ Logs email message. Contents of attachments are not shown, only
+ their sizes.
+
+ Given the difficulty of determining who the actual sender
+ is, Sendmail and SMTP are disabled for this action.
+
+ Args:
+ request: The message to send, a SendMailRequest.
+ response: The send response, a SendMailResponse.
+ log: Log function to send log information. Used for dependency
+ injection.
+ """
+ self._GenerateLog('SendToAdmins', request, log)
+
+ if self._smtp_host and self._enable_sendmail:
+ log('Both SMTP and sendmail are enabled. Ignoring sendmail.')
+
+ _Dynamic_SendToAdmins = _SendToAdmins
diff --git a/google_appengine/google/appengine/api/mail_stub.pyc b/google_appengine/google/appengine/api/mail_stub.pyc
new file mode 100644
index 0000000..1f7e646
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/memcache/__init__.py b/google_appengine/google/appengine/api/memcache/__init__.py
new file mode 100755
index 0000000..1f23cb6
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/__init__.py
@@ -0,0 +1,931 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Memcache API.
+
+Provides memcached-alike API to application developers to store
+data in memory when reliable storage via the DataStore API isn't
+required and higher performance is desired.
+"""
+
+
+
+import cStringIO
+import math
+import pickle
+import types
+import sha
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import namespace_manager
+from google.appengine.api.memcache import memcache_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
+MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
+
+MemcacheGetResponse = memcache_service_pb.MemcacheGetResponse
+MemcacheGetRequest = memcache_service_pb.MemcacheGetRequest
+
+MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
+MemcacheDeleteRequest = memcache_service_pb.MemcacheDeleteRequest
+
+MemcacheIncrementResponse = memcache_service_pb.MemcacheIncrementResponse
+MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
+
+MemcacheFlushResponse = memcache_service_pb.MemcacheFlushResponse
+MemcacheFlushRequest = memcache_service_pb.MemcacheFlushRequest
+
+MemcacheStatsRequest = memcache_service_pb.MemcacheStatsRequest
+MemcacheStatsResponse = memcache_service_pb.MemcacheStatsResponse
+
+DELETE_NETWORK_FAILURE = 0
+DELETE_ITEM_MISSING = 1
+DELETE_SUCCESSFUL = 2
+
+MAX_KEY_SIZE = 250
+MAX_VALUE_SIZE = 10 ** 6
+
+STAT_HITS = 'hits'
+STAT_MISSES = 'misses'
+STAT_BYTE_HITS = 'byte_hits'
+STAT_ITEMS = 'items'
+STAT_BYTES = 'bytes'
+STAT_OLDEST_ITEM_AGES = 'oldest_item_age'
+
+FLAG_TYPE_MASK = 7
+FLAG_COMPRESSED = 1 << 3
+
+TYPE_STR = 0
+TYPE_UNICODE = 1
+TYPE_PICKLED = 2
+TYPE_INT = 3
+TYPE_LONG = 4
+TYPE_BOOL = 5
+
+
+def _key_string(key, key_prefix='', server_to_user_dict=None):
+ """Utility function to handle different ways of requesting keys.
+
+ Args:
+ key: Either a string or tuple of (shard_number, string). In Google App
+ Engine the sharding is automatic so the shard number is ignored.
+ To memcache, the key is just bytes (no defined encoding).
+ key_prefix: Optional string prefix to prepend to key.
+ server_to_user_dict: Optional dictionary to populate with a mapping of
+ server-side key (which includes the key_prefix) to user-supplied key
+ (which does not have the prefix).
+
+ Returns:
+ The key as a non-unicode string prepended with key_prefix. This is
+ the key sent to and stored by the server. If the resulting key is
+ longer then MAX_KEY_SIZE, it will be hashed with sha1 and will be
+ replaced with the hex representation of the said hash.
+
+ Raises:
+ TypeError: If provided key isn't a string or tuple of (int, string)
+ or key_prefix or server_to_user_dict are of the wrong type.
+ """
+ if type(key) is types.TupleType:
+ key = key[1]
+ if not isinstance(key, basestring):
+ raise TypeError('Key must be a string instance, received %r' % key)
+ if not isinstance(key_prefix, basestring):
+ raise TypeError('key_prefix must be a string instance, received %r' %
+ key_prefix)
+
+ server_key = key_prefix + key
+ if isinstance(server_key, unicode):
+ server_key = server_key.encode('utf-8')
+
+ if len(server_key) > MAX_KEY_SIZE:
+ server_key = sha.new(server_key).hexdigest()
+
+ if server_to_user_dict is not None:
+ if not isinstance(server_to_user_dict, dict):
+ raise TypeError('server_to_user_dict must be a dict instance, ' +
+ 'received %r' % key)
+ server_to_user_dict[server_key] = key
+
+ return server_key
+
+
+def _validate_encode_value(value, do_pickle):
+ """Utility function to validate and encode server keys and values.
+
+ Args:
+ value: Value to store in memcache. If it's a string, it will get passed
+ along as-is. If it's a unicode string, it will be marked appropriately,
+ such that retrievals will yield a unicode value. If it's any other data
+ type, this function will attempt to pickle the data and then store the
+ serialized result, unpickling it upon retrieval.
+ do_pickle: Callable that takes an object and returns a non-unicode
+ string containing the pickled object.
+
+ Returns:
+ Tuple (stored_value, flags) where:
+ stored_value: The value as a non-unicode string that should be stored
+ in memcache.
+ flags: An integer with bits set from the FLAG_* constants in this file
+ to indicate the encoding of the key and value.
+
+ Raises:
+ ValueError: If the encoded value is too large.
+ pickle.PicklingError: If the value is not a string and could not be pickled.
+ RuntimeError: If a complicated data structure could not be pickled due to
+ too many levels of recursion in its composition.
+ """
+ flags = 0
+ stored_value = value
+
+ if isinstance(value, str):
+ pass
+ elif isinstance(value, unicode):
+ stored_value = value.encode('utf-8')
+ flags |= TYPE_UNICODE
+ elif isinstance(value, bool):
+ stored_value = str(int(value))
+ flags |= TYPE_BOOL
+ elif isinstance(value, int):
+ stored_value = str(value)
+ flags |= TYPE_INT
+ elif isinstance(value, long):
+ stored_value = str(value)
+ flags |= TYPE_LONG
+ else:
+ stored_value = do_pickle(value)
+ flags |= TYPE_PICKLED
+
+
+ if len(stored_value) > MAX_VALUE_SIZE:
+ raise ValueError('Values may not be more than %d bytes in length; '
+ 'received %d bytes' % (MAX_VALUE_SIZE, len(stored_value)))
+
+ return (stored_value, flags)
+
+
+def _decode_value(stored_value, flags, do_unpickle):
+ """Utility function for decoding values retrieved from memcache.
+
+ Args:
+ stored_value: The value as a non-unicode string that was stored.
+ flags: An integer with bits set from the FLAG_* constants in this file
+ that indicate the encoding of the key and value.
+ do_unpickle: Callable that takes a non-unicode string object that contains
+ a pickled object and returns the pickled object.
+
+ Returns:
+ The original object that was stored, be it a normal string, a unicode
+ string, int, long, or a Python object that was pickled.
+
+ Raises:
+ pickle.UnpicklingError: If the value could not be unpickled.
+ """
+ assert isinstance(stored_value, str)
+ assert isinstance(flags, (int, long))
+
+ type_number = flags & FLAG_TYPE_MASK
+ value = stored_value
+
+
+ if type_number == TYPE_STR:
+ return value
+ elif type_number == TYPE_UNICODE:
+ return value.decode('utf-8')
+ elif type_number == TYPE_PICKLED:
+ return do_unpickle(value)
+ elif type_number == TYPE_BOOL:
+ return bool(int(value))
+ elif type_number == TYPE_INT:
+ return int(value)
+ elif type_number == TYPE_LONG:
+ return long(value)
+ else:
+ assert False, "Unknown stored type"
+ assert False, "Shouldn't get here."
+
+class Client(object):
+ """Memcache client object, through which one invokes all memcache operations.
+
+ Several methods are no-ops to retain source-level compatibility
+ with the existing popular Python memcache library.
+
+ Any method that takes a 'key' argument will accept that key as a string
+ (unicode or not) or a tuple of (hash_value, string) where the hash_value,
+ normally used for sharding onto a memcache instance, is instead ignored, as
+ Google App Engine deals with the sharding transparently. Keys in memcache are
+ just bytes, without a specified encoding. All such methods may raise TypeError
+ if provided a bogus key value and a ValueError if the key is too large.
+
+ Any method that takes a 'value' argument will accept as that value any
+ string (unicode or not), int, long, or pickle-able Python object, including
+ all native types. You'll get back from the cache the same type that you
+ originally put in.
+ """
+
+ def __init__(self, servers=None, debug=0,
+ pickleProtocol=pickle.HIGHEST_PROTOCOL,
+ pickler=pickle.Pickler,
+ unpickler=pickle.Unpickler,
+ pload=None,
+ pid=None,
+ make_sync_call=apiproxy_stub_map.MakeSyncCall):
+ """Create a new Client object.
+
+ No parameters are required.
+
+ Arguments:
+ servers: Ignored; only for compatibility.
+ debug: Ignored; only for compatibility.
+ pickleProtocol: Pickle protocol to use for pickling the object.
+ pickler: pickle.Pickler sub-class to use for pickling.
+ unpickler: pickle.Unpickler sub-class to use for unpickling.
+ pload: Callable to use for retrieving objects by persistent id.
+ pid: Callable to use for determine the persistent id for objects, if any.
+ make_sync_call: Function to use to make an App Engine service call.
+ Used for testing.
+ """
+ self._pickle_data = cStringIO.StringIO()
+ self._pickler_instance = pickler(self._pickle_data,
+ protocol=pickleProtocol)
+ self._unpickler_instance = unpickler(self._pickle_data)
+ if pid is not None:
+ self._pickler_instance.persistent_id = pid
+ if pload is not None:
+ self._unpickler_instance.persistent_load = pload
+
+ def DoPickle(value):
+ self._pickle_data.truncate(0)
+ self._pickler_instance.clear_memo()
+ self._pickler_instance.dump(value)
+ return self._pickle_data.getvalue()
+ self._do_pickle = DoPickle
+
+ def DoUnpickle(value):
+ self._pickle_data.truncate(0)
+ self._pickle_data.write(value)
+ self._pickle_data.seek(0)
+ self._unpickler_instance.memo.clear()
+ return self._unpickler_instance.load()
+ self._do_unpickle = DoUnpickle
+
+ self._make_sync_call = make_sync_call
+
+ def set_servers(self, servers):
+ """Sets the pool of memcache servers used by the client.
+
+ This is purely a compatibility method. In Google App Engine, it's a no-op.
+ """
+ pass
+
+ def disconnect_all(self):
+ """Closes all connections to memcache servers.
+
+ This is purely a compatibility method. In Google App Engine, it's a no-op.
+ """
+ pass
+
+ def forget_dead_hosts(self):
+ """Resets all servers to the alive status.
+
+ This is purely a compatibility method. In Google App Engine, it's a no-op.
+ """
+ pass
+
+ def debuglog(self):
+ """Logging function for debugging information.
+
+ This is purely a compatibility method. In Google App Engine, it's a no-op.
+ """
+ pass
+
+ def get_stats(self):
+ """Gets memcache statistics for this application.
+
+ All of these statistics may reset due to various transient conditions. They
+ provide the best information available at the time of being called.
+
+ Returns:
+ Dictionary mapping statistic names to associated values. Statistics and
+ their associated meanings:
+
+ hits: Number of cache get requests resulting in a cache hit.
+ misses: Number of cache get requests resulting in a cache miss.
+ byte_hits: Sum of bytes transferred on get requests. Rolls over to
+ zero on overflow.
+ items: Number of key/value pairs in the cache.
+ bytes: Total size of all items in the cache.
+ oldest_item_age: How long in seconds since the oldest item in the
+ cache was accessed. Effectively, this indicates how long a new
+ item will survive in the cache without being accessed. This is
+ _not_ the amount of time that has elapsed since the item was
+ created.
+
+ On error, returns None.
+ """
+ request = MemcacheStatsRequest()
+ response = MemcacheStatsResponse()
+ try:
+ self._make_sync_call('memcache', 'Stats', request, response)
+ except apiproxy_errors.Error:
+ return None
+
+ if not response.has_stats():
+ return {
+ STAT_HITS: 0,
+ STAT_MISSES: 0,
+ STAT_BYTE_HITS: 0,
+ STAT_ITEMS: 0,
+ STAT_BYTES: 0,
+ STAT_OLDEST_ITEM_AGES: 0,
+ }
+
+ stats = response.stats()
+ return {
+ STAT_HITS: stats.hits(),
+ STAT_MISSES: stats.misses(),
+ STAT_BYTE_HITS: stats.byte_hits(),
+ STAT_ITEMS: stats.items(),
+ STAT_BYTES: stats.bytes(),
+ STAT_OLDEST_ITEM_AGES: stats.oldest_item_age(),
+ }
+
+ def flush_all(self):
+ """Deletes everything in memcache.
+
+ Returns:
+ True on success, False on RPC or server error.
+ """
+ request = MemcacheFlushRequest()
+ response = MemcacheFlushResponse()
+ try:
+ self._make_sync_call('memcache', 'FlushAll', request, response)
+ except apiproxy_errors.Error:
+ return False
+ return True
+
+ def get(self, key, namespace=None):
+ """Looks up a single key in memcache.
+
+ If you have multiple items to load, though, it's much more efficient
+ to use get_multi() instead, which loads them in one bulk operation,
+ reducing the networking latency that'd otherwise be required to do
+ many serialized get() operations.
+
+ Args:
+ key: The key in memcache to look up. See docs on Client
+ for details of format.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ The value of the key, if found in memcache, else None.
+ """
+ request = MemcacheGetRequest()
+ request.add_key(_key_string(key))
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheGetResponse()
+ try:
+ self._make_sync_call('memcache', 'Get', request, response)
+ except apiproxy_errors.Error:
+ return None
+
+ if not response.item_size():
+ return None
+
+ return _decode_value(response.item(0).value(),
+ response.item(0).flags(),
+ self._do_unpickle)
+
+ def get_multi(self, keys, key_prefix='', namespace=None):
+ """Looks up multiple keys from memcache in one operation.
+
+ This is the recommended way to do bulk loads.
+
+ Args:
+ keys: List of keys to look up. Keys may be strings or
+ tuples of (hash_value, string). Google App Engine
+ does the sharding and hashing automatically, though, so the hash
+ value is ignored. To memcache, keys are just series of bytes,
+ and not in any particular encoding.
+ key_prefix: Prefix to prepend to all keys when talking to the server;
+ not included in the returned dictionary.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A dictionary of the keys and values that were present in memcache.
+ Even if the key_prefix was specified, that key_prefix won't be on
+ the keys in the returned dictionary.
+ """
+ request = MemcacheGetRequest()
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheGetResponse()
+ user_key = {}
+ for key in keys:
+ request.add_key(_key_string(key, key_prefix, user_key))
+ try:
+ self._make_sync_call('memcache', 'Get', request, response)
+ except apiproxy_errors.Error:
+ return {}
+
+ return_value = {}
+ for returned_item in response.item_list():
+ value = _decode_value(returned_item.value(), returned_item.flags(),
+ self._do_unpickle)
+ return_value[user_key[returned_item.key()]] = value
+ return return_value
+
+ def delete(self, key, seconds=0, namespace=None):
+ """Deletes a key from memcache.
+
+ Args:
+ key: Key to delete. See docs on Client for detils.
+ seconds: Optional number of seconds to make deleted items 'locked'
+ for 'add' operations. Value can be a delta from current time (up to
+ 1 month), or an absolute Unix epoch time. Defaults to 0, which means
+ items can be immediately added. With or without this option,
+ a 'set' operation will always work. Float values will be rounded up to
+ the nearest whole second.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ DELETE_NETWORK_FAILURE (0) on network failure,
+ DELETE_ITEM_MISSING (1) if the server tried to delete the item but
+ didn't have it, or
+ DELETE_SUCCESSFUL (2) if the item was actually deleted.
+ This can be used as a boolean value, where a network failure is the
+ only bad condition.
+ """
+ if not isinstance(seconds, (int, long, float)):
+ raise TypeError('Delete timeout must be a number.')
+ if seconds < 0:
+ raise ValueError('Delete timeout must be non-negative.')
+
+ request = MemcacheDeleteRequest()
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheDeleteResponse()
+
+ delete_item = request.add_item()
+ delete_item.set_key(_key_string(key))
+ delete_item.set_delete_time(int(math.ceil(seconds)))
+ try:
+ self._make_sync_call('memcache', 'Delete', request, response)
+ except apiproxy_errors.Error:
+ return DELETE_NETWORK_FAILURE
+ assert response.delete_status_size() == 1, 'Unexpected status size.'
+
+ if response.delete_status(0) == MemcacheDeleteResponse.DELETED:
+ return DELETE_SUCCESSFUL
+ elif response.delete_status(0) == MemcacheDeleteResponse.NOT_FOUND:
+ return DELETE_ITEM_MISSING
+ assert False, 'Unexpected deletion status code.'
+
+ def delete_multi(self, keys, seconds=0, key_prefix='', namespace=None):
+ """Delete multiple keys at once.
+
+ Args:
+ keys: List of keys to delete.
+ seconds: Optional number of seconds to make deleted items 'locked'
+ for 'add' operations. Value can be a delta from current time (up to
+ 1 month), or an absolute Unix epoch time. Defaults to 0, which means
+ items can be immediately added. With or without this option,
+ a 'set' operation will always work. Float values will be rounded up to
+ the nearest whole second.
+ key_prefix: Prefix to put on all keys when sending specified
+ keys to memcache. See docs for get_multi() and set_multi().
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if all operations completed successfully. False if one
+ or more failed to complete.
+ """
+ if not isinstance(seconds, (int, long, float)):
+ raise TypeError('Delete timeout must be a number.')
+ if seconds < 0:
+ raise ValueError('Delete timeout must not be negative.')
+
+ request = MemcacheDeleteRequest()
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheDeleteResponse()
+
+ for key in keys:
+ delete_item = request.add_item()
+ delete_item.set_key(_key_string(key, key_prefix=key_prefix))
+ delete_item.set_delete_time(int(math.ceil(seconds)))
+ try:
+ self._make_sync_call('memcache', 'Delete', request, response)
+ except apiproxy_errors.Error:
+ return False
+ return True
+
+ def set(self, key, value, time=0, min_compress_len=0, namespace=None):
+ """Sets a key's value, regardless of previous contents in cache.
+
+ Unlike add() and replace(), this method always sets (or
+ overwrites) the value in memcache, regardless of previous
+ contents.
+
+ Args:
+ key: Key to set. See docs on Client for details.
+ value: Value to set. Any type. If complex, will be pickled.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ min_compress_len: Ignored option for compatibility.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if set. False on error.
+ """
+ return self._set_with_policy(MemcacheSetRequest.SET, key, value, time=time,
+ namespace=namespace)
+
+ def add(self, key, value, time=0, min_compress_len=0, namespace=None):
+ """Sets a key's value, iff item is not already in memcache.
+
+ Args:
+ key: Key to set. See docs on Client for details.
+ value: Value to set. Any type. If complex, will be pickled.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ min_compress_len: Ignored option for compatibility.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if added. False on error.
+ """
+ return self._set_with_policy(MemcacheSetRequest.ADD, key, value, time=time,
+ namespace=namespace)
+
+ def replace(self, key, value, time=0, min_compress_len=0, namespace=None):
+ """Replaces a key's value, failing if item isn't already in memcache.
+
+ Args:
+ key: Key to set. See docs on Client for details.
+ value: Value to set. Any type. If complex, will be pickled.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ min_compress_len: Ignored option for compatibility.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if replaced. False on RPC error or cache miss.
+ """
+ return self._set_with_policy(MemcacheSetRequest.REPLACE,
+ key, value, time=time, namespace=namespace)
+
+ def _set_with_policy(self, policy, key, value, time=0, namespace=None):
+ """Sets a single key with a specified policy.
+
+ Helper function for set(), add(), and replace().
+
+ Args:
+ policy: One of MemcacheSetRequest.SET, .ADD, or .REPLACE.
+ key: Key to add, set, or replace. See docs on Client for details.
+ value: Value to set.
+ time: Expiration time, defaulting to 0 (never expiring).
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if stored, False on RPC error or policy error, e.g. a replace
+ that failed due to the item not already existing, or an add
+ failing due to the item not already existing.
+ """
+ if not isinstance(time, (int, long, float)):
+ raise TypeError('Expiration must be a number.')
+ if time < 0:
+ raise ValueError('Expiration must not be negative.')
+
+ request = MemcacheSetRequest()
+ item = request.add_item()
+ item.set_key(_key_string(key))
+ stored_value, flags = _validate_encode_value(value, self._do_pickle)
+ item.set_value(stored_value)
+ item.set_flags(flags)
+ item.set_set_policy(policy)
+ item.set_expiration_time(int(math.ceil(time)))
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheSetResponse()
+ try:
+ self._make_sync_call('memcache', 'Set', request, response)
+ except apiproxy_errors.Error:
+ return False
+ if response.set_status_size() != 1:
+ return False
+ return response.set_status(0) == MemcacheSetResponse.STORED
+
+ def _set_multi_with_policy(self, policy, mapping, time=0, key_prefix='',
+ namespace=None):
+ """Set multiple keys with a specified policy.
+
+ Helper function for set_multi(), add_multi(), and replace_multi(). This
+ reduces the network latency of doing many requests in serial.
+
+ Args:
+ policy: One of MemcacheSetRequest.SET, ADD, or REPLACE.
+ mapping: Dictionary of keys to values.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ key_prefix: Prefix for to prepend to all keys.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A list of keys whose values were NOT set. On total success,
+ this list should be empty. On network/RPC/server errors,
+ a list of all input keys is returned; in this case the keys
+ may or may not have been updated.
+ """
+ if not isinstance(time, (int, long, float)):
+ raise TypeError('Expiration must be a number.')
+ if time < 0.0:
+ raise ValueError('Expiration must not be negative.')
+
+ request = MemcacheSetRequest()
+ user_key = {}
+ server_keys = []
+ for key, value in mapping.iteritems():
+ server_key = _key_string(key, key_prefix, user_key)
+ stored_value, flags = _validate_encode_value(value, self._do_pickle)
+ server_keys.append(server_key)
+
+ item = request.add_item()
+ item.set_key(server_key)
+ item.set_value(stored_value)
+ item.set_flags(flags)
+ item.set_set_policy(policy)
+ item.set_expiration_time(int(math.ceil(time)))
+ namespace_manager._add_name_space(request, namespace)
+
+ response = MemcacheSetResponse()
+ try:
+ self._make_sync_call('memcache', 'Set', request, response)
+ except apiproxy_errors.Error:
+ return user_key.values()
+
+ assert response.set_status_size() == len(server_keys)
+
+ unset_list = []
+ for server_key, set_status in zip(server_keys, response.set_status_list()):
+ if set_status != MemcacheSetResponse.STORED:
+ unset_list.append(user_key[server_key])
+
+ return unset_list
+
+ def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0,
+ namespace=None):
+ """Set multiple keys' values at once, regardless of previous contents.
+
+ Args:
+ mapping: Dictionary of keys to values.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ key_prefix: Prefix for to prepend to all keys.
+ min_compress_len: Unimplemented compatibility option.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A list of keys whose values were NOT set. On total success,
+ this list should be empty.
+ """
+ return self._set_multi_with_policy(MemcacheSetRequest.SET, mapping,
+ time=time, key_prefix=key_prefix,
+ namespace=namespace)
+
+ def add_multi(self, mapping, time=0, key_prefix='', min_compress_len=0,
+ namespace=None):
+ """Set multiple keys' values iff items are not already in memcache.
+
+ Args:
+ mapping: Dictionary of keys to values.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ key_prefix: Prefix for to prepend to all keys.
+ min_compress_len: Unimplemented compatibility option.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A list of keys whose values were NOT set because they did not already
+ exist in memcache. On total success, this list should be empty.
+ """
+ return self._set_multi_with_policy(MemcacheSetRequest.ADD, mapping,
+ time=time, key_prefix=key_prefix,
+ namespace=namespace)
+
+ def replace_multi(self, mapping, time=0, key_prefix='', min_compress_len=0,
+ namespace=None):
+ """Replace multiple keys' values, failing if the items aren't in memcache.
+
+ Args:
+ mapping: Dictionary of keys to values.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ key_prefix: Prefix for to prepend to all keys.
+ min_compress_len: Unimplemented compatibility option.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A list of keys whose values were NOT set because they already existed
+ in memcache. On total success, this list should be empty.
+ """
+ return self._set_multi_with_policy(MemcacheSetRequest.REPLACE, mapping,
+ time=time, key_prefix=key_prefix,
+ namespace=namespace)
+
+ def incr(self, key, delta=1, namespace=None, initial_value=None):
+ """Atomically increments a key's value.
+
+ Internally, the value is a unsigned 64-bit integer. Memcache
+ doesn't check 64-bit overflows. The value, if too large, will
+ wrap around.
+
+ Unless an initial_value is specified, the key must already exist
+ in the cache to be incremented. To initialize a counter, either
+ specify initial_value or set() it to the initial value, as an
+ ASCII decimal integer. Future get()s of the key, post-increment,
+ will still be an ASCII decimal value.
+
+ Args:
+ key: Key to increment. See Client's docstring for details.
+ delta: Non-negative integer value (int or long) to increment key by,
+ defaulting to 1.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+ initial_value: initial value to put in the cache, if it doesn't
+ already exist. The default value, None, will not create a cache
+ entry if it doesn't already exist.
+
+ Returns:
+ New long integer value, or None if key was not in the cache, could not
+ be incremented for any other reason, or a network/RPC/server error
+ occurred.
+
+ Raises:
+ ValueError: If number is negative.
+ TypeError: If delta isn't an int or long.
+ """
+ return self._incrdecr(key, False, delta, namespace=namespace,
+ initial_value=initial_value)
+
+ def decr(self, key, delta=1, namespace=None, initial_value=None):
+ """Atomically decrements a key's value.
+
+ Internally, the value is a unsigned 64-bit integer. Memcache
+ caps decrementing below zero to zero.
+
+ The key must already exist in the cache to be decremented. See
+ docs on incr() for details.
+
+ Args:
+ key: Key to decrement. See Client's docstring for details.
+ delta: Non-negative integer value (int or long) to decrement key by,
+ defaulting to 1.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+ initial_value: initial value to put in the cache, if it doesn't
+ already exist. The default value, None, will not create a cache
+ entry if it doesn't already exist.
+
+ Returns:
+ New long integer value, or None if key wasn't in cache and couldn't
+ be decremented, or a network/RPC/server error occurred.
+
+ Raises:
+ ValueError: If number is negative.
+ TypeError: If delta isn't an int or long.
+ """
+ return self._incrdecr(key, True, delta, namespace=namespace,
+ initial_value=initial_value)
+
+ def _incrdecr(self, key, is_negative, delta, namespace=None,
+ initial_value=None):
+ """Increment or decrement a key by a provided delta.
+
+ Args:
+ key: Key to increment or decrement.
+ is_negative: Boolean, if this is a decrement.
+ delta: Non-negative integer amount (int or long) to increment
+ or decrement by.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+ initial_value: initial value to put in the cache, if it doesn't
+ already exist. The default value, None, will not create a cache
+ entry if it doesn't already exist.
+
+ Returns:
+ New long integer value, or None on cache miss or network/RPC/server
+ error.
+
+ Raises:
+ ValueError: If delta is negative.
+ TypeError: If delta isn't an int or long.
+ """
+ if not isinstance(delta, (int, long)):
+ raise TypeError('Delta must be an integer or long, received %r' % delta)
+ if delta < 0:
+ raise ValueError('Delta must not be negative.')
+
+ request = MemcacheIncrementRequest()
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheIncrementResponse()
+ request.set_key(_key_string(key))
+ request.set_delta(delta)
+ if is_negative:
+ request.set_direction(MemcacheIncrementRequest.DECREMENT)
+ else:
+ request.set_direction(MemcacheIncrementRequest.INCREMENT)
+ if initial_value is not None:
+ request.set_initial_value(long(initial_value))
+
+ try:
+ self._make_sync_call('memcache', 'Increment', request, response)
+ except apiproxy_errors.Error:
+ return None
+
+ if response.has_new_value():
+ return response.new_value()
+ return None
+
+
+_CLIENT = None
+
+
+def setup_client(client_obj):
+ """Sets the Client object instance to use for all module-level methods.
+
+ Use this method if you want to have customer persistent_id() or
+ persistent_load() functions associated with your client.
+
+ Args:
+ client_obj: Instance of the memcache.Client object.
+ """
+ global _CLIENT
+ var_dict = globals()
+
+ _CLIENT = client_obj
+ var_dict['set_servers'] = _CLIENT.set_servers
+ var_dict['disconnect_all'] = _CLIENT.disconnect_all
+ var_dict['forget_dead_hosts'] = _CLIENT.forget_dead_hosts
+ var_dict['debuglog'] = _CLIENT.debuglog
+ var_dict['get'] = _CLIENT.get
+ var_dict['get_multi'] = _CLIENT.get_multi
+ var_dict['set'] = _CLIENT.set
+ var_dict['set_multi'] = _CLIENT.set_multi
+ var_dict['add'] = _CLIENT.add
+ var_dict['add_multi'] = _CLIENT.add_multi
+ var_dict['replace'] = _CLIENT.replace
+ var_dict['replace_multi'] = _CLIENT.replace_multi
+ var_dict['delete'] = _CLIENT.delete
+ var_dict['delete_multi'] = _CLIENT.delete_multi
+ var_dict['incr'] = _CLIENT.incr
+ var_dict['decr'] = _CLIENT.decr
+ var_dict['flush_all'] = _CLIENT.flush_all
+ var_dict['get_stats'] = _CLIENT.get_stats
+
+
+setup_client(Client())
diff --git a/google_appengine/google/appengine/api/memcache/__init__.pyc b/google_appengine/google/appengine/api/memcache/__init__.pyc
new file mode 100644
index 0000000..2e2cfef
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/memcache/memcache_service_pb.py b/google_appengine/google/appengine/api/memcache/memcache_service_pb.py
new file mode 100644
index 0000000..8d499b2
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/memcache_service_pb.py
@@ -0,0 +1,2002 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ UNSPECIFIED_ERROR = 1
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "UNSPECIFIED_ERROR",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheGetRequest(ProtocolBuffer.ProtocolMessage):
+ has_name_space_ = 0
+ name_space_ = ""
+
+ def __init__(self, contents=None):
+ self.key_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def key_size(self): return len(self.key_)
+ def key_list(self): return self.key_
+
+ def key(self, i):
+ return self.key_[i]
+
+ def set_key(self, i, x):
+ self.key_[i] = x
+
+ def add_key(self, x):
+ self.key_.append(x)
+
+ def clear_key(self):
+ self.key_ = []
+
+ def name_space(self): return self.name_space_
+
+ def set_name_space(self, x):
+ self.has_name_space_ = 1
+ self.name_space_ = x
+
+ def clear_name_space(self):
+ if self.has_name_space_:
+ self.has_name_space_ = 0
+ self.name_space_ = ""
+
+ def has_name_space(self): return self.has_name_space_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.key_size()): self.add_key(x.key(i))
+ if (x.has_name_space()): self.set_name_space(x.name_space())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.key_) != len(x.key_): return 0
+ for e1, e2 in zip(self.key_, x.key_):
+ if e1 != e2: return 0
+ if self.has_name_space_ != x.has_name_space_: return 0
+ if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.key_)
+ for i in xrange(len(self.key_)): n += self.lengthString(len(self.key_[i]))
+ if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_name_space()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.key_)):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.key_[i])
+ if (self.has_name_space_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.name_space_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.add_key(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_name_space(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.key_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("key%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkey = 1
+ kname_space = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "key",
+ 2: "name_space",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheGetResponse_Item(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+ has_flags_ = 0
+ flags_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+ def flags(self): return self.flags_
+
+ def set_flags(self, x):
+ self.has_flags_ = 1
+ self.flags_ = x
+
+ def clear_flags(self):
+ if self.has_flags_:
+ self.has_flags_ = 0
+ self.flags_ = 0
+
+ def has_flags(self): return self.has_flags_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+ if (x.has_flags()): self.set_flags(x.flags())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ if self.has_flags_ != x.has_flags_: return 0
+ if self.has_flags_ and self.flags_ != x.flags_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ if (self.has_flags_): n += 5
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+ self.clear_flags()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.value_)
+ if (self.has_flags_):
+ out.putVarInt32(37)
+ out.put32(self.flags_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_value(d.getPrefixedString())
+ continue
+ if tt == 37:
+ self.set_flags(d.get32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatFixed32(self.flags_))
+ return res
+
+class MemcacheGetResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.item_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def item_size(self): return len(self.item_)
+ def item_list(self): return self.item_
+
+ def item(self, i):
+ return self.item_[i]
+
+ def mutable_item(self, i):
+ return self.item_[i]
+
+ def add_item(self):
+ x = MemcacheGetResponse_Item()
+ self.item_.append(x)
+ return x
+
+ def clear_item(self):
+ self.item_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.item_) != len(x.item_): return 0
+ for e1, e2 in zip(self.item_, x.item_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.item_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.item_)
+ for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_item()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.item_)):
+ out.putVarInt32(11)
+ self.item_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_item().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.item_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Item%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kItemGroup = 1
+ kItemkey = 2
+ kItemvalue = 3
+ kItemflags = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Item",
+ 2: "key",
+ 3: "value",
+ 4: "flags",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.FLOAT,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheSetRequest_Item(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+ has_flags_ = 0
+ flags_ = 0
+ has_set_policy_ = 0
+ set_policy_ = 1
+ has_expiration_time_ = 0
+ expiration_time_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+ def flags(self): return self.flags_
+
+ def set_flags(self, x):
+ self.has_flags_ = 1
+ self.flags_ = x
+
+ def clear_flags(self):
+ if self.has_flags_:
+ self.has_flags_ = 0
+ self.flags_ = 0
+
+ def has_flags(self): return self.has_flags_
+
+ def set_policy(self): return self.set_policy_
+
+ def set_set_policy(self, x):
+ self.has_set_policy_ = 1
+ self.set_policy_ = x
+
+ def clear_set_policy(self):
+ if self.has_set_policy_:
+ self.has_set_policy_ = 0
+ self.set_policy_ = 1
+
+ def has_set_policy(self): return self.has_set_policy_
+
+ def expiration_time(self): return self.expiration_time_
+
+ def set_expiration_time(self, x):
+ self.has_expiration_time_ = 1
+ self.expiration_time_ = x
+
+ def clear_expiration_time(self):
+ if self.has_expiration_time_:
+ self.has_expiration_time_ = 0
+ self.expiration_time_ = 0
+
+ def has_expiration_time(self): return self.has_expiration_time_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+ if (x.has_flags()): self.set_flags(x.flags())
+ if (x.has_set_policy()): self.set_set_policy(x.set_policy())
+ if (x.has_expiration_time()): self.set_expiration_time(x.expiration_time())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ if self.has_flags_ != x.has_flags_: return 0
+ if self.has_flags_ and self.flags_ != x.flags_: return 0
+ if self.has_set_policy_ != x.has_set_policy_: return 0
+ if self.has_set_policy_ and self.set_policy_ != x.set_policy_: return 0
+ if self.has_expiration_time_ != x.has_expiration_time_: return 0
+ if self.has_expiration_time_ and self.expiration_time_ != x.expiration_time_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ if (self.has_flags_): n += 5
+ if (self.has_set_policy_): n += 1 + self.lengthVarInt64(self.set_policy_)
+ if (self.has_expiration_time_): n += 5
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+ self.clear_flags()
+ self.clear_set_policy()
+ self.clear_expiration_time()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.value_)
+ if (self.has_flags_):
+ out.putVarInt32(37)
+ out.put32(self.flags_)
+ if (self.has_set_policy_):
+ out.putVarInt32(40)
+ out.putVarInt32(self.set_policy_)
+ if (self.has_expiration_time_):
+ out.putVarInt32(53)
+ out.put32(self.expiration_time_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_value(d.getPrefixedString())
+ continue
+ if tt == 37:
+ self.set_flags(d.get32())
+ continue
+ if tt == 40:
+ self.set_set_policy(d.getVarInt32())
+ continue
+ if tt == 53:
+ self.set_expiration_time(d.get32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatFixed32(self.flags_))
+ if self.has_set_policy_: res+=prefix+("set_policy: %s\n" % self.DebugFormatInt32(self.set_policy_))
+ if self.has_expiration_time_: res+=prefix+("expiration_time: %s\n" % self.DebugFormatFixed32(self.expiration_time_))
+ return res
+
+class MemcacheSetRequest(ProtocolBuffer.ProtocolMessage):
+
+ SET = 1
+ ADD = 2
+ REPLACE = 3
+
+ _SetPolicy_NAMES = {
+ 1: "SET",
+ 2: "ADD",
+ 3: "REPLACE",
+ }
+
+ def SetPolicy_Name(cls, x): return cls._SetPolicy_NAMES.get(x, "")
+ SetPolicy_Name = classmethod(SetPolicy_Name)
+
+ has_name_space_ = 0
+ name_space_ = ""
+
+ def __init__(self, contents=None):
+ self.item_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def item_size(self): return len(self.item_)
+ def item_list(self): return self.item_
+
+ def item(self, i):
+ return self.item_[i]
+
+ def mutable_item(self, i):
+ return self.item_[i]
+
+ def add_item(self):
+ x = MemcacheSetRequest_Item()
+ self.item_.append(x)
+ return x
+
+ def clear_item(self):
+ self.item_ = []
+ def name_space(self): return self.name_space_
+
+ def set_name_space(self, x):
+ self.has_name_space_ = 1
+ self.name_space_ = x
+
+ def clear_name_space(self):
+ if self.has_name_space_:
+ self.has_name_space_ = 0
+ self.name_space_ = ""
+
+ def has_name_space(self): return self.has_name_space_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
+ if (x.has_name_space()): self.set_name_space(x.name_space())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.item_) != len(x.item_): return 0
+ for e1, e2 in zip(self.item_, x.item_):
+ if e1 != e2: return 0
+ if self.has_name_space_ != x.has_name_space_: return 0
+ if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.item_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.item_)
+ for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
+ if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_item()
+ self.clear_name_space()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.item_)):
+ out.putVarInt32(11)
+ self.item_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+ if (self.has_name_space_):
+ out.putVarInt32(58)
+ out.putPrefixedString(self.name_space_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_item().TryMerge(d)
+ continue
+ if tt == 58:
+ self.set_name_space(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.item_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Item%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kItemGroup = 1
+ kItemkey = 2
+ kItemvalue = 3
+ kItemflags = 4
+ kItemset_policy = 5
+ kItemexpiration_time = 6
+ kname_space = 7
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Item",
+ 2: "key",
+ 3: "value",
+ 4: "flags",
+ 5: "set_policy",
+ 6: "expiration_time",
+ 7: "name_space",
+ }, 7)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.FLOAT,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.FLOAT,
+ 7: ProtocolBuffer.Encoder.STRING,
+ }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheSetResponse(ProtocolBuffer.ProtocolMessage):
+
+ STORED = 1
+ NOT_STORED = 2
+ ERROR = 3
+
+ _SetStatusCode_NAMES = {
+ 1: "STORED",
+ 2: "NOT_STORED",
+ 3: "ERROR",
+ }
+
+ def SetStatusCode_Name(cls, x): return cls._SetStatusCode_NAMES.get(x, "")
+ SetStatusCode_Name = classmethod(SetStatusCode_Name)
+
+
+ def __init__(self, contents=None):
+ self.set_status_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def set_status_size(self): return len(self.set_status_)
+ def set_status_list(self): return self.set_status_
+
+ def set_status(self, i):
+ return self.set_status_[i]
+
+ def set_set_status(self, i, x):
+ self.set_status_[i] = x
+
+ def add_set_status(self, x):
+ self.set_status_.append(x)
+
+ def clear_set_status(self):
+ self.set_status_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.set_status_size()): self.add_set_status(x.set_status(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.set_status_) != len(x.set_status_): return 0
+ for e1, e2 in zip(self.set_status_, x.set_status_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.set_status_)
+ for i in xrange(len(self.set_status_)): n += self.lengthVarInt64(self.set_status_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_set_status()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.set_status_)):
+ out.putVarInt32(8)
+ out.putVarInt32(self.set_status_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.add_set_status(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.set_status_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("set_status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kset_status = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "set_status",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheDeleteRequest_Item(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_delete_time_ = 0
+ delete_time_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def delete_time(self): return self.delete_time_
+
+ def set_delete_time(self, x):
+ self.has_delete_time_ = 1
+ self.delete_time_ = x
+
+ def clear_delete_time(self):
+ if self.has_delete_time_:
+ self.has_delete_time_ = 0
+ self.delete_time_ = 0
+
+ def has_delete_time(self): return self.has_delete_time_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_delete_time()): self.set_delete_time(x.delete_time())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_delete_time_ != x.has_delete_time_: return 0
+ if self.has_delete_time_ and self.delete_time_ != x.delete_time_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ if (self.has_delete_time_): n += 5
+ return n + 1
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_delete_time()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.key_)
+ if (self.has_delete_time_):
+ out.putVarInt32(29)
+ out.put32(self.delete_time_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 29:
+ self.set_delete_time(d.get32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_delete_time_: res+=prefix+("delete_time: %s\n" % self.DebugFormatFixed32(self.delete_time_))
+ return res
+
+class MemcacheDeleteRequest(ProtocolBuffer.ProtocolMessage):
+ has_name_space_ = 0
+ name_space_ = ""
+
+ def __init__(self, contents=None):
+ self.item_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def item_size(self): return len(self.item_)
+ def item_list(self): return self.item_
+
+ def item(self, i):
+ return self.item_[i]
+
+ def mutable_item(self, i):
+ return self.item_[i]
+
+ def add_item(self):
+ x = MemcacheDeleteRequest_Item()
+ self.item_.append(x)
+ return x
+
+ def clear_item(self):
+ self.item_ = []
+ def name_space(self): return self.name_space_
+
+ def set_name_space(self, x):
+ self.has_name_space_ = 1
+ self.name_space_ = x
+
+ def clear_name_space(self):
+ if self.has_name_space_:
+ self.has_name_space_ = 0
+ self.name_space_ = ""
+
+ def has_name_space(self): return self.has_name_space_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
+ if (x.has_name_space()): self.set_name_space(x.name_space())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.item_) != len(x.item_): return 0
+ for e1, e2 in zip(self.item_, x.item_):
+ if e1 != e2: return 0
+ if self.has_name_space_ != x.has_name_space_: return 0
+ if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.item_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.item_)
+ for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
+ if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_item()
+ self.clear_name_space()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.item_)):
+ out.putVarInt32(11)
+ self.item_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+ if (self.has_name_space_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.name_space_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_item().TryMerge(d)
+ continue
+ if tt == 34:
+ self.set_name_space(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.item_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Item%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kItemGroup = 1
+ kItemkey = 2
+ kItemdelete_time = 3
+ kname_space = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Item",
+ 2: "key",
+ 3: "delete_time",
+ 4: "name_space",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.FLOAT,
+ 4: ProtocolBuffer.Encoder.STRING,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheDeleteResponse(ProtocolBuffer.ProtocolMessage):
+
+ DELETED = 1
+ NOT_FOUND = 2
+
+ _DeleteStatusCode_NAMES = {
+ 1: "DELETED",
+ 2: "NOT_FOUND",
+ }
+
+ def DeleteStatusCode_Name(cls, x): return cls._DeleteStatusCode_NAMES.get(x, "")
+ DeleteStatusCode_Name = classmethod(DeleteStatusCode_Name)
+
+
+ def __init__(self, contents=None):
+ self.delete_status_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def delete_status_size(self): return len(self.delete_status_)
+ def delete_status_list(self): return self.delete_status_
+
+ def delete_status(self, i):
+ return self.delete_status_[i]
+
+ def set_delete_status(self, i, x):
+ self.delete_status_[i] = x
+
+ def add_delete_status(self, x):
+ self.delete_status_.append(x)
+
+ def clear_delete_status(self):
+ self.delete_status_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.delete_status_size()): self.add_delete_status(x.delete_status(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.delete_status_) != len(x.delete_status_): return 0
+ for e1, e2 in zip(self.delete_status_, x.delete_status_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.delete_status_)
+ for i in xrange(len(self.delete_status_)): n += self.lengthVarInt64(self.delete_status_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_delete_status()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.delete_status_)):
+ out.putVarInt32(8)
+ out.putVarInt32(self.delete_status_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.add_delete_status(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.delete_status_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("delete_status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kdelete_status = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "delete_status",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheIncrementRequest(ProtocolBuffer.ProtocolMessage):
+
+ INCREMENT = 1
+ DECREMENT = 2
+
+ _Direction_NAMES = {
+ 1: "INCREMENT",
+ 2: "DECREMENT",
+ }
+
+ def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
+ Direction_Name = classmethod(Direction_Name)
+
+ has_key_ = 0
+ key_ = ""
+ has_name_space_ = 0
+ name_space_ = ""
+ has_delta_ = 0
+ delta_ = 1
+ has_direction_ = 0
+ direction_ = 1
+ has_initial_value_ = 0
+ initial_value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def name_space(self): return self.name_space_
+
+ def set_name_space(self, x):
+ self.has_name_space_ = 1
+ self.name_space_ = x
+
+ def clear_name_space(self):
+ if self.has_name_space_:
+ self.has_name_space_ = 0
+ self.name_space_ = ""
+
+ def has_name_space(self): return self.has_name_space_
+
+ def delta(self): return self.delta_
+
+ def set_delta(self, x):
+ self.has_delta_ = 1
+ self.delta_ = x
+
+ def clear_delta(self):
+ if self.has_delta_:
+ self.has_delta_ = 0
+ self.delta_ = 1
+
+ def has_delta(self): return self.has_delta_
+
+ def direction(self): return self.direction_
+
+ def set_direction(self, x):
+ self.has_direction_ = 1
+ self.direction_ = x
+
+ def clear_direction(self):
+ if self.has_direction_:
+ self.has_direction_ = 0
+ self.direction_ = 1
+
+ def has_direction(self): return self.has_direction_
+
+ def initial_value(self): return self.initial_value_
+
+ def set_initial_value(self, x):
+ self.has_initial_value_ = 1
+ self.initial_value_ = x
+
+ def clear_initial_value(self):
+ if self.has_initial_value_:
+ self.has_initial_value_ = 0
+ self.initial_value_ = 0
+
+ def has_initial_value(self): return self.has_initial_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_name_space()): self.set_name_space(x.name_space())
+ if (x.has_delta()): self.set_delta(x.delta())
+ if (x.has_direction()): self.set_direction(x.direction())
+ if (x.has_initial_value()): self.set_initial_value(x.initial_value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_name_space_ != x.has_name_space_: return 0
+ if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+ if self.has_delta_ != x.has_delta_: return 0
+ if self.has_delta_ and self.delta_ != x.delta_: return 0
+ if self.has_direction_ != x.has_direction_: return 0
+ if self.has_direction_ and self.direction_ != x.direction_: return 0
+ if self.has_initial_value_ != x.has_initial_value_: return 0
+ if self.has_initial_value_ and self.initial_value_ != x.initial_value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
+ if (self.has_delta_): n += 1 + self.lengthVarInt64(self.delta_)
+ if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+ if (self.has_initial_value_): n += 1 + self.lengthVarInt64(self.initial_value_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_name_space()
+ self.clear_delta()
+ self.clear_direction()
+ self.clear_initial_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.key_)
+ if (self.has_delta_):
+ out.putVarInt32(16)
+ out.putVarUint64(self.delta_)
+ if (self.has_direction_):
+ out.putVarInt32(24)
+ out.putVarInt32(self.direction_)
+ if (self.has_name_space_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.name_space_)
+ if (self.has_initial_value_):
+ out.putVarInt32(40)
+ out.putVarUint64(self.initial_value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_delta(d.getVarUint64())
+ continue
+ if tt == 24:
+ self.set_direction(d.getVarInt32())
+ continue
+ if tt == 34:
+ self.set_name_space(d.getPrefixedString())
+ continue
+ if tt == 40:
+ self.set_initial_value(d.getVarUint64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+ if self.has_delta_: res+=prefix+("delta: %s\n" % self.DebugFormatInt64(self.delta_))
+ if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
+ if self.has_initial_value_: res+=prefix+("initial_value: %s\n" % self.DebugFormatInt64(self.initial_value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkey = 1
+ kname_space = 4
+ kdelta = 2
+ kdirection = 3
+ kinitial_value = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "key",
+ 2: "delta",
+ 3: "direction",
+ 4: "name_space",
+ 5: "initial_value",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheIncrementResponse(ProtocolBuffer.ProtocolMessage):
+ has_new_value_ = 0
+ new_value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def new_value(self): return self.new_value_
+
+ def set_new_value(self, x):
+ self.has_new_value_ = 1
+ self.new_value_ = x
+
+ def clear_new_value(self):
+ if self.has_new_value_:
+ self.has_new_value_ = 0
+ self.new_value_ = 0
+
+ def has_new_value(self): return self.has_new_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_new_value()): self.set_new_value(x.new_value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_new_value_ != x.has_new_value_: return 0
+ if self.has_new_value_ and self.new_value_ != x.new_value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_new_value_): n += 1 + self.lengthVarInt64(self.new_value_)
+ return n + 0
+
+ def Clear(self):
+ self.clear_new_value()
+
+ def OutputUnchecked(self, out):
+ if (self.has_new_value_):
+ out.putVarInt32(8)
+ out.putVarUint64(self.new_value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_new_value(d.getVarUint64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_new_value_: res+=prefix+("new_value: %s\n" % self.DebugFormatInt64(self.new_value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ knew_value = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "new_value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheFlushRequest(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheFlushResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheStatsRequest(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MergedNamespaceStats(ProtocolBuffer.ProtocolMessage):
+ has_hits_ = 0
+ hits_ = 0
+ has_misses_ = 0
+ misses_ = 0
+ has_byte_hits_ = 0
+ byte_hits_ = 0
+ has_items_ = 0
+ items_ = 0
+ has_bytes_ = 0
+ bytes_ = 0
+ has_oldest_item_age_ = 0
+ oldest_item_age_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def hits(self): return self.hits_
+
+ def set_hits(self, x):
+ self.has_hits_ = 1
+ self.hits_ = x
+
+ def clear_hits(self):
+ if self.has_hits_:
+ self.has_hits_ = 0
+ self.hits_ = 0
+
+ def has_hits(self): return self.has_hits_
+
+ def misses(self): return self.misses_
+
+ def set_misses(self, x):
+ self.has_misses_ = 1
+ self.misses_ = x
+
+ def clear_misses(self):
+ if self.has_misses_:
+ self.has_misses_ = 0
+ self.misses_ = 0
+
+ def has_misses(self): return self.has_misses_
+
+ def byte_hits(self): return self.byte_hits_
+
+ def set_byte_hits(self, x):
+ self.has_byte_hits_ = 1
+ self.byte_hits_ = x
+
+ def clear_byte_hits(self):
+ if self.has_byte_hits_:
+ self.has_byte_hits_ = 0
+ self.byte_hits_ = 0
+
+ def has_byte_hits(self): return self.has_byte_hits_
+
+ def items(self): return self.items_
+
+ def set_items(self, x):
+ self.has_items_ = 1
+ self.items_ = x
+
+ def clear_items(self):
+ if self.has_items_:
+ self.has_items_ = 0
+ self.items_ = 0
+
+ def has_items(self): return self.has_items_
+
+ def bytes(self): return self.bytes_
+
+ def set_bytes(self, x):
+ self.has_bytes_ = 1
+ self.bytes_ = x
+
+ def clear_bytes(self):
+ if self.has_bytes_:
+ self.has_bytes_ = 0
+ self.bytes_ = 0
+
+ def has_bytes(self): return self.has_bytes_
+
+ def oldest_item_age(self): return self.oldest_item_age_
+
+ def set_oldest_item_age(self, x):
+ self.has_oldest_item_age_ = 1
+ self.oldest_item_age_ = x
+
+ def clear_oldest_item_age(self):
+ if self.has_oldest_item_age_:
+ self.has_oldest_item_age_ = 0
+ self.oldest_item_age_ = 0
+
+ def has_oldest_item_age(self): return self.has_oldest_item_age_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_hits()): self.set_hits(x.hits())
+ if (x.has_misses()): self.set_misses(x.misses())
+ if (x.has_byte_hits()): self.set_byte_hits(x.byte_hits())
+ if (x.has_items()): self.set_items(x.items())
+ if (x.has_bytes()): self.set_bytes(x.bytes())
+ if (x.has_oldest_item_age()): self.set_oldest_item_age(x.oldest_item_age())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_hits_ != x.has_hits_: return 0
+ if self.has_hits_ and self.hits_ != x.hits_: return 0
+ if self.has_misses_ != x.has_misses_: return 0
+ if self.has_misses_ and self.misses_ != x.misses_: return 0
+ if self.has_byte_hits_ != x.has_byte_hits_: return 0
+ if self.has_byte_hits_ and self.byte_hits_ != x.byte_hits_: return 0
+ if self.has_items_ != x.has_items_: return 0
+ if self.has_items_ and self.items_ != x.items_: return 0
+ if self.has_bytes_ != x.has_bytes_: return 0
+ if self.has_bytes_ and self.bytes_ != x.bytes_: return 0
+ if self.has_oldest_item_age_ != x.has_oldest_item_age_: return 0
+ if self.has_oldest_item_age_ and self.oldest_item_age_ != x.oldest_item_age_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_hits_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: hits not set.')
+ if (not self.has_misses_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: misses not set.')
+ if (not self.has_byte_hits_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: byte_hits not set.')
+ if (not self.has_items_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: items not set.')
+ if (not self.has_bytes_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bytes not set.')
+ if (not self.has_oldest_item_age_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: oldest_item_age not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.hits_)
+ n += self.lengthVarInt64(self.misses_)
+ n += self.lengthVarInt64(self.byte_hits_)
+ n += self.lengthVarInt64(self.items_)
+ n += self.lengthVarInt64(self.bytes_)
+ return n + 10
+
+ def Clear(self):
+ self.clear_hits()
+ self.clear_misses()
+ self.clear_byte_hits()
+ self.clear_items()
+ self.clear_bytes()
+ self.clear_oldest_item_age()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarUint64(self.hits_)
+ out.putVarInt32(16)
+ out.putVarUint64(self.misses_)
+ out.putVarInt32(24)
+ out.putVarUint64(self.byte_hits_)
+ out.putVarInt32(32)
+ out.putVarUint64(self.items_)
+ out.putVarInt32(40)
+ out.putVarUint64(self.bytes_)
+ out.putVarInt32(53)
+ out.put32(self.oldest_item_age_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_hits(d.getVarUint64())
+ continue
+ if tt == 16:
+ self.set_misses(d.getVarUint64())
+ continue
+ if tt == 24:
+ self.set_byte_hits(d.getVarUint64())
+ continue
+ if tt == 32:
+ self.set_items(d.getVarUint64())
+ continue
+ if tt == 40:
+ self.set_bytes(d.getVarUint64())
+ continue
+ if tt == 53:
+ self.set_oldest_item_age(d.get32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_hits_: res+=prefix+("hits: %s\n" % self.DebugFormatInt64(self.hits_))
+ if self.has_misses_: res+=prefix+("misses: %s\n" % self.DebugFormatInt64(self.misses_))
+ if self.has_byte_hits_: res+=prefix+("byte_hits: %s\n" % self.DebugFormatInt64(self.byte_hits_))
+ if self.has_items_: res+=prefix+("items: %s\n" % self.DebugFormatInt64(self.items_))
+ if self.has_bytes_: res+=prefix+("bytes: %s\n" % self.DebugFormatInt64(self.bytes_))
+ if self.has_oldest_item_age_: res+=prefix+("oldest_item_age: %s\n" % self.DebugFormatFixed32(self.oldest_item_age_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ khits = 1
+ kmisses = 2
+ kbyte_hits = 3
+ kitems = 4
+ kbytes = 5
+ koldest_item_age = 6
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "hits",
+ 2: "misses",
+ 3: "byte_hits",
+ 4: "items",
+ 5: "bytes",
+ 6: "oldest_item_age",
+ }, 6)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.FLOAT,
+ }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheStatsResponse(ProtocolBuffer.ProtocolMessage):
+ has_stats_ = 0
+ stats_ = None
+
+ def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def stats(self):
+ if self.stats_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.stats_ is None: self.stats_ = MergedNamespaceStats()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.stats_
+
+ def mutable_stats(self): self.has_stats_ = 1; return self.stats()
+
+ def clear_stats(self):
+ if self.has_stats_:
+ self.has_stats_ = 0;
+ if self.stats_ is not None: self.stats_.Clear()
+
+ def has_stats(self): return self.has_stats_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_stats()): self.mutable_stats().MergeFrom(x.stats())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_stats_ != x.has_stats_: return 0
+ if self.has_stats_ and self.stats_ != x.stats_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_stats_ and not self.stats_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_stats_): n += 1 + self.lengthString(self.stats_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_stats()
+
+ def OutputUnchecked(self, out):
+ if (self.has_stats_):
+ out.putVarInt32(10)
+ out.putVarInt32(self.stats_.ByteSize())
+ self.stats_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_stats().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_stats_:
+ res+=prefix+"stats <\n"
+ res+=self.stats_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kstats = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "stats",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['MemcacheServiceError','MemcacheGetRequest','MemcacheGetResponse','MemcacheGetResponse_Item','MemcacheSetRequest','MemcacheSetRequest_Item','MemcacheSetResponse','MemcacheDeleteRequest','MemcacheDeleteRequest_Item','MemcacheDeleteResponse','MemcacheIncrementRequest','MemcacheIncrementResponse','MemcacheFlushRequest','MemcacheFlushResponse','MemcacheStatsRequest','MergedNamespaceStats','MemcacheStatsResponse']
diff --git a/google_appengine/google/appengine/api/memcache/memcache_service_pb.pyc b/google_appengine/google/appengine/api/memcache/memcache_service_pb.pyc
new file mode 100644
index 0000000..e7f4872
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/memcache_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/memcache/memcache_stub.py b/google_appengine/google/appengine/api/memcache/memcache_stub.py
new file mode 100755
index 0000000..8d03bf2
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/memcache_stub.py
@@ -0,0 +1,293 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the memcache API, keeping all data in process memory."""
+
+
+
+import logging
+import time
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import memcache
+from google.appengine.api.memcache import memcache_service_pb
+
+MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
+MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
+MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
+MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
+
+
+class CacheEntry(object):
+ """An entry in the cache."""
+
+ def __init__(self, value, expiration, flags, gettime):
+ """Initializer.
+
+ Args:
+ value: String containing the data for this entry.
+ expiration: Number containing the expiration time or offset in seconds
+ for this entry.
+ flags: Opaque flags used by the memcache implementation.
+ gettime: Used for testing. Function that works like time.time().
+ """
+ assert isinstance(value, basestring)
+ assert len(value) <= memcache.MAX_VALUE_SIZE
+ assert isinstance(expiration, (int, long))
+
+ self._gettime = gettime
+ self.value = value
+ self.flags = flags
+ self.created_time = self._gettime()
+ self.will_expire = expiration != 0
+ self.locked = False
+ self._SetExpiration(expiration)
+
+ def _SetExpiration(self, expiration):
+ """Sets the expiration for this entry.
+
+ Args:
+ expiration: Number containing the expiration time or offset in seconds
+ for this entry. If expiration is above one month, then it's considered
+ an absolute time since the UNIX epoch.
+ """
+ if expiration > (86400 * 30):
+ self.expiration_time = expiration
+ else:
+ self.expiration_time = self._gettime() + expiration
+
+ def CheckExpired(self):
+ """Returns True if this entry has expired; False otherwise."""
+ return self.will_expire and self._gettime() >= self.expiration_time
+
+ def ExpireAndLock(self, timeout):
+ """Marks this entry as deleted and locks it for the expiration time.
+
+ Used to implement memcache's delete timeout behavior.
+
+ Args:
+ timeout: Parameter originally passed to memcache.delete or
+ memcache.delete_multi to control deletion timeout.
+ """
+ self.will_expire = True
+ self.locked = True
+ self._SetExpiration(timeout)
+
+ def CheckLocked(self):
+ """Returns True if this entry was deleted but has not yet timed out."""
+ return self.locked and not self.CheckExpired()
+
+
+class MemcacheServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only memcache service stub.
+
+ This stub keeps all data in the local process' memory, not in any
+ external servers.
+ """
+
+ def __init__(self, gettime=time.time, service_name='memcache'):
+ """Initializer.
+
+ Args:
+ gettime: time.time()-like function used for testing.
+ service_name: Service name expected for all calls.
+ """
+ super(MemcacheServiceStub, self).__init__(service_name)
+ self._gettime = gettime
+ self._ResetStats()
+
+ self._the_cache = {}
+
+ def _ResetStats(self):
+ """Resets statistics information."""
+ self._hits = 0
+ self._misses = 0
+ self._byte_hits = 0
+ self._cache_creation_time = self._gettime()
+
+ def _GetKey(self, namespace, key):
+ """Retrieves a CacheEntry from the cache if it hasn't expired.
+
+ Does not take deletion timeout into account.
+
+ Args:
+ namespace: The namespace that keys are stored under.
+ key: The key to retrieve from the cache.
+
+ Returns:
+ The corresponding CacheEntry instance, or None if it was not found or
+ has already expired.
+ """
+ namespace_dict = self._the_cache.get(namespace, None)
+ if namespace_dict is None:
+ return None
+ entry = namespace_dict.get(key, None)
+ if entry is None:
+ return None
+ elif entry.CheckExpired():
+ del namespace_dict[key]
+ return None
+ else:
+ return entry
+
+ def _Dynamic_Get(self, request, response):
+ """Implementation of MemcacheService::Get().
+
+ Args:
+ request: A MemcacheGetRequest.
+ response: A MemcacheGetResponse.
+ """
+ namespace = request.name_space()
+ keys = set(request.key_list())
+ for key in keys:
+ entry = self._GetKey(namespace, key)
+ if entry is None or entry.CheckLocked():
+ self._misses += 1
+ continue
+ self._hits += 1
+ self._byte_hits += len(entry.value)
+ item = response.add_item()
+ item.set_key(key)
+ item.set_value(entry.value)
+ item.set_flags(entry.flags)
+
+ def _Dynamic_Set(self, request, response):
+ """Implementation of MemcacheService::Set().
+
+ Args:
+ request: A MemcacheSetRequest.
+ response: A MemcacheSetResponse.
+ """
+ namespace = request.name_space()
+ for item in request.item_list():
+ key = item.key()
+ set_policy = item.set_policy()
+ old_entry = self._GetKey(namespace, key)
+
+ set_status = MemcacheSetResponse.NOT_STORED
+ if ((set_policy == MemcacheSetRequest.SET) or
+ (set_policy == MemcacheSetRequest.ADD and old_entry is None) or
+ (set_policy == MemcacheSetRequest.REPLACE and old_entry is not None)):
+
+ if (old_entry is None or
+ set_policy == MemcacheSetRequest.SET
+ or not old_entry.CheckLocked()):
+ if namespace not in self._the_cache:
+ self._the_cache[namespace] = {}
+ self._the_cache[namespace][key] = CacheEntry(item.value(),
+ item.expiration_time(),
+ item.flags(),
+ gettime=self._gettime)
+ set_status = MemcacheSetResponse.STORED
+
+ response.add_set_status(set_status)
+
+ def _Dynamic_Delete(self, request, response):
+ """Implementation of MemcacheService::Delete().
+
+ Args:
+ request: A MemcacheDeleteRequest.
+ response: A MemcacheDeleteResponse.
+ """
+ namespace = request.name_space()
+ for item in request.item_list():
+ key = item.key()
+ entry = self._GetKey(namespace, key)
+
+ delete_status = MemcacheDeleteResponse.DELETED
+ if entry is None:
+ delete_status = MemcacheDeleteResponse.NOT_FOUND
+ elif item.delete_time() == 0:
+ del self._the_cache[namespace][key]
+ else:
+ entry.ExpireAndLock(item.delete_time())
+
+ response.add_delete_status(delete_status)
+
+ def _Dynamic_Increment(self, request, response):
+ """Implementation of MemcacheService::Increment().
+
+ Args:
+ request: A MemcacheIncrementRequest.
+ response: A MemcacheIncrementResponse.
+ """
+ namespace = request.name_space()
+ key = request.key()
+ entry = self._GetKey(namespace, key)
+ if entry is None:
+ if not request.has_initial_value():
+ return
+ if namespace not in self._the_cache:
+ self._the_cache[namespace] = {}
+ self._the_cache[namespace][key] = CacheEntry(str(request.initial_value()),
+ expiration=0,
+ flags=0,
+ gettime=self._gettime)
+ entry = self._GetKey(namespace, key)
+ assert entry is not None
+
+ try:
+ old_value = long(entry.value)
+ if old_value < 0:
+ raise ValueError
+ except ValueError:
+ logging.error('Increment/decrement failed: Could not interpret '
+ 'value for key = "%s" as an unsigned integer.', key)
+ return
+
+ delta = request.delta()
+ if request.direction() == MemcacheIncrementRequest.DECREMENT:
+ delta = -delta
+
+ new_value = old_value + delta
+ if not (0 <= new_value < 2**64):
+ new_value = 0
+
+ entry.value = str(new_value)
+ response.set_new_value(new_value)
+
+ def _Dynamic_FlushAll(self, request, response):
+ """Implementation of MemcacheService::FlushAll().
+
+ Args:
+ request: A MemcacheFlushRequest.
+ response: A MemcacheFlushResponse.
+ """
+ self._the_cache.clear()
+ self._ResetStats()
+
+ def _Dynamic_Stats(self, request, response):
+ """Implementation of MemcacheService::Stats().
+
+ Args:
+ request: A MemcacheStatsRequest.
+ response: A MemcacheStatsResponse.
+ """
+ stats = response.mutable_stats()
+ stats.set_hits(self._hits)
+ stats.set_misses(self._misses)
+ stats.set_byte_hits(self._byte_hits)
+ items = 0
+ total_bytes = 0
+ for namespace in self._the_cache.itervalues():
+ items += len(namespace)
+ for entry in namespace.itervalues():
+ total_bytes += len(entry.value)
+ stats.set_items(items)
+ stats.set_bytes(total_bytes)
+
+ stats.set_oldest_item_age(self._gettime() - self._cache_creation_time)
diff --git a/google_appengine/google/appengine/api/memcache/memcache_stub.pyc b/google_appengine/google/appengine/api/memcache/memcache_stub.pyc
new file mode 100644
index 0000000..d16bb1c
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/memcache_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/namespace_manager/__init__.py b/google_appengine/google/appengine/api/namespace_manager/__init__.py
new file mode 100755
index 0000000..43e68af
--- /dev/null
+++ b/google_appengine/google/appengine/api/namespace_manager/__init__.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Control the namespacing system used by various APIs.
+
+Each API call can specify an alternate namespace, but the functions
+here can be used to change the default namespace. The default is set
+before user code begins executing.
+"""
+
+
+
+import os
+
+ENV_DEFAULT_NAMESPACE = 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'
+ENV_CURRENT_NAMESPACE = '__INTERNAL_CURRENT_NAMESPACE'
+
+
+def set_request_namespace(namespace):
+ """Set the default namespace to use for future calls, for this request only.
+
+ Args:
+ namespace: A string naming the new namespace to use. The empty
+ string specifies the root namespace for this app.
+ """
+ os.environ[ENV_CURRENT_NAMESPACE] = namespace
+
+
+def get_request_namespace():
+ """Get the name of the current default namespace.
+
+ The empty string indicates that the root namespace is the default.
+ """
+ return os.getenv(ENV_CURRENT_NAMESPACE, '')
+
+
+def _enable_request_namespace():
+ """Automatically enable namespace to default for domain.
+
+ Calling this function will automatically default the namespace to the
+ chosen Google Apps domain for the current request.
+ """
+ if ENV_CURRENT_NAMESPACE not in os.environ:
+ if ENV_DEFAULT_NAMESPACE in os.environ:
+ os.environ[ENV_CURRENT_NAMESPACE] = os.environ[ENV_DEFAULT_NAMESPACE]
+ else:
+ os.environ[ENV_CURRENT_NAMESPACE] = ''
+
+
+def _add_name_space(request, namespace=None):
+ """Add a name_space field to a request.
+
+ Args:
+ request: A protocol buffer supporting the set_name_space() operation.
+ namespace: The name of the namespace part. If None, use the
+ default namespace.
+ """
+ if namespace is None:
+ request.set_name_space(get_request_namespace())
+ else:
+ request.set_name_space(namespace)
diff --git a/google_appengine/google/appengine/api/namespace_manager/__init__.pyc b/google_appengine/google/appengine/api/namespace_manager/__init__.pyc
new file mode 100644
index 0000000..5bb0673
--- /dev/null
+++ b/google_appengine/google/appengine/api/namespace_manager/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/queueinfo.py b/google_appengine/google/appengine/api/queueinfo.py
new file mode 100755
index 0000000..bdaa358
--- /dev/null
+++ b/google_appengine/google/appengine/api/queueinfo.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""QueueInfo tools.
+
+A library for working with QueueInfo records, describing task queue entries
+for an application. Supports loading the records from queue.yaml.
+
+A queue has two required parameters and one optional one. The required
+parameters are 'name' (must be unique for an appid) and 'rate' (the rate
+at which jobs in the queue are run). There is an optional parameter
+'bucket_size' that will allow tokens to be 'saved up' (for more on the
+algorithm, see http://en.wikipedia.org/wiki/Token_Bucket). rate is expressed
+as number/unit, with number being an int or a float, and unit being one of
+'s' (seconds), 'm' (minutes), 'h' (hours) or 'd' (days). bucket_size is
+an integer.
+
+An example of the use of bucket_size rate: the free email quota is 2000/d,
+and the maximum you can send in a single minute is 11. So we can define a
+queue for sending email like this:
+
+queue:
+- name: mail_queue
+ rate: 2000/d
+ bucket_size: 10
+
+If this queue had been idle for a while before some jobs were submitted to it,
+the first 10 jobs submitted would be run immediately, then subsequent ones
+would be run once every 40s or so. The limit of 2000 per day would still apply.
+"""
+
+
+
+from google.appengine.api import validation
+from google.appengine.api import yaml_builder
+from google.appengine.api import yaml_listener
+from google.appengine.api import yaml_object
+
+_NAME_REGEX = r'^[A-Za-z0-9-]{0,499}$'
+_RATE_REGEX = r'^(0|[0-9]+(\.[0-9]*)?/[smhd])'
+
+QUEUE = 'queue'
+
+NAME = 'name'
+RATE = 'rate'
+BUCKET_SIZE = 'bucket_size'
+
+
+class MalformedQueueConfiguration(Exception):
+ """Configuration file for Task Queue is malformed."""
+
+
+class QueueEntry(validation.Validated):
+ """A queue entry describes a single task queue."""
+ ATTRIBUTES = {
+ NAME: _NAME_REGEX,
+ RATE: _RATE_REGEX,
+ BUCKET_SIZE: validation.Optional(validation.TYPE_INT),
+ }
+
+
+class QueueInfoExternal(validation.Validated):
+ """QueueInfoExternal describes all queue entries for an application."""
+ ATTRIBUTES = {
+ QUEUE: validation.Optional(validation.Repeated(QueueEntry))
+ }
+
+
+def LoadSingleQueue(queue_info):
+ """Load a queue.yaml file or string and return a QueueInfoExternal object.
+
+ Args:
+ queue_info: the contents of a queue.yaml file, as a string.
+
+ Returns:
+ A QueueInfoExternal object.
+ """
+ builder = yaml_object.ObjectBuilder(QueueInfoExternal)
+ handler = yaml_builder.BuilderHandler(builder)
+ listener = yaml_listener.EventListener(handler)
+ listener.Parse(queue_info)
+
+ queue_info = handler.GetResults()
+ if len(queue_info) < 1:
+ raise MalformedQueueConfiguration('Empty queue configuration.')
+ if len(queue_info) > 1:
+ raise MalformedQueueConfiguration('Multiple queue: sections '
+ 'in configuration.')
+ return queue_info[0]
+
+
+def ParseRate(rate):
+ """Parses a rate string in the form number/unit, or the literal 0.
+
+ The unit is one of s (seconds), m (minutes), h (hours) or d (days).
+
+ Args:
+ rate: the rate string.
+
+ Returns:
+ a floating point number representing the rate/second.
+
+ Raises:
+ MalformedQueueConfiguration: if the rate is invalid
+ """
+ if rate == "0":
+ return 0.0
+ elements = rate.split('/')
+ if len(elements) != 2:
+ raise MalformedQueueConfiguration('Rate "%s" is invalid.' % rate)
+ number, unit = elements
+ try:
+ number = float(number)
+ except ValueError:
+ raise MalformedQueueConfiguration('Rate "%s" is invalid:'
+ ' "%s" is not a number.' %
+ (rate, number))
+ if unit not in 'smhd':
+ raise MalformedQueueConfiguration('Rate "%s" is invalid:'
+ ' "%s" is not one of s, m, h, d.' %
+ (rate, unit))
+ if unit == 's':
+ return number
+ if unit == 'm':
+ return number/60
+ if unit == 'h':
+ return number/(60 * 60)
+ if unit == 'd':
+ return number/(24 * 60 * 60)
diff --git a/google_appengine/google/appengine/api/queueinfo.pyc b/google_appengine/google/appengine/api/queueinfo.pyc
new file mode 100644
index 0000000..74dd348
--- /dev/null
+++ b/google_appengine/google/appengine/api/queueinfo.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/quota.py b/google_appengine/google/appengine/api/quota.py
new file mode 100755
index 0000000..3168eb2
--- /dev/null
+++ b/google_appengine/google/appengine/api/quota.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Access to quota usage for this application."""
+
+
+
+
+try:
+ from google3.apphosting.runtime import _apphosting_runtime___python__apiproxy
+except ImportError:
+ _apphosting_runtime___python__apiproxy = None
+
+def get_request_cpu_usage():
+ """Get the amount of CPU used so far for the current request.
+
+ Returns the number of megacycles used so far for the current
+ request. Does not include CPU used by API calls.
+
+ Does nothing when used in the dev_appserver.
+ """
+
+ if _apphosting_runtime___python__apiproxy:
+ return _apphosting_runtime___python__apiproxy.get_request_cpu_usage()
+ return 0
+
+def get_request_api_cpu_usage():
+ """Get the amount of CPU used so far by API calls during the current request.
+
+ Returns the number of megacycles used so far by API calls for the current
+ request. Does not include CPU used by code in the request itself.
+
+ Does nothing when used in the dev_appserver.
+ """
+
+ if _apphosting_runtime___python__apiproxy:
+ return _apphosting_runtime___python__apiproxy.get_request_api_cpu_usage()
+ return 0
+
+MCYCLES_PER_SECOND = 1200.0
+"""Megacycles to CPU seconds. Convert by using a 1.2 GHz 64-bit x86 CPU."""
+
+def megacycles_to_cpu_seconds(mcycles):
+ """Convert an input value in megacycles to CPU-seconds.
+
+ Returns a double representing the CPU-seconds the input megacycle value
+ converts to.
+ """
+ return mcycles / MCYCLES_PER_SECOND
+
+def cpu_seconds_to_megacycles(cpu_secs):
+ """Convert an input value in CPU-seconds to megacycles.
+
+ Returns an integer representing the megacycles the input CPU-seconds value
+ converts to.
+ """
+ return int(cpu_secs * MCYCLES_PER_SECOND)
diff --git a/google_appengine/google/appengine/api/urlfetch.py b/google_appengine/google/appengine/api/urlfetch.py
new file mode 100755
index 0000000..8d9e836
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""URL downloading API.
+
+Methods defined in this module:
+ Fetch(): fetchs a given URL using an HTTP GET or POST
+"""
+
+
+
+
+
+import os
+import UserDict
+import urllib2
+import urlparse
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import urlfetch_service_pb
+from google.appengine.api.urlfetch_errors import *
+from google.appengine.runtime import apiproxy_errors
+
+MAX_REDIRECTS = 5
+
+GET = 1
+POST = 2
+HEAD = 3
+PUT = 4
+DELETE = 5
+
+
+_URL_STRING_MAP = {
+ 'GET': GET,
+ 'POST': POST,
+ 'HEAD': HEAD,
+ 'PUT': PUT,
+ 'DELETE': DELETE,
+}
+
+
+_VALID_METHODS = frozenset(_URL_STRING_MAP.values())
+
+
+class _CaselessDict(UserDict.IterableUserDict):
+ """Case insensitive dictionary.
+
+ This class was lifted from os.py and slightly modified.
+ """
+
+ def __init__(self):
+ UserDict.IterableUserDict.__init__(self)
+ self.caseless_keys = {}
+
+ def __setitem__(self, key, item):
+ """Set dictionary item.
+
+ Args:
+ key: Key of new item. Key is case insensitive, so "d['Key'] = value "
+ will replace previous values set by "d['key'] = old_value".
+ item: Item to store.
+ """
+ caseless_key = key.lower()
+ if caseless_key in self.caseless_keys:
+ del self.data[self.caseless_keys[caseless_key]]
+ self.caseless_keys[caseless_key] = key
+ self.data[key] = item
+
+ def __getitem__(self, key):
+ """Get dictionary item.
+
+ Args:
+ key: Key of item to get. Key is case insensitive, so "d['Key']" is the
+ same as "d['key']".
+
+ Returns:
+ Item associated with key.
+ """
+ return self.data[self.caseless_keys[key.lower()]]
+
+ def __delitem__(self, key):
+ """Remove item from dictionary.
+
+ Args:
+ key: Key of item to remove. Key is case insensitive, so "del d['Key']" is
+ the same as "del d['key']"
+ """
+ caseless_key = key.lower()
+ del self.data[self.caseless_keys[caseless_key]]
+ del self.caseless_keys[caseless_key]
+
+ def has_key(self, key):
+ """Determine if dictionary has item with specific key.
+
+ Args:
+ key: Key to check for presence. Key is case insensitive, so
+ "d.has_key('Key')" evaluates to the same value as "d.has_key('key')".
+
+ Returns:
+ True if dictionary contains key, else False.
+ """
+ return key.lower() in self.caseless_keys
+
+ def __contains__(self, key):
+ """Same as 'has_key', but used for 'in' operator.'"""
+ return self.has_key(key)
+
+ def get(self, key, failobj=None):
+ """Get dictionary item, defaulting to another value if it does not exist.
+
+ Args:
+ key: Key of item to get. Key is case insensitive, so "d['Key']" is the
+ same as "d['key']".
+ failobj: Value to return if key not in dictionary.
+ """
+ try:
+ cased_key = self.caseless_keys[key.lower()]
+ except KeyError:
+ return failobj
+ return self.data[cased_key]
+
+ def update(self, dict=None, **kwargs):
+ """Update dictionary using values from another dictionary and keywords.
+
+ Args:
+ dict: Dictionary to update from.
+ kwargs: Keyword arguments to update from.
+ """
+ if dict:
+ try:
+ keys = dict.keys()
+ except AttributeError:
+ for k, v in dict:
+ self[k] = v
+ else:
+ for k in keys:
+ self[k] = dict[k]
+ if kwargs:
+ self.update(kwargs)
+
+ def copy(self):
+ """Make a shallow, case sensitive copy of self."""
+ return dict(self)
+
+
+def _is_fetching_self(url, method):
+ """Checks if the fetch is for the same URL from which it originated.
+
+ Args:
+ url: str, The URL being fetched.
+ method: value from _VALID_METHODS.
+
+ Returns:
+ boolean indicating whether or not it seems that the app is trying to fetch
+ itself.
+ """
+ if (method != GET or
+ "HTTP_HOST" not in os.environ or
+ "PATH_INFO" not in os.environ):
+ return False
+
+ scheme, host_port, path, query, fragment = urlparse.urlsplit(url)
+
+ if host_port == os.environ['HTTP_HOST']:
+ current_path = urllib2.unquote(os.environ['PATH_INFO'])
+ desired_path = urllib2.unquote(path)
+
+ if (current_path == desired_path or
+ (current_path in ('', '/') and desired_path in ('', '/'))):
+ return True
+
+ return False
+
+
+def create_rpc(deadline=None, callback=None):
+ """Creates an RPC object for use with the urlfetch API.
+
+ Args:
+ deadline: Optional deadline in seconds for the operation; the default
+ is a system-specific deadline (typically 5 seconds).
+ callback: Optional callable to invoke on completion.
+
+ Returns:
+ An apiproxy_stub_map.UserRPC object specialized for this service.
+ """
+ return apiproxy_stub_map.UserRPC('urlfetch', deadline, callback)
+
+
+def fetch(url, payload=None, method=GET, headers={},
+ allow_truncated=False, follow_redirects=True,
+ deadline=None):
+ """Fetches the given HTTP URL, blocking until the result is returned.
+
+ Other optional parameters are:
+ method: GET, POST, HEAD, PUT, or DELETE
+ payload: POST or PUT payload (implies method is not GET, HEAD, or DELETE).
+ this is ignored if the method is not POST or PUT.
+ headers: dictionary of HTTP headers to send with the request
+ allow_truncated: if true, truncate large responses and return them without
+ error. Otherwise, ResponseTooLargeError is raised when a response is
+ truncated.
+ follow_redirects: if true (the default), redirects are
+ transparently followed and the response (if less than 5
+ redirects) contains the final destination's payload and the
+ response status is 200. You lose, however, the redirect chain
+ information. If false, you see the HTTP response yourself,
+ including the 'Location' header, and redirects are not
+ followed.
+ deadline: deadline in seconds for the operation.
+
+ We use a HTTP/1.1 compliant proxy to fetch the result.
+
+ The returned data structure has the following fields:
+ content: string containing the response from the server
+ status_code: HTTP status code returned by the server
+ headers: dictionary of headers returned by the server
+
+ If the URL is an empty string or obviously invalid, we throw an
+ urlfetch.InvalidURLError. If the server cannot be contacted, we throw a
+ urlfetch.DownloadError. Note that HTTP errors are returned as a part
+ of the returned structure, so HTTP errors like 404 do not result in an
+ exception.
+ """
+ rpc = create_rpc(deadline=deadline)
+ make_fetch_call(rpc, url, payload, method, headers,
+ allow_truncated, follow_redirects)
+ return rpc.get_result()
+
+
+def make_fetch_call(rpc, url, payload=None, method=GET, headers={},
+ allow_truncated=False, follow_redirects=True):
+ """Executes the RPC call to fetch a given HTTP URL.
+
+ The first argument is a UserRPC instance. See urlfetch.fetch for a
+ thorough description of remaining arguments.
+ """
+ assert rpc.service == 'urlfetch', repr(rpc.service)
+ if isinstance(method, basestring):
+ method = method.upper()
+ method = _URL_STRING_MAP.get(method, method)
+ if method not in _VALID_METHODS:
+ raise InvalidMethodError('Invalid method %s.' % str(method))
+
+ if _is_fetching_self(url, method):
+ raise InvalidURLError("App cannot fetch the same URL as the one used for "
+ "the request.")
+
+ request = urlfetch_service_pb.URLFetchRequest()
+ response = urlfetch_service_pb.URLFetchResponse()
+ request.set_url(url)
+
+ if method == GET:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
+ elif method == POST:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.POST)
+ elif method == HEAD:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.HEAD)
+ elif method == PUT:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.PUT)
+ elif method == DELETE:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.DELETE)
+
+ if payload and (method == POST or method == PUT):
+ request.set_payload(payload)
+
+ for key, value in headers.iteritems():
+ header_proto = request.add_header()
+ header_proto.set_key(key)
+ header_proto.set_value(str(value))
+
+ request.set_followredirects(follow_redirects)
+
+ if rpc.deadline is not None:
+ request.set_deadline(rpc.deadline)
+
+ rpc.make_call('Fetch', request, response, _get_fetch_result, allow_truncated)
+
+
+def _get_fetch_result(rpc):
+ """Check success, handle exceptions, and return converted RPC result.
+
+ This method waits for the RPC if it has not yet finished, and calls the
+ post-call hooks on the first invocation.
+
+ Args:
+ rpc: A UserRPC object.
+
+ Raises:
+ InvalidURLError if the url was invalid.
+ DownloadError if there was a problem fetching the url.
+ ResponseTooLargeError if the response was either truncated (and
+ allow_truncated=False was passed to make_fetch_call()), or if it
+ was too big for us to download.
+
+ Returns:
+ A _URLFetchResult object.
+ """
+ assert rpc.service == 'urlfetch', repr(rpc.service)
+ assert rpc.method == 'Fetch', repr(rpc.method)
+ try:
+ rpc.check_success()
+ except apiproxy_errors.ApplicationError, err:
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.INVALID_URL):
+ raise InvalidURLError(str(err))
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR):
+ raise DownloadError(str(err))
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR):
+ raise DownloadError(str(err))
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
+ raise ResponseTooLargeError(None)
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
+ raise DownloadError(str(err))
+ raise err
+
+ response = rpc.response
+ allow_truncated = rpc.user_data
+ result = _URLFetchResult(response)
+ if response.contentwastruncated() and not allow_truncated:
+ raise ResponseTooLargeError(result)
+ return result
+
+
+Fetch = fetch
+
+
+class _URLFetchResult(object):
+ """A Pythonic representation of our fetch response protocol buffer.
+ """
+
+ def __init__(self, response_proto):
+ """Constructor.
+
+ Args:
+ response_proto: the URLFetchResponse proto buffer to wrap.
+ """
+ self.__pb = response_proto
+ self.content = response_proto.content()
+ self.status_code = response_proto.statuscode()
+ self.content_was_truncated = response_proto.contentwastruncated()
+ self.headers = _CaselessDict()
+ for header_proto in response_proto.header_list():
+ self.headers[header_proto.key()] = header_proto.value()
diff --git a/google_appengine/google/appengine/api/urlfetch.pyc b/google_appengine/google/appengine/api/urlfetch.pyc
new file mode 100644
index 0000000..3b53f6e
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/urlfetch_errors.py b/google_appengine/google/appengine/api/urlfetch_errors.py
new file mode 100755
index 0000000..e71ca5d
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_errors.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Errors used in the urlfetch API
+developers.
+"""
+
+
+
+
+
+
+class Error(Exception):
+ """Base URL fetcher error type."""
+
+
+class InvalidURLError(Error):
+ """Raised when the URL given is empty or invalid.
+
+ Only http: and https: URLs are allowed. The maximum URL length
+ allowed is 2048 characters. The login/pass portion is not
+ allowed. In deployed applications, only ports 80 and 443 for http
+ and https respectively are allowed.
+ """
+
+
+class DownloadError(Error):
+ """Raised when the we could not fetch the URL for any reason.
+
+ Note that this exception is only raised when we could not contact the
+ server. HTTP errors (e.g., 404) are returned in as the status_code field
+ in the return value of Fetch, and no exception is raised.
+ """
+
+
+class ResponseTooLargeError(Error):
+ """Raised when the response was too large and was truncated."""
+ def __init__(self, response):
+ self.response = response
+
+
+class InvalidMethodError(Error):
+ """Raised when an invalid value for 'method' is provided"""
+
+
+class InvalidMethodError(Error):
+ """Raised when an invalid value for 'method' is provided"""
diff --git a/google_appengine/google/appengine/api/urlfetch_errors.pyc b/google_appengine/google/appengine/api/urlfetch_errors.pyc
new file mode 100644
index 0000000..1d41770
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/urlfetch_service_pb.py b/google_appengine/google/appengine/api/urlfetch_service_pb.py
new file mode 100644
index 0000000..bf513a3
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_service_pb.py
@@ -0,0 +1,823 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class URLFetchServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ INVALID_URL = 1
+ FETCH_ERROR = 2
+ UNSPECIFIED_ERROR = 3
+ RESPONSE_TOO_LARGE = 4
+ DEADLINE_EXCEEDED = 5
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "INVALID_URL",
+ 2: "FETCH_ERROR",
+ 3: "UNSPECIFIED_ERROR",
+ 4: "RESPONSE_TOO_LARGE",
+ 5: "DEADLINE_EXCEEDED",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class URLFetchRequest_Header(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(42)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 28: break
+ if tt == 34:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("Key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("Value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+class URLFetchRequest(ProtocolBuffer.ProtocolMessage):
+
+ GET = 1
+ POST = 2
+ HEAD = 3
+ PUT = 4
+ DELETE = 5
+
+ _RequestMethod_NAMES = {
+ 1: "GET",
+ 2: "POST",
+ 3: "HEAD",
+ 4: "PUT",
+ 5: "DELETE",
+ }
+
+ def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
+ RequestMethod_Name = classmethod(RequestMethod_Name)
+
+ has_method_ = 0
+ method_ = 0
+ has_url_ = 0
+ url_ = ""
+ has_payload_ = 0
+ payload_ = ""
+ has_followredirects_ = 0
+ followredirects_ = 1
+ has_deadline_ = 0
+ deadline_ = 0.0
+
+ def __init__(self, contents=None):
+ self.header_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def method(self): return self.method_
+
+ def set_method(self, x):
+ self.has_method_ = 1
+ self.method_ = x
+
+ def clear_method(self):
+ if self.has_method_:
+ self.has_method_ = 0
+ self.method_ = 0
+
+ def has_method(self): return self.has_method_
+
+ def url(self): return self.url_
+
+ def set_url(self, x):
+ self.has_url_ = 1
+ self.url_ = x
+
+ def clear_url(self):
+ if self.has_url_:
+ self.has_url_ = 0
+ self.url_ = ""
+
+ def has_url(self): return self.has_url_
+
+ def header_size(self): return len(self.header_)
+ def header_list(self): return self.header_
+
+ def header(self, i):
+ return self.header_[i]
+
+ def mutable_header(self, i):
+ return self.header_[i]
+
+ def add_header(self):
+ x = URLFetchRequest_Header()
+ self.header_.append(x)
+ return x
+
+ def clear_header(self):
+ self.header_ = []
+ def payload(self): return self.payload_
+
+ def set_payload(self, x):
+ self.has_payload_ = 1
+ self.payload_ = x
+
+ def clear_payload(self):
+ if self.has_payload_:
+ self.has_payload_ = 0
+ self.payload_ = ""
+
+ def has_payload(self): return self.has_payload_
+
+ def followredirects(self): return self.followredirects_
+
+ def set_followredirects(self, x):
+ self.has_followredirects_ = 1
+ self.followredirects_ = x
+
+ def clear_followredirects(self):
+ if self.has_followredirects_:
+ self.has_followredirects_ = 0
+ self.followredirects_ = 1
+
+ def has_followredirects(self): return self.has_followredirects_
+
+ def deadline(self): return self.deadline_
+
+ def set_deadline(self, x):
+ self.has_deadline_ = 1
+ self.deadline_ = x
+
+ def clear_deadline(self):
+ if self.has_deadline_:
+ self.has_deadline_ = 0
+ self.deadline_ = 0.0
+
+ def has_deadline(self): return self.has_deadline_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_method()): self.set_method(x.method())
+ if (x.has_url()): self.set_url(x.url())
+ for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
+ if (x.has_payload()): self.set_payload(x.payload())
+ if (x.has_followredirects()): self.set_followredirects(x.followredirects())
+ if (x.has_deadline()): self.set_deadline(x.deadline())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_method_ != x.has_method_: return 0
+ if self.has_method_ and self.method_ != x.method_: return 0
+ if self.has_url_ != x.has_url_: return 0
+ if self.has_url_ and self.url_ != x.url_: return 0
+ if len(self.header_) != len(x.header_): return 0
+ for e1, e2 in zip(self.header_, x.header_):
+ if e1 != e2: return 0
+ if self.has_payload_ != x.has_payload_: return 0
+ if self.has_payload_ and self.payload_ != x.payload_: return 0
+ if self.has_followredirects_ != x.has_followredirects_: return 0
+ if self.has_followredirects_ and self.followredirects_ != x.followredirects_: return 0
+ if self.has_deadline_ != x.has_deadline_: return 0
+ if self.has_deadline_ and self.deadline_ != x.deadline_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_method_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: method not set.')
+ if (not self.has_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: url not set.')
+ for p in self.header_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.method_)
+ n += self.lengthString(len(self.url_))
+ n += 2 * len(self.header_)
+ for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
+ if (self.has_payload_): n += 1 + self.lengthString(len(self.payload_))
+ if (self.has_followredirects_): n += 2
+ if (self.has_deadline_): n += 9
+ return n + 2
+
+ def Clear(self):
+ self.clear_method()
+ self.clear_url()
+ self.clear_header()
+ self.clear_payload()
+ self.clear_followredirects()
+ self.clear_deadline()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.method_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.url_)
+ for i in xrange(len(self.header_)):
+ out.putVarInt32(27)
+ self.header_[i].OutputUnchecked(out)
+ out.putVarInt32(28)
+ if (self.has_payload_):
+ out.putVarInt32(50)
+ out.putPrefixedString(self.payload_)
+ if (self.has_followredirects_):
+ out.putVarInt32(56)
+ out.putBoolean(self.followredirects_)
+ if (self.has_deadline_):
+ out.putVarInt32(65)
+ out.putDouble(self.deadline_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_method(d.getVarInt32())
+ continue
+ if tt == 18:
+ self.set_url(d.getPrefixedString())
+ continue
+ if tt == 27:
+ self.add_header().TryMerge(d)
+ continue
+ if tt == 50:
+ self.set_payload(d.getPrefixedString())
+ continue
+ if tt == 56:
+ self.set_followredirects(d.getBoolean())
+ continue
+ if tt == 65:
+ self.set_deadline(d.getDouble())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_method_: res+=prefix+("Method: %s\n" % self.DebugFormatInt32(self.method_))
+ if self.has_url_: res+=prefix+("Url: %s\n" % self.DebugFormatString(self.url_))
+ cnt=0
+ for e in self.header_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Header%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_payload_: res+=prefix+("Payload: %s\n" % self.DebugFormatString(self.payload_))
+ if self.has_followredirects_: res+=prefix+("FollowRedirects: %s\n" % self.DebugFormatBool(self.followredirects_))
+ if self.has_deadline_: res+=prefix+("Deadline: %s\n" % self.DebugFormat(self.deadline_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kMethod = 1
+ kUrl = 2
+ kHeaderGroup = 3
+ kHeaderKey = 4
+ kHeaderValue = 5
+ kPayload = 6
+ kFollowRedirects = 7
+ kDeadline = 8
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Method",
+ 2: "Url",
+ 3: "Header",
+ 4: "Key",
+ 5: "Value",
+ 6: "Payload",
+ 7: "FollowRedirects",
+ 8: "Deadline",
+ }, 8)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STARTGROUP,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.STRING,
+ 7: ProtocolBuffer.Encoder.NUMERIC,
+ 8: ProtocolBuffer.Encoder.DOUBLE,
+ }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class URLFetchResponse_Header(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(42)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 28: break
+ if tt == 34:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("Key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("Value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
+ has_content_ = 0
+ content_ = ""
+ has_statuscode_ = 0
+ statuscode_ = 0
+ has_contentwastruncated_ = 0
+ contentwastruncated_ = 0
+ has_externalbytessent_ = 0
+ externalbytessent_ = 0
+ has_externalbytesreceived_ = 0
+ externalbytesreceived_ = 0
+
+ def __init__(self, contents=None):
+ self.header_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def content(self): return self.content_
+
+ def set_content(self, x):
+ self.has_content_ = 1
+ self.content_ = x
+
+ def clear_content(self):
+ if self.has_content_:
+ self.has_content_ = 0
+ self.content_ = ""
+
+ def has_content(self): return self.has_content_
+
+ def statuscode(self): return self.statuscode_
+
+ def set_statuscode(self, x):
+ self.has_statuscode_ = 1
+ self.statuscode_ = x
+
+ def clear_statuscode(self):
+ if self.has_statuscode_:
+ self.has_statuscode_ = 0
+ self.statuscode_ = 0
+
+ def has_statuscode(self): return self.has_statuscode_
+
+ def header_size(self): return len(self.header_)
+ def header_list(self): return self.header_
+
+ def header(self, i):
+ return self.header_[i]
+
+ def mutable_header(self, i):
+ return self.header_[i]
+
+ def add_header(self):
+ x = URLFetchResponse_Header()
+ self.header_.append(x)
+ return x
+
+ def clear_header(self):
+ self.header_ = []
+ def contentwastruncated(self): return self.contentwastruncated_
+
+ def set_contentwastruncated(self, x):
+ self.has_contentwastruncated_ = 1
+ self.contentwastruncated_ = x
+
+ def clear_contentwastruncated(self):
+ if self.has_contentwastruncated_:
+ self.has_contentwastruncated_ = 0
+ self.contentwastruncated_ = 0
+
+ def has_contentwastruncated(self): return self.has_contentwastruncated_
+
+ def externalbytessent(self): return self.externalbytessent_
+
+ def set_externalbytessent(self, x):
+ self.has_externalbytessent_ = 1
+ self.externalbytessent_ = x
+
+ def clear_externalbytessent(self):
+ if self.has_externalbytessent_:
+ self.has_externalbytessent_ = 0
+ self.externalbytessent_ = 0
+
+ def has_externalbytessent(self): return self.has_externalbytessent_
+
+ def externalbytesreceived(self): return self.externalbytesreceived_
+
+ def set_externalbytesreceived(self, x):
+ self.has_externalbytesreceived_ = 1
+ self.externalbytesreceived_ = x
+
+ def clear_externalbytesreceived(self):
+ if self.has_externalbytesreceived_:
+ self.has_externalbytesreceived_ = 0
+ self.externalbytesreceived_ = 0
+
+ def has_externalbytesreceived(self): return self.has_externalbytesreceived_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_content()): self.set_content(x.content())
+ if (x.has_statuscode()): self.set_statuscode(x.statuscode())
+ for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
+ if (x.has_contentwastruncated()): self.set_contentwastruncated(x.contentwastruncated())
+ if (x.has_externalbytessent()): self.set_externalbytessent(x.externalbytessent())
+ if (x.has_externalbytesreceived()): self.set_externalbytesreceived(x.externalbytesreceived())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_content_ != x.has_content_: return 0
+ if self.has_content_ and self.content_ != x.content_: return 0
+ if self.has_statuscode_ != x.has_statuscode_: return 0
+ if self.has_statuscode_ and self.statuscode_ != x.statuscode_: return 0
+ if len(self.header_) != len(x.header_): return 0
+ for e1, e2 in zip(self.header_, x.header_):
+ if e1 != e2: return 0
+ if self.has_contentwastruncated_ != x.has_contentwastruncated_: return 0
+ if self.has_contentwastruncated_ and self.contentwastruncated_ != x.contentwastruncated_: return 0
+ if self.has_externalbytessent_ != x.has_externalbytessent_: return 0
+ if self.has_externalbytessent_ and self.externalbytessent_ != x.externalbytessent_: return 0
+ if self.has_externalbytesreceived_ != x.has_externalbytesreceived_: return 0
+ if self.has_externalbytesreceived_ and self.externalbytesreceived_ != x.externalbytesreceived_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_statuscode_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: statuscode not set.')
+ for p in self.header_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_content_): n += 1 + self.lengthString(len(self.content_))
+ n += self.lengthVarInt64(self.statuscode_)
+ n += 2 * len(self.header_)
+ for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
+ if (self.has_contentwastruncated_): n += 2
+ if (self.has_externalbytessent_): n += 1 + self.lengthVarInt64(self.externalbytessent_)
+ if (self.has_externalbytesreceived_): n += 1 + self.lengthVarInt64(self.externalbytesreceived_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_content()
+ self.clear_statuscode()
+ self.clear_header()
+ self.clear_contentwastruncated()
+ self.clear_externalbytessent()
+ self.clear_externalbytesreceived()
+
+ def OutputUnchecked(self, out):
+ if (self.has_content_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.content_)
+ out.putVarInt32(16)
+ out.putVarInt32(self.statuscode_)
+ for i in xrange(len(self.header_)):
+ out.putVarInt32(27)
+ self.header_[i].OutputUnchecked(out)
+ out.putVarInt32(28)
+ if (self.has_contentwastruncated_):
+ out.putVarInt32(48)
+ out.putBoolean(self.contentwastruncated_)
+ if (self.has_externalbytessent_):
+ out.putVarInt32(56)
+ out.putVarInt64(self.externalbytessent_)
+ if (self.has_externalbytesreceived_):
+ out.putVarInt32(64)
+ out.putVarInt64(self.externalbytesreceived_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_content(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_statuscode(d.getVarInt32())
+ continue
+ if tt == 27:
+ self.add_header().TryMerge(d)
+ continue
+ if tt == 48:
+ self.set_contentwastruncated(d.getBoolean())
+ continue
+ if tt == 56:
+ self.set_externalbytessent(d.getVarInt64())
+ continue
+ if tt == 64:
+ self.set_externalbytesreceived(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_content_: res+=prefix+("Content: %s\n" % self.DebugFormatString(self.content_))
+ if self.has_statuscode_: res+=prefix+("StatusCode: %s\n" % self.DebugFormatInt32(self.statuscode_))
+ cnt=0
+ for e in self.header_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Header%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_contentwastruncated_: res+=prefix+("ContentWasTruncated: %s\n" % self.DebugFormatBool(self.contentwastruncated_))
+ if self.has_externalbytessent_: res+=prefix+("ExternalBytesSent: %s\n" % self.DebugFormatInt64(self.externalbytessent_))
+ if self.has_externalbytesreceived_: res+=prefix+("ExternalBytesReceived: %s\n" % self.DebugFormatInt64(self.externalbytesreceived_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kContent = 1
+ kStatusCode = 2
+ kHeaderGroup = 3
+ kHeaderKey = 4
+ kHeaderValue = 5
+ kContentWasTruncated = 6
+ kExternalBytesSent = 7
+ kExternalBytesReceived = 8
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Content",
+ 2: "StatusCode",
+ 3: "Header",
+ 4: "Key",
+ 5: "Value",
+ 6: "ContentWasTruncated",
+ 7: "ExternalBytesSent",
+ 8: "ExternalBytesReceived",
+ }, 8)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STARTGROUP,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.NUMERIC,
+ 7: ProtocolBuffer.Encoder.NUMERIC,
+ 8: ProtocolBuffer.Encoder.NUMERIC,
+ }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['URLFetchServiceError','URLFetchRequest','URLFetchRequest_Header','URLFetchResponse','URLFetchResponse_Header']
diff --git a/google_appengine/google/appengine/api/urlfetch_service_pb.pyc b/google_appengine/google/appengine/api/urlfetch_service_pb.pyc
new file mode 100644
index 0000000..0c0d0e1
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/urlfetch_stub.py b/google_appengine/google/appengine/api/urlfetch_stub.py
new file mode 100755
index 0000000..d317401
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_stub.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the urlfetch API, based on httplib."""
+
+
+
+import gzip
+import httplib
+import logging
+import socket
+import StringIO
+import urllib
+import urlparse
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import urlfetch
+from google.appengine.api import urlfetch_errors
+from google.appengine.api import urlfetch_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+MAX_RESPONSE_SIZE = 2 ** 24
+
+MAX_REDIRECTS = urlfetch.MAX_REDIRECTS
+
+REDIRECT_STATUSES = frozenset([
+ httplib.MOVED_PERMANENTLY,
+ httplib.FOUND,
+ httplib.SEE_OTHER,
+ httplib.TEMPORARY_REDIRECT,
+])
+
+PORTS_ALLOWED_IN_PRODUCTION = (
+ None, '80', '443', '4443', '8080', '8081', '8082', '8083', '8084', '8085',
+ '8086', '8087', '8088', '8089', '8188', '8444', '8990')
+
+_API_CALL_DEADLINE = 5.0
+
+
+_UNTRUSTED_REQUEST_HEADERS = frozenset([
+ 'content-length',
+ 'host',
+ 'vary',
+ 'via',
+ 'x-forwarded-for',
+])
+
+class URLFetchServiceStub(apiproxy_stub.APIProxyStub):
+ """Stub version of the urlfetch API to be used with apiproxy_stub_map."""
+
+ def __init__(self, service_name='urlfetch'):
+ """Initializer.
+
+ Args:
+ service_name: Service name expected for all calls.
+ """
+ super(URLFetchServiceStub, self).__init__(service_name)
+
+ def _Dynamic_Fetch(self, request, response):
+ """Trivial implementation of URLFetchService::Fetch().
+
+ Args:
+ request: the fetch to perform, a URLFetchRequest
+ response: the fetch response, a URLFetchResponse
+ """
+ (protocol, host, path, parameters, query, fragment) = urlparse.urlparse(request.url())
+
+ payload = None
+ if request.method() == urlfetch_service_pb.URLFetchRequest.GET:
+ method = 'GET'
+ elif request.method() == urlfetch_service_pb.URLFetchRequest.POST:
+ method = 'POST'
+ payload = request.payload()
+ elif request.method() == urlfetch_service_pb.URLFetchRequest.HEAD:
+ method = 'HEAD'
+ elif request.method() == urlfetch_service_pb.URLFetchRequest.PUT:
+ method = 'PUT'
+ payload = request.payload()
+ elif request.method() == urlfetch_service_pb.URLFetchRequest.DELETE:
+ method = 'DELETE'
+ else:
+ logging.error('Invalid method: %s', request.method())
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR)
+
+ if not (protocol == 'http' or protocol == 'https'):
+ logging.error('Invalid protocol: %s', protocol)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
+
+ if not host:
+ logging.error('Missing host.')
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR)
+
+ sanitized_headers = self._SanitizeHttpHeaders(_UNTRUSTED_REQUEST_HEADERS,
+ request.header_list())
+ request.clear_header()
+ request.header_list().extend(sanitized_headers)
+ deadline = _API_CALL_DEADLINE
+ if request.has_deadline():
+ deadline = request.deadline()
+
+ self._RetrieveURL(request.url(), payload, method,
+ request.header_list(), response,
+ follow_redirects=request.followredirects(),
+ deadline=deadline)
+
+ def _RetrieveURL(self, url, payload, method, headers, response,
+ follow_redirects=True, deadline=_API_CALL_DEADLINE):
+ """Retrieves a URL.
+
+ Args:
+ url: String containing the URL to access.
+ payload: Request payload to send, if any; None if no payload.
+ method: HTTP method to use (e.g., 'GET')
+ headers: List of additional header objects to use for the request.
+ response: Response object
+ follow_redirects: optional setting (defaulting to True) for whether or not
+ we should transparently follow redirects (up to MAX_REDIRECTS)
+ deadline: Number of seconds to wait for the urlfetch to finish.
+
+ Raises:
+ Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR
+ in cases where:
+ - MAX_REDIRECTS is exceeded
+ - The protocol of the redirected URL is bad or missing.
+ """
+ last_protocol = ''
+ last_host = ''
+
+ for redirect_number in xrange(MAX_REDIRECTS + 1):
+ parsed = urlparse.urlparse(url)
+ protocol, host, path, parameters, query, fragment = parsed
+
+ port = urllib.splitport(urllib.splituser(host)[1])[1]
+
+ if port not in PORTS_ALLOWED_IN_PRODUCTION:
+ logging.warning(
+ 'urlfetch received %s ; port %s is not allowed in production!' %
+ (url, port))
+
+ if protocol and not host:
+ logging.error('Missing host on redirect; target url is %s' % url)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR)
+
+ if not host and not protocol:
+ host = last_host
+ protocol = last_protocol
+
+ adjusted_headers = {
+ 'User-Agent':
+ 'AppEngine-Google; (+http://code.google.com/appengine)',
+ 'Host': host,
+ 'Accept-Encoding': 'gzip',
+ }
+ if payload is not None:
+ adjusted_headers['Content-Length'] = len(payload)
+ if method == 'POST' and payload:
+ adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded'
+
+ for header in headers:
+ if header.key().title().lower() == 'user-agent':
+ adjusted_headers['User-Agent'] = (
+ '%s %s' %
+ (header.value(), adjusted_headers['User-Agent']))
+ else:
+ adjusted_headers[header.key().title()] = header.value()
+
+ logging.debug('Making HTTP request: host = %s, '
+ 'url = %s, payload = %s, headers = %s',
+ host, url, payload, adjusted_headers)
+ try:
+ if protocol == 'http':
+ connection = httplib.HTTPConnection(host)
+ elif protocol == 'https':
+ connection = httplib.HTTPSConnection(host)
+ else:
+ error_msg = 'Redirect specified invalid protocol: "%s"' % protocol
+ logging.error(error_msg)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
+
+ last_protocol = protocol
+ last_host = host
+
+ if query != '':
+ full_path = path + '?' + query
+ else:
+ full_path = path
+
+ orig_timeout = socket.getdefaulttimeout()
+ try:
+ socket.setdefaulttimeout(deadline)
+ connection.request(method, full_path, payload, adjusted_headers)
+ http_response = connection.getresponse()
+ if method == 'HEAD':
+ http_response_data = ''
+ else:
+ http_response_data = http_response.read()
+ finally:
+ socket.setdefaulttimeout(orig_timeout)
+ connection.close()
+ except (httplib.error, socket.error, IOError), e:
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, str(e))
+
+ if http_response.status in REDIRECT_STATUSES and follow_redirects:
+ url = http_response.getheader('Location', None)
+ if url is None:
+ error_msg = 'Redirecting response was missing "Location" header'
+ logging.error(error_msg)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
+ else:
+ response.set_statuscode(http_response.status)
+ if http_response.getheader('content-encoding') == 'gzip':
+ gzip_stream = StringIO.StringIO(http_response_data)
+ gzip_file = gzip.GzipFile(fileobj=gzip_stream)
+ http_response_data = gzip_file.read()
+ response.set_content(http_response_data[:MAX_RESPONSE_SIZE])
+ for header_key, header_value in http_response.getheaders():
+ if (header_key.lower() == 'content-encoding' and
+ header_value == 'gzip'):
+ continue
+ if header_key.lower() == 'content-length':
+ header_value = str(len(response.content()))
+ header_proto = response.add_header()
+ header_proto.set_key(header_key)
+ header_proto.set_value(header_value)
+
+ if len(http_response_data) > MAX_RESPONSE_SIZE:
+ response.set_contentwastruncated(True)
+
+ break
+ else:
+ error_msg = 'Too many repeated redirects'
+ logging.error(error_msg)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
+
+ def _SanitizeHttpHeaders(self, untrusted_headers, headers):
+ """Cleans "unsafe" headers from the HTTP request/response.
+
+ Args:
+ untrusted_headers: set of untrusted headers names
+ headers: list of string pairs, first is header name and the second is header's value
+ """
+ prohibited_headers = [h.key() for h in headers
+ if h.key().lower() in untrusted_headers]
+ if prohibited_headers:
+ logging.warn('Stripped prohibited headers from URLFetch request: %s',
+ prohibited_headers)
+ return (h for h in headers if h.key().lower() not in untrusted_headers)
diff --git a/google_appengine/google/appengine/api/urlfetch_stub.pyc b/google_appengine/google/appengine/api/urlfetch_stub.pyc
new file mode 100644
index 0000000..136ea22
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/user_service_pb.py b/google_appengine/google/appengine/api/user_service_pb.py
new file mode 100644
index 0000000..1fe799b
--- /dev/null
+++ b/google_appengine/google/appengine/api/user_service_pb.py
@@ -0,0 +1,491 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.api.api_base_pb import *
+class UserServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ REDIRECT_URL_TOO_LONG = 1
+ NOT_ALLOWED = 2
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "REDIRECT_URL_TOO_LONG",
+ 2: "NOT_ALLOWED",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CreateLoginURLRequest(ProtocolBuffer.ProtocolMessage):
+ has_destination_url_ = 0
+ destination_url_ = ""
+ has_auth_domain_ = 0
+ auth_domain_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def destination_url(self): return self.destination_url_
+
+ def set_destination_url(self, x):
+ self.has_destination_url_ = 1
+ self.destination_url_ = x
+
+ def clear_destination_url(self):
+ if self.has_destination_url_:
+ self.has_destination_url_ = 0
+ self.destination_url_ = ""
+
+ def has_destination_url(self): return self.has_destination_url_
+
+ def auth_domain(self): return self.auth_domain_
+
+ def set_auth_domain(self, x):
+ self.has_auth_domain_ = 1
+ self.auth_domain_ = x
+
+ def clear_auth_domain(self):
+ if self.has_auth_domain_:
+ self.has_auth_domain_ = 0
+ self.auth_domain_ = ""
+
+ def has_auth_domain(self): return self.has_auth_domain_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_destination_url()): self.set_destination_url(x.destination_url())
+ if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_destination_url_ != x.has_destination_url_: return 0
+ if self.has_destination_url_ and self.destination_url_ != x.destination_url_: return 0
+ if self.has_auth_domain_ != x.has_auth_domain_: return 0
+ if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_destination_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: destination_url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.destination_url_))
+ if (self.has_auth_domain_): n += 1 + self.lengthString(len(self.auth_domain_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_destination_url()
+ self.clear_auth_domain()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.destination_url_)
+ if (self.has_auth_domain_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.auth_domain_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_destination_url(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_auth_domain(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_destination_url_: res+=prefix+("destination_url: %s\n" % self.DebugFormatString(self.destination_url_))
+ if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kdestination_url = 1
+ kauth_domain = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "destination_url",
+ 2: "auth_domain",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CreateLoginURLResponse(ProtocolBuffer.ProtocolMessage):
+ has_login_url_ = 0
+ login_url_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def login_url(self): return self.login_url_
+
+ def set_login_url(self, x):
+ self.has_login_url_ = 1
+ self.login_url_ = x
+
+ def clear_login_url(self):
+ if self.has_login_url_:
+ self.has_login_url_ = 0
+ self.login_url_ = ""
+
+ def has_login_url(self): return self.has_login_url_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_login_url()): self.set_login_url(x.login_url())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_login_url_ != x.has_login_url_: return 0
+ if self.has_login_url_ and self.login_url_ != x.login_url_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_login_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: login_url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.login_url_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_login_url()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.login_url_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_login_url(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_login_url_: res+=prefix+("login_url: %s\n" % self.DebugFormatString(self.login_url_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ klogin_url = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "login_url",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CreateLogoutURLRequest(ProtocolBuffer.ProtocolMessage):
+ has_destination_url_ = 0
+ destination_url_ = ""
+ has_auth_domain_ = 0
+ auth_domain_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def destination_url(self): return self.destination_url_
+
+ def set_destination_url(self, x):
+ self.has_destination_url_ = 1
+ self.destination_url_ = x
+
+ def clear_destination_url(self):
+ if self.has_destination_url_:
+ self.has_destination_url_ = 0
+ self.destination_url_ = ""
+
+ def has_destination_url(self): return self.has_destination_url_
+
+ def auth_domain(self): return self.auth_domain_
+
+ def set_auth_domain(self, x):
+ self.has_auth_domain_ = 1
+ self.auth_domain_ = x
+
+ def clear_auth_domain(self):
+ if self.has_auth_domain_:
+ self.has_auth_domain_ = 0
+ self.auth_domain_ = ""
+
+ def has_auth_domain(self): return self.has_auth_domain_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_destination_url()): self.set_destination_url(x.destination_url())
+ if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_destination_url_ != x.has_destination_url_: return 0
+ if self.has_destination_url_ and self.destination_url_ != x.destination_url_: return 0
+ if self.has_auth_domain_ != x.has_auth_domain_: return 0
+ if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_destination_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: destination_url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.destination_url_))
+ if (self.has_auth_domain_): n += 1 + self.lengthString(len(self.auth_domain_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_destination_url()
+ self.clear_auth_domain()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.destination_url_)
+ if (self.has_auth_domain_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.auth_domain_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_destination_url(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_auth_domain(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_destination_url_: res+=prefix+("destination_url: %s\n" % self.DebugFormatString(self.destination_url_))
+ if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kdestination_url = 1
+ kauth_domain = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "destination_url",
+ 2: "auth_domain",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CreateLogoutURLResponse(ProtocolBuffer.ProtocolMessage):
+ has_logout_url_ = 0
+ logout_url_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def logout_url(self): return self.logout_url_
+
+ def set_logout_url(self, x):
+ self.has_logout_url_ = 1
+ self.logout_url_ = x
+
+ def clear_logout_url(self):
+ if self.has_logout_url_:
+ self.has_logout_url_ = 0
+ self.logout_url_ = ""
+
+ def has_logout_url(self): return self.has_logout_url_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_logout_url()): self.set_logout_url(x.logout_url())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_logout_url_ != x.has_logout_url_: return 0
+ if self.has_logout_url_ and self.logout_url_ != x.logout_url_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_logout_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: logout_url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.logout_url_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_logout_url()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.logout_url_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_logout_url(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_logout_url_: res+=prefix+("logout_url: %s\n" % self.DebugFormatString(self.logout_url_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ klogout_url = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "logout_url",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['UserServiceError','CreateLoginURLRequest','CreateLoginURLResponse','CreateLogoutURLRequest','CreateLogoutURLResponse']
diff --git a/google_appengine/google/appengine/api/user_service_pb.pyc b/google_appengine/google/appengine/api/user_service_pb.pyc
new file mode 100644
index 0000000..2d478ee
--- /dev/null
+++ b/google_appengine/google/appengine/api/user_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/user_service_stub.py b/google_appengine/google/appengine/api/user_service_stub.py
new file mode 100755
index 0000000..d1542e1
--- /dev/null
+++ b/google_appengine/google/appengine/api/user_service_stub.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Trivial implementation of the UserService."""
+
+
+import os
+import urllib
+import urlparse
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import user_service_pb
+
+
+_DEFAULT_LOGIN_URL = 'https://www.google.com/accounts/Login?continue=%s'
+_DEFAULT_LOGOUT_URL = 'https://www.google.com/accounts/Logout?continue=%s'
+
+
+class UserServiceStub(apiproxy_stub.APIProxyStub):
+ """Trivial implementation of the UserService."""
+
+ def __init__(self,
+ login_url=_DEFAULT_LOGIN_URL,
+ logout_url=_DEFAULT_LOGOUT_URL,
+ service_name='user'):
+ """Initializer.
+
+ Args:
+ login_url: String containing the URL to use for logging in.
+ logout_url: String containing the URL to use for logging out.
+ service_name: Service name expected for all calls.
+
+ Note: Both the login_url and logout_url arguments must contain one format
+ parameter, which will be replaced with the continuation URL where the user
+ should be redirected after log-in or log-out has been completed.
+ """
+ super(UserServiceStub, self).__init__(service_name)
+ self.__num_requests = 0
+ self._login_url = login_url
+ self._logout_url = logout_url
+
+ os.environ['AUTH_DOMAIN'] = 'gmail.com'
+
+ def num_requests(self):
+ return self.__num_requests
+
+ def _Dynamic_CreateLoginURL(self, request, response):
+ """Trivial implementation of UserService.CreateLoginURL().
+
+ Args:
+ request: the URL to redirect to after login; a base.StringProto
+ response: the login URL; a base.StringProto
+ """
+ self.__num_requests += 1
+ response.set_login_url(
+ self._login_url %
+ urllib.quote(self._AddHostToContinueURL(request.destination_url())))
+
+ def _Dynamic_CreateLogoutURL(self, request, response):
+ """Trivial implementation of UserService.CreateLogoutURL().
+
+ Args:
+ request: the URL to redirect to after logout; a base.StringProto
+ response: the logout URL; a base.StringProto
+ """
+ self.__num_requests += 1
+ response.set_logout_url(
+ self._logout_url %
+ urllib.quote(self._AddHostToContinueURL(request.destination_url())))
+
+ def _AddHostToContinueURL(self, continue_url):
+ """Adds the request host to the continue url if no host is specified.
+
+ Args:
+ continue_url: the URL which may or may not have a host specified
+
+ Returns:
+ string
+ """
+ (protocol, host, path, parameters, query, fragment) = urlparse.urlparse(continue_url, 'http')
+
+ if host:
+ return continue_url
+
+ host = os.environ['SERVER_NAME']
+ if os.environ['SERVER_PORT'] != '80':
+ host = host + ":" + os.environ['SERVER_PORT']
+
+ if path == '':
+ path = '/'
+
+ return urlparse.urlunparse(
+ (protocol, host, path, parameters, query, fragment))
diff --git a/google_appengine/google/appengine/api/user_service_stub.pyc b/google_appengine/google/appengine/api/user_service_stub.pyc
new file mode 100644
index 0000000..e5083cd
--- /dev/null
+++ b/google_appengine/google/appengine/api/user_service_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/users.py b/google_appengine/google/appengine/api/users.py
new file mode 100755
index 0000000..3577510
--- /dev/null
+++ b/google_appengine/google/appengine/api/users.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Python datastore class User to be used as a datastore data type.
+
+Classes defined here:
+ User: object representing a user.
+ Error: base exception type
+ UserNotFoundError: UserService exception
+ RedirectTooLongError: UserService exception
+ NotAllowedError: UserService exception
+"""
+
+
+
+
+
+
+import os
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import user_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+class Error(Exception):
+ """Base User error type."""
+
+
+class UserNotFoundError(Error):
+ """Raised by User.__init__() when there's no email argument and no user is
+ logged in."""
+
+
+class RedirectTooLongError(Error):
+ """Raised by UserService calls if the generated redirect URL was too long.
+ """
+
+
+class NotAllowedError(Error):
+ """Raised by UserService calls if the requested redirect URL is not allowed.
+ """
+
+
+class User(object):
+ """A user.
+
+ We provide the email address, nickname, auth domain, and id for a user.
+
+ A nickname is a human-readable string which uniquely identifies a Google
+ user, akin to a username. It will be an email address for some users, but
+ not all.
+ """
+
+
+ __user_id = None
+
+ def __init__(self, email=None, _auth_domain=None, _user_id=None):
+ """Constructor.
+
+ Args:
+ email: An optional string of the user's email address. It defaults to
+ the current user's email address.
+
+ Raises:
+ UserNotFoundError: Raised if the user is not logged in and the email
+ argument is empty.
+ """
+ if _auth_domain is None:
+ _auth_domain = os.environ.get('AUTH_DOMAIN')
+ else:
+ assert email is not None
+
+ assert _auth_domain
+
+ if email is None:
+ assert 'USER_EMAIL' in os.environ
+ email = os.environ['USER_EMAIL']
+ if _user_id is None and 'USER_ID' in os.environ:
+ _user_id = os.environ['USER_ID']
+
+ if not email:
+ raise UserNotFoundError
+
+ self.__email = email
+ self.__auth_domain = _auth_domain
+ self.__user_id = _user_id or None
+
+ def nickname(self):
+ """Return this user's nickname.
+
+ The nickname will be a unique, human readable identifier for this user
+ with respect to this application. It will be an email address for some
+ users, but not all.
+ """
+ if (self.__email and self.__auth_domain and
+ self.__email.endswith('@' + self.__auth_domain)):
+ suffix_len = len(self.__auth_domain) + 1
+ return self.__email[:-suffix_len]
+ else:
+ return self.__email
+
+ def email(self):
+ """Return this user's email address."""
+ return self.__email
+
+ def user_id(self):
+ """Return either a permanent unique identifying string or None.
+
+ If the email address was set explicity, this will return None.
+ """
+ return self.__user_id
+
+ def auth_domain(self):
+ """Return this user's auth domain."""
+ return self.__auth_domain
+
+ def __unicode__(self):
+ return unicode(self.nickname())
+
+ def __str__(self):
+ return str(self.nickname())
+
+ def __repr__(self):
+ if self.__user_id:
+ return "users.User(email='%s',_user_id='%s')" % (self.email(),
+ self.user_id())
+ else:
+ return "users.User(email='%s')" % self.email()
+
+ def __hash__(self):
+ return hash((self.__email, self.__auth_domain))
+
+ def __cmp__(self, other):
+ if not isinstance(other, User):
+ return NotImplemented
+ return cmp((self.__email, self.__auth_domain),
+ (other.__email, other.__auth_domain))
+
+
+def create_login_url(dest_url):
+ """Computes the login URL for this request and specified destination URL.
+
+ Args:
+ dest_url: String that is the desired final destination URL for the user
+ once login is complete. If 'dest_url' does not have a host
+ specified, we will use the host from the current request.
+
+ Returns:
+ string
+ """
+ req = user_service_pb.CreateLoginURLRequest()
+ resp = user_service_pb.CreateLoginURLResponse()
+ req.set_destination_url(dest_url)
+ try:
+ apiproxy_stub_map.MakeSyncCall('user', 'CreateLoginURL', req, resp)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ user_service_pb.UserServiceError.REDIRECT_URL_TOO_LONG):
+ raise RedirectTooLongError
+ elif (e.application_error ==
+ user_service_pb.UserServiceError.NOT_ALLOWED):
+ raise NotAllowedError
+ else:
+ raise e
+ return resp.login_url()
+
+CreateLoginURL = create_login_url
+
+
+def create_logout_url(dest_url):
+ """Computes the logout URL for this request and specified destination URL.
+
+ Args:
+ dest_url: String that is the desired final destination URL for the user
+ once logout is complete. If 'dest_url' does not have a host
+ specified, we will use the host from the current request.
+
+ Returns:
+ string
+ """
+ req = user_service_pb.CreateLogoutURLRequest()
+ resp = user_service_pb.CreateLogoutURLResponse()
+ req.set_destination_url(dest_url)
+ try:
+ apiproxy_stub_map.MakeSyncCall('user', 'CreateLogoutURL', req, resp)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ user_service_pb.UserServiceError.REDIRECT_URL_TOO_LONG):
+ raise RedirectTooLongError
+ else:
+ raise e
+ return resp.logout_url()
+
+CreateLogoutURL = create_logout_url
+
+
+def get_current_user():
+ try:
+ return User()
+ except UserNotFoundError:
+ return None
+
+GetCurrentUser = get_current_user
+
+
+def is_current_user_admin():
+ """Return true if the user making this request is an admin for this
+ application, false otherwise.
+
+ We specifically make this a separate function, and not a member function of
+ the User class, because admin status is not persisted in the datastore. It
+ only exists for the user making this request right now.
+ """
+ return (os.environ.get('USER_IS_ADMIN', '0')) == '1'
+
+IsCurrentUserAdmin = is_current_user_admin
diff --git a/google_appengine/google/appengine/api/users.pyc b/google_appengine/google/appengine/api/users.pyc
new file mode 100644
index 0000000..365ddb8
--- /dev/null
+++ b/google_appengine/google/appengine/api/users.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/validation.py b/google_appengine/google/appengine/api/validation.py
new file mode 100755
index 0000000..00833e6
--- /dev/null
+++ b/google_appengine/google/appengine/api/validation.py
@@ -0,0 +1,928 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Validation tools for generic object structures.
+
+This library is used for defining classes with constrained attributes.
+Attributes are defined on the class which contains them using validators.
+Although validators can be defined by any client of this library, a number
+of standard validators are provided here.
+
+Validators can be any callable that takes a single parameter which checks
+the new value before it is assigned to the attribute. Validators are
+permitted to modify a received value so that it is appropriate for the
+attribute definition. For example, using int as a validator will cast
+a correctly formatted string to a number, or raise an exception if it
+can not. This is not recommended, however. the correct way to use a
+validator that ensure the correct type is to use the Type validator.
+
+This validation library is mainly intended for use with the YAML object
+builder. See yaml_object.py.
+"""
+
+
+
+
+
+import re
+
+import google
+import yaml
+
+
+class Error(Exception):
+ """Base class for all package errors."""
+
+
+class AttributeDefinitionError(Error):
+ """An error occurred in the definition of class attributes."""
+
+
+class ValidationError(Error):
+ """Base class for raising exceptions during validation."""
+
+ def __init__(self, message, cause=None):
+ """Initialize exception."""
+ if hasattr(cause, 'args') and cause.args:
+ Error.__init__(self, message, *cause.args)
+ else:
+ Error.__init__(self, message)
+ self.message = message
+ self.cause = cause
+
+ def __str__(self):
+ return str(self.message)
+
+
+class MissingAttribute(ValidationError):
+ """Raised when a required attribute is missing from object."""
+
+
+def AsValidator(validator):
+ """Wrap various types as instances of a validator.
+
+ Used to allow shorthand for common validator types. It
+ converts the following types to the following Validators.
+
+ strings -> Regex
+ type -> Type
+ collection -> Options
+ Validator -> Its self!
+
+ Args:
+ validator: Object to wrap in a validator.
+
+ Returns:
+ Validator instance that wraps the given value.
+
+ Raises:
+ AttributeDefinitionError if validator is not one of the above described
+ types.
+ """
+ if isinstance(validator, (str, unicode)):
+ return Regex(validator, type(validator))
+ if isinstance(validator, type):
+ return Type(validator)
+ if isinstance(validator, (list, tuple, set)):
+ return Options(*tuple(validator))
+ if isinstance(validator, Validator):
+ return validator
+ else:
+ raise AttributeDefinitionError('%s is not a valid validator' %
+ str(validator))
+
+
+class Validated(object):
+ """Base class for other classes that require validation.
+
+ A class which intends to use validated fields should sub-class itself from
+ this class. Each class should define an 'ATTRIBUTES' class variable which
+ should be a map from attribute name to its validator. For example:
+
+ class Story(Validated):
+ ATTRIBUTES = {'title': Type(str),
+ 'authors': Repeated(Type(str)),
+ 'isbn': Optional(Type(str)),
+ 'pages': Type(int),
+ }
+
+ Attributes that are not listed under ATTRIBUTES work like normal and are
+ not validated upon assignment.
+ """
+
+ ATTRIBUTES = None
+
+ def __init__(self, **attributes):
+ """Constructor for Validated classes.
+
+ This constructor can optionally assign values to the class via its
+ keyword arguments.
+
+ Raises:
+ AttributeDefinitionError when class instance is missing ATTRIBUTE
+ definition or when ATTRIBUTE is of the wrong type.
+ """
+ if not isinstance(self.ATTRIBUTES, dict):
+ raise AttributeDefinitionError(
+ 'The class %s does not define an ATTRIBUTE variable.'
+ % self.__class__)
+
+ for key in self.ATTRIBUTES.keys():
+ object.__setattr__(self, key, self.GetAttribute(key).default)
+
+ self.Set(**attributes)
+
+ @classmethod
+ def GetAttribute(self, key):
+ """Safely get the underlying attribute definition as a Validator.
+
+ Args:
+ key: Name of attribute to get.
+
+ Returns:
+ Validator associated with key or attribute value wrapped in a
+ validator.
+ """
+ return AsValidator(self.ATTRIBUTES[key])
+
+ def Set(self, **attributes):
+ """Set multiple values on Validated instance.
+
+ This method can only be used to assign validated methods.
+
+ Args:
+ attributes: Attributes to set on object.
+
+ Raises:
+ ValidationError when no validated attribute exists on class.
+ """
+ for key, value in attributes.iteritems():
+ if key not in self.ATTRIBUTES:
+ raise ValidationError('Class \'%s\' does not have attribute \'%s\''
+ % (self.__class__, key))
+ setattr(self, key, value)
+
+ def CheckInitialized(self):
+ """Checks that all required fields are initialized.
+
+ Since an instance of Validated starts off in an uninitialized state, it
+ is sometimes necessary to check that it has been fully initialized.
+ The main problem this solves is how to validate that an instance has
+ all of its required fields set. By default, Validator classes do not
+ allow None, but all attributes are initialized to None when instantiated.
+
+ Raises:
+ Exception relevant to the kind of validation. The type of the exception
+ is determined by the validator. Typically this will be ValueError or
+ TypeError.
+ """
+ for key in self.ATTRIBUTES.iterkeys():
+ try:
+ self.GetAttribute(key)(getattr(self, key))
+ except MissingAttribute, e:
+ e.message = "Missing required value '%s'." % key
+ raise e
+
+
+ def __setattr__(self, key, value):
+ """Set attribute.
+
+ Setting a value on an object of this type will only work for attributes
+ defined in ATTRIBUTES. To make other assignments possible it is necessary
+ to override this method in subclasses.
+
+ It is important that assignment is restricted in this way because
+ this validation is used as validation for parsing. Absent this restriction
+ it would be possible for method names to be overwritten.
+
+ Args:
+ key: Name of attribute to set.
+ value: Attributes new value.
+
+ Raises:
+ ValidationError when trying to assign to a value that does not exist.
+ """
+
+ if key in self.ATTRIBUTES:
+ value = self.GetAttribute(key)(value)
+ object.__setattr__(self, key, value)
+ else:
+ raise ValidationError('Class \'%s\' does not have attribute \'%s\''
+ % (self.__class__, key))
+
+ def __str__(self):
+ """Formatted view of validated object and nested values."""
+ return repr(self)
+
+ def __repr__(self):
+ """Formatted view of validated object and nested values."""
+ values = [(attr, getattr(self, attr)) for attr in self.ATTRIBUTES]
+ dent = ' '
+ value_list = []
+ for attr, value in values:
+ value_list.append('\n%s%s=%s' % (dent, attr, value))
+
+ return "<%s %s\n%s>" % (self.__class__.__name__, ' '.join(value_list), dent)
+
+ def __eq__(self, other):
+ """Equality operator.
+
+ Comparison is done by comparing all attribute values to those in the other
+ instance. Objects which are not of the same type are not equal.
+
+ Args:
+ other: Other object to compare against.
+
+ Returns:
+ True if validated objects are equal, else False.
+ """
+ if type(self) != type(other):
+ return False
+ for key in self.ATTRIBUTES.iterkeys():
+ if getattr(self, key) != getattr(other, key):
+ return False
+ return True
+
+ def __ne__(self, other):
+ """Inequality operator."""
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ """Hash function for using Validated objects in sets and maps.
+
+ Hash is done by hashing all keys and values and xor'ing them together.
+
+ Returns:
+ Hash of validated object.
+ """
+ result = 0
+ for key in self.ATTRIBUTES.iterkeys():
+ value = getattr(self, key)
+ if isinstance(value, list):
+ value = tuple(value)
+ result = result ^ hash(key) ^ hash(value)
+ return result
+
+ @staticmethod
+ def _ToValue(validator, value):
+ """Convert any value to simplified collections and basic types.
+
+ Args:
+ validator: An instance of Validator that corresponds with 'value'.
+ May also be 'str' or 'int' if those were used instead of a full
+ Validator.
+ value: Value to convert to simplified collections.
+
+ Returns:
+ The value as a dictionary if it is a Validated object.
+ A list of items converted to simplified collections if value is a list
+ or a tuple.
+ Otherwise, just the value.
+ """
+ if isinstance(value, Validated):
+ return value.ToDict()
+ elif isinstance(value, (list, tuple)):
+ return [Validated._ToValue(validator, item) for item in value]
+ else:
+ if isinstance(validator, Validator):
+ return validator.ToValue(value)
+ return value
+
+ def ToDict(self):
+ """Convert Validated object to a dictionary.
+
+ Recursively traverses all of its elements and converts everything to
+ simplified collections.
+
+ Returns:
+ A dict of all attributes defined in this classes ATTRIBUTES mapped
+ to its value. This structure is recursive in that Validated objects
+ that are referenced by this object and in lists are also converted to
+ dicts.
+ """
+ result = {}
+ for name, validator in self.ATTRIBUTES.iteritems():
+ value = getattr(self, name)
+ if not(isinstance(validator, Validator) and value == validator.default):
+ result[name] = Validated._ToValue(validator, value)
+ return result
+
+ def ToYAML(self):
+ """Print validated object as simplified YAML.
+
+ Returns:
+ Object as a simplified YAML string compatible with parsing using the
+ SafeLoader.
+ """
+ return yaml.dump(self.ToDict(),
+ default_flow_style=False,
+ Dumper=yaml.SafeDumper)
+
+
+
+class Validator(object):
+ """Validator base class.
+
+ Though any callable can be used as a validator, this class encapsulates the
+ case when a specific validator needs to hold a particular state or
+ configuration.
+
+ To implement Validator sub-class, override the validate method.
+
+ This class is permitted to change the ultimate value that is set to the
+ attribute if there is a reasonable way to perform the conversion.
+ """
+
+ expected_type = object
+
+ def __init__(self, default=None):
+ """Constructor.
+
+ Args:
+ default: Default assignment is made during initialization and will
+ not pass through validation.
+ """
+ self.default = default
+
+ def __call__(self, value):
+ """Main interface to validator is call mechanism."""
+ return self.Validate(value)
+
+ def Validate(self, value):
+ """Override this method to customize sub-class behavior.
+
+ Args:
+ value: Value to validate.
+
+ Returns:
+ Value if value is valid, or a valid representation of value.
+ """
+ return value
+
+ def ToValue(self, value):
+ """Convert 'value' to a simplified collection or basic type.
+
+ Subclasses of Validator should override this method when the dumped
+ representation of 'value' is not simply <type>(value) (e.g. a regex).
+
+ Args:
+ value: An object of the same type that was returned from Validate().
+
+ Returns:
+ An instance of a builtin type (e.g. int, str, dict, etc). By default
+ it returns 'value' unmodified.
+ """
+ return value
+
+
+class Type(Validator):
+ """Verifies property is of expected type.
+
+ Can optionally convert value if it is not of the expected type.
+
+ It is possible to specify a required field of a specific type in shorthand
+ by merely providing the type. This method is slightly less efficient than
+ providing an explicit type but is not significant unless parsing a large
+ amount of information:
+
+ class Person(Validated):
+ ATTRIBUTES = {'name': unicode,
+ 'age': int,
+ }
+
+ However, in most instances it is best to use the type constants:
+
+ class Person(Validated):
+ ATTRIBUTES = {'name': TypeUnicode,
+ 'age': TypeInt,
+ }
+ """
+
+ def __init__(self, expected_type, convert=True, default=None):
+ """Initialize Type validator.
+
+ Args:
+ expected_type: Type that attribute should validate against.
+ convert: Cause conversion if value is not the right type.
+ Conversion is done by calling the constructor of the type
+ with the value as its first parameter.
+ """
+ super(Type, self).__init__(default)
+ self.expected_type = expected_type
+ self.convert = convert
+
+ def Validate(self, value):
+ """Validate that value is correct type.
+
+ Args:
+ value: Value to validate.
+
+ Returns:
+ None if value is None, value if value is of correct type, converted
+ value if the validator is configured to convert.
+
+ Raises:
+ ValidationError if value is not of the right type and validator
+ is not configured to convert.
+ """
+ if not isinstance(value, self.expected_type):
+ if value is not None and self.convert:
+ try:
+ return self.expected_type(value)
+ except ValueError, e:
+ raise ValidationError('Type conversion failed for value \'%s\'.'
+ % value,
+ e)
+ except TypeError, e:
+ raise ValidationError('Expected value of type %s, but got \'%s\'.'
+ % (self.expected_type, value))
+ else:
+ raise MissingAttribute('Missing value is required.')
+ else:
+ return value
+
+
+TYPE_BOOL = Type(bool)
+TYPE_INT = Type(int)
+TYPE_LONG = Type(long)
+TYPE_STR = Type(str)
+TYPE_UNICODE = Type(unicode)
+TYPE_FLOAT = Type(float)
+
+
+class Options(Validator):
+ """Limit field based on pre-determined values.
+
+ Options are used to make sure an enumerated set of values are the only
+ one permitted for assignment. It is possible to define aliases which
+ map multiple string values to a single original. An example of usage:
+
+ class ZooAnimal(validated.Class):
+ ATTRIBUTES = {
+ 'name': str,
+ 'kind': Options('platypus', # No aliases
+ ('rhinoceros', ['rhino']), # One alias
+ ('canine', ('dog', 'puppy')), # Two aliases
+ )
+ """
+
+ def __init__(self, *options, **kw):
+ """Initialize options.
+
+ Args:
+ options: List of allowed values.
+ """
+ if 'default' in kw:
+ default = kw['default']
+ else:
+ default = None
+
+ alias_map = {}
+ def AddAlias(alias, original):
+ """Set new alias on alias_map.
+
+ Raises:
+ AttributeDefinitionError when option already exists or if alias is
+ not of type str..
+ """
+ if not isinstance(alias, str):
+ raise AttributeDefinitionError(
+ 'All option values must be of type str.')
+ elif alias in alias_map:
+ raise AttributeDefinitionError(
+ "Option '%s' already defined for options property." % alias)
+ alias_map[alias] = original
+
+ for option in options:
+ if isinstance(option, str):
+ AddAlias(option, option)
+
+ elif isinstance(option, (list, tuple)):
+ if len(option) != 2:
+ raise AttributeDefinitionError("Alias is defined as a list of tuple "
+ "with two items. The first is the "
+ "original option, while the second "
+ "is a list or tuple of str aliases.\n"
+ "\n Example:\n"
+ " ('original', ('alias1', "
+ "'alias2'")
+ original, aliases = option
+ AddAlias(original, original)
+ if not isinstance(aliases, (list, tuple)):
+ raise AttributeDefinitionError('Alias lists must be a list or tuple')
+
+ for alias in aliases:
+ AddAlias(alias, original)
+
+ else:
+ raise AttributeDefinitionError("All options must be of type str "
+ "or of the form (str, [str...]).")
+ super(Options, self).__init__(default)
+ self.options = alias_map
+
+ def Validate(self, value):
+ """Validate options.
+
+ Returns:
+ Original value for provided alias.
+
+ Raises:
+ ValidationError when value is not one of predefined values.
+ """
+ if value is None:
+ raise ValidationError('Value for options field must not be None.')
+ value = str(value)
+ if value not in self.options:
+ raise ValidationError('Value \'%s\' not in %s.'
+ % (value, self.options))
+ return self.options[value]
+
+
+class Optional(Validator):
+ """Definition of optional attributes.
+
+ Optional values are attributes which can be set to None or left
+ unset. All values in a basic Validated class are set to None
+ at initialization. Failure to assign to non-optional values
+ will result in a validation error when calling CheckInitialized.
+ """
+
+ def __init__(self, validator, default=None):
+ """Initializer.
+
+ This constructor will make a few guesses about the value passed in
+ as the validator:
+
+ - If the validator argument is a type, it automatically creates a Type
+ validator around it.
+
+ - If the validator argument is a list or tuple, it automatically
+ creates an Options validator around it.
+
+ Args:
+ validator: Optional validation condition.
+
+ Raises:
+ AttributeDefinitionError if validator is not callable.
+ """
+ self.validator = AsValidator(validator)
+ self.expected_type = self.validator.expected_type
+ self.default = default
+
+ def Validate(self, value):
+ """Optionally require a value.
+
+ Normal validators do not accept None. This will accept none on
+ behalf of the contained validator.
+
+ Args:
+ value: Value to be validated as optional.
+
+ Returns:
+ None if value is None, else results of contained validation.
+ """
+ if value is None:
+ return None
+ return self.validator(value)
+
+
+class Regex(Validator):
+ """Regular expression validator.
+
+ Regular expression validator always converts value to string. Note that
+ matches must be exact. Partial matches will not validate. For example:
+
+ class ClassDescr(Validated):
+ ATTRIBUTES = { 'name': Regex(r'[a-zA-Z_][a-zA-Z_0-9]*'),
+ 'parent': Type(type),
+ }
+
+ Alternatively, any attribute that is defined as a string is automatically
+ interpreted to be of type Regex. It is possible to specify unicode regex
+ strings as well. This approach is slightly less efficient, but usually
+ is not significant unless parsing large amounts of data:
+
+ class ClassDescr(Validated):
+ ATTRIBUTES = { 'name': r'[a-zA-Z_][a-zA-Z_0-9]*',
+ 'parent': Type(type),
+ }
+
+ # This will raise a ValidationError exception.
+ my_class(name='AName with space', parent=AnotherClass)
+ """
+
+ def __init__(self, regex, string_type=unicode, default=None):
+ """Initialized regex validator.
+
+ Args:
+ regex: Regular expression string to use for comparison.
+
+ Raises:
+ AttributeDefinitionError if string_type is not a kind of string.
+ """
+ super(Regex, self).__init__(default)
+ if (not issubclass(string_type, basestring) or
+ string_type is basestring):
+ raise AttributeDefinitionError(
+ 'Regex fields must be a string type not %s.' % str(string_type))
+ if isinstance(regex, basestring):
+ self.re = re.compile('^%s$' % regex)
+ else:
+ raise AttributeDefinitionError(
+ 'Regular expression must be string. Found %s.' % str(regex))
+
+ self.expected_type = string_type
+
+ def Validate(self, value):
+ """Does validation of a string against a regular expression.
+
+ Args:
+ value: String to match against regular expression.
+
+ Raises:
+ ValidationError when value does not match regular expression or
+ when value does not match provided string type.
+ """
+ if issubclass(self.expected_type, str):
+ cast_value = TYPE_STR(value)
+ else:
+ cast_value = TYPE_UNICODE(value)
+
+ if self.re.match(cast_value) is None:
+ raise ValidationError('Value \'%s\' does not match expression \'%s\''
+ % (value, self.re.pattern))
+ return cast_value
+
+
+class _RegexStrValue(object):
+ """Simulates the regex object to support recomplation when necessary.
+
+ Used by the RegexStr class to dynamically build and recompile regular
+ expression attributes of a validated object. This object replaces the normal
+ object returned from re.compile which is immutable.
+
+ When the value of this object is a string, that string is simply used as the
+ regular expression when recompilation is needed. If the state of this object
+ is a list of strings, the strings are joined in to a single 'or' expression.
+ """
+
+ def __init__(self, attribute, value):
+ """Initialize recompilable regex value.
+
+ Args:
+ attribute: Attribute validator associated with this regex value.
+ value: Initial underlying python value for regex string. Either a single
+ regex string or a list of regex strings.
+ """
+ self.__attribute = attribute
+ self.__value = value
+ self.__regex = None
+
+ def __AsString(self, value):
+ """Convert a value to appropriate string.
+
+ Returns:
+ String version of value with all carriage returns and line feeds removed.
+ """
+ if issubclass(self.__attribute.expected_type, str):
+ cast_value = TYPE_STR(value)
+ else:
+ cast_value = TYPE_UNICODE(value)
+
+ cast_value = cast_value.replace('\n', '')
+ cast_value = cast_value.replace('\r', '')
+ return cast_value
+
+ def __BuildRegex(self):
+ """Build regex string from state.
+
+ Returns:
+ String version of regular expression. Sequence objects are constructed
+ as larger regular expression where each regex in the list is joined with
+ all the others as single 'or' expression.
+ """
+ if isinstance(self.__value, list):
+ value_list = self.__value
+ sequence = True
+ else:
+ value_list = [self.__value]
+ sequence = False
+
+ regex_list = []
+ for item in value_list:
+ regex_list.append(self.__AsString(item))
+
+ if sequence:
+ return '|'.join('(?:%s)' % item for item in regex_list)
+ else:
+ return regex_list[0]
+
+ def __Compile(self):
+ """Build regular expression object from state.
+
+ Returns:
+ Compiled regular expression based on internal value.
+ """
+ regex = self.__BuildRegex()
+ try:
+ return re.compile(regex)
+ except re.error, e:
+ raise ValidationError('Value \'%s\' does not compile: %s' % (regex, e), e)
+
+ @property
+ def regex(self):
+ """Compiled regular expression as described by underlying value."""
+ return self.__Compile()
+
+ def match(self, value):
+ """Match against internal regular expression.
+
+ Returns:
+ Regular expression object built from underlying value.
+ """
+ return re.match(self.__BuildRegex(), value)
+
+ def Validate(self):
+ """Ensure that regex string compiles."""
+ self.__Compile()
+
+ def __str__(self):
+ """Regular expression string as described by underlying value."""
+ return self.__BuildRegex()
+
+ def __eq__(self, other):
+ """Comparison against other regular expression string values."""
+ if isinstance(other, _RegexStrValue):
+ return self.__BuildRegex() == other.__BuildRegex()
+ return str(self) == other
+
+ def __ne__(self, other):
+ """Inequality operator for regular expression string value."""
+ return not self.__eq__(other)
+
+
+class RegexStr(Validator):
+ """Validates that a string can compile as a regex without errors.
+
+ Use this validator when the value of a field should be a regex. That
+ means that the value must be a string that can be compiled by re.compile().
+ The attribute will then be a compiled re object.
+ """
+
+ def __init__(self, string_type=unicode, default=None):
+ """Initialized regex validator.
+
+ Raises:
+ AttributeDefinitionError if string_type is not a kind of string.
+ """
+ if default is not None:
+ default = _RegexStrValue(self, default)
+ re.compile(str(default))
+ super(RegexStr, self).__init__(default)
+ if (not issubclass(string_type, basestring) or
+ string_type is basestring):
+ raise AttributeDefinitionError(
+ 'RegexStr fields must be a string type not %s.' % str(string_type))
+
+ self.expected_type = string_type
+
+ def Validate(self, value):
+ """Validates that the string compiles as a regular expression.
+
+ Because the regular expression might have been expressed as a multiline
+ string, this function also strips newlines out of value.
+
+ Args:
+ value: String to compile as a regular expression.
+
+ Raises:
+ ValueError when value does not compile as a regular expression. TypeError
+ when value does not match provided string type.
+ """
+ if isinstance(value, _RegexStrValue):
+ return value
+ value = _RegexStrValue(self, value)
+ value.Validate()
+ return value
+
+ def ToValue(self, value):
+ """Returns the RE pattern for this validator."""
+ return str(value)
+
+
+class Range(Validator):
+ """Validates that numbers fall within the correct range.
+
+ In theory this class can be emulated using Options, however error
+ messages generated from that class will not be very intelligible.
+ This class essentially does the same thing, but knows the intended
+ integer range.
+
+ Also, this range class supports floats and other types that implement
+ ordinality.
+
+ The range is inclusive, meaning 3 is considered in the range
+ in Range(1,3).
+ """
+
+ def __init__(self, minimum, maximum, range_type=int, default=None):
+ """Initializer for range.
+
+ Args:
+ minimum: Minimum for attribute.
+ maximum: Maximum for attribute.
+ range_type: Type of field. Defaults to int.
+ """
+ super(Range, self).__init__(default)
+ if not isinstance(minimum, range_type):
+ raise AttributeDefinitionError(
+ 'Minimum value must be of type %s, instead it is %s (%s).' %
+ (str(range_type), str(type(minimum)), str(minimum)))
+ if not isinstance(maximum, range_type):
+ raise AttributeDefinitionError(
+ 'Maximum value must be of type %s, instead it is %s (%s).' %
+ (str(range_type), str(type(maximum)), str(maximum)))
+
+ self.minimum = minimum
+ self.maximum = maximum
+ self.expected_type = range_type
+ self._type_validator = Type(range_type)
+
+ def Validate(self, value):
+ """Validate that value is within range.
+
+ Validates against range-type then checks the range.
+
+ Args:
+ value: Value to validate.
+
+ Raises:
+ ValidationError when value is out of range. ValidationError when value
+ is notd of the same range type.
+ """
+ cast_value = self._type_validator.Validate(value)
+ if cast_value < self.minimum or cast_value > self.maximum:
+ raise ValidationError('Value \'%s\' is out of range %s - %s'
+ % (str(value),
+ str(self.minimum),
+ str(self.maximum)))
+ return cast_value
+
+
+class Repeated(Validator):
+ """Repeated field validator.
+
+ Indicates that attribute is expected to be a repeated value, ie,
+ a sequence. This adds additional validation over just Type(list)
+ in that it retains information about what can be stored in the list by
+ use of its constructor field.
+ """
+
+ def __init__(self, constructor, default=None):
+ """Initializer for repeated field.
+
+ Args:
+ constructor: Type used for verifying elements of sequence attribute.
+ """
+ super(Repeated, self).__init__(default)
+ self.constructor = constructor
+ self.expected_type = list
+
+ def Validate(self, value):
+ """Do validation of sequence.
+
+ Value must be a list and all elements must be of type 'constructor'.
+
+ Args:
+ value: Value to validate.
+
+ Raises:
+ ValidationError if value is None, not a list or one of its elements is the
+ wrong type.
+ """
+ if not isinstance(value, list):
+ raise ValidationError('Repeated fields must be sequence, '
+ 'but found \'%s\'.' % value)
+
+ for item in value:
+ if isinstance(self.constructor, Validator):
+ item = self.constructor.Validate(item)
+ elif not isinstance(item, self.constructor):
+ raise ValidationError('Repeated items must be %s, but found \'%s\'.'
+ % (str(self.constructor), str(item)))
+
+ return value
diff --git a/google_appengine/google/appengine/api/validation.pyc b/google_appengine/google/appengine/api/validation.pyc
new file mode 100644
index 0000000..ccfed3e
--- /dev/null
+++ b/google_appengine/google/appengine/api/validation.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/xmpp/__init__.py b/google_appengine/google/appengine/api/xmpp/__init__.py
new file mode 100755
index 0000000..8cc477a
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/__init__.py
@@ -0,0 +1,332 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""XMPP API.
+
+This module allows AppEngine apps to interact with a bot representing that app
+on the Google Talk network.
+
+Functions defined in this module:
+ get_presence: Gets the presence for a JID.
+ send_message: Sends a chat message to any number of JIDs.
+ send_invite: Sends an invitation to chat to a JID.
+
+Classes defined in this module:
+ Message: A class to encapsulate received messages.
+"""
+
+
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api.xmpp import xmpp_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+NO_ERROR = xmpp_service_pb.XmppMessageResponse.NO_ERROR
+INVALID_JID = xmpp_service_pb.XmppMessageResponse.INVALID_JID
+OTHER_ERROR = xmpp_service_pb.XmppMessageResponse.OTHER_ERROR
+
+
+MESSAGE_TYPE_NONE = ""
+MESSAGE_TYPE_CHAT = "chat"
+MESSAGE_TYPE_ERROR = "error"
+MESSAGE_TYPE_GROUPCHAT = "groupchat"
+MESSAGE_TYPE_HEADLINE = "headline"
+MESSAGE_TYPE_NORMAL = "normal"
+
+_VALID_MESSAGE_TYPES = frozenset([MESSAGE_TYPE_NONE, MESSAGE_TYPE_CHAT,
+ MESSAGE_TYPE_ERROR, MESSAGE_TYPE_GROUPCHAT,
+ MESSAGE_TYPE_HEADLINE, MESSAGE_TYPE_NORMAL])
+
+
+class Error(Exception):
+ """Base error class for this module."""
+
+
+class InvalidJidError(Error):
+ """Error that indicates a request for an invalid JID."""
+
+
+class InvalidTypeError(Error):
+ """Error that indicates a send message request has an invalid type."""
+
+
+class InvalidXmlError(Error):
+ """Error that indicates a send message request has invalid XML."""
+
+
+class NoBodyError(Error):
+ """Error that indicates a send message request has no body."""
+
+
+class InvalidMessageError(Error):
+ """Error that indicates a received message was invalid or incomplete."""
+
+
+def get_presence(jid, from_jid=None):
+ """Gets the presence for a JID.
+
+ Args:
+ jid: The JID of the contact whose presence is requested.
+ from_jid: The optional custom JID to use for sending. Currently, the default
+ is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+ of the form <anything>@<appid>.appspotchat.com.
+
+ Returns:
+ bool, Whether the user is online.
+
+ Raises:
+ InvalidJidError if any of the JIDs passed are invalid.
+ Error if an unspecified error happens processing the request.
+ """
+ if not jid:
+ raise InvalidJidError()
+
+ request = xmpp_service_pb.PresenceRequest()
+ response = xmpp_service_pb.PresenceResponse()
+
+ request.set_jid(_to_str(jid))
+ if from_jid:
+ request.set_from_jid(_to_str(from_jid))
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("xmpp",
+ "GetPresence",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_JID):
+ raise InvalidJidError()
+ else:
+ raise Error()
+
+ return bool(response.is_available())
+
+
+def send_invite(jid, from_jid=None):
+ """Sends an invitation to chat to a JID.
+
+ Args:
+ jid: The JID of the contact to invite.
+ from_jid: The optional custom JID to use for sending. Currently, the default
+ is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+ of the form <anything>@<appid>.appspotchat.com.
+
+ Raises:
+ InvalidJidError if the JID passed is invalid.
+ Error if an unspecified error happens processing the request.
+ """
+ if not jid:
+ raise InvalidJidError()
+
+ request = xmpp_service_pb.XmppInviteRequest()
+ response = xmpp_service_pb.XmppInviteResponse()
+
+ request.set_jid(_to_str(jid))
+ if from_jid:
+ request.set_from_jid(_to_str(from_jid))
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("xmpp",
+ "SendInvite",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_JID):
+ raise InvalidJidError()
+ else:
+ raise Error()
+
+ return
+
+
+def send_message(jids, body, from_jid=None, message_type=MESSAGE_TYPE_CHAT,
+ raw_xml=False):
+ """Sends a chat message to a list of JIDs.
+
+ Args:
+ jids: A list of JIDs to send the message to, or a single JID to send the
+ message to.
+ from_jid: The optional custom JID to use for sending. Currently, the default
+ is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+ of the form <anything>@<appid>.appspotchat.com.
+ body: The body of the message.
+ message_type: Optional type of the message. Should be one of the types
+ specified in RFC 3921, section 2.1.1. An empty string will result in a
+ message stanza without a type attribute. For convenience, all of the
+ valid types are in the MESSAGE_TYPE_* constants in this file. The
+ default is MESSAGE_TYPE_CHAT. Anything else will throw an exception.
+ raw_xml: Optionally specifies that the body should be interpreted as XML. If
+ this is false, the contents of the body will be escaped and placed inside
+ of a body element inside of the message. If this is true, the contents
+ will be made children of the message.
+
+ Returns:
+ list, A list of statuses, one for each JID, corresponding to the result of
+ sending the message to that JID. Or, if a single JID was passed in,
+ returns the status directly.
+
+ Raises:
+ InvalidJidError if there is no valid JID in the list.
+ InvalidTypeError if the type argument is invalid.
+ InvalidXmlError if the body is malformed XML and raw_xml is True.
+ NoBodyError if there is no body.
+ Error if another error occurs processing the request.
+ """
+ request = xmpp_service_pb.XmppMessageRequest()
+ response = xmpp_service_pb.XmppMessageResponse()
+
+ if not body:
+ raise NoBodyError()
+
+ if not jids:
+ raise InvalidJidError()
+
+ if not message_type in _VALID_MESSAGE_TYPES:
+ raise InvalidTypeError()
+
+ single_jid = False
+ if isinstance(jids, basestring):
+ single_jid = True
+ jids = [jids]
+
+ for jid in jids:
+ if not jid:
+ raise InvalidJidError()
+ request.add_jid(_to_str(jid))
+
+ request.set_body(_to_str(body))
+ request.set_type(_to_str(message_type))
+ request.set_raw_xml(raw_xml)
+ if from_jid:
+ request.set_from_jid(_to_str(from_jid))
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("xmpp",
+ "SendMessage",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_JID):
+ raise InvalidJidError()
+ elif (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_TYPE):
+ raise InvalidTypeError()
+ elif (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_XML):
+ raise InvalidXmlError()
+ elif (e.application_error ==
+ xmpp_service_pb.XmppServiceError.NO_BODY):
+ raise NoBodyError()
+ raise Error()
+
+ if single_jid:
+ return response.status_list()[0]
+ return response.status_list()
+
+
+class Message(object):
+ """Encapsulates an XMPP message received by the application."""
+
+ def __init__(self, vars):
+ """Constructs a new XMPP Message from an HTTP request.
+
+ Args:
+ vars: A dict-like object to extract message arguments from.
+ """
+ try:
+ self.__sender = vars["from"]
+ self.__to = vars["to"]
+ self.__body = vars["body"]
+ except KeyError, e:
+ raise InvalidMessageError(e[0])
+ self.__command = None
+ self.__arg = None
+
+ @property
+ def sender(self):
+ return self.__sender
+
+ @property
+ def to(self):
+ return self.__to
+
+ @property
+ def body(self):
+ return self.__body
+
+ def __parse_command(self):
+ if self.__arg != None:
+ return
+
+ body = self.__body
+ if body.startswith('\\'):
+ body = '/' + body[1:]
+
+ self.__arg = ''
+ if body.startswith('/'):
+ parts = body.split(' ', 1)
+ self.__command = parts[0][1:]
+ if len(parts) > 1:
+ self.__arg = parts[1].strip()
+ else:
+ self.__arg = self.__body.strip()
+
+ @property
+ def command(self):
+ self.__parse_command()
+ return self.__command
+
+ @property
+ def arg(self):
+ self.__parse_command()
+ return self.__arg
+
+ def reply(self, body, message_type=MESSAGE_TYPE_CHAT, raw_xml=False,
+ send_message=send_message):
+ """Convenience function to reply to a message.
+
+ Args:
+ body: str: The body of the message
+ message_type, raw_xml: As per send_message.
+ send_message: Used for testing.
+
+ Returns:
+ A status code as per send_message.
+
+ Raises:
+ See send_message.
+ """
+ return send_message([self.sender], body, from_jid=self.to,
+ message_type=message_type, raw_xml=raw_xml)
+
+
+def _to_str(value):
+ """Helper function to make sure unicode values converted to utf-8
+
+ Args:
+ value: str or unicode to convert to utf-8.
+
+ Returns:
+ UTF-8 encoded str of value, otherwise value unchanged.
+ """
+ if isinstance(value, unicode):
+ return value.encode('utf-8')
+ return value
diff --git a/google_appengine/google/appengine/api/xmpp/__init__.pyc b/google_appengine/google/appengine/api/xmpp/__init__.pyc
new file mode 100644
index 0000000..fd06892
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py b/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py
new file mode 100644
index 0000000..f77e50b
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py
@@ -0,0 +1,826 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class XmppServiceError(ProtocolBuffer.ProtocolMessage):
+
+ UNSPECIFIED_ERROR = 1
+ INVALID_JID = 2
+ NO_BODY = 3
+ INVALID_XML = 4
+ INVALID_TYPE = 5
+
+ _ErrorCode_NAMES = {
+ 1: "UNSPECIFIED_ERROR",
+ 2: "INVALID_JID",
+ 3: "NO_BODY",
+ 4: "INVALID_XML",
+ 5: "INVALID_TYPE",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class PresenceRequest(ProtocolBuffer.ProtocolMessage):
+ has_jid_ = 0
+ jid_ = ""
+ has_from_jid_ = 0
+ from_jid_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def jid(self): return self.jid_
+
+ def set_jid(self, x):
+ self.has_jid_ = 1
+ self.jid_ = x
+
+ def clear_jid(self):
+ if self.has_jid_:
+ self.has_jid_ = 0
+ self.jid_ = ""
+
+ def has_jid(self): return self.has_jid_
+
+ def from_jid(self): return self.from_jid_
+
+ def set_from_jid(self, x):
+ self.has_from_jid_ = 1
+ self.from_jid_ = x
+
+ def clear_from_jid(self):
+ if self.has_from_jid_:
+ self.has_from_jid_ = 0
+ self.from_jid_ = ""
+
+ def has_from_jid(self): return self.has_from_jid_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_jid()): self.set_jid(x.jid())
+ if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_jid_ != x.has_jid_: return 0
+ if self.has_jid_ and self.jid_ != x.jid_: return 0
+ if self.has_from_jid_ != x.has_from_jid_: return 0
+ if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_jid_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: jid not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.jid_))
+ if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_jid()
+ self.clear_from_jid()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.jid_)
+ if (self.has_from_jid_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.from_jid_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_jid(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_from_jid(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_jid_: res+=prefix+("jid: %s\n" % self.DebugFormatString(self.jid_))
+ if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kjid = 1
+ kfrom_jid = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "jid",
+ 2: "from_jid",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class PresenceResponse(ProtocolBuffer.ProtocolMessage):
+
+ NORMAL = 0
+ AWAY = 1
+ DO_NOT_DISTURB = 2
+ CHAT = 3
+ EXTENDED_AWAY = 4
+
+ _SHOW_NAMES = {
+ 0: "NORMAL",
+ 1: "AWAY",
+ 2: "DO_NOT_DISTURB",
+ 3: "CHAT",
+ 4: "EXTENDED_AWAY",
+ }
+
+ def SHOW_Name(cls, x): return cls._SHOW_NAMES.get(x, "")
+ SHOW_Name = classmethod(SHOW_Name)
+
+ has_is_available_ = 0
+ is_available_ = 0
+ has_presence_ = 0
+ presence_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def is_available(self): return self.is_available_
+
+ def set_is_available(self, x):
+ self.has_is_available_ = 1
+ self.is_available_ = x
+
+ def clear_is_available(self):
+ if self.has_is_available_:
+ self.has_is_available_ = 0
+ self.is_available_ = 0
+
+ def has_is_available(self): return self.has_is_available_
+
+ def presence(self): return self.presence_
+
+ def set_presence(self, x):
+ self.has_presence_ = 1
+ self.presence_ = x
+
+ def clear_presence(self):
+ if self.has_presence_:
+ self.has_presence_ = 0
+ self.presence_ = 0
+
+ def has_presence(self): return self.has_presence_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_is_available()): self.set_is_available(x.is_available())
+ if (x.has_presence()): self.set_presence(x.presence())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_is_available_ != x.has_is_available_: return 0
+ if self.has_is_available_ and self.is_available_ != x.is_available_: return 0
+ if self.has_presence_ != x.has_presence_: return 0
+ if self.has_presence_ and self.presence_ != x.presence_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_is_available_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: is_available not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_presence_): n += 1 + self.lengthVarInt64(self.presence_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_is_available()
+ self.clear_presence()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putBoolean(self.is_available_)
+ if (self.has_presence_):
+ out.putVarInt32(16)
+ out.putVarInt32(self.presence_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_is_available(d.getBoolean())
+ continue
+ if tt == 16:
+ self.set_presence(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_is_available_: res+=prefix+("is_available: %s\n" % self.DebugFormatBool(self.is_available_))
+ if self.has_presence_: res+=prefix+("presence: %s\n" % self.DebugFormatInt32(self.presence_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kis_available = 1
+ kpresence = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "is_available",
+ 2: "presence",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class XmppMessageRequest(ProtocolBuffer.ProtocolMessage):
+ has_body_ = 0
+ body_ = ""
+ has_raw_xml_ = 0
+ raw_xml_ = 0
+ has_type_ = 0
+ type_ = "chat"
+ has_from_jid_ = 0
+ from_jid_ = ""
+
+ def __init__(self, contents=None):
+ self.jid_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def jid_size(self): return len(self.jid_)
+ def jid_list(self): return self.jid_
+
+ def jid(self, i):
+ return self.jid_[i]
+
+ def set_jid(self, i, x):
+ self.jid_[i] = x
+
+ def add_jid(self, x):
+ self.jid_.append(x)
+
+ def clear_jid(self):
+ self.jid_ = []
+
+ def body(self): return self.body_
+
+ def set_body(self, x):
+ self.has_body_ = 1
+ self.body_ = x
+
+ def clear_body(self):
+ if self.has_body_:
+ self.has_body_ = 0
+ self.body_ = ""
+
+ def has_body(self): return self.has_body_
+
+ def raw_xml(self): return self.raw_xml_
+
+ def set_raw_xml(self, x):
+ self.has_raw_xml_ = 1
+ self.raw_xml_ = x
+
+ def clear_raw_xml(self):
+ if self.has_raw_xml_:
+ self.has_raw_xml_ = 0
+ self.raw_xml_ = 0
+
+ def has_raw_xml(self): return self.has_raw_xml_
+
+ def type(self): return self.type_
+
+ def set_type(self, x):
+ self.has_type_ = 1
+ self.type_ = x
+
+ def clear_type(self):
+ if self.has_type_:
+ self.has_type_ = 0
+ self.type_ = "chat"
+
+ def has_type(self): return self.has_type_
+
+ def from_jid(self): return self.from_jid_
+
+ def set_from_jid(self, x):
+ self.has_from_jid_ = 1
+ self.from_jid_ = x
+
+ def clear_from_jid(self):
+ if self.has_from_jid_:
+ self.has_from_jid_ = 0
+ self.from_jid_ = ""
+
+ def has_from_jid(self): return self.has_from_jid_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.jid_size()): self.add_jid(x.jid(i))
+ if (x.has_body()): self.set_body(x.body())
+ if (x.has_raw_xml()): self.set_raw_xml(x.raw_xml())
+ if (x.has_type()): self.set_type(x.type())
+ if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.jid_) != len(x.jid_): return 0
+ for e1, e2 in zip(self.jid_, x.jid_):
+ if e1 != e2: return 0
+ if self.has_body_ != x.has_body_: return 0
+ if self.has_body_ and self.body_ != x.body_: return 0
+ if self.has_raw_xml_ != x.has_raw_xml_: return 0
+ if self.has_raw_xml_ and self.raw_xml_ != x.raw_xml_: return 0
+ if self.has_type_ != x.has_type_: return 0
+ if self.has_type_ and self.type_ != x.type_: return 0
+ if self.has_from_jid_ != x.has_from_jid_: return 0
+ if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_body_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: body not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.jid_)
+ for i in xrange(len(self.jid_)): n += self.lengthString(len(self.jid_[i]))
+ n += self.lengthString(len(self.body_))
+ if (self.has_raw_xml_): n += 2
+ if (self.has_type_): n += 1 + self.lengthString(len(self.type_))
+ if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_jid()
+ self.clear_body()
+ self.clear_raw_xml()
+ self.clear_type()
+ self.clear_from_jid()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.jid_)):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.jid_[i])
+ out.putVarInt32(18)
+ out.putPrefixedString(self.body_)
+ if (self.has_raw_xml_):
+ out.putVarInt32(24)
+ out.putBoolean(self.raw_xml_)
+ if (self.has_type_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.type_)
+ if (self.has_from_jid_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.from_jid_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.add_jid(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_body(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_raw_xml(d.getBoolean())
+ continue
+ if tt == 34:
+ self.set_type(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.set_from_jid(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.jid_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("jid%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+ if self.has_raw_xml_: res+=prefix+("raw_xml: %s\n" % self.DebugFormatBool(self.raw_xml_))
+ if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
+ if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kjid = 1
+ kbody = 2
+ kraw_xml = 3
+ ktype = 4
+ kfrom_jid = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "jid",
+ 2: "body",
+ 3: "raw_xml",
+ 4: "type",
+ 5: "from_jid",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class XmppMessageResponse(ProtocolBuffer.ProtocolMessage):
+
+ NO_ERROR = 0
+ INVALID_JID = 1
+ OTHER_ERROR = 2
+
+ _XmppMessageStatus_NAMES = {
+ 0: "NO_ERROR",
+ 1: "INVALID_JID",
+ 2: "OTHER_ERROR",
+ }
+
+ def XmppMessageStatus_Name(cls, x): return cls._XmppMessageStatus_NAMES.get(x, "")
+ XmppMessageStatus_Name = classmethod(XmppMessageStatus_Name)
+
+
+ def __init__(self, contents=None):
+ self.status_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def status_size(self): return len(self.status_)
+ def status_list(self): return self.status_
+
+ def status(self, i):
+ return self.status_[i]
+
+ def set_status(self, i, x):
+ self.status_[i] = x
+
+ def add_status(self, x):
+ self.status_.append(x)
+
+ def clear_status(self):
+ self.status_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.status_size()): self.add_status(x.status(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.status_) != len(x.status_): return 0
+ for e1, e2 in zip(self.status_, x.status_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.status_)
+ for i in xrange(len(self.status_)): n += self.lengthVarInt64(self.status_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_status()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.status_)):
+ out.putVarInt32(8)
+ out.putVarInt32(self.status_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.add_status(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.status_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kstatus = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "status",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class XmppInviteRequest(ProtocolBuffer.ProtocolMessage):
+ has_jid_ = 0
+ jid_ = ""
+ has_from_jid_ = 0
+ from_jid_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def jid(self): return self.jid_
+
+ def set_jid(self, x):
+ self.has_jid_ = 1
+ self.jid_ = x
+
+ def clear_jid(self):
+ if self.has_jid_:
+ self.has_jid_ = 0
+ self.jid_ = ""
+
+ def has_jid(self): return self.has_jid_
+
+ def from_jid(self): return self.from_jid_
+
+ def set_from_jid(self, x):
+ self.has_from_jid_ = 1
+ self.from_jid_ = x
+
+ def clear_from_jid(self):
+ if self.has_from_jid_:
+ self.has_from_jid_ = 0
+ self.from_jid_ = ""
+
+ def has_from_jid(self): return self.has_from_jid_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_jid()): self.set_jid(x.jid())
+ if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_jid_ != x.has_jid_: return 0
+ if self.has_jid_ and self.jid_ != x.jid_: return 0
+ if self.has_from_jid_ != x.has_from_jid_: return 0
+ if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_jid_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: jid not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.jid_))
+ if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_jid()
+ self.clear_from_jid()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.jid_)
+ if (self.has_from_jid_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.from_jid_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_jid(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_from_jid(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_jid_: res+=prefix+("jid: %s\n" % self.DebugFormatString(self.jid_))
+ if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kjid = 1
+ kfrom_jid = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "jid",
+ 2: "from_jid",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class XmppInviteResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['XmppServiceError','PresenceRequest','PresenceResponse','XmppMessageRequest','XmppMessageResponse','XmppInviteRequest','XmppInviteResponse']
diff --git a/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.pyc b/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.pyc
new file mode 100644
index 0000000..6fc90d3
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py b/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py
new file mode 100755
index 0000000..b97dd86
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the XMPP API, writes messages to logs."""
+
+
+
+
+
+import logging
+import os
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import xmpp
+from google.appengine.api.xmpp import xmpp_service_pb
+
+
+class XmppServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only xmpp service stub.
+
+ This stub does not use an XMPP network. It prints messages to the console
+ instead of sending any stanzas.
+ """
+
+ def __init__(self, log=logging.info, service_name='xmpp'):
+ """Initializer.
+
+ Args:
+ log: A logger, used for dependency injection.
+ service_name: Service name expected for all calls.
+ """
+ super(XmppServiceStub, self).__init__(service_name)
+ self.log = log
+
+ def _Dynamic_GetPresence(self, request, response):
+ """Implementation of XmppService::GetPresence.
+
+ Returns online if the first character of the JID comes before 'm' in the
+ alphabet, otherwise returns offline.
+
+ Args:
+ request: A PresenceRequest.
+ response: A PresenceResponse.
+ """
+ jid = request.jid()
+ self._GetFrom(request.from_jid())
+ if jid[0] < 'm':
+ response.set_is_available(True)
+ else:
+ response.set_is_available(False)
+
+ def _Dynamic_SendMessage(self, request, response):
+ """Implementation of XmppService::SendMessage.
+
+ Args:
+ request: An XmppMessageRequest.
+ response: An XmppMessageResponse .
+ """
+ from_jid = self._GetFrom(request.from_jid())
+ self.log('Sending an XMPP Message:')
+ self.log(' From:')
+ self.log(' ' + from_jid)
+ self.log(' Body:')
+ self.log(' ' + request.body())
+ self.log(' Type:')
+ self.log(' ' + request.type())
+ self.log(' Raw Xml:')
+ self.log(' ' + str(request.raw_xml()))
+ self.log(' To JIDs:')
+ for jid in request.jid_list():
+ self.log(' ' + jid)
+
+ for jid in request.jid_list():
+ response.add_status(xmpp_service_pb.XmppMessageResponse.NO_ERROR)
+
+ def _Dynamic_SendInvite(self, request, response):
+ """Implementation of XmppService::SendInvite.
+
+ Args:
+ request: An XmppInviteRequest.
+ response: An XmppInviteResponse .
+ """
+ from_jid = self._GetFrom(request.from_jid())
+ self.log('Sending an XMPP Invite:')
+ self.log(' From:')
+ self.log(' ' + from_jid)
+ self.log(' To: ' + request.jid())
+
+ def _GetFrom(self, requested):
+ """Validates that the from JID is valid.
+
+ Args:
+ requested: The requested from JID.
+
+ Returns:
+ string, The from JID.
+
+ Raises:
+ xmpp.InvalidJidError if the requested JID is invalid.
+ """
+
+ appid = os.environ.get('APPLICATION_ID', '')
+ if requested == None or requested == '':
+ return appid + '@appspot.com/bot'
+
+ node, domain, resource = ('', '', '')
+ at = requested.find('@')
+ if at == -1:
+ self.log('Invalid From JID: No \'@\' character found. JID: %s', requested)
+ raise xmpp.InvalidJidError()
+
+ node = requested[:at]
+ rest = requested[at+1:]
+
+ if rest.find('@') > -1:
+ self.log('Invalid From JID: Second \'@\' character found. JID: %s',
+ requested)
+ raise xmpp.InvalidJidError()
+
+ slash = rest.find('/')
+ if slash == -1:
+ domain = rest
+ resource = 'bot'
+ else:
+ domain = rest[:slash]
+ resource = rest[slash+1:]
+
+ if resource.find('/') > -1:
+ self.log('Invalid From JID: Second \'/\' character found. JID: %s',
+ requested)
+ raise xmpp.InvalidJidError()
+
+ if domain == 'appspot.com' and node == appid:
+ return node + '@' + domain + '/' + resource
+ elif domain == appid + '.appspotchat.com':
+ return node + '@' + domain + '/' + resource
+
+ self.log('Invalid From JID: Must be appid@appspot.com[/resource] or '
+ 'node@appid.appspotchat.com[/resource]. JID: %s', requested)
+ raise xmpp.InvalidJidError()
diff --git a/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.pyc b/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.pyc
new file mode 100644
index 0000000..8a26f65
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/yaml_builder.py b/google_appengine/google/appengine/api/yaml_builder.py
new file mode 100755
index 0000000..71e730c
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_builder.py
@@ -0,0 +1,432 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""PyYAML event builder handler
+
+Receives events from YAML listener and forwards them to a builder
+object so that it can construct a properly structured object.
+"""
+
+
+
+
+
+from google.appengine.api import yaml_errors
+from google.appengine.api import yaml_listener
+
+import yaml
+
+_TOKEN_DOCUMENT = 'document'
+_TOKEN_SEQUENCE = 'sequence'
+_TOKEN_MAPPING = 'mapping'
+_TOKEN_KEY = 'key'
+_TOKEN_VALUES = frozenset((
+ _TOKEN_DOCUMENT,
+ _TOKEN_SEQUENCE,
+ _TOKEN_MAPPING,
+ _TOKEN_KEY))
+
+
+class Builder(object):
+ """Interface for building documents and type from YAML events.
+
+ Implement this interface to create a new builder. Builders are
+ passed to the BuilderHandler and used as a factory and assembler
+ for creating concrete representations of YAML files.
+ """
+
+ def BuildDocument(self):
+ """Build new document.
+
+ The object built by this method becomes the top level entity
+ that the builder handler constructs. The actual type is
+ determined by the sub-class of the Builder class and can essentially
+ be any type at all. This method is always called when the parser
+ encounters the start of a new document.
+
+ Returns:
+ New object instance representing concrete document which is
+ returned to user via BuilderHandler.GetResults().
+ """
+
+ def InitializeDocument(self, document, value):
+ """Initialize document with value from top level of document.
+
+ This method is called when the root document element is encountered at
+ the top level of a YAML document. It should get called immediately
+ after BuildDocument.
+
+ Receiving the None value indicates the empty document.
+
+ Args:
+ document: Document as constructed in BuildDocument.
+ value: Scalar value to initialize the document with.
+ """
+
+ def BuildMapping(self, top_value):
+ """Build a new mapping representation.
+
+ Called when StartMapping event received. Type of object is determined
+ by Builder sub-class.
+
+ Args:
+ top_value: Object which will be new mappings parant. Will be object
+ returned from previous call to BuildMapping or BuildSequence.
+
+ Returns:
+ Instance of new object that represents a mapping type in target model.
+ """
+
+ def EndMapping(self, top_value, mapping):
+ """Previously constructed mapping scope is at an end.
+
+ Called when the end of a mapping block is encountered. Useful for
+ additional clean up or end of scope validation.
+
+ Args:
+ top_value: Value which is parent of the mapping.
+ mapping: Mapping which is at the end of its scope.
+ """
+
+ def BuildSequence(self, top_value):
+ """Build a new sequence representation.
+
+ Called when StartSequence event received. Type of object is determined
+ by Builder sub-class.
+
+ Args:
+ top_value: Object which will be new sequences parant. Will be object
+ returned from previous call to BuildMapping or BuildSequence.
+
+ Returns:
+ Instance of new object that represents a sequence type in target model.
+ """
+
+ def EndSequence(self, top_value, sequence):
+ """Previously constructed sequence scope is at an end.
+
+ Called when the end of a sequence block is encountered. Useful for
+ additional clean up or end of scope validation.
+
+ Args:
+ top_value: Value which is parent of the sequence.
+ sequence: Sequence which is at the end of its scope.
+ """
+
+ def MapTo(self, subject, key, value):
+ """Map value to a mapping representation.
+
+ Implementation is defined by sub-class of Builder.
+
+ Args:
+ subject: Object that represents mapping. Value returned from
+ BuildMapping.
+ key: Key used to map value to subject. Can be any scalar value.
+ value: Value which is mapped to subject. Can be any kind of value.
+ """
+
+ def AppendTo(self, subject, value):
+ """Append value to a sequence representation.
+
+ Implementation is defined by sub-class of Builder.
+
+ Args:
+ subject: Object that represents sequence. Value returned from
+ BuildSequence
+ value: Value to be appended to subject. Can be any kind of value.
+ """
+
+
+class BuilderHandler(yaml_listener.EventHandler):
+ """PyYAML event handler used to build objects.
+
+ Maintains state information as it receives parse events so that object
+ nesting is maintained. Uses provided builder object to construct and
+ assemble objects as it goes.
+
+ As it receives events from the YAML parser, it builds a stack of data
+ representing structural tokens. As the scope of documents, mappings
+ and sequences end, those token, value pairs are popped from the top of
+ the stack so that the original scope can resume processing.
+
+ A special case is made for the _KEY token. It represents a temporary
+ value which only occurs inside mappings. It is immediately popped off
+ the stack when it's associated value is encountered in the parse stream.
+ It is necessary to do this because the YAML parser does not combine
+ key and value information in to a single event.
+ """
+
+ def __init__(self, builder):
+ """Initialization for builder handler.
+
+ Args:
+ builder: Instance of Builder class.
+
+ Raises:
+ ListenerConfigurationError when builder is not a Builder class.
+ """
+ if not isinstance(builder, Builder):
+ raise yaml_errors.ListenerConfigurationError(
+ 'Must provide builder of type yaml_listener.Builder')
+ self._builder = builder
+ self._stack = None
+ self._top = None
+ self._results = []
+
+ def _Push(self, token, value):
+ """Push values to stack at start of nesting.
+
+ When a new object scope is beginning, will push the token (type of scope)
+ along with the new objects value, the latter of which is provided through
+ the various build methods of the builder.
+
+ Args:
+ token: Token indicating the type of scope which is being created; must
+ belong to _TOKEN_VALUES.
+ value: Value to associate with given token. Construction of value is
+ determined by the builder provided to this handler at construction.
+ """
+ self._top = (token, value)
+ self._stack.append(self._top)
+
+ def _Pop(self):
+ """Pop values from stack at end of nesting.
+
+ Called to indicate the end of a nested scope.
+
+ Returns:
+ Previously pushed value at the top of the stack.
+ """
+ assert self._stack != [] and self._stack is not None
+ token, value = self._stack.pop()
+ if self._stack:
+ self._top = self._stack[-1]
+ else:
+ self._top = None
+ return value
+
+ def _HandleAnchor(self, event):
+ """Handle anchor attached to event.
+
+ Currently will raise an error if anchor is used. Anchors are used to
+ define a document wide tag to a given value (scalar, mapping or sequence).
+
+ Args:
+ event: Event which may have anchor property set.
+
+ Raises:
+ NotImplementedError if event attempts to use an anchor.
+ """
+ if hasattr(event, 'anchor') and event.anchor is not None:
+ raise NotImplementedError, 'Anchors not supported in this handler'
+
+ def _HandleValue(self, value):
+ """Handle given value based on state of parser
+
+ This method handles the various values that are created by the builder
+ at the beginning of scope events (such as mappings and sequences) or
+ when a scalar value is received.
+
+ Method is called when handler receives a parser, MappingStart or
+ SequenceStart.
+
+ Args:
+ value: Value received as scalar value or newly constructed mapping or
+ sequence instance.
+
+ Raises:
+ InternalError if the building process encounters an unexpected token.
+ This is an indication of an implementation error in BuilderHandler.
+ """
+ token, top_value = self._top
+
+ if token == _TOKEN_KEY:
+ key = self._Pop()
+ mapping_token, mapping = self._top
+ assert _TOKEN_MAPPING == mapping_token
+ self._builder.MapTo(mapping, key, value)
+
+ elif token == _TOKEN_MAPPING:
+ self._Push(_TOKEN_KEY, value)
+
+ elif token == _TOKEN_SEQUENCE:
+ self._builder.AppendTo(top_value, value)
+
+ elif token == _TOKEN_DOCUMENT:
+ self._builder.InitializeDocument(top_value, value)
+
+ else:
+ raise yaml_errors.InternalError('Unrecognized builder token:\n%s' % token)
+
+ def StreamStart(self, event, loader):
+ """Initializes internal state of handler
+
+ Args:
+ event: Ignored.
+ """
+ assert self._stack is None
+ self._stack = []
+ self._top = None
+ self._results = []
+
+ def StreamEnd(self, event, loader):
+ """Cleans up internal state of handler after parsing
+
+ Args:
+ event: Ignored.
+ """
+ assert self._stack == [] and self._top is None
+ self._stack = None
+
+ def DocumentStart(self, event, loader):
+ """Build new document.
+
+ Pushes new document on to stack.
+
+ Args:
+ event: Ignored.
+ """
+ assert self._stack == []
+ self._Push(_TOKEN_DOCUMENT, self._builder.BuildDocument())
+
+ def DocumentEnd(self, event, loader):
+ """End of document.
+
+ Args:
+ event: Ignored.
+ """
+ assert self._top[0] == _TOKEN_DOCUMENT
+ self._results.append(self._Pop())
+
+ def Alias(self, event, loader):
+ """Not implemented yet.
+
+ Args:
+ event: Ignored.
+ """
+ raise NotImplementedError('Anchors not supported in this handler')
+
+ def Scalar(self, event, loader):
+ """Handle scalar value
+
+ Since scalars are simple values that are passed directly in by the
+ parser, handle like any value with no additional processing.
+
+ Of course, key values will be handles specially. A key value is recognized
+ when the top token is _TOKEN_MAPPING.
+
+ Args:
+ event: Event containing scalar value.
+ """
+ self._HandleAnchor(event)
+ if event.tag is None and self._top[0] != _TOKEN_MAPPING:
+ try:
+ tag = loader.resolve(yaml.nodes.ScalarNode,
+ event.value, event.implicit)
+ except IndexError:
+ tag = loader.DEFAULT_SCALAR_TAG
+ else:
+ tag = event.tag
+
+ if tag is None:
+ value = event.value
+ else:
+ node = yaml.nodes.ScalarNode(tag,
+ event.value,
+ event.start_mark,
+ event.end_mark,
+ event.style)
+ value = loader.construct_object(node)
+ self._HandleValue(value)
+
+ def SequenceStart(self, event, loader):
+ """Start of sequence scope
+
+ Create a new sequence from the builder and then handle in the context
+ of its parent.
+
+ Args:
+ event: SequenceStartEvent generated by loader.
+ loader: Loader that generated event.
+ """
+ self._HandleAnchor(event)
+ token, parent = self._top
+
+ if token == _TOKEN_KEY:
+ token, parent = self._stack[-2]
+ sequence = self._builder.BuildSequence(parent)
+ self._HandleValue(sequence)
+ self._Push(_TOKEN_SEQUENCE, sequence)
+
+ def SequenceEnd(self, event, loader):
+ """End of sequence.
+
+ Args:
+ event: Ignored
+ loader: Ignored.
+ """
+ assert self._top[0] == _TOKEN_SEQUENCE
+ end_object = self._Pop()
+ top_value = self._top[1]
+ self._builder.EndSequence(top_value, end_object)
+
+ def MappingStart(self, event, loader):
+ """Start of mapping scope.
+
+ Create a mapping from builder and then handle in the context of its
+ parent.
+
+ Args:
+ event: MappingStartEvent generated by loader.
+ loader: Loader that generated event.
+ """
+ self._HandleAnchor(event)
+ token, parent = self._top
+
+ if token == _TOKEN_KEY:
+ token, parent = self._stack[-2]
+ mapping = self._builder.BuildMapping(parent)
+ self._HandleValue(mapping)
+ self._Push(_TOKEN_MAPPING, mapping)
+
+ def MappingEnd(self, event, loader):
+ """End of mapping
+
+ Args:
+ event: Ignored.
+ loader: Ignored.
+ """
+ assert self._top[0] == _TOKEN_MAPPING
+ end_object = self._Pop()
+ top_value = self._top[1]
+ self._builder.EndMapping(top_value, end_object)
+
+ def GetResults(self):
+ """Get results of document stream processing.
+
+ This method can be invoked after fully parsing the entire YAML file
+ to retrieve constructed contents of YAML file. Called after EndStream.
+
+ Returns:
+ A tuple of all document objects that were parsed from YAML stream.
+
+ Raises:
+ InternalError if the builder stack is not empty by the end of parsing.
+ """
+ if self._stack is not None:
+ raise yaml_errors.InternalError('Builder stack is not empty.')
+ return tuple(self._results)
diff --git a/google_appengine/google/appengine/api/yaml_builder.pyc b/google_appengine/google/appengine/api/yaml_builder.pyc
new file mode 100644
index 0000000..713f7b2
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_builder.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/yaml_errors.py b/google_appengine/google/appengine/api/yaml_errors.py
new file mode 100755
index 0000000..6896e2c
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_errors.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Errors used in the YAML API, which is used by app developers."""
+
+
+
+class Error(Exception):
+ """Base datastore yaml error type."""
+
+class ProtocolBufferParseError(Error):
+ """Error in protocol buffer parsing"""
+
+
+class EmptyConfigurationFile(Error):
+ """Tried to load empty configuration file."""
+
+
+class MultipleConfigurationFile(Error):
+ """Tried to load configuration file with multiple objects."""
+
+
+class UnexpectedAttribute(Error):
+ """Raised when an unexpected attribute is encounted."""
+
+
+class DuplicateAttribute(Error):
+ """Generated when an attribute is assigned to twice."""
+
+
+class ListenerConfigurationError(Error):
+ """Generated when there is a parsing problem due to configuration."""
+
+
+class IllegalEvent(Error):
+ """Raised when an unexpected event type is received by listener."""
+
+
+class InternalError(Error):
+ """Raised when an internal implementation error is detected."""
+
+
+class EventListenerError(Error):
+ """Top level exception raised by YAML listener.
+
+ Any exception raised within the process of parsing a YAML file via an
+ EventListener is caught and wrapped in an EventListenerError. The causing
+ exception is maintained, but additional useful information is saved which
+ can be used for reporting useful information to users.
+
+ Attributes:
+ cause: The original exception which caused the EventListenerError.
+ """
+
+ def __init__(self, cause):
+ """Initialize event-listener error."""
+ if hasattr(cause, 'args') and cause.args:
+ Error.__init__(self, *cause.args)
+ else:
+ Error.__init__(self, str(cause))
+ self.cause = cause
+
+
+class EventListenerYAMLError(EventListenerError):
+ """Generated specifically for yaml.error.YAMLError."""
+
+
+class EventError(EventListenerError):
+ """Generated specifically when an error occurs in event handler.
+
+ Attributes:
+ cause: The original exception which caused the EventListenerError.
+ event: Event being handled when exception occured.
+ """
+
+ def __init__(self, cause, event):
+ """Initialize event-listener error."""
+ EventListenerError.__init__(self, cause)
+ self.event = event
+
+ def __str__(self):
+ return '%s\n%s' % (self.cause, self.event.start_mark)
diff --git a/google_appengine/google/appengine/api/yaml_errors.pyc b/google_appengine/google/appengine/api/yaml_errors.pyc
new file mode 100644
index 0000000..a89c146
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/yaml_listener.py b/google_appengine/google/appengine/api/yaml_listener.py
new file mode 100755
index 0000000..e7d978f
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_listener.py
@@ -0,0 +1,218 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""PyYAML event listener
+
+Contains class which interprets YAML events and forwards them to
+a handler object.
+"""
+
+
+from google.appengine.api import yaml_errors
+import yaml
+
+
+_EVENT_METHOD_MAP = {
+ yaml.events.StreamStartEvent: 'StreamStart',
+ yaml.events.StreamEndEvent: 'StreamEnd',
+ yaml.events.DocumentStartEvent: 'DocumentStart',
+ yaml.events.DocumentEndEvent: 'DocumentEnd',
+ yaml.events.AliasEvent: 'Alias',
+ yaml.events.ScalarEvent: 'Scalar',
+ yaml.events.SequenceStartEvent: 'SequenceStart',
+ yaml.events.SequenceEndEvent: 'SequenceEnd',
+ yaml.events.MappingStartEvent: 'MappingStart',
+ yaml.events.MappingEndEvent: 'MappingEnd',
+}
+
+
+class EventHandler(object):
+ """Handler interface for parsing YAML files.
+
+ Implement this interface to define specific YAML event handling class.
+ Implementing classes instances are passed to the constructor of
+ EventListener to act as a receiver of YAML parse events.
+ """
+ def StreamStart(self, event, loader):
+ """Handle start of stream event"""
+
+ def StreamEnd(self, event, loader):
+ """Handle end of stream event"""
+
+ def DocumentStart(self, event, loader):
+ """Handle start of document event"""
+
+ def DocumentEnd(self, event, loader):
+ """Handle end of document event"""
+
+ def Alias(self, event, loader):
+ """Handle alias event"""
+
+ def Scalar(self, event, loader):
+ """Handle scalar event"""
+
+ def SequenceStart(self, event, loader):
+ """Handle start of sequence event"""
+
+ def SequenceEnd(self, event, loader):
+ """Handle end of sequence event"""
+
+ def MappingStart(self, event, loader):
+ """Handle start of mappping event"""
+
+ def MappingEnd(self, event, loader):
+ """Handle end of mapping event"""
+
+
+class EventListener(object):
+ """Helper class to re-map PyYAML events to method calls.
+
+ By default, PyYAML generates its events via a Python generator. This class
+ is a helper that iterates over the events from the PyYAML parser and forwards
+ them to a handle class in the form of method calls. For simplicity, the
+ underlying event is forwarded to the handler as a parameter to the call.
+
+ This object does not itself produce iterable objects, but is really a mapping
+ to a given handler instance.
+
+ Example use:
+
+ class PrintDocumentHandler(object):
+ def DocumentStart(event):
+ print "A new document has been started"
+
+ EventListener(PrintDocumentHandler()).Parse('''
+ key1: value1
+ ---
+ key2: value2
+ '''
+
+ >>> A new document has been started
+ A new document has been started
+
+ In the example above, the implemented handler class (PrintDocumentHandler)
+ has a single method which reports each time a new document is started within
+ a YAML file. It is not necessary to subclass the EventListener, merely it
+ receives a PrintDocumentHandler instance. Every time a new document begins,
+ PrintDocumentHandler.DocumentStart is called with the PyYAML event passed
+ in as its parameter..
+ """
+
+ def __init__(self, event_handler):
+ """Initialize PyYAML event listener.
+
+ Constructs internal mapping directly from event type to method on actual
+ handler. This prevents reflection being used during actual parse time.
+
+ Args:
+ event_handler: Event handler that will receive mapped events. Must
+ implement at least one appropriate handler method named from
+ the values of the _EVENT_METHOD_MAP.
+
+ Raises:
+ ListenerConfigurationError if event_handler is not an EventHandler.
+ """
+ if not isinstance(event_handler, EventHandler):
+ raise yaml_errors.ListenerConfigurationError(
+ 'Must provide event handler of type yaml_listener.EventHandler')
+ self._event_method_map = {}
+ for event, method in _EVENT_METHOD_MAP.iteritems():
+ self._event_method_map[event] = getattr(event_handler, method)
+
+ def HandleEvent(self, event, loader=None):
+ """Handle individual PyYAML event.
+
+ Args:
+ event: Event to forward to method call in method call.
+
+ Raises:
+ IllegalEvent when receives an unrecognized or unsupported event type.
+ """
+ if event.__class__ not in _EVENT_METHOD_MAP:
+ raise yaml_errors.IllegalEvent(
+ "%s is not a valid PyYAML class" % event.__class__.__name__)
+ if event.__class__ in self._event_method_map:
+ self._event_method_map[event.__class__](event, loader)
+
+ def _HandleEvents(self, events):
+ """Iterate over all events and send them to handler.
+
+ This method is not meant to be called from the interface.
+
+ Only use in tests.
+
+ Args:
+ events: Iterator or generator containing events to process.
+ raises:
+ EventListenerParserError when a yaml.parser.ParserError is raised.
+ EventError when an exception occurs during the handling of an event.
+ """
+ for event in events:
+ try:
+ self.HandleEvent(*event)
+ except Exception, e:
+ event_object, loader = event
+ raise yaml_errors.EventError(e, event_object)
+
+ def _GenerateEventParameters(self,
+ stream,
+ loader_class=yaml.loader.SafeLoader):
+ """Creates a generator that yields event, loader parameter pairs.
+
+ For use as parameters to HandleEvent method for use by Parse method.
+ During testing, _GenerateEventParameters is simulated by allowing
+ the harness to pass in a list of pairs as the parameter.
+
+ A list of (event, loader) pairs must be passed to _HandleEvents otherwise
+ it is not possible to pass the loader instance to the handler.
+
+ Also responsible for instantiating the loader from the Loader
+ parameter.
+
+ Args:
+ stream: String document or open file object to process as per the
+ yaml.parse method. Any object that implements a 'read()' method which
+ returns a string document will work.
+ Loader: Loader class to use as per the yaml.parse method. Used to
+ instantiate new yaml.loader instance.
+
+ Yields:
+ Tuple(event, loader) where:
+ event: Event emitted by PyYAML loader.
+ loader_class: Used for dependency injection.
+ """
+ assert loader_class is not None
+ try:
+ loader = loader_class(stream)
+ while loader.check_event():
+ yield (loader.get_event(), loader)
+ except yaml.error.YAMLError, e:
+ raise yaml_errors.EventListenerYAMLError(e)
+
+ def Parse(self, stream, loader_class=yaml.loader.SafeLoader):
+ """Call YAML parser to generate and handle all events.
+
+ Calls PyYAML parser and sends resulting generator to handle_event method
+ for processing.
+
+ Args:
+ stream: String document or open file object to process as per the
+ yaml.parse method. Any object that implements a 'read()' method which
+ returns a string document will work with the YAML parser.
+ loader_class: Used for dependency injection.
+ """
+ self._HandleEvents(self._GenerateEventParameters(stream, loader_class))
diff --git a/google_appengine/google/appengine/api/yaml_listener.pyc b/google_appengine/google/appengine/api/yaml_listener.pyc
new file mode 100644
index 0000000..5e0a8e3
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_listener.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/yaml_object.py b/google_appengine/google/appengine/api/yaml_object.py
new file mode 100755
index 0000000..767f1f3
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_object.py
@@ -0,0 +1,294 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Builder for mapping YAML documents to object instances.
+
+ObjectBuilder is responsible for mapping a YAML document to classes defined
+using the validation mechanism (see google.appengine.api.validation.py).
+"""
+
+
+
+
+
+from google.appengine.api import validation
+from google.appengine.api import yaml_listener
+from google.appengine.api import yaml_builder
+from google.appengine.api import yaml_errors
+
+import yaml
+
+
+class _ObjectMapper(object):
+ """Wrapper used for mapping attributes from a yaml file to an object.
+
+ This wrapper is required because objects do not know what property they are
+ associated with a creation time, and therefore can not be instantiated
+ with the correct class until they are mapped to their parents.
+ """
+
+ def __init__(self):
+ """Object mapper starts off with empty value."""
+ self.value = None
+ self.seen = set()
+
+ def set_value(self, value):
+ """Set value of instance to map to.
+
+ Args:
+ value: Instance that this mapper maps to.
+ """
+ self.value = value
+
+ def see(self, key):
+ if key in self.seen:
+ raise yaml_errors.DuplicateAttribute("Duplicate attribute '%s'." % key)
+ self.seen.add(key)
+
+class _ObjectSequencer(object):
+ """Wrapper used for building sequences from a yaml file to a list.
+
+ This wrapper is required because objects do not know what property they are
+ associated with a creation time, and therefore can not be instantiated
+ with the correct class until they are mapped to their parents.
+ """
+
+ def __init__(self):
+ """Object sequencer starts off with empty value."""
+ self.value = []
+ self.constructor = None
+
+ def set_constructor(self, constructor):
+ """Set object used for constructing new sequence instances.
+
+ Args:
+ constructor: Callable which can accept no arguments. Must return
+ an instance of the appropriate class for the container.
+ """
+ self.constructor = constructor
+
+
+class ObjectBuilder(yaml_builder.Builder):
+ """Builder used for constructing validated objects.
+
+ Given a class that implements validation.Validated, it will parse a YAML
+ document and attempt to build an instance of the class. It does so by mapping
+ YAML keys to Python attributes. ObjectBuilder will only map YAML fields
+ to attributes defined in the Validated subclasses 'ATTRIBUTE' definitions.
+ Lists are mapped to validated. Repeated attributes and maps are mapped to
+ validated.Type properties.
+
+ For a YAML map to be compatible with a class, the class must have a
+ constructor that can be called with no parameters. If the provided type
+ does not have such a constructor a parse time error will occur.
+ """
+
+ def __init__(self, default_class):
+ """Initialize validated object builder.
+
+ Args:
+ default_class: Class that is instantiated upon the detection of a new
+ document. An instance of this class will act as the document itself.
+ """
+ self.default_class = default_class
+
+ def _GetRepeated(self, attribute):
+ """Get the ultimate type of a repeated validator.
+
+ Looks for an instance of validation.Repeated, returning its constructor.
+
+ Args:
+ attribute: Repeated validator attribute to find type for.
+
+ Returns:
+ The expected class of of the Type validator, otherwise object.
+ """
+ if isinstance(attribute, validation.Optional):
+ attribute = attribute.validator
+ if isinstance(attribute, validation.Repeated):
+ return attribute.constructor
+ return object
+
+ def BuildDocument(self):
+ """Instantiate new root validated object.
+
+ Returns:
+ New instance of validated object.
+ """
+ return self.default_class()
+
+ def BuildMapping(self, top_value):
+ """New instance of object mapper for opening map scope.
+
+ Args:
+ top_value: Parent of nested object.
+
+ Returns:
+ New instance of object mapper.
+ """
+ result = _ObjectMapper()
+ if isinstance(top_value, self.default_class):
+ result.value = top_value
+ return result
+
+ def EndMapping(self, top_value, mapping):
+ """When leaving scope, makes sure new object is initialized.
+
+ This method is mainly for picking up on any missing required attributes.
+
+ Args:
+ top_value: Parent of closing mapping object.
+ mapping: _ObjectMapper instance that is leaving scope.
+ """
+ try:
+ mapping.value.CheckInitialized()
+ except validation.ValidationError:
+ raise
+ except Exception, e:
+ try:
+ error_str = str(e)
+ except Exception:
+ error_str = '<unknown>'
+
+ raise validation.ValidationError("Invalid object:\n%s" % error_str, e)
+
+ def BuildSequence(self, top_value):
+ """New instance of object sequence.
+
+ Args:
+ top_value: Object that contains the new sequence.
+
+ Returns:
+ A new _ObjectSequencer instance.
+ """
+ return _ObjectSequencer()
+
+ def MapTo(self, subject, key, value):
+ """Map key-value pair to an objects attribute.
+
+ Args:
+ subject: _ObjectMapper of object that will receive new attribute.
+ key: Key of attribute.
+ value: Value of new attribute.
+
+ Raises:
+ UnexpectedAttribute when the key is not a validated attribute of
+ the subject value class.
+ """
+ assert subject.value is not None
+ if key not in subject.value.ATTRIBUTES:
+ raise yaml_errors.UnexpectedAttribute(
+ 'Unexpected attribute \'%s\' for object of type %s.' %
+ (key, str(subject.value.__class__)))
+
+ if isinstance(value, _ObjectMapper):
+ value.set_value(subject.value.GetAttribute(key).expected_type())
+ value = value.value
+ elif isinstance(value, _ObjectSequencer):
+ value.set_constructor(self._GetRepeated(subject.value.ATTRIBUTES[key]))
+ value = value.value
+
+ subject.see(key)
+ try:
+ setattr(subject.value, key, value)
+ except validation.ValidationError, e:
+ try:
+ error_str = str(e)
+ except Exception:
+ error_str = '<unknown>'
+
+ try:
+ value_str = str(value)
+ except Exception:
+ value_str = '<unknown>'
+
+ e.message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
+ (value_str, key, error_str))
+ raise e
+ except Exception, e:
+ try:
+ error_str = str(e)
+ except Exception:
+ error_str = '<unknown>'
+
+ try:
+ value_str = str(value)
+ except Exception:
+ value_str = '<unknown>'
+
+ message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
+ (value_str, key, error_str))
+ raise validation.ValidationError(message, e)
+
+ def AppendTo(self, subject, value):
+ """Append a value to a sequence.
+
+ Args:
+ subject: _ObjectSequence that is receiving new value.
+ value: Value that is being appended to sequence.
+ """
+ if isinstance(value, _ObjectMapper):
+ value.set_value(subject.constructor())
+ subject.value.append(value.value)
+ else:
+ subject.value.append(value)
+
+
+def BuildObjects(default_class, stream, loader=yaml.loader.SafeLoader):
+ """Build objects from stream.
+
+ Handles the basic case of loading all the objects from a stream.
+
+ Args:
+ default_class: Class that is instantiated upon the detection of a new
+ document. An instance of this class will act as the document itself.
+ stream: String document or open file object to process as per the
+ yaml.parse method. Any object that implements a 'read()' method which
+ returns a string document will work with the YAML parser.
+ loader_class: Used for dependency injection.
+
+ Returns:
+ List of default_class instances parsed from the stream.
+ """
+ builder = ObjectBuilder(default_class)
+ handler = yaml_builder.BuilderHandler(builder)
+ listener = yaml_listener.EventListener(handler)
+
+ listener.Parse(stream, loader)
+ return handler.GetResults()
+
+
+def BuildSingleObject(default_class, stream, loader=yaml.loader.SafeLoader):
+ """Build object from stream.
+
+ Handles the basic case of loading a single object from a stream.
+
+ Args:
+ default_class: Class that is instantiated upon the detection of a new
+ document. An instance of this class will act as the document itself.
+ stream: String document or open file object to process as per the
+ yaml.parse method. Any object that implements a 'read()' method which
+ returns a string document will work with the YAML parser.
+ loader_class: Used for dependency injection.
+ """
+ definitions = BuildObjects(default_class, stream, loader)
+
+ if len(definitions) < 1:
+ raise yaml_errors.EmptyConfigurationFile()
+ if len(definitions) > 1:
+ raise yaml_errors.MultipleConfigurationFile()
+ return definitions[0]
diff --git a/google_appengine/google/appengine/api/yaml_object.pyc b/google_appengine/google/appengine/api/yaml_object.pyc
new file mode 100644
index 0000000..7ec78cd
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_object.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/base/__init__.py b/google_appengine/google/appengine/base/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/base/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/base/__init__.pyc b/google_appengine/google/appengine/base/__init__.pyc
new file mode 100644
index 0000000..ea4e353
--- /dev/null
+++ b/google_appengine/google/appengine/base/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/base/capabilities_pb.py b/google_appengine/google/appengine/base/capabilities_pb.py
new file mode 100644
index 0000000..c0434ef
--- /dev/null
+++ b/google_appengine/google/appengine/base/capabilities_pb.py
@@ -0,0 +1,451 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class CapabilityConfigList(ProtocolBuffer.ProtocolMessage):
+ has_default_config_ = 0
+ default_config_ = None
+
+ def __init__(self, contents=None):
+ self.config_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def config_size(self): return len(self.config_)
+ def config_list(self): return self.config_
+
+ def config(self, i):
+ return self.config_[i]
+
+ def mutable_config(self, i):
+ return self.config_[i]
+
+ def add_config(self):
+ x = CapabilityConfig()
+ self.config_.append(x)
+ return x
+
+ def clear_config(self):
+ self.config_ = []
+ def default_config(self):
+ if self.default_config_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.default_config_ is None: self.default_config_ = CapabilityConfig()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.default_config_
+
+ def mutable_default_config(self): self.has_default_config_ = 1; return self.default_config()
+
+ def clear_default_config(self):
+ if self.has_default_config_:
+ self.has_default_config_ = 0;
+ if self.default_config_ is not None: self.default_config_.Clear()
+
+ def has_default_config(self): return self.has_default_config_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.config_size()): self.add_config().CopyFrom(x.config(i))
+ if (x.has_default_config()): self.mutable_default_config().MergeFrom(x.default_config())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.config_) != len(x.config_): return 0
+ for e1, e2 in zip(self.config_, x.config_):
+ if e1 != e2: return 0
+ if self.has_default_config_ != x.has_default_config_: return 0
+ if self.has_default_config_ and self.default_config_ != x.default_config_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.config_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_default_config_ and not self.default_config_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.config_)
+ for i in xrange(len(self.config_)): n += self.lengthString(self.config_[i].ByteSize())
+ if (self.has_default_config_): n += 1 + self.lengthString(self.default_config_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_config()
+ self.clear_default_config()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.config_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.config_[i].ByteSize())
+ self.config_[i].OutputUnchecked(out)
+ if (self.has_default_config_):
+ out.putVarInt32(18)
+ out.putVarInt32(self.default_config_.ByteSize())
+ self.default_config_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_config().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_default_config().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.config_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("config%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_default_config_:
+ res+=prefix+"default_config <\n"
+ res+=self.default_config_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kconfig = 1
+ kdefault_config = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "config",
+ 2: "default_config",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CapabilityConfig(ProtocolBuffer.ProtocolMessage):
+
+ ENABLED = 1
+ SCHEDULED = 2
+ DISABLED = 3
+ UNKNOWN = 4
+
+ _Status_NAMES = {
+ 1: "ENABLED",
+ 2: "SCHEDULED",
+ 3: "DISABLED",
+ 4: "UNKNOWN",
+ }
+
+ def Status_Name(cls, x): return cls._Status_NAMES.get(x, "")
+ Status_Name = classmethod(Status_Name)
+
+ has_package_ = 0
+ package_ = ""
+ has_capability_ = 0
+ capability_ = ""
+ has_status_ = 0
+ status_ = 4
+ has_scheduled_time_ = 0
+ scheduled_time_ = ""
+ has_internal_message_ = 0
+ internal_message_ = ""
+ has_admin_message_ = 0
+ admin_message_ = ""
+ has_error_message_ = 0
+ error_message_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def package(self): return self.package_
+
+ def set_package(self, x):
+ self.has_package_ = 1
+ self.package_ = x
+
+ def clear_package(self):
+ if self.has_package_:
+ self.has_package_ = 0
+ self.package_ = ""
+
+ def has_package(self): return self.has_package_
+
+ def capability(self): return self.capability_
+
+ def set_capability(self, x):
+ self.has_capability_ = 1
+ self.capability_ = x
+
+ def clear_capability(self):
+ if self.has_capability_:
+ self.has_capability_ = 0
+ self.capability_ = ""
+
+ def has_capability(self): return self.has_capability_
+
+ def status(self): return self.status_
+
+ def set_status(self, x):
+ self.has_status_ = 1
+ self.status_ = x
+
+ def clear_status(self):
+ if self.has_status_:
+ self.has_status_ = 0
+ self.status_ = 4
+
+ def has_status(self): return self.has_status_
+
+ def scheduled_time(self): return self.scheduled_time_
+
+ def set_scheduled_time(self, x):
+ self.has_scheduled_time_ = 1
+ self.scheduled_time_ = x
+
+ def clear_scheduled_time(self):
+ if self.has_scheduled_time_:
+ self.has_scheduled_time_ = 0
+ self.scheduled_time_ = ""
+
+ def has_scheduled_time(self): return self.has_scheduled_time_
+
+ def internal_message(self): return self.internal_message_
+
+ def set_internal_message(self, x):
+ self.has_internal_message_ = 1
+ self.internal_message_ = x
+
+ def clear_internal_message(self):
+ if self.has_internal_message_:
+ self.has_internal_message_ = 0
+ self.internal_message_ = ""
+
+ def has_internal_message(self): return self.has_internal_message_
+
+ def admin_message(self): return self.admin_message_
+
+ def set_admin_message(self, x):
+ self.has_admin_message_ = 1
+ self.admin_message_ = x
+
+ def clear_admin_message(self):
+ if self.has_admin_message_:
+ self.has_admin_message_ = 0
+ self.admin_message_ = ""
+
+ def has_admin_message(self): return self.has_admin_message_
+
+ def error_message(self): return self.error_message_
+
+ def set_error_message(self, x):
+ self.has_error_message_ = 1
+ self.error_message_ = x
+
+ def clear_error_message(self):
+ if self.has_error_message_:
+ self.has_error_message_ = 0
+ self.error_message_ = ""
+
+ def has_error_message(self): return self.has_error_message_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_package()): self.set_package(x.package())
+ if (x.has_capability()): self.set_capability(x.capability())
+ if (x.has_status()): self.set_status(x.status())
+ if (x.has_scheduled_time()): self.set_scheduled_time(x.scheduled_time())
+ if (x.has_internal_message()): self.set_internal_message(x.internal_message())
+ if (x.has_admin_message()): self.set_admin_message(x.admin_message())
+ if (x.has_error_message()): self.set_error_message(x.error_message())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_package_ != x.has_package_: return 0
+ if self.has_package_ and self.package_ != x.package_: return 0
+ if self.has_capability_ != x.has_capability_: return 0
+ if self.has_capability_ and self.capability_ != x.capability_: return 0
+ if self.has_status_ != x.has_status_: return 0
+ if self.has_status_ and self.status_ != x.status_: return 0
+ if self.has_scheduled_time_ != x.has_scheduled_time_: return 0
+ if self.has_scheduled_time_ and self.scheduled_time_ != x.scheduled_time_: return 0
+ if self.has_internal_message_ != x.has_internal_message_: return 0
+ if self.has_internal_message_ and self.internal_message_ != x.internal_message_: return 0
+ if self.has_admin_message_ != x.has_admin_message_: return 0
+ if self.has_admin_message_ and self.admin_message_ != x.admin_message_: return 0
+ if self.has_error_message_ != x.has_error_message_: return 0
+ if self.has_error_message_ and self.error_message_ != x.error_message_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_package_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: package not set.')
+ if (not self.has_capability_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: capability not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.package_))
+ n += self.lengthString(len(self.capability_))
+ if (self.has_status_): n += 1 + self.lengthVarInt64(self.status_)
+ if (self.has_scheduled_time_): n += 1 + self.lengthString(len(self.scheduled_time_))
+ if (self.has_internal_message_): n += 1 + self.lengthString(len(self.internal_message_))
+ if (self.has_admin_message_): n += 1 + self.lengthString(len(self.admin_message_))
+ if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_package()
+ self.clear_capability()
+ self.clear_status()
+ self.clear_scheduled_time()
+ self.clear_internal_message()
+ self.clear_admin_message()
+ self.clear_error_message()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.package_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.capability_)
+ if (self.has_status_):
+ out.putVarInt32(24)
+ out.putVarInt32(self.status_)
+ if (self.has_internal_message_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.internal_message_)
+ if (self.has_admin_message_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.admin_message_)
+ if (self.has_error_message_):
+ out.putVarInt32(50)
+ out.putPrefixedString(self.error_message_)
+ if (self.has_scheduled_time_):
+ out.putVarInt32(58)
+ out.putPrefixedString(self.scheduled_time_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_package(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_capability(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_status(d.getVarInt32())
+ continue
+ if tt == 34:
+ self.set_internal_message(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.set_admin_message(d.getPrefixedString())
+ continue
+ if tt == 50:
+ self.set_error_message(d.getPrefixedString())
+ continue
+ if tt == 58:
+ self.set_scheduled_time(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_package_: res+=prefix+("package: %s\n" % self.DebugFormatString(self.package_))
+ if self.has_capability_: res+=prefix+("capability: %s\n" % self.DebugFormatString(self.capability_))
+ if self.has_status_: res+=prefix+("status: %s\n" % self.DebugFormatInt32(self.status_))
+ if self.has_scheduled_time_: res+=prefix+("scheduled_time: %s\n" % self.DebugFormatString(self.scheduled_time_))
+ if self.has_internal_message_: res+=prefix+("internal_message: %s\n" % self.DebugFormatString(self.internal_message_))
+ if self.has_admin_message_: res+=prefix+("admin_message: %s\n" % self.DebugFormatString(self.admin_message_))
+ if self.has_error_message_: res+=prefix+("error_message: %s\n" % self.DebugFormatString(self.error_message_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kpackage = 1
+ kcapability = 2
+ kstatus = 3
+ kscheduled_time = 7
+ kinternal_message = 4
+ kadmin_message = 5
+ kerror_message = 6
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "package",
+ 2: "capability",
+ 3: "status",
+ 4: "internal_message",
+ 5: "admin_message",
+ 6: "error_message",
+ 7: "scheduled_time",
+ }, 7)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.STRING,
+ 7: ProtocolBuffer.Encoder.STRING,
+ }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['CapabilityConfigList','CapabilityConfig']
diff --git a/google_appengine/google/appengine/base/capabilities_pb.pyc b/google_appengine/google/appengine/base/capabilities_pb.pyc
new file mode 100644
index 0000000..c6a3645
--- /dev/null
+++ b/google_appengine/google/appengine/base/capabilities_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/cron/GrocLexer.py b/google_appengine/google/appengine/cron/GrocLexer.py
new file mode 100755
index 0000000..7224334
--- /dev/null
+++ b/google_appengine/google/appengine/cron/GrocLexer.py
@@ -0,0 +1,1669 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import sys
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+
+HIDDEN = BaseRecognizer.HIDDEN
+
+THIRD=12
+SEPTEMBER=35
+FOURTH=13
+SECOND=11
+WEDNESDAY=21
+NOVEMBER=37
+SATURDAY=24
+JULY=33
+APRIL=30
+DIGITS=8
+OCTOBER=36
+MAY=31
+EVERY=6
+FEBRUARY=28
+MONDAY=19
+SUNDAY=25
+DAY=18
+JUNE=32
+OF=4
+MARCH=29
+EOF=-1
+JANUARY=27
+MONTH=26
+FRIDAY=23
+MINUTES=17
+FIFTH=14
+TIME=5
+WS=40
+QUARTER=39
+THURSDAY=22
+COMMA=9
+DECEMBER=38
+AUGUST=34
+DIGIT=7
+TUESDAY=20
+HOURS=16
+FOURTH_OR_FIFTH=15
+FIRST=10
+
+
+class GrocLexer(Lexer):
+
+ grammarFileName = "Groc.g"
+ antlr_version = version_str_to_tuple("3.1.1")
+ antlr_version_str = "3.1.1"
+
+ def __init__(self, input=None, state=None):
+ if state is None:
+ state = RecognizerSharedState()
+ Lexer.__init__(self, input, state)
+
+ self.dfa25 = self.DFA25(
+ self, 25,
+ eot = self.DFA25_eot,
+ eof = self.DFA25_eof,
+ min = self.DFA25_min,
+ max = self.DFA25_max,
+ accept = self.DFA25_accept,
+ special = self.DFA25_special,
+ transition = self.DFA25_transition
+ )
+
+
+
+
+
+
+ def mTIME(self, ):
+
+ try:
+ _type = TIME
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ alt1 = 4
+ LA1 = self.input.LA(1)
+ if LA1 == 48:
+ LA1_1 = self.input.LA(2)
+
+ if (LA1_1 == 58) :
+ alt1 = 1
+ elif ((48 <= LA1_1 <= 57)) :
+ alt1 = 2
+ else:
+ nvae = NoViableAltException("", 1, 1, self.input)
+
+ raise nvae
+
+ elif LA1 == 49:
+ LA1_2 = self.input.LA(2)
+
+ if (LA1_2 == 58) :
+ alt1 = 1
+ elif ((48 <= LA1_2 <= 57)) :
+ alt1 = 3
+ else:
+ nvae = NoViableAltException("", 1, 2, self.input)
+
+ raise nvae
+
+ elif LA1 == 50:
+ LA1_3 = self.input.LA(2)
+
+ if ((48 <= LA1_3 <= 51)) :
+ alt1 = 4
+ elif (LA1_3 == 58) :
+ alt1 = 1
+ else:
+ nvae = NoViableAltException("", 1, 3, self.input)
+
+ raise nvae
+
+ elif LA1 == 51 or LA1 == 52 or LA1 == 53 or LA1 == 54 or LA1 == 55 or LA1 == 56 or LA1 == 57:
+ alt1 = 1
+ else:
+ nvae = NoViableAltException("", 1, 0, self.input)
+
+ raise nvae
+
+ if alt1 == 1:
+ pass
+ self.mDIGIT()
+
+
+ elif alt1 == 2:
+ pass
+ pass
+ self.match(48)
+ self.mDIGIT()
+
+
+
+
+
+ elif alt1 == 3:
+ pass
+ pass
+ self.match(49)
+ self.mDIGIT()
+
+
+
+
+
+ elif alt1 == 4:
+ pass
+ pass
+ self.match(50)
+ self.matchRange(48, 51)
+
+
+
+
+
+
+ self.match(58)
+ pass
+ self.matchRange(48, 53)
+ self.mDIGIT()
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mFIRST(self, ):
+
+ try:
+ _type = FIRST
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if (LA2_0 == 49) :
+ alt2 = 1
+ elif (LA2_0 == 102) :
+ alt2 = 2
+ else:
+ nvae = NoViableAltException("", 2, 0, self.input)
+
+ raise nvae
+
+ if alt2 == 1:
+ pass
+ self.match("1st")
+
+
+ elif alt2 == 2:
+ pass
+ self.match("first")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mSECOND(self, ):
+
+ try:
+ _type = SECOND
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == 50) :
+ alt3 = 1
+ elif (LA3_0 == 115) :
+ alt3 = 2
+ else:
+ nvae = NoViableAltException("", 3, 0, self.input)
+
+ raise nvae
+
+ if alt3 == 1:
+ pass
+ self.match("2nd")
+
+
+ elif alt3 == 2:
+ pass
+ self.match("second")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mTHIRD(self, ):
+
+ try:
+ _type = THIRD
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ alt4 = 2
+ LA4_0 = self.input.LA(1)
+
+ if (LA4_0 == 51) :
+ alt4 = 1
+ elif (LA4_0 == 116) :
+ alt4 = 2
+ else:
+ nvae = NoViableAltException("", 4, 0, self.input)
+
+ raise nvae
+
+ if alt4 == 1:
+ pass
+ self.match("3rd")
+
+
+ elif alt4 == 2:
+ pass
+ self.match("third")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mFOURTH(self, ):
+
+ try:
+ _type = FOURTH
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.match("4th")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mFIFTH(self, ):
+
+ try:
+ _type = FIFTH
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.match("5th")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mFOURTH_OR_FIFTH(self, ):
+
+ try:
+ _type = FOURTH_OR_FIFTH
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == 102) :
+ LA5_1 = self.input.LA(2)
+
+ if (LA5_1 == 111) :
+ alt5 = 1
+ elif (LA5_1 == 105) :
+ alt5 = 2
+ else:
+ nvae = NoViableAltException("", 5, 1, self.input)
+
+ raise nvae
+
+ else:
+ nvae = NoViableAltException("", 5, 0, self.input)
+
+ raise nvae
+
+ if alt5 == 1:
+ pass
+ pass
+ self.match("fourth")
+ _type = FOURTH;
+
+
+
+
+
+ elif alt5 == 2:
+ pass
+ pass
+ self.match("fifth")
+ _type = FIFTH;
+
+
+
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mDAY(self, ):
+
+ try:
+ _type = DAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("day")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mMONDAY(self, ):
+
+ try:
+ _type = MONDAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("mon")
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if (LA6_0 == 100) :
+ alt6 = 1
+ if alt6 == 1:
+ pass
+ self.match("day")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mTUESDAY(self, ):
+
+ try:
+ _type = TUESDAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("tue")
+ alt7 = 2
+ LA7_0 = self.input.LA(1)
+
+ if (LA7_0 == 115) :
+ alt7 = 1
+ if alt7 == 1:
+ pass
+ self.match("sday")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mWEDNESDAY(self, ):
+
+ try:
+ _type = WEDNESDAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("wed")
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if (LA8_0 == 110) :
+ alt8 = 1
+ if alt8 == 1:
+ pass
+ self.match("nesday")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mTHURSDAY(self, ):
+
+ try:
+ _type = THURSDAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("thu")
+ alt9 = 2
+ LA9_0 = self.input.LA(1)
+
+ if (LA9_0 == 114) :
+ alt9 = 1
+ if alt9 == 1:
+ pass
+ self.match("rsday")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mFRIDAY(self, ):
+
+ try:
+ _type = FRIDAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("fri")
+ alt10 = 2
+ LA10_0 = self.input.LA(1)
+
+ if (LA10_0 == 100) :
+ alt10 = 1
+ if alt10 == 1:
+ pass
+ self.match("day")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mSATURDAY(self, ):
+
+ try:
+ _type = SATURDAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("sat")
+ alt11 = 2
+ LA11_0 = self.input.LA(1)
+
+ if (LA11_0 == 117) :
+ alt11 = 1
+ if alt11 == 1:
+ pass
+ self.match("urday")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mSUNDAY(self, ):
+
+ try:
+ _type = SUNDAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("sun")
+ alt12 = 2
+ LA12_0 = self.input.LA(1)
+
+ if (LA12_0 == 100) :
+ alt12 = 1
+ if alt12 == 1:
+ pass
+ self.match("day")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mJANUARY(self, ):
+
+ try:
+ _type = JANUARY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("jan")
+ alt13 = 2
+ LA13_0 = self.input.LA(1)
+
+ if (LA13_0 == 117) :
+ alt13 = 1
+ if alt13 == 1:
+ pass
+ self.match("uary")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mFEBRUARY(self, ):
+
+ try:
+ _type = FEBRUARY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("feb")
+ alt14 = 2
+ LA14_0 = self.input.LA(1)
+
+ if (LA14_0 == 114) :
+ alt14 = 1
+ if alt14 == 1:
+ pass
+ self.match("ruary")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mMARCH(self, ):
+
+ try:
+ _type = MARCH
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("mar")
+ alt15 = 2
+ LA15_0 = self.input.LA(1)
+
+ if (LA15_0 == 99) :
+ alt15 = 1
+ if alt15 == 1:
+ pass
+ self.match("ch")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mAPRIL(self, ):
+
+ try:
+ _type = APRIL
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("apr")
+ alt16 = 2
+ LA16_0 = self.input.LA(1)
+
+ if (LA16_0 == 105) :
+ alt16 = 1
+ if alt16 == 1:
+ pass
+ self.match("il")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mMAY(self, ):
+
+ try:
+ _type = MAY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("may")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mJUNE(self, ):
+
+ try:
+ _type = JUNE
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("jun")
+ alt17 = 2
+ LA17_0 = self.input.LA(1)
+
+ if (LA17_0 == 101) :
+ alt17 = 1
+ if alt17 == 1:
+ pass
+ self.match(101)
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mJULY(self, ):
+
+ try:
+ _type = JULY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("jul")
+ alt18 = 2
+ LA18_0 = self.input.LA(1)
+
+ if (LA18_0 == 121) :
+ alt18 = 1
+ if alt18 == 1:
+ pass
+ self.match(121)
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mAUGUST(self, ):
+
+ try:
+ _type = AUGUST
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("aug")
+ alt19 = 2
+ LA19_0 = self.input.LA(1)
+
+ if (LA19_0 == 117) :
+ alt19 = 1
+ if alt19 == 1:
+ pass
+ self.match("ust")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mSEPTEMBER(self, ):
+
+ try:
+ _type = SEPTEMBER
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("sep")
+ alt20 = 2
+ LA20_0 = self.input.LA(1)
+
+ if (LA20_0 == 116) :
+ alt20 = 1
+ if alt20 == 1:
+ pass
+ self.match("tember")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mOCTOBER(self, ):
+
+ try:
+ _type = OCTOBER
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("oct")
+ alt21 = 2
+ LA21_0 = self.input.LA(1)
+
+ if (LA21_0 == 111) :
+ alt21 = 1
+ if alt21 == 1:
+ pass
+ self.match("ober")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mNOVEMBER(self, ):
+
+ try:
+ _type = NOVEMBER
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("nov")
+ alt22 = 2
+ LA22_0 = self.input.LA(1)
+
+ if (LA22_0 == 101) :
+ alt22 = 1
+ if alt22 == 1:
+ pass
+ self.match("ember")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mDECEMBER(self, ):
+
+ try:
+ _type = DECEMBER
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ self.match("dec")
+ alt23 = 2
+ LA23_0 = self.input.LA(1)
+
+ if (LA23_0 == 101) :
+ alt23 = 1
+ if alt23 == 1:
+ pass
+ self.match("ember")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mMONTH(self, ):
+
+ try:
+ _type = MONTH
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.match("month")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mQUARTER(self, ):
+
+ try:
+ _type = QUARTER
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.match("quarter")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mEVERY(self, ):
+
+ try:
+ _type = EVERY
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.match("every")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mHOURS(self, ):
+
+ try:
+ _type = HOURS
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.match("hours")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mMINUTES(self, ):
+
+ try:
+ _type = MINUTES
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ alt24 = 2
+ LA24_0 = self.input.LA(1)
+
+ if (LA24_0 == 109) :
+ LA24_1 = self.input.LA(2)
+
+ if (LA24_1 == 105) :
+ LA24_2 = self.input.LA(3)
+
+ if (LA24_2 == 110) :
+ LA24_3 = self.input.LA(4)
+
+ if (LA24_3 == 115) :
+ alt24 = 1
+ elif (LA24_3 == 117) :
+ alt24 = 2
+ else:
+ nvae = NoViableAltException("", 24, 3, self.input)
+
+ raise nvae
+
+ else:
+ nvae = NoViableAltException("", 24, 2, self.input)
+
+ raise nvae
+
+ else:
+ nvae = NoViableAltException("", 24, 1, self.input)
+
+ raise nvae
+
+ else:
+ nvae = NoViableAltException("", 24, 0, self.input)
+
+ raise nvae
+
+ if alt24 == 1:
+ pass
+ self.match("mins")
+
+
+ elif alt24 == 2:
+ pass
+ self.match("minutes")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mCOMMA(self, ):
+
+ try:
+ _type = COMMA
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.match(44)
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mOF(self, ):
+
+ try:
+ _type = OF
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.match("of")
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mWS(self, ):
+
+ try:
+ _type = WS
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ if (9 <= self.input.LA(1) <= 10) or self.input.LA(1) == 13 or self.input.LA(1) == 32:
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+ _channel=HIDDEN;
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mDIGIT(self, ):
+
+ try:
+ _type = DIGIT
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.matchRange(48, 57)
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mDIGITS(self, ):
+
+ try:
+ _type = DIGITS
+ _channel = DEFAULT_CHANNEL
+
+ pass
+ pass
+ self.mDIGIT()
+ self.mDIGIT()
+
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+
+
+
+ def mTokens(self):
+ alt25 = 37
+ alt25 = self.dfa25.predict(self.input)
+ if alt25 == 1:
+ pass
+ self.mTIME()
+
+
+ elif alt25 == 2:
+ pass
+ self.mFIRST()
+
+
+ elif alt25 == 3:
+ pass
+ self.mSECOND()
+
+
+ elif alt25 == 4:
+ pass
+ self.mTHIRD()
+
+
+ elif alt25 == 5:
+ pass
+ self.mFOURTH()
+
+
+ elif alt25 == 6:
+ pass
+ self.mFIFTH()
+
+
+ elif alt25 == 7:
+ pass
+ self.mFOURTH_OR_FIFTH()
+
+
+ elif alt25 == 8:
+ pass
+ self.mDAY()
+
+
+ elif alt25 == 9:
+ pass
+ self.mMONDAY()
+
+
+ elif alt25 == 10:
+ pass
+ self.mTUESDAY()
+
+
+ elif alt25 == 11:
+ pass
+ self.mWEDNESDAY()
+
+
+ elif alt25 == 12:
+ pass
+ self.mTHURSDAY()
+
+
+ elif alt25 == 13:
+ pass
+ self.mFRIDAY()
+
+
+ elif alt25 == 14:
+ pass
+ self.mSATURDAY()
+
+
+ elif alt25 == 15:
+ pass
+ self.mSUNDAY()
+
+
+ elif alt25 == 16:
+ pass
+ self.mJANUARY()
+
+
+ elif alt25 == 17:
+ pass
+ self.mFEBRUARY()
+
+
+ elif alt25 == 18:
+ pass
+ self.mMARCH()
+
+
+ elif alt25 == 19:
+ pass
+ self.mAPRIL()
+
+
+ elif alt25 == 20:
+ pass
+ self.mMAY()
+
+
+ elif alt25 == 21:
+ pass
+ self.mJUNE()
+
+
+ elif alt25 == 22:
+ pass
+ self.mJULY()
+
+
+ elif alt25 == 23:
+ pass
+ self.mAUGUST()
+
+
+ elif alt25 == 24:
+ pass
+ self.mSEPTEMBER()
+
+
+ elif alt25 == 25:
+ pass
+ self.mOCTOBER()
+
+
+ elif alt25 == 26:
+ pass
+ self.mNOVEMBER()
+
+
+ elif alt25 == 27:
+ pass
+ self.mDECEMBER()
+
+
+ elif alt25 == 28:
+ pass
+ self.mMONTH()
+
+
+ elif alt25 == 29:
+ pass
+ self.mQUARTER()
+
+
+ elif alt25 == 30:
+ pass
+ self.mEVERY()
+
+
+ elif alt25 == 31:
+ pass
+ self.mHOURS()
+
+
+ elif alt25 == 32:
+ pass
+ self.mMINUTES()
+
+
+ elif alt25 == 33:
+ pass
+ self.mCOMMA()
+
+
+ elif alt25 == 34:
+ pass
+ self.mOF()
+
+
+ elif alt25 == 35:
+ pass
+ self.mWS()
+
+
+ elif alt25 == 36:
+ pass
+ self.mDIGIT()
+
+
+ elif alt25 == 37:
+ pass
+ self.mDIGITS()
+
+
+
+
+
+
+
+
+ DFA25_eot = DFA.unpack(
+ u"\1\uffff\4\30\2\uffff\1\30\1\uffff\2\30\14\uffff\1\36\3\uffff\2"
+ u"\36\33\uffff\1\76\6\uffff"
+ )
+
+ DFA25_eof = DFA.unpack(
+ u"\77\uffff"
+ )
+
+ DFA25_min = DFA.unpack(
+ u"\1\11\4\60\1\145\1\141\1\60\1\150\2\60\2\141\1\uffff\1\141\1\160"
+ u"\1\143\6\uffff\1\72\3\uffff\2\72\3\uffff\1\146\3\uffff\1\143\3"
+ u"\uffff\1\151\4\uffff\1\156\1\162\2\uffff\1\154\6\uffff\1\164\6"
+ u"\uffff"
+ )
+
+ DFA25_max = DFA.unpack(
+ u"\1\167\1\72\1\163\1\156\2\162\1\165\1\164\1\165\1\164\1\72\1\145"
+ u"\1\157\1\uffff\2\165\1\146\6\uffff\1\72\3\uffff\2\72\3\uffff\1"
+ u"\162\3\uffff\1\160\3\uffff\1\165\4\uffff\1\156\1\171\2\uffff\1"
+ u"\156\6\uffff\1\164\6\uffff"
+ )
+
+ DFA25_accept = DFA.unpack(
+ u"\15\uffff\1\13\3\uffff\1\32\1\35\1\36\1\37\1\41\1\43\1\uffff\1"
+ u"\44\1\1\1\2\2\uffff\1\3\1\45\1\4\1\uffff\1\7\1\15\1\21\1\uffff"
+ u"\1\16\1\17\1\5\1\uffff\1\12\1\6\1\10\1\33\2\uffff\1\40\1\20\1\uffff"
+ u"\1\23\1\27\1\31\1\42\1\30\1\14\1\uffff\1\22\1\24\1\25\1\26\1\34"
+ u"\1\11"
+ )
+
+ DFA25_special = DFA.unpack(
+ u"\77\uffff"
+ )
+
+
+ DFA25_transition = [
+ DFA.unpack(u"\2\26\2\uffff\1\26\22\uffff\1\26\13\uffff\1\25\3\uffff"
+ u"\1\1\1\2\1\3\1\4\1\7\1\11\4\12\47\uffff\1\17\2\uffff\1\13\1\23"
+ u"\1\5\1\uffff\1\24\1\uffff\1\16\2\uffff\1\14\1\21\1\20\1\uffff\1"
+ u"\22\1\uffff\1\6\1\10\2\uffff\1\15"),
+ DFA.unpack(u"\12\27\1\31"),
+ DFA.unpack(u"\12\33\1\31\70\uffff\1\32"),
+ DFA.unpack(u"\4\34\6\36\1\31\63\uffff\1\35"),
+ DFA.unpack(u"\12\36\1\31\67\uffff\1\37"),
+ DFA.unpack(u"\1\43\3\uffff\1\40\5\uffff\1\41\2\uffff\1\42"),
+ DFA.unpack(u"\1\45\3\uffff\1\44\17\uffff\1\46"),
+ DFA.unpack(u"\12\36\1\31\71\uffff\1\47"),
+ DFA.unpack(u"\1\50\14\uffff\1\51"),
+ DFA.unpack(u"\12\36\1\31\71\uffff\1\52"),
+ DFA.unpack(u"\12\36\1\31"),
+ DFA.unpack(u"\1\53\3\uffff\1\54"),
+ DFA.unpack(u"\1\56\7\uffff\1\57\5\uffff\1\55"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\60\23\uffff\1\61"),
+ DFA.unpack(u"\1\62\4\uffff\1\63"),
+ DFA.unpack(u"\1\64\2\uffff\1\65"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\31"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\31"),
+ DFA.unpack(u"\1\31"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\41\13\uffff\1\32"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\35\14\uffff\1\66"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\37\13\uffff\1\67"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\70"),
+ DFA.unpack(u"\1\71\6\uffff\1\72"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\74\1\uffff\1\73"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\75"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"")
+ ]
+
+
+ DFA25 = DFA
+
+
+
+
+def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
+ from antlr3.main import LexerMain
+ main = LexerMain(GrocLexer)
+ main.stdin = stdin
+ main.stdout = stdout
+ main.stderr = stderr
+ main.execute(argv)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/google_appengine/google/appengine/cron/GrocLexer.pyc b/google_appengine/google/appengine/cron/GrocLexer.pyc
new file mode 100644
index 0000000..c0bd09f
--- /dev/null
+++ b/google_appengine/google/appengine/cron/GrocLexer.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/cron/GrocParser.py b/google_appengine/google/appengine/cron/GrocParser.py
new file mode 100755
index 0000000..b86cb4e
--- /dev/null
+++ b/google_appengine/google/appengine/cron/GrocParser.py
@@ -0,0 +1,1008 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import sys
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+
+
+
+
+allOrdinals = set([1, 2, 3, 4, 5])
+numOrdinals = len(allOrdinals)
+
+
+
+
+HIDDEN = BaseRecognizer.HIDDEN
+
+THIRD=12
+SEPTEMBER=35
+FOURTH=13
+SECOND=11
+WEDNESDAY=21
+NOVEMBER=37
+SATURDAY=24
+JULY=33
+APRIL=30
+DIGITS=8
+OCTOBER=36
+MAY=31
+EVERY=6
+FEBRUARY=28
+MONDAY=19
+SUNDAY=25
+JUNE=32
+DAY=18
+MARCH=29
+OF=4
+EOF=-1
+JANUARY=27
+MONTH=26
+FRIDAY=23
+FIFTH=14
+MINUTES=17
+TIME=5
+WS=40
+QUARTER=39
+THURSDAY=22
+COMMA=9
+DECEMBER=38
+AUGUST=34
+DIGIT=7
+TUESDAY=20
+HOURS=16
+FIRST=10
+FOURTH_OR_FIFTH=15
+
+tokenNames = [
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "OF", "TIME", "EVERY", "DIGIT", "DIGITS", "COMMA", "FIRST", "SECOND",
+ "THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES", "DAY",
+ "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY",
+ "SUNDAY", "MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE",
+ "JULY", "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
+ "WS"
+]
+
+
+
+
+class GrocParser(Parser):
+ grammarFileName = "Groc.g"
+ antlr_version = version_str_to_tuple("3.1.1")
+ antlr_version_str = "3.1.1"
+ tokenNames = tokenNames
+
+ def __init__(self, input, state=None):
+ if state is None:
+ state = RecognizerSharedState()
+
+ Parser.__init__(self, input, state)
+
+
+ self.dfa3 = self.DFA3(
+ self, 3,
+ eot = self.DFA3_eot,
+ eof = self.DFA3_eof,
+ min = self.DFA3_min,
+ max = self.DFA3_max,
+ accept = self.DFA3_accept,
+ special = self.DFA3_special,
+ transition = self.DFA3_transition
+ )
+
+
+
+
+ self.ordinal_set = set()
+ self.weekday_set = set()
+ self.month_set = set()
+ self.time_string = '';
+ self.interval_mins = 0;
+ self.period_string = '';
+
+
+
+
+
+
+
+
+
+
+ valuesDict = {
+ SUNDAY: 0,
+ FIRST: 1,
+ MONDAY: 1,
+ JANUARY: 1,
+ TUESDAY: 2,
+ SECOND: 2,
+ FEBRUARY: 2,
+ WEDNESDAY: 3,
+ THIRD: 3,
+ MARCH: 3,
+ THURSDAY: 4,
+ FOURTH: 4,
+ APRIL: 4,
+ FRIDAY: 5,
+ FIFTH: 5,
+ MAY: 5,
+ SATURDAY: 6,
+ JUNE: 6,
+ JULY: 7,
+ AUGUST: 8,
+ SEPTEMBER: 9,
+ OCTOBER: 10,
+ NOVEMBER: 11,
+ DECEMBER: 12,
+ }
+
+ def ValueOf(self, token_type):
+ return self.valuesDict.get(token_type, -1)
+
+
+
+
+ def timespec(self, ):
+
+ try:
+ try:
+ pass
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == EVERY) :
+ LA1_1 = self.input.LA(2)
+
+ if ((DIGIT <= LA1_1 <= DIGITS)) :
+ alt1 = 2
+ elif ((DAY <= LA1_1 <= SUNDAY)) :
+ alt1 = 1
+ else:
+ nvae = NoViableAltException("", 1, 1, self.input)
+
+ raise nvae
+
+ elif ((FIRST <= LA1_0 <= FOURTH_OR_FIFTH)) :
+ alt1 = 1
+ else:
+ nvae = NoViableAltException("", 1, 0, self.input)
+
+ raise nvae
+
+ if alt1 == 1:
+ pass
+ self._state.following.append(self.FOLLOW_specifictime_in_timespec44)
+ self.specifictime()
+
+ self._state.following.pop()
+
+
+ elif alt1 == 2:
+ pass
+ self._state.following.append(self.FOLLOW_interval_in_timespec48)
+ self.interval()
+
+ self._state.following.pop()
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def specifictime(self, ):
+
+ TIME1 = None
+
+ try:
+ try:
+ pass
+ pass
+ alt3 = 2
+ alt3 = self.dfa3.predict(self.input)
+ if alt3 == 1:
+ pass
+ pass
+ pass
+ self._state.following.append(self.FOLLOW_ordinals_in_specifictime69)
+ self.ordinals()
+
+ self._state.following.pop()
+ self._state.following.append(self.FOLLOW_weekdays_in_specifictime71)
+ self.weekdays()
+
+ self._state.following.pop()
+
+
+
+ self.match(self.input, OF, self.FOLLOW_OF_in_specifictime74)
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if ((MONTH <= LA2_0 <= DECEMBER)) :
+ alt2 = 1
+ elif ((FIRST <= LA2_0 <= THIRD) or LA2_0 == QUARTER) :
+ alt2 = 2
+ else:
+ nvae = NoViableAltException("", 2, 0, self.input)
+
+ raise nvae
+
+ if alt2 == 1:
+ pass
+ self._state.following.append(self.FOLLOW_monthspec_in_specifictime77)
+ self.monthspec()
+
+ self._state.following.pop()
+
+
+ elif alt2 == 2:
+ pass
+ self._state.following.append(self.FOLLOW_quarterspec_in_specifictime79)
+ self.quarterspec()
+
+ self._state.following.pop()
+
+
+
+
+
+
+
+
+ elif alt3 == 2:
+ pass
+ pass
+ self._state.following.append(self.FOLLOW_ordinals_in_specifictime96)
+ self.ordinals()
+
+ self._state.following.pop()
+ self._state.following.append(self.FOLLOW_weekdays_in_specifictime98)
+ self.weekdays()
+
+ self._state.following.pop()
+ self.month_set = set(range(1,13))
+
+
+
+
+
+
+ TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime112)
+ self.time_string = TIME1.text
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def interval(self, ):
+
+ intervalnum = None
+ period2 = None
+
+
+ try:
+ try:
+ pass
+ pass
+ self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval131)
+ intervalnum = self.input.LT(1)
+ if (DIGIT <= self.input.LA(1) <= DIGITS):
+ self.input.consume()
+ self._state.errorRecovery = False
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ raise mse
+
+
+
+ self.interval_mins = int(intervalnum.text)
+
+ self._state.following.append(self.FOLLOW_period_in_interval157)
+ period2 = self.period()
+
+ self._state.following.pop()
+
+ if ((period2 is not None) and [self.input.toString(period2.start,period2.stop)] or [None])[0] == "hours":
+ self.period_string = "hours"
+ else:
+ self.period_string = "minutes"
+
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def ordinals(self, ):
+
+ try:
+ try:
+ pass
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == EVERY) :
+ alt5 = 1
+ elif ((FIRST <= LA5_0 <= FOURTH_OR_FIFTH)) :
+ alt5 = 2
+ else:
+ nvae = NoViableAltException("", 5, 0, self.input)
+
+ raise nvae
+
+ if alt5 == 1:
+ pass
+ self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals176)
+ self.ordinal_set = self.ordinal_set.union(allOrdinals)
+
+
+ elif alt5 == 2:
+ pass
+ pass
+ self._state.following.append(self.FOLLOW_ordinal_in_ordinals192)
+ self.ordinal()
+
+ self._state.following.pop()
+ while True:
+ alt4 = 2
+ LA4_0 = self.input.LA(1)
+
+ if (LA4_0 == COMMA) :
+ alt4 = 1
+
+
+ if alt4 == 1:
+ pass
+ self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals195)
+ self._state.following.append(self.FOLLOW_ordinal_in_ordinals197)
+ self.ordinal()
+
+ self._state.following.pop()
+
+
+ else:
+ break
+
+
+
+
+
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def ordinal(self, ):
+
+ ord = None
+
+ try:
+ try:
+ pass
+ ord = self.input.LT(1)
+ if (FIRST <= self.input.LA(1) <= FOURTH_OR_FIFTH):
+ self.input.consume()
+ self._state.errorRecovery = False
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ raise mse
+
+
+
+ self.ordinal_set.add(self.ValueOf(ord.type));
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+ class period_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+
+
+
+
+ def period(self, ):
+
+ retval = self.period_return()
+ retval.start = self.input.LT(1)
+
+ try:
+ try:
+ pass
+ if (HOURS <= self.input.LA(1) <= MINUTES):
+ self.input.consume()
+ self._state.errorRecovery = False
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ raise mse
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return retval
+
+
+
+ def weekdays(self, ):
+
+ try:
+ try:
+ pass
+ alt7 = 2
+ LA7_0 = self.input.LA(1)
+
+ if (LA7_0 == DAY) :
+ alt7 = 1
+ elif ((MONDAY <= LA7_0 <= SUNDAY)) :
+ alt7 = 2
+ else:
+ nvae = NoViableAltException("", 7, 0, self.input)
+
+ raise nvae
+
+ if alt7 == 1:
+ pass
+ self.match(self.input, DAY, self.FOLLOW_DAY_in_weekdays280)
+
+ self.weekday_set = set([self.ValueOf(SUNDAY), self.ValueOf(MONDAY),
+ self.ValueOf(TUESDAY), self.ValueOf(WEDNESDAY),
+ self.ValueOf(THURSDAY), self.ValueOf(FRIDAY),
+ self.ValueOf(SATURDAY), self.ValueOf(SUNDAY)])
+
+
+
+ elif alt7 == 2:
+ pass
+ pass
+ self._state.following.append(self.FOLLOW_weekday_in_weekdays288)
+ self.weekday()
+
+ self._state.following.pop()
+ while True:
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if (LA6_0 == COMMA) :
+ alt6 = 1
+
+
+ if alt6 == 1:
+ pass
+ self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays291)
+ self._state.following.append(self.FOLLOW_weekday_in_weekdays293)
+ self.weekday()
+
+ self._state.following.pop()
+
+
+ else:
+ break
+
+
+
+
+
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def weekday(self, ):
+
+ dayname = None
+
+ try:
+ try:
+ pass
+ dayname = self.input.LT(1)
+ if (MONDAY <= self.input.LA(1) <= SUNDAY):
+ self.input.consume()
+ self._state.errorRecovery = False
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ raise mse
+
+
+
+ self.weekday_set.add(self.ValueOf(dayname.type))
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def monthspec(self, ):
+
+ try:
+ try:
+ pass
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if (LA8_0 == MONTH) :
+ alt8 = 1
+ elif ((JANUARY <= LA8_0 <= DECEMBER)) :
+ alt8 = 2
+ else:
+ nvae = NoViableAltException("", 8, 0, self.input)
+
+ raise nvae
+
+ if alt8 == 1:
+ pass
+ self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec373)
+
+ self.month_set = self.month_set.union(set([
+ self.ValueOf(JANUARY), self.ValueOf(FEBRUARY), self.ValueOf(MARCH),
+ self.ValueOf(APRIL), self.ValueOf(MAY), self.ValueOf(JUNE),
+ self.ValueOf(JULY), self.ValueOf(AUGUST), self.ValueOf(SEPTEMBER),
+ self.ValueOf(OCTOBER), self.ValueOf(NOVEMBER),
+ self.ValueOf(DECEMBER)]))
+
+
+
+ elif alt8 == 2:
+ pass
+ self._state.following.append(self.FOLLOW_months_in_monthspec383)
+ self.months()
+
+ self._state.following.pop()
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def months(self, ):
+
+ try:
+ try:
+ pass
+ pass
+ self._state.following.append(self.FOLLOW_month_in_months400)
+ self.month()
+
+ self._state.following.pop()
+ while True:
+ alt9 = 2
+ LA9_0 = self.input.LA(1)
+
+ if (LA9_0 == COMMA) :
+ alt9 = 1
+
+
+ if alt9 == 1:
+ pass
+ self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months403)
+ self._state.following.append(self.FOLLOW_month_in_months405)
+ self.month()
+
+ self._state.following.pop()
+
+
+ else:
+ break
+
+
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def month(self, ):
+
+ monthname = None
+
+ try:
+ try:
+ pass
+ monthname = self.input.LT(1)
+ if (JANUARY <= self.input.LA(1) <= DECEMBER):
+ self.input.consume()
+ self._state.errorRecovery = False
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ raise mse
+
+
+ self.month_set.add(self.ValueOf(monthname.type));
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def quarterspec(self, ):
+
+ try:
+ try:
+ pass
+ alt10 = 2
+ LA10_0 = self.input.LA(1)
+
+ if (LA10_0 == QUARTER) :
+ alt10 = 1
+ elif ((FIRST <= LA10_0 <= THIRD)) :
+ alt10 = 2
+ else:
+ nvae = NoViableAltException("", 10, 0, self.input)
+
+ raise nvae
+
+ if alt10 == 1:
+ pass
+ self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec497)
+
+ self.month_set = self.month_set.union(set([
+ self.ValueOf(JANUARY), self.ValueOf(APRIL), self.ValueOf(JULY),
+ self.ValueOf(OCTOBER)]))
+
+
+ elif alt10 == 2:
+ pass
+ pass
+ self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec509)
+ self.quarter_ordinals()
+
+ self._state.following.pop()
+ self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec511)
+ self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec513)
+ self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec515)
+
+
+
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def quarter_ordinals(self, ):
+
+ try:
+ try:
+ pass
+ pass
+ self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals534)
+ self.month_of_quarter_ordinal()
+
+ self._state.following.pop()
+ while True:
+ alt11 = 2
+ LA11_0 = self.input.LA(1)
+
+ if (LA11_0 == COMMA) :
+ alt11 = 1
+
+
+ if alt11 == 1:
+ pass
+ self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals537)
+ self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals539)
+ self.month_of_quarter_ordinal()
+
+ self._state.following.pop()
+
+
+ else:
+ break
+
+
+
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+ def month_of_quarter_ordinal(self, ):
+
+ offset = None
+
+ try:
+ try:
+ pass
+ offset = self.input.LT(1)
+ if (FIRST <= self.input.LA(1) <= THIRD):
+ self.input.consume()
+ self._state.errorRecovery = False
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ raise mse
+
+
+
+ jOffset = self.ValueOf(offset.type) - 1
+ self.month_set = self.month_set.union(set([
+ jOffset + self.ValueOf(JANUARY), jOffset + self.ValueOf(APRIL),
+ jOffset + self.ValueOf(JULY), jOffset + self.ValueOf(OCTOBER)]))
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+
+
+
+
+
+ DFA3_eot = DFA.unpack(
+ u"\13\uffff"
+ )
+
+ DFA3_eof = DFA.unpack(
+ u"\13\uffff"
+ )
+
+ DFA3_min = DFA.unpack(
+ u"\1\6\1\22\1\11\2\4\1\12\2\uffff\1\23\1\11\1\4"
+ )
+
+ DFA3_max = DFA.unpack(
+ u"\1\17\2\31\1\5\1\11\1\17\2\uffff\2\31\1\11"
+ )
+
+ DFA3_accept = DFA.unpack(
+ u"\6\uffff\1\1\1\2\3\uffff"
+ )
+
+ DFA3_special = DFA.unpack(
+ u"\13\uffff"
+ )
+
+
+ DFA3_transition = [
+ DFA.unpack(u"\1\1\3\uffff\6\2"),
+ DFA.unpack(u"\1\3\7\4"),
+ DFA.unpack(u"\1\5\10\uffff\1\3\7\4"),
+ DFA.unpack(u"\1\6\1\7"),
+ DFA.unpack(u"\1\6\1\7\3\uffff\1\10"),
+ DFA.unpack(u"\6\11"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\7\12"),
+ DFA.unpack(u"\1\5\10\uffff\1\3\7\4"),
+ DFA.unpack(u"\1\6\1\7\3\uffff\1\10")
+ ]
+
+
+ DFA3 = DFA
+
+
+ FOLLOW_specifictime_in_timespec44 = frozenset([1])
+ FOLLOW_interval_in_timespec48 = frozenset([1])
+ FOLLOW_ordinals_in_specifictime69 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
+ FOLLOW_weekdays_in_specifictime71 = frozenset([4])
+ FOLLOW_OF_in_specifictime74 = frozenset([10, 11, 12, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39])
+ FOLLOW_monthspec_in_specifictime77 = frozenset([5])
+ FOLLOW_quarterspec_in_specifictime79 = frozenset([5])
+ FOLLOW_ordinals_in_specifictime96 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
+ FOLLOW_weekdays_in_specifictime98 = frozenset([5])
+ FOLLOW_TIME_in_specifictime112 = frozenset([1])
+ FOLLOW_EVERY_in_interval131 = frozenset([7, 8])
+ FOLLOW_set_in_interval141 = frozenset([16, 17])
+ FOLLOW_period_in_interval157 = frozenset([1])
+ FOLLOW_EVERY_in_ordinals176 = frozenset([1])
+ FOLLOW_ordinal_in_ordinals192 = frozenset([1, 9])
+ FOLLOW_COMMA_in_ordinals195 = frozenset([10, 11, 12, 13, 14, 15])
+ FOLLOW_ordinal_in_ordinals197 = frozenset([1, 9])
+ FOLLOW_set_in_ordinal218 = frozenset([1])
+ FOLLOW_set_in_period257 = frozenset([1])
+ FOLLOW_DAY_in_weekdays280 = frozenset([1])
+ FOLLOW_weekday_in_weekdays288 = frozenset([1, 9])
+ FOLLOW_COMMA_in_weekdays291 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
+ FOLLOW_weekday_in_weekdays293 = frozenset([1, 9])
+ FOLLOW_set_in_weekday314 = frozenset([1])
+ FOLLOW_MONTH_in_monthspec373 = frozenset([1])
+ FOLLOW_months_in_monthspec383 = frozenset([1])
+ FOLLOW_month_in_months400 = frozenset([1, 9])
+ FOLLOW_COMMA_in_months403 = frozenset([26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
+ FOLLOW_month_in_months405 = frozenset([1, 9])
+ FOLLOW_set_in_month424 = frozenset([1])
+ FOLLOW_QUARTER_in_quarterspec497 = frozenset([1])
+ FOLLOW_quarter_ordinals_in_quarterspec509 = frozenset([26])
+ FOLLOW_MONTH_in_quarterspec511 = frozenset([4])
+ FOLLOW_OF_in_quarterspec513 = frozenset([39])
+ FOLLOW_QUARTER_in_quarterspec515 = frozenset([1])
+ FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals534 = frozenset([1, 9])
+ FOLLOW_COMMA_in_quarter_ordinals537 = frozenset([10, 11, 12, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39])
+ FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals539 = frozenset([1, 9])
+ FOLLOW_set_in_month_of_quarter_ordinal558 = frozenset([1])
+
+
+
+def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
+ from antlr3.main import ParserMain
+ main = ParserMain("GrocLexer", GrocParser)
+ main.stdin = stdin
+ main.stdout = stdout
+ main.stderr = stderr
+ main.execute(argv)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/google_appengine/google/appengine/cron/GrocParser.pyc b/google_appengine/google/appengine/cron/GrocParser.pyc
new file mode 100644
index 0000000..094d1b9
--- /dev/null
+++ b/google_appengine/google/appengine/cron/GrocParser.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/cron/__init__.py b/google_appengine/google/appengine/cron/__init__.py
new file mode 100755
index 0000000..d5eed70
--- /dev/null
+++ b/google_appengine/google/appengine/cron/__init__.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"this file is needed to make this a package"
diff --git a/google_appengine/google/appengine/cron/__init__.pyc b/google_appengine/google/appengine/cron/__init__.pyc
new file mode 100644
index 0000000..1d7c118
--- /dev/null
+++ b/google_appengine/google/appengine/cron/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/cron/groc.py b/google_appengine/google/appengine/cron/groc.py
new file mode 100755
index 0000000..373d56a
--- /dev/null
+++ b/google_appengine/google/appengine/cron/groc.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""A wrapper around the generated Groc parser and lexer."""
+
+
+import google
+
+import antlr3
+
+import GrocLexer
+import GrocParser
+
+
+class GrocException(Exception):
+ """An error occurred while parsing the groc input string."""
+
+
+class GrocLexerWithErrors(GrocLexer.GrocLexer):
+ """An overridden Lexer that raises exceptions."""
+
+ def emitErrorMessage(self, msg):
+ """Raise an exception if the input fails to parse correctly.
+
+ Overriding the default, which normally just prints a message to
+ stderr.
+
+ Arguments:
+ msg: the error message
+ Raises:
+ GrocException: always.
+ """
+ raise GrocException(msg)
+
+
+class GrocParserWithErrors(GrocParser.GrocParser):
+ """An overridden Parser that raises exceptions."""
+
+ def emitErrorMessage(self, msg):
+ """Raise an exception if the input fails to parse correctly.
+
+ Overriding the default, which normally just prints a message to
+ stderr.
+
+ Arguments:
+ msg: the error message
+ Raises:
+ GrocException: always.
+ """
+ raise GrocException(msg)
+
+
+def CreateParser(parse_string):
+ """Creates a Groc Parser."""
+ input_string = antlr3.ANTLRStringStream(parse_string)
+ lexer = GrocLexerWithErrors(input_string)
+ tokens = antlr3.CommonTokenStream(lexer)
+ parser = GrocParserWithErrors(tokens)
+ return parser
diff --git a/google_appengine/google/appengine/cron/groc.pyc b/google_appengine/google/appengine/cron/groc.pyc
new file mode 100644
index 0000000..a6770cf
--- /dev/null
+++ b/google_appengine/google/appengine/cron/groc.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/cron/groctimespecification.py b/google_appengine/google/appengine/cron/groctimespecification.py
new file mode 100755
index 0000000..9acb7bf
--- /dev/null
+++ b/google_appengine/google/appengine/cron/groctimespecification.py
@@ -0,0 +1,304 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""Implementation of scheduling for Groc format schedules.
+
+A Groc schedule looks like '1st,2nd monday 9:00', or 'every 20 mins'. This
+module takes a parsed schedule (produced by Antlr) and creates objects that
+can produce times that match this schedule.
+
+A parsed schedule is one of two types - an Interval or a Specific Time.
+See the class docstrings for more.
+
+Extensions to be considered:
+
+ allowing a comma separated list of times to run
+ allowing the user to specify particular days of the month to run
+"""
+
+
+import calendar
+import datetime
+
+try:
+ import pytz
+except ImportError:
+ pytz = None
+
+import groc
+
+HOURS = 'hours'
+MINUTES = 'minutes'
+
+try:
+ from pytz import NonExistentTimeError
+ from pytz import AmbiguousTimeError
+except ImportError:
+ class NonExistentTimeError(Exception):
+ pass
+ class AmbiguousTimeError(Exception):
+ pass
+
+
+def GrocTimeSpecification(schedule):
+ """Factory function.
+
+ Turns a schedule specification into a TimeSpecification.
+
+ Arguments:
+ schedule: the schedule specification, as a string
+
+ Returns:
+ a TimeSpecification instance
+ """
+ parser = groc.CreateParser(schedule)
+ parser.timespec()
+
+ if parser.interval_mins:
+ return IntervalTimeSpecification(parser.interval_mins,
+ parser.period_string)
+ else:
+ return SpecificTimeSpecification(parser.ordinal_set, parser.weekday_set,
+ parser.month_set,
+ None,
+ parser.time_string)
+
+
+class TimeSpecification(object):
+ """Base class for time specifications."""
+
+ def GetMatches(self, start, n):
+ """Returns the next n times that match the schedule, starting at time start.
+
+ Arguments:
+ start: a datetime to start from. Matches will start from after this time.
+ n: the number of matching times to return
+
+ Returns:
+ a list of n datetime objects
+ """
+ out = []
+ for _ in range(n):
+ start = self.GetMatch(start)
+ out.append(start)
+ return out
+
+ def GetMatch(self, start):
+ """Returns the next match after time start.
+
+ Must be implemented in subclasses.
+
+ Arguments:
+ start: a datetime to start with. Matches will start from this time.
+
+ Returns:
+ a datetime object
+ """
+ raise NotImplementedError
+
+
+class IntervalTimeSpecification(TimeSpecification):
+ """A time specification for a given interval.
+
+ An Interval type spec runs at the given fixed interval. It has two
+ attributes:
+ period - the type of interval, either "hours" or "minutes"
+ interval - the number of units of type period.
+ """
+
+ def __init__(self, interval, period):
+ super(IntervalTimeSpecification, self).__init__()
+ self.interval = interval
+ self.period = period
+
+ def GetMatch(self, t):
+ """Returns the next match after time 't'.
+
+ Arguments:
+ t: a datetime to start from. Matches will start from after this time.
+
+ Returns:
+ a datetime object
+ """
+ if self.period == HOURS:
+ return t + datetime.timedelta(hours=self.interval)
+ else:
+ return t + datetime.timedelta(minutes=self.interval)
+
+
+class SpecificTimeSpecification(TimeSpecification):
+ """Specific time specification.
+
+ A Specific interval is more complex, but defines a certain time to run and
+ the days that it should run. It has the following attributes:
+ time - the time of day to run, as "HH:MM"
+ ordinals - first, second, third &c, as a set of integers in 1..5
+ months - the months that this should run, as a set of integers in 1..12
+ weekdays - the days of the week that this should run, as a set of integers,
+ 0=Sunday, 6=Saturday
+ timezone - the optional timezone as a string for this specification.
+ Defaults to UTC - valid entries are things like Australia/Victoria
+ or PST8PDT.
+
+ A specific time schedule can be quite complex. A schedule could look like
+ this:
+ "1st,third sat,sun of jan,feb,mar 09:15"
+
+ In this case, ordinals would be {1,3}, weekdays {0,6}, months {1,2,3} and
+ time would be "09:15".
+ """
+
+ timezone = None
+
+ def __init__(self, ordinals=None, weekdays=None, months=None, monthdays=None,
+ timestr='00:00', timezone=None):
+ super(SpecificTimeSpecification, self).__init__(self)
+ if weekdays is not None and monthdays is not None:
+ raise ValueError("can't supply both monthdays and weekdays")
+ if ordinals is None:
+ self.ordinals = set(range(1, 6))
+ else:
+ self.ordinals = set(ordinals)
+
+ if weekdays is None:
+ self.weekdays = set(range(7))
+ else:
+ self.weekdays = set(weekdays)
+
+ if months is None:
+ self.months = set(range(1, 13))
+ else:
+ self.months = set(months)
+
+ if monthdays is None:
+ self.monthdays = set()
+ else:
+ self.monthdays = set(monthdays)
+ hourstr, minutestr = timestr.split(':')
+ self.time = datetime.time(int(hourstr), int(minutestr))
+ if timezone:
+ if pytz is None:
+ raise ValueError("need pytz in order to specify a timezone")
+ self.timezone = pytz.timezone(timezone)
+
+ def _MatchingDays(self, year, month):
+ """Returns matching days for the given year and month.
+
+ For the given year and month, return the days that match this instance's
+ day specification, based on the ordinals and weekdays.
+
+ Arguments:
+ year: the year as an integer
+ month: the month as an integer, in range 1-12
+
+ Returns:
+ a list of matching days, as ints in range 1-31
+ """
+ out_days = []
+ start_day, last_day = calendar.monthrange(year, month)
+ start_day = (start_day + 1) % 7
+ for ordinal in self.ordinals:
+ for weekday in self.weekdays:
+ day = ((weekday - start_day) % 7) + 1
+ day += 7 * (ordinal - 1)
+ if day <= last_day:
+ out_days.append(day)
+ return sorted(out_days)
+
+ def _NextMonthGenerator(self, start, matches):
+ """Creates a generator that produces results from the set 'matches'.
+
+ Matches must be >= 'start'. If none match, the wrap counter is incremented,
+ and the result set is reset to the full set. Yields a 2-tuple of (match,
+ wrapcount).
+
+ Arguments:
+ start: first set of matches will be >= this value (an int)
+ matches: the set of potential matches (a sequence of ints)
+
+ Yields:
+ a two-tuple of (match, wrap counter). match is an int in range (1-12),
+ wrapcount is a int indicating how many times we've wrapped around.
+ """
+ potential = matches = sorted(matches)
+ after = start - 1
+ wrapcount = 0
+ while True:
+ potential = [x for x in potential if x > after]
+ if not potential:
+ wrapcount += 1
+ potential = matches
+ after = potential[0]
+ yield (after, wrapcount)
+
+ def GetMatch(self, start):
+ """Returns the next time that matches the schedule after time start.
+
+ Arguments:
+ start: a UTC datetime to start from. Matches will start after this time
+
+ Returns:
+ a datetime object
+ """
+ start_time = start
+ if self.timezone and pytz is not None:
+ if not start_time.tzinfo:
+ start_time = pytz.utc.localize(start_time)
+ start_time = start_time.astimezone(self.timezone)
+ start_time = start_time.replace(tzinfo=None)
+ if self.months:
+ months = self._NextMonthGenerator(start_time.month, self.months)
+ while True:
+ month, yearwraps = months.next()
+ candidate_month = start_time.replace(day=1, month=month,
+ year=start_time.year + yearwraps)
+
+ if self.monthdays:
+ _, last_day = calendar.monthrange(candidate_month.year,
+ candidate_month.month)
+ day_matches = sorted(x for x in self.monthdays if x <= last_day)
+ else:
+ day_matches = self._MatchingDays(candidate_month.year, month)
+
+ if ((candidate_month.year, candidate_month.month)
+ == (start_time.year, start_time.month)):
+ day_matches = [x for x in day_matches if x >= start_time.day]
+ while (day_matches and day_matches[0] == start_time.day
+ and start_time.time() >= self.time):
+ day_matches.pop(0)
+ while day_matches:
+ out = candidate_month.replace(day=day_matches[0], hour=self.time.hour,
+
+
+ minute=self.time.minute, second=0,
+ microsecond=0)
+ if self.timezone and pytz is not None:
+ try:
+ out = self.timezone.localize(out, is_dst=None)
+ except AmbiguousTimeError:
+ out = self.timezone.localize(out)
+ except NonExistentTimeError:
+ for _ in range(24):
+ out = out.replace(minute=1) + datetime.timedelta(minutes=60)
+ try:
+ out = self.timezone.localize(out)
+ except NonExistentTimeError:
+ continue
+ break
+ out = out.astimezone(pytz.utc)
+ return out
diff --git a/google_appengine/google/appengine/cron/groctimespecification.pyc b/google_appengine/google/appengine/cron/groctimespecification.pyc
new file mode 100644
index 0000000..f030218
--- /dev/null
+++ b/google_appengine/google/appengine/cron/groctimespecification.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/datastore/__init__.py b/google_appengine/google/appengine/datastore/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/datastore/__init__.pyc b/google_appengine/google/appengine/datastore/__init__.pyc
new file mode 100644
index 0000000..c9d9f68
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/datastore/action_pb.py b/google_appengine/google/appengine/datastore/action_pb.py
new file mode 100755
index 0000000..dd97c6a
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/action_pb.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class Action(ProtocolBuffer.ProtocolMessage):
+ pass
diff --git a/google_appengine/google/appengine/datastore/action_pb.pyc b/google_appengine/google/appengine/datastore/action_pb.pyc
new file mode 100644
index 0000000..8fc8717
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/action_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/datastore/datastore_index.py b/google_appengine/google/appengine/datastore/datastore_index.py
new file mode 100755
index 0000000..3e60947
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/datastore_index.py
@@ -0,0 +1,438 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Primitives for dealing with datastore indexes.
+
+Example index.yaml file:
+------------------------
+
+indexes:
+
+- kind: Cat
+ ancestor: no
+ properties:
+ - name: name
+ - name: age
+ direction: desc
+
+- kind: Cat
+ properties:
+ - name: name
+ direction: ascending
+ - name: whiskers
+ direction: descending
+
+- kind: Store
+ ancestor: yes
+ properties:
+ - name: business
+ direction: asc
+ - name: owner
+ direction: asc
+"""
+
+
+
+
+
+from google.appengine.api import datastore_types
+from google.appengine.api import validation
+from google.appengine.api import yaml_errors
+from google.appengine.api import yaml_object
+from google.appengine.datastore import datastore_pb
+
+
+class Property(validation.Validated):
+ """Representation for an individual property of an index.
+
+ Attributes:
+ name: Name of attribute to sort by.
+ direction: Direction of sort.
+ """
+
+ ATTRIBUTES = {
+ 'name': validation.TYPE_STR,
+ 'direction': validation.Options(('asc', ('ascending',)),
+ ('desc', ('descending',)),
+ default='asc'),
+ }
+
+
+class Index(validation.Validated):
+ """Individual index definition.
+
+ Order of the properties properties determins a given indixes sort priority.
+
+ Attributes:
+ kind: Datastore kind that index belongs to.
+ ancestors: Include ancestors in index.
+ properties: Properties to sort on.
+ """
+
+ ATTRIBUTES = {
+ 'kind': validation.TYPE_STR,
+ 'ancestor': validation.Type(bool, default=False),
+ 'properties': validation.Optional(validation.Repeated(Property)),
+ }
+
+
+class IndexDefinitions(validation.Validated):
+ """Top level for index definition file.
+
+ Attributes:
+ indexes: List of Index definitions.
+ """
+
+ ATTRIBUTES = {
+ 'indexes': validation.Optional(validation.Repeated(Index)),
+ }
+
+
+def ParseIndexDefinitions(document):
+ """Parse an individual index definitions document from string or stream.
+
+ Args:
+ document: Yaml document as a string or file-like stream.
+
+ Raises:
+ EmptyConfigurationFile when the configuration file is empty.
+ MultipleConfigurationFile when the configuration file contains more than
+ one document.
+
+ Returns:
+ Single parsed yaml file if one is defined, else None.
+ """
+ try:
+ return yaml_object.BuildSingleObject(IndexDefinitions, document)
+ except yaml_errors.EmptyConfigurationFile:
+ return None
+
+
+def ParseMultipleIndexDefinitions(document):
+ """Parse multiple index definitions documents from a string or stream.
+
+ Args:
+ document: Yaml document as a string or file-like stream.
+
+ Returns:
+ A list of datstore_index.IndexDefinitions objects, one for each document.
+ """
+ return yaml_object.BuildObjects(IndexDefinitions, document)
+
+
+def IndexDefinitionsToKeys(indexes):
+ """Convert IndexDefinitions to set of keys.
+
+ Args:
+ indexes: A datastore_index.IndexDefinitions instance, or None.
+
+ Returns:
+ A set of keys constructed from the argument, each key being a
+ tuple of the form (kind, ancestor, properties) where properties is
+ a tuple of (name, direction) pairs, direction being ASCENDING or
+ DESCENDING (the enums).
+ """
+ keyset = set()
+ if indexes is not None:
+ if indexes.indexes:
+ for index in indexes.indexes:
+ keyset.add(IndexToKey(index))
+ return keyset
+
+
+def IndexToKey(index):
+ """Convert Index to key.
+
+ Args:
+ index: A datastore_index.Index instance (not None!).
+
+ Returns:
+ A tuple of the form (kind, ancestor, properties) where properties
+ is a tuple of (name, direction) pairs, direction being ASCENDING
+ or DESCENDING (the enums).
+ """
+ props = []
+ if index.properties is not None:
+ for prop in index.properties:
+ if prop.direction == 'asc':
+ direction = ASCENDING
+ else:
+ direction = DESCENDING
+ props.append((prop.name, direction))
+ return index.kind, index.ancestor, tuple(props)
+
+
+
+
+ASCENDING = datastore_pb.Query_Order.ASCENDING
+DESCENDING = datastore_pb.Query_Order.DESCENDING
+
+EQUALITY_OPERATORS = set((datastore_pb.Query_Filter.EQUAL,
+ ))
+INEQUALITY_OPERATORS = set((datastore_pb.Query_Filter.LESS_THAN,
+ datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
+ datastore_pb.Query_Filter.GREATER_THAN,
+ datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
+ ))
+EXISTS_OPERATORS = set((datastore_pb.Query_Filter.EXISTS,
+ ))
+
+
+def Normalize(filters, orders):
+ """ Normalizes filter and order query components.
+
+ The resulting components have the same effect as the given components if used
+ in a query.
+
+ Returns:
+ (filter, orders) the reduced set of filters and orders
+ """
+
+ for f in filters:
+ if f.op() == datastore_pb.Query_Filter.IN and f.property_size() == 1:
+ f.set_op(datastore_pb.Query_Filter.EQUAL);
+
+ eq_properties = set([f.property(0).name() for f in filters if f.op() == datastore_pb.Query_Filter.EQUAL]);
+
+ remove_set = eq_properties.copy()
+ new_orders = []
+ for o in orders:
+ if o.property() not in remove_set:
+ remove_set.add(o.property())
+ new_orders.append(o)
+ orders = new_orders
+
+
+ if datastore_types._KEY_SPECIAL_PROPERTY in eq_properties:
+ orders = []
+
+ new_orders = []
+ for o in orders:
+ if o.property() == datastore_types._KEY_SPECIAL_PROPERTY:
+ new_orders.append(o)
+ break
+ new_orders.append(o)
+ orders = new_orders
+
+ return (filters, orders)
+
+
+def RemoveNativelySupportedComponents(filters, orders):
+ """ Removes query components that are natively supported by the datastore.
+
+ The resulting filters and orders should not be used in an actual query.
+
+ Returns
+ (filters, orders) the reduced set of filters and orders
+ """
+ (filters, orders) = Normalize(filters, orders)
+
+ has_key_desc_order = False
+ if orders and orders[-1].property() == datastore_types._KEY_SPECIAL_PROPERTY:
+ if orders[-1].direction() == ASCENDING:
+ orders = orders[:-1]
+ else:
+ has_key_desc_order = True
+
+ if not has_key_desc_order:
+ for f in filters:
+ if (f.op() in INEQUALITY_OPERATORS and
+ f.property(0).name() != datastore_types._KEY_SPECIAL_PROPERTY):
+ break
+ else:
+ filters = [f for f in filters
+ if f.property(0).name() != datastore_types._KEY_SPECIAL_PROPERTY]
+
+ return (filters, orders)
+
+
+def CompositeIndexForQuery(query):
+ """Return the composite index needed for a query.
+
+ A query is translated into a tuple, as follows:
+
+ - The first item is the kind string, or None if we're not filtering
+ on kind (see below).
+
+ - The second item is a bool giving whether the query specifies an
+ ancestor.
+
+ - After that come (property, ASCENDING) pairs for those Filter
+ entries whose operator is EQUAL or IN. Since the order of these
+ doesn't matter, they are sorted by property name to normalize them
+ in order to avoid duplicates.
+
+ - After that comes at most one (property, ASCENDING) pair for a
+ Filter entry whose operator is on of the four inequalities. There
+ can be at most one of these.
+
+ - After that come all the (property, direction) pairs for the Order
+ entries, in the order given in the query. Exceptions:
+ (a) if there is a Filter entry with an inequality operator that matches
+ the first Order entry, the first order pair is omitted (or,
+ equivalently, in this case the inequality pair is omitted).
+ (b) if an Order entry corresponds to an equality filter, it is ignored
+ (since there will only ever be one value returned).
+ (c) if there is an equality filter on __key__ all orders are dropped
+ (since there will be at most one result returned).
+ (d) if there is an order on __key__ all further orders are dropped (since
+ keys are unique).
+ (e) orders on __key__ ASCENDING are dropped (since this is supported
+ natively by the datastore).
+
+ - Finally, if there are Filter entries whose operator is EXISTS, and
+ whose property names are not already listed, they are added, with
+ the direction set to ASCENDING.
+
+ This algorithm should consume all Filter and Order entries.
+
+ Additional notes:
+
+ - The low-level implementation allows queries that don't specify a
+ kind; but the Python API doesn't support this yet.
+
+ - If there's an inequality filter and one or more sort orders, the
+ first sort order *must* match the inequality filter.
+
+ - The following indexes are always built in and should be suppressed:
+ - query on kind only;
+ - query on kind and one filter *or* one order;
+ - query on ancestor only, without kind (not exposed in Python yet);
+ - query on kind and equality filters only, no order (with or without
+ ancestor).
+
+ - While the protocol buffer allows a Filter to contain multiple
+ properties, we don't use this. It is only needed for the IN operator
+ but this is (currently) handled on the client side, so in practice
+ each Filter is expected to have exactly one property.
+
+ Args:
+ query: A datastore_pb.Query instance.
+
+ Returns:
+ A tuple of the form (required, kind, ancestor, (prop1, prop2, ...), neq):
+ required: boolean, whether the index is required
+ kind: the kind or None;
+ ancestor: True if this is an ancestor query;
+ prop1, prop2, ...: tuples of the form (name, direction) where:
+ name: a property name;
+ direction: datastore_pb.Query_Order.ASCENDING or ...DESCENDING;
+ neq: the number of prop tuples corresponding to equality filters.
+ """
+ required = True
+
+ kind = query.kind()
+ ancestor = query.has_ancestor()
+ filters = query.filter_list()
+ orders = query.order_list()
+
+ for filter in filters:
+ assert filter.op() != datastore_pb.Query_Filter.IN, 'Filter.op()==IN'
+ nprops = len(filter.property_list())
+ assert nprops == 1, 'Filter has %s properties, expected 1' % nprops
+
+ if not kind:
+ required = False
+
+ (filters, orders) = RemoveNativelySupportedComponents(filters, orders)
+
+ eq_filters = [f for f in filters if f.op() in EQUALITY_OPERATORS]
+ ineq_filters = [f for f in filters if f.op() in INEQUALITY_OPERATORS]
+ exists_filters = [f for f in filters if f.op() in EXISTS_OPERATORS]
+ assert (len(eq_filters) + len(ineq_filters) +
+ len(exists_filters)) == len(filters), 'Not all filters used'
+
+ if (kind and not ineq_filters and not exists_filters and
+ not orders):
+ names = set(f.property(0).name() for f in eq_filters)
+ if not names.intersection(datastore_types._SPECIAL_PROPERTIES):
+ required = False
+
+ ineq_property = None
+ if ineq_filters:
+ ineq_property = ineq_filters[0].property(0).name()
+ for filter in ineq_filters:
+ assert filter.property(0).name() == ineq_property
+
+ props = []
+
+ for f in eq_filters:
+ prop = f.property(0)
+ props.append((prop.name(), ASCENDING))
+
+ props.sort()
+
+ if ineq_property:
+ if orders:
+ assert ineq_property == orders[0].property()
+ else:
+ props.append((ineq_property, ASCENDING))
+
+ for order in orders:
+ props.append((order.property(), order.direction()))
+
+ for filter in exists_filters:
+ prop = filter.property(0)
+ prop_name = prop.name()
+ for name, direction in props:
+ if name == prop_name:
+ break
+ else:
+ props.append((prop_name, ASCENDING))
+
+ if kind and not ancestor and len(props) <= 1:
+ required = False
+
+ if props:
+ prop, dir = props[0]
+ if prop in datastore_types._SPECIAL_PROPERTIES and dir is DESCENDING:
+ required = True
+
+ unique_names = set(name for name, dir in props)
+ if len(props) > 1 and len(unique_names) == 1:
+ required = False
+
+ return (required, kind, ancestor, tuple(props), len(eq_filters))
+
+
+def IndexYamlForQuery(kind, ancestor, props):
+ """Return the composite index definition YAML needed for a query.
+
+ The arguments are the same as the tuples returned by CompositeIndexForQuery,
+ without the last neq element.
+
+ Args:
+ kind: the kind or None
+ ancestor: True if this is an ancestor query, False otherwise
+ prop1, prop2, ...: tuples of the form (name, direction) where:
+ name: a property name;
+ direction: datastore_pb.Query_Order.ASCENDING or ...DESCENDING;
+
+ Returns:
+ A string with the YAML for the composite index needed by the query.
+ """
+ yaml = []
+ yaml.append('- kind: %s' % kind)
+ if ancestor:
+ yaml.append(' ancestor: yes')
+ if props:
+ yaml.append(' properties:')
+ for name, direction in props:
+ yaml.append(' - name: %s' % name)
+ if direction == DESCENDING:
+ yaml.append(' direction: desc')
+ return '\n'.join(yaml)
diff --git a/google_appengine/google/appengine/datastore/datastore_index.pyc b/google_appengine/google/appengine/datastore/datastore_index.pyc
new file mode 100644
index 0000000..e91e7be
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/datastore_index.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/datastore/datastore_pb.py b/google_appengine/google/appengine/datastore/datastore_pb.py
new file mode 100644
index 0000000..50ff5e7
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/datastore_pb.py
@@ -0,0 +1,4673 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.api.api_base_pb import Integer64Proto;
+from google.appengine.api.api_base_pb import StringProto;
+from google.appengine.api.api_base_pb import VoidProto;
+from google.appengine.datastore.action_pb import Action
+from google.appengine.datastore.entity_pb import CompositeIndex
+from google.appengine.datastore.entity_pb import EntityProto
+from google.appengine.datastore.entity_pb import Index
+from google.appengine.datastore.entity_pb import Property
+from google.appengine.datastore.entity_pb import Path
+from google.appengine.datastore.entity_pb import Reference
+class Transaction(ProtocolBuffer.ProtocolMessage):
+ has_handle_ = 0
+ handle_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def handle(self): return self.handle_
+
+ def set_handle(self, x):
+ self.has_handle_ = 1
+ self.handle_ = x
+
+ def clear_handle(self):
+ if self.has_handle_:
+ self.has_handle_ = 0
+ self.handle_ = 0
+
+ def has_handle(self): return self.has_handle_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_handle()): self.set_handle(x.handle())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_handle_ != x.has_handle_: return 0
+ if self.has_handle_ and self.handle_ != x.handle_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_handle_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: handle not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 9
+
+ def Clear(self):
+ self.clear_handle()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(9)
+ out.put64(self.handle_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 9:
+ self.set_handle(d.get64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ khandle = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "handle",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.DOUBLE,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Query_Filter(ProtocolBuffer.ProtocolMessage):
+
+ LESS_THAN = 1
+ LESS_THAN_OR_EQUAL = 2
+ GREATER_THAN = 3
+ GREATER_THAN_OR_EQUAL = 4
+ EQUAL = 5
+ IN = 6
+ EXISTS = 7
+
+ _Operator_NAMES = {
+ 1: "LESS_THAN",
+ 2: "LESS_THAN_OR_EQUAL",
+ 3: "GREATER_THAN",
+ 4: "GREATER_THAN_OR_EQUAL",
+ 5: "EQUAL",
+ 6: "IN",
+ 7: "EXISTS",
+ }
+
+ def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "")
+ Operator_Name = classmethod(Operator_Name)
+
+ has_op_ = 0
+ op_ = 0
+
+ def __init__(self, contents=None):
+ self.property_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def op(self): return self.op_
+
+ def set_op(self, x):
+ self.has_op_ = 1
+ self.op_ = x
+
+ def clear_op(self):
+ if self.has_op_:
+ self.has_op_ = 0
+ self.op_ = 0
+
+ def has_op(self): return self.has_op_
+
+ def property_size(self): return len(self.property_)
+ def property_list(self): return self.property_
+
+ def property(self, i):
+ return self.property_[i]
+
+ def mutable_property(self, i):
+ return self.property_[i]
+
+ def add_property(self):
+ x = Property()
+ self.property_.append(x)
+ return x
+
+ def clear_property(self):
+ self.property_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_op()): self.set_op(x.op())
+ for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_op_ != x.has_op_: return 0
+ if self.has_op_ and self.op_ != x.op_: return 0
+ if len(self.property_) != len(x.property_): return 0
+ for e1, e2 in zip(self.property_, x.property_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_op_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: op not set.')
+ for p in self.property_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.op_)
+ n += 1 * len(self.property_)
+ for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_op()
+ self.clear_property()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(48)
+ out.putVarInt32(self.op_)
+ for i in xrange(len(self.property_)):
+ out.putVarInt32(114)
+ out.putVarInt32(self.property_[i].ByteSize())
+ self.property_[i].OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 36: break
+ if tt == 48:
+ self.set_op(d.getVarInt32())
+ continue
+ if tt == 114:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_property().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
+ cnt=0
+ for e in self.property_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("property%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+class Query_Order(ProtocolBuffer.ProtocolMessage):
+
+ ASCENDING = 1
+ DESCENDING = 2
+
+ _Direction_NAMES = {
+ 1: "ASCENDING",
+ 2: "DESCENDING",
+ }
+
+ def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
+ Direction_Name = classmethod(Direction_Name)
+
+ has_property_ = 0
+ property_ = ""
+ has_direction_ = 0
+ direction_ = 1
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def property(self): return self.property_
+
+ def set_property(self, x):
+ self.has_property_ = 1
+ self.property_ = x
+
+ def clear_property(self):
+ if self.has_property_:
+ self.has_property_ = 0
+ self.property_ = ""
+
+ def has_property(self): return self.has_property_
+
+ def direction(self): return self.direction_
+
+ def set_direction(self, x):
+ self.has_direction_ = 1
+ self.direction_ = x
+
+ def clear_direction(self):
+ if self.has_direction_:
+ self.has_direction_ = 0
+ self.direction_ = 1
+
+ def has_direction(self): return self.has_direction_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_property()): self.set_property(x.property())
+ if (x.has_direction()): self.set_direction(x.direction())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_property_ != x.has_property_: return 0
+ if self.has_property_ and self.property_ != x.property_: return 0
+ if self.has_direction_ != x.has_direction_: return 0
+ if self.has_direction_ and self.direction_ != x.direction_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_property_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: property not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.property_))
+ if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_property()
+ self.clear_direction()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(82)
+ out.putPrefixedString(self.property_)
+ if (self.has_direction_):
+ out.putVarInt32(88)
+ out.putVarInt32(self.direction_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 76: break
+ if tt == 82:
+ self.set_property(d.getPrefixedString())
+ continue
+ if tt == 88:
+ self.set_direction(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
+ if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
+ return res
+
+class Query(ProtocolBuffer.ProtocolMessage):
+
+ ORDER_FIRST = 1
+ ANCESTOR_FIRST = 2
+ FILTER_FIRST = 3
+
+ _Hint_NAMES = {
+ 1: "ORDER_FIRST",
+ 2: "ANCESTOR_FIRST",
+ 3: "FILTER_FIRST",
+ }
+
+ def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
+ Hint_Name = classmethod(Hint_Name)
+
+ has_app_ = 0
+ app_ = ""
+ has_kind_ = 0
+ kind_ = ""
+ has_ancestor_ = 0
+ ancestor_ = None
+ has_search_query_ = 0
+ search_query_ = ""
+ has_hint_ = 0
+ hint_ = 0
+ has_count_ = 0
+ count_ = 0
+ has_offset_ = 0
+ offset_ = 0
+ has_limit_ = 0
+ limit_ = 0
+ has_require_perfect_plan_ = 0
+ require_perfect_plan_ = 0
+ has_keys_only_ = 0
+ keys_only_ = 0
+ has_transaction_ = 0
+ transaction_ = None
+ has_distinct_ = 0
+ distinct_ = 0
+ has_compile_ = 0
+ compile_ = 0
+
+ def __init__(self, contents=None):
+ self.filter_ = []
+ self.order_ = []
+ self.composite_index_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def app(self): return self.app_
+
+ def set_app(self, x):
+ self.has_app_ = 1
+ self.app_ = x
+
+ def clear_app(self):
+ if self.has_app_:
+ self.has_app_ = 0
+ self.app_ = ""
+
+ def has_app(self): return self.has_app_
+
+ def kind(self): return self.kind_
+
+ def set_kind(self, x):
+ self.has_kind_ = 1
+ self.kind_ = x
+
+ def clear_kind(self):
+ if self.has_kind_:
+ self.has_kind_ = 0
+ self.kind_ = ""
+
+ def has_kind(self): return self.has_kind_
+
+ def ancestor(self):
+ if self.ancestor_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.ancestor_ is None: self.ancestor_ = Reference()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.ancestor_
+
+ def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
+
+ def clear_ancestor(self):
+ if self.has_ancestor_:
+ self.has_ancestor_ = 0;
+ if self.ancestor_ is not None: self.ancestor_.Clear()
+
+ def has_ancestor(self): return self.has_ancestor_
+
+ def filter_size(self): return len(self.filter_)
+ def filter_list(self): return self.filter_
+
+ def filter(self, i):
+ return self.filter_[i]
+
+ def mutable_filter(self, i):
+ return self.filter_[i]
+
+ def add_filter(self):
+ x = Query_Filter()
+ self.filter_.append(x)
+ return x
+
+ def clear_filter(self):
+ self.filter_ = []
+ def search_query(self): return self.search_query_
+
+ def set_search_query(self, x):
+ self.has_search_query_ = 1
+ self.search_query_ = x
+
+ def clear_search_query(self):
+ if self.has_search_query_:
+ self.has_search_query_ = 0
+ self.search_query_ = ""
+
+ def has_search_query(self): return self.has_search_query_
+
+ def order_size(self): return len(self.order_)
+ def order_list(self): return self.order_
+
+ def order(self, i):
+ return self.order_[i]
+
+ def mutable_order(self, i):
+ return self.order_[i]
+
+ def add_order(self):
+ x = Query_Order()
+ self.order_.append(x)
+ return x
+
+ def clear_order(self):
+ self.order_ = []
+ def hint(self): return self.hint_
+
+ def set_hint(self, x):
+ self.has_hint_ = 1
+ self.hint_ = x
+
+ def clear_hint(self):
+ if self.has_hint_:
+ self.has_hint_ = 0
+ self.hint_ = 0
+
+ def has_hint(self): return self.has_hint_
+
+ def count(self): return self.count_
+
+ def set_count(self, x):
+ self.has_count_ = 1
+ self.count_ = x
+
+ def clear_count(self):
+ if self.has_count_:
+ self.has_count_ = 0
+ self.count_ = 0
+
+ def has_count(self): return self.has_count_
+
+ def offset(self): return self.offset_
+
+ def set_offset(self, x):
+ self.has_offset_ = 1
+ self.offset_ = x
+
+ def clear_offset(self):
+ if self.has_offset_:
+ self.has_offset_ = 0
+ self.offset_ = 0
+
+ def has_offset(self): return self.has_offset_
+
+ def limit(self): return self.limit_
+
+ def set_limit(self, x):
+ self.has_limit_ = 1
+ self.limit_ = x
+
+ def clear_limit(self):
+ if self.has_limit_:
+ self.has_limit_ = 0
+ self.limit_ = 0
+
+ def has_limit(self): return self.has_limit_
+
+ def composite_index_size(self): return len(self.composite_index_)
+ def composite_index_list(self): return self.composite_index_
+
+ def composite_index(self, i):
+ return self.composite_index_[i]
+
+ def mutable_composite_index(self, i):
+ return self.composite_index_[i]
+
+ def add_composite_index(self):
+ x = CompositeIndex()
+ self.composite_index_.append(x)
+ return x
+
+ def clear_composite_index(self):
+ self.composite_index_ = []
+ def require_perfect_plan(self): return self.require_perfect_plan_
+
+ def set_require_perfect_plan(self, x):
+ self.has_require_perfect_plan_ = 1
+ self.require_perfect_plan_ = x
+
+ def clear_require_perfect_plan(self):
+ if self.has_require_perfect_plan_:
+ self.has_require_perfect_plan_ = 0
+ self.require_perfect_plan_ = 0
+
+ def has_require_perfect_plan(self): return self.has_require_perfect_plan_
+
+ def keys_only(self): return self.keys_only_
+
+ def set_keys_only(self, x):
+ self.has_keys_only_ = 1
+ self.keys_only_ = x
+
+ def clear_keys_only(self):
+ if self.has_keys_only_:
+ self.has_keys_only_ = 0
+ self.keys_only_ = 0
+
+ def has_keys_only(self): return self.has_keys_only_
+
+ def transaction(self):
+ if self.transaction_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.transaction_ is None: self.transaction_ = Transaction()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.transaction_
+
+ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+ def clear_transaction(self):
+ if self.has_transaction_:
+ self.has_transaction_ = 0;
+ if self.transaction_ is not None: self.transaction_.Clear()
+
+ def has_transaction(self): return self.has_transaction_
+
+ def distinct(self): return self.distinct_
+
+ def set_distinct(self, x):
+ self.has_distinct_ = 1
+ self.distinct_ = x
+
+ def clear_distinct(self):
+ if self.has_distinct_:
+ self.has_distinct_ = 0
+ self.distinct_ = 0
+
+ def has_distinct(self): return self.has_distinct_
+
+ def compile(self): return self.compile_
+
+ def set_compile(self, x):
+ self.has_compile_ = 1
+ self.compile_ = x
+
+ def clear_compile(self):
+ if self.has_compile_:
+ self.has_compile_ = 0
+ self.compile_ = 0
+
+ def has_compile(self): return self.has_compile_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app()): self.set_app(x.app())
+ if (x.has_kind()): self.set_kind(x.kind())
+ if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
+ for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i))
+ if (x.has_search_query()): self.set_search_query(x.search_query())
+ for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
+ if (x.has_hint()): self.set_hint(x.hint())
+ if (x.has_count()): self.set_count(x.count())
+ if (x.has_offset()): self.set_offset(x.offset())
+ if (x.has_limit()): self.set_limit(x.limit())
+ for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
+ if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
+ if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+ if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+ if (x.has_distinct()): self.set_distinct(x.distinct())
+ if (x.has_compile()): self.set_compile(x.compile())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_ != x.has_app_: return 0
+ if self.has_app_ and self.app_ != x.app_: return 0
+ if self.has_kind_ != x.has_kind_: return 0
+ if self.has_kind_ and self.kind_ != x.kind_: return 0
+ if self.has_ancestor_ != x.has_ancestor_: return 0
+ if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
+ if len(self.filter_) != len(x.filter_): return 0
+ for e1, e2 in zip(self.filter_, x.filter_):
+ if e1 != e2: return 0
+ if self.has_search_query_ != x.has_search_query_: return 0
+ if self.has_search_query_ and self.search_query_ != x.search_query_: return 0
+ if len(self.order_) != len(x.order_): return 0
+ for e1, e2 in zip(self.order_, x.order_):
+ if e1 != e2: return 0
+ if self.has_hint_ != x.has_hint_: return 0
+ if self.has_hint_ and self.hint_ != x.hint_: return 0
+ if self.has_count_ != x.has_count_: return 0
+ if self.has_count_ and self.count_ != x.count_: return 0
+ if self.has_offset_ != x.has_offset_: return 0
+ if self.has_offset_ and self.offset_ != x.offset_: return 0
+ if self.has_limit_ != x.has_limit_: return 0
+ if self.has_limit_ and self.limit_ != x.limit_: return 0
+ if len(self.composite_index_) != len(x.composite_index_): return 0
+ for e1, e2 in zip(self.composite_index_, x.composite_index_):
+ if e1 != e2: return 0
+ if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
+ if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
+ if self.has_keys_only_ != x.has_keys_only_: return 0
+ if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+ if self.has_transaction_ != x.has_transaction_: return 0
+ if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+ if self.has_distinct_ != x.has_distinct_: return 0
+ if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
+ if self.has_compile_ != x.has_compile_: return 0
+ if self.has_compile_ and self.compile_ != x.compile_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app not set.')
+ if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
+ for p in self.filter_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ for p in self.order_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ for p in self.composite_index_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_))
+ if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
+ if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
+ n += 2 * len(self.filter_)
+ for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize()
+ if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
+ n += 2 * len(self.order_)
+ for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
+ if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
+ if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
+ if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+ if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
+ n += 2 * len(self.composite_index_)
+ for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
+ if (self.has_require_perfect_plan_): n += 3
+ if (self.has_keys_only_): n += 3
+ if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize())
+ if (self.has_distinct_): n += 3
+ if (self.has_compile_): n += 3
+ return n + 1
+
+ def Clear(self):
+ self.clear_app()
+ self.clear_kind()
+ self.clear_ancestor()
+ self.clear_filter()
+ self.clear_search_query()
+ self.clear_order()
+ self.clear_hint()
+ self.clear_count()
+ self.clear_offset()
+ self.clear_limit()
+ self.clear_composite_index()
+ self.clear_require_perfect_plan()
+ self.clear_keys_only()
+ self.clear_transaction()
+ self.clear_distinct()
+ self.clear_compile()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_)
+ if (self.has_kind_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.kind_)
+ for i in xrange(len(self.filter_)):
+ out.putVarInt32(35)
+ self.filter_[i].OutputUnchecked(out)
+ out.putVarInt32(36)
+ if (self.has_search_query_):
+ out.putVarInt32(66)
+ out.putPrefixedString(self.search_query_)
+ for i in xrange(len(self.order_)):
+ out.putVarInt32(75)
+ self.order_[i].OutputUnchecked(out)
+ out.putVarInt32(76)
+ if (self.has_offset_):
+ out.putVarInt32(96)
+ out.putVarInt32(self.offset_)
+ if (self.has_limit_):
+ out.putVarInt32(128)
+ out.putVarInt32(self.limit_)
+ if (self.has_ancestor_):
+ out.putVarInt32(138)
+ out.putVarInt32(self.ancestor_.ByteSize())
+ self.ancestor_.OutputUnchecked(out)
+ if (self.has_hint_):
+ out.putVarInt32(144)
+ out.putVarInt32(self.hint_)
+ for i in xrange(len(self.composite_index_)):
+ out.putVarInt32(154)
+ out.putVarInt32(self.composite_index_[i].ByteSize())
+ self.composite_index_[i].OutputUnchecked(out)
+ if (self.has_require_perfect_plan_):
+ out.putVarInt32(160)
+ out.putBoolean(self.require_perfect_plan_)
+ if (self.has_keys_only_):
+ out.putVarInt32(168)
+ out.putBoolean(self.keys_only_)
+ if (self.has_transaction_):
+ out.putVarInt32(178)
+ out.putVarInt32(self.transaction_.ByteSize())
+ self.transaction_.OutputUnchecked(out)
+ if (self.has_count_):
+ out.putVarInt32(184)
+ out.putVarInt32(self.count_)
+ if (self.has_distinct_):
+ out.putVarInt32(192)
+ out.putBoolean(self.distinct_)
+ if (self.has_compile_):
+ out.putVarInt32(200)
+ out.putBoolean(self.compile_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_kind(d.getPrefixedString())
+ continue
+ if tt == 35:
+ self.add_filter().TryMerge(d)
+ continue
+ if tt == 66:
+ self.set_search_query(d.getPrefixedString())
+ continue
+ if tt == 75:
+ self.add_order().TryMerge(d)
+ continue
+ if tt == 96:
+ self.set_offset(d.getVarInt32())
+ continue
+ if tt == 128:
+ self.set_limit(d.getVarInt32())
+ continue
+ if tt == 138:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_ancestor().TryMerge(tmp)
+ continue
+ if tt == 144:
+ self.set_hint(d.getVarInt32())
+ continue
+ if tt == 154:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_composite_index().TryMerge(tmp)
+ continue
+ if tt == 160:
+ self.set_require_perfect_plan(d.getBoolean())
+ continue
+ if tt == 168:
+ self.set_keys_only(d.getBoolean())
+ continue
+ if tt == 178:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_transaction().TryMerge(tmp)
+ continue
+ if tt == 184:
+ self.set_count(d.getVarInt32())
+ continue
+ if tt == 192:
+ self.set_distinct(d.getBoolean())
+ continue
+ if tt == 200:
+ self.set_compile(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+ if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
+ if self.has_ancestor_:
+ res+=prefix+"ancestor <\n"
+ res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt=0
+ for e in self.filter_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Filter%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_))
+ cnt=0
+ for e in self.order_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Order%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
+ if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+ if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+ if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+ cnt=0
+ for e in self.composite_index_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("composite_index%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
+ if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+ if self.has_transaction_:
+ res+=prefix+"transaction <\n"
+ res+=self.transaction_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
+ if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp = 1
+ kkind = 3
+ kancestor = 17
+ kFilterGroup = 4
+ kFilterop = 6
+ kFilterproperty = 14
+ ksearch_query = 8
+ kOrderGroup = 9
+ kOrderproperty = 10
+ kOrderdirection = 11
+ khint = 18
+ kcount = 23
+ koffset = 12
+ klimit = 16
+ kcomposite_index = 19
+ krequire_perfect_plan = 20
+ kkeys_only = 21
+ ktransaction = 22
+ kdistinct = 24
+ kcompile = 25
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app",
+ 3: "kind",
+ 4: "Filter",
+ 6: "op",
+ 8: "search_query",
+ 9: "Order",
+ 10: "property",
+ 11: "direction",
+ 12: "offset",
+ 14: "property",
+ 16: "limit",
+ 17: "ancestor",
+ 18: "hint",
+ 19: "composite_index",
+ 20: "require_perfect_plan",
+ 21: "keys_only",
+ 22: "transaction",
+ 23: "count",
+ 24: "distinct",
+ 25: "compile",
+ }, 25)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.STARTGROUP,
+ 6: ProtocolBuffer.Encoder.NUMERIC,
+ 8: ProtocolBuffer.Encoder.STRING,
+ 9: ProtocolBuffer.Encoder.STARTGROUP,
+ 10: ProtocolBuffer.Encoder.STRING,
+ 11: ProtocolBuffer.Encoder.NUMERIC,
+ 12: ProtocolBuffer.Encoder.NUMERIC,
+ 14: ProtocolBuffer.Encoder.STRING,
+ 16: ProtocolBuffer.Encoder.NUMERIC,
+ 17: ProtocolBuffer.Encoder.STRING,
+ 18: ProtocolBuffer.Encoder.NUMERIC,
+ 19: ProtocolBuffer.Encoder.STRING,
+ 20: ProtocolBuffer.Encoder.NUMERIC,
+ 21: ProtocolBuffer.Encoder.NUMERIC,
+ 22: ProtocolBuffer.Encoder.STRING,
+ 23: ProtocolBuffer.Encoder.NUMERIC,
+ 24: ProtocolBuffer.Encoder.NUMERIC,
+ 25: ProtocolBuffer.Encoder.NUMERIC,
+ }, 25, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
+ has_index_name_ = 0
+ index_name_ = ""
+ has_start_key_ = 0
+ start_key_ = ""
+ has_start_inclusive_ = 0
+ start_inclusive_ = 0
+ has_end_key_ = 0
+ end_key_ = ""
+ has_end_inclusive_ = 0
+ end_inclusive_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def index_name(self): return self.index_name_
+
+ def set_index_name(self, x):
+ self.has_index_name_ = 1
+ self.index_name_ = x
+
+ def clear_index_name(self):
+ if self.has_index_name_:
+ self.has_index_name_ = 0
+ self.index_name_ = ""
+
+ def has_index_name(self): return self.has_index_name_
+
+ def start_key(self): return self.start_key_
+
+ def set_start_key(self, x):
+ self.has_start_key_ = 1
+ self.start_key_ = x
+
+ def clear_start_key(self):
+ if self.has_start_key_:
+ self.has_start_key_ = 0
+ self.start_key_ = ""
+
+ def has_start_key(self): return self.has_start_key_
+
+ def start_inclusive(self): return self.start_inclusive_
+
+ def set_start_inclusive(self, x):
+ self.has_start_inclusive_ = 1
+ self.start_inclusive_ = x
+
+ def clear_start_inclusive(self):
+ if self.has_start_inclusive_:
+ self.has_start_inclusive_ = 0
+ self.start_inclusive_ = 0
+
+ def has_start_inclusive(self): return self.has_start_inclusive_
+
+ def end_key(self): return self.end_key_
+
+ def set_end_key(self, x):
+ self.has_end_key_ = 1
+ self.end_key_ = x
+
+ def clear_end_key(self):
+ if self.has_end_key_:
+ self.has_end_key_ = 0
+ self.end_key_ = ""
+
+ def has_end_key(self): return self.has_end_key_
+
+ def end_inclusive(self): return self.end_inclusive_
+
+ def set_end_inclusive(self, x):
+ self.has_end_inclusive_ = 1
+ self.end_inclusive_ = x
+
+ def clear_end_inclusive(self):
+ if self.has_end_inclusive_:
+ self.has_end_inclusive_ = 0
+ self.end_inclusive_ = 0
+
+ def has_end_inclusive(self): return self.has_end_inclusive_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_index_name()): self.set_index_name(x.index_name())
+ if (x.has_start_key()): self.set_start_key(x.start_key())
+ if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
+ if (x.has_end_key()): self.set_end_key(x.end_key())
+ if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_index_name_ != x.has_index_name_: return 0
+ if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
+ if self.has_start_key_ != x.has_start_key_: return 0
+ if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
+ if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
+ if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
+ if self.has_end_key_ != x.has_end_key_: return 0
+ if self.has_end_key_ and self.end_key_ != x.end_key_: return 0
+ if self.has_end_inclusive_ != x.has_end_inclusive_: return 0
+ if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
+ if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
+ if (self.has_start_inclusive_): n += 2
+ if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
+ if (self.has_end_inclusive_): n += 2
+ return n + 0
+
+ def Clear(self):
+ self.clear_index_name()
+ self.clear_start_key()
+ self.clear_start_inclusive()
+ self.clear_end_key()
+ self.clear_end_inclusive()
+
+ def OutputUnchecked(self, out):
+ if (self.has_index_name_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.index_name_)
+ if (self.has_start_key_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.start_key_)
+ if (self.has_start_inclusive_):
+ out.putVarInt32(32)
+ out.putBoolean(self.start_inclusive_)
+ if (self.has_end_key_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.end_key_)
+ if (self.has_end_inclusive_):
+ out.putVarInt32(48)
+ out.putBoolean(self.end_inclusive_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_index_name(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_start_key(d.getPrefixedString())
+ continue
+ if tt == 32:
+ self.set_start_inclusive(d.getBoolean())
+ continue
+ if tt == 42:
+ self.set_end_key(d.getPrefixedString())
+ continue
+ if tt == 48:
+ self.set_end_inclusive(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
+ if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
+ if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
+ if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_))
+ if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_))
+ return res
+
+class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage):
+ has_index_name_ = 0
+ index_name_ = ""
+
+ def __init__(self, contents=None):
+ self.prefix_value_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def index_name(self): return self.index_name_
+
+ def set_index_name(self, x):
+ self.has_index_name_ = 1
+ self.index_name_ = x
+
+ def clear_index_name(self):
+ if self.has_index_name_:
+ self.has_index_name_ = 0
+ self.index_name_ = ""
+
+ def has_index_name(self): return self.has_index_name_
+
+ def prefix_value_size(self): return len(self.prefix_value_)
+ def prefix_value_list(self): return self.prefix_value_
+
+ def prefix_value(self, i):
+ return self.prefix_value_[i]
+
+ def set_prefix_value(self, i, x):
+ self.prefix_value_[i] = x
+
+ def add_prefix_value(self, x):
+ self.prefix_value_.append(x)
+
+ def clear_prefix_value(self):
+ self.prefix_value_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_index_name()): self.set_index_name(x.index_name())
+ for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_index_name_ != x.has_index_name_: return 0
+ if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
+ if len(self.prefix_value_) != len(x.prefix_value_): return 0
+ for e1, e2 in zip(self.prefix_value_, x.prefix_value_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_index_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: index_name not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.index_name_))
+ n += 1 * len(self.prefix_value_)
+ for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
+ return n + 1
+
+ def Clear(self):
+ self.clear_index_name()
+ self.clear_prefix_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(66)
+ out.putPrefixedString(self.index_name_)
+ for i in xrange(len(self.prefix_value_)):
+ out.putVarInt32(74)
+ out.putPrefixedString(self.prefix_value_[i])
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 60: break
+ if tt == 66:
+ self.set_index_name(d.getPrefixedString())
+ continue
+ if tt == 74:
+ self.add_prefix_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
+ cnt=0
+ for e in self.prefix_value_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ return res
+
+class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage):
+ has_distinct_ = 0
+ distinct_ = 0
+ has_offset_ = 0
+ offset_ = 0
+ has_limit_ = 0
+ limit_ = 0
+ has_kind_ = 0
+ kind_ = ""
+ has_ancestor_ = 0
+ ancestor_ = None
+
+ def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def distinct(self): return self.distinct_
+
+ def set_distinct(self, x):
+ self.has_distinct_ = 1
+ self.distinct_ = x
+
+ def clear_distinct(self):
+ if self.has_distinct_:
+ self.has_distinct_ = 0
+ self.distinct_ = 0
+
+ def has_distinct(self): return self.has_distinct_
+
+ def offset(self): return self.offset_
+
+ def set_offset(self, x):
+ self.has_offset_ = 1
+ self.offset_ = x
+
+ def clear_offset(self):
+ if self.has_offset_:
+ self.has_offset_ = 0
+ self.offset_ = 0
+
+ def has_offset(self): return self.has_offset_
+
+ def limit(self): return self.limit_
+
+ def set_limit(self, x):
+ self.has_limit_ = 1
+ self.limit_ = x
+
+ def clear_limit(self):
+ if self.has_limit_:
+ self.has_limit_ = 0
+ self.limit_ = 0
+
+ def has_limit(self): return self.has_limit_
+
+ def kind(self): return self.kind_
+
+ def set_kind(self, x):
+ self.has_kind_ = 1
+ self.kind_ = x
+
+ def clear_kind(self):
+ if self.has_kind_:
+ self.has_kind_ = 0
+ self.kind_ = ""
+
+ def has_kind(self): return self.has_kind_
+
+ def ancestor(self):
+ if self.ancestor_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.ancestor_ is None: self.ancestor_ = Reference()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.ancestor_
+
+ def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
+
+ def clear_ancestor(self):
+ if self.has_ancestor_:
+ self.has_ancestor_ = 0;
+ if self.ancestor_ is not None: self.ancestor_.Clear()
+
+ def has_ancestor(self): return self.has_ancestor_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_distinct()): self.set_distinct(x.distinct())
+ if (x.has_offset()): self.set_offset(x.offset())
+ if (x.has_limit()): self.set_limit(x.limit())
+ if (x.has_kind()): self.set_kind(x.kind())
+ if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_distinct_ != x.has_distinct_: return 0
+ if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
+ if self.has_offset_ != x.has_offset_: return 0
+ if self.has_offset_ and self.offset_ != x.offset_: return 0
+ if self.has_limit_ != x.has_limit_: return 0
+ if self.has_limit_ and self.limit_ != x.limit_: return 0
+ if self.has_kind_ != x.has_kind_: return 0
+ if self.has_kind_ and self.kind_ != x.kind_: return 0
+ if self.has_ancestor_ != x.has_ancestor_: return 0
+ if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_distinct_): n += 2
+ if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+ if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
+ if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
+ if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_distinct()
+ self.clear_offset()
+ self.clear_limit()
+ self.clear_kind()
+ self.clear_ancestor()
+
+ def OutputUnchecked(self, out):
+ if (self.has_distinct_):
+ out.putVarInt32(112)
+ out.putBoolean(self.distinct_)
+ if (self.has_offset_):
+ out.putVarInt32(120)
+ out.putVarInt32(self.offset_)
+ if (self.has_limit_):
+ out.putVarInt32(128)
+ out.putVarInt32(self.limit_)
+ if (self.has_kind_):
+ out.putVarInt32(138)
+ out.putPrefixedString(self.kind_)
+ if (self.has_ancestor_):
+ out.putVarInt32(146)
+ out.putVarInt32(self.ancestor_.ByteSize())
+ self.ancestor_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 108: break
+ if tt == 112:
+ self.set_distinct(d.getBoolean())
+ continue
+ if tt == 120:
+ self.set_offset(d.getVarInt32())
+ continue
+ if tt == 128:
+ self.set_limit(d.getVarInt32())
+ continue
+ if tt == 138:
+ self.set_kind(d.getPrefixedString())
+ continue
+ if tt == 146:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_ancestor().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
+ if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+ if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+ if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
+ if self.has_ancestor_:
+ res+=prefix+"ancestor <\n"
+ res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+class CompiledQuery(ProtocolBuffer.ProtocolMessage):
+ has_primaryscan_ = 0
+ has_offset_ = 0
+ offset_ = 0
+ has_limit_ = 0
+ limit_ = 0
+ has_keys_only_ = 0
+ keys_only_ = 0
+ has_entityfilter_ = 0
+ entityfilter_ = None
+
+ def __init__(self, contents=None):
+ self.primaryscan_ = CompiledQuery_PrimaryScan()
+ self.mergejoinscan_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def primaryscan(self): return self.primaryscan_
+
+ def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_
+
+ def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear()
+
+ def has_primaryscan(self): return self.has_primaryscan_
+
+ def mergejoinscan_size(self): return len(self.mergejoinscan_)
+ def mergejoinscan_list(self): return self.mergejoinscan_
+
+ def mergejoinscan(self, i):
+ return self.mergejoinscan_[i]
+
+ def mutable_mergejoinscan(self, i):
+ return self.mergejoinscan_[i]
+
+ def add_mergejoinscan(self):
+ x = CompiledQuery_MergeJoinScan()
+ self.mergejoinscan_.append(x)
+ return x
+
+ def clear_mergejoinscan(self):
+ self.mergejoinscan_ = []
+ def offset(self): return self.offset_
+
+ def set_offset(self, x):
+ self.has_offset_ = 1
+ self.offset_ = x
+
+ def clear_offset(self):
+ if self.has_offset_:
+ self.has_offset_ = 0
+ self.offset_ = 0
+
+ def has_offset(self): return self.has_offset_
+
+ def limit(self): return self.limit_
+
+ def set_limit(self, x):
+ self.has_limit_ = 1
+ self.limit_ = x
+
+ def clear_limit(self):
+ if self.has_limit_:
+ self.has_limit_ = 0
+ self.limit_ = 0
+
+ def has_limit(self): return self.has_limit_
+
+ def keys_only(self): return self.keys_only_
+
+ def set_keys_only(self, x):
+ self.has_keys_only_ = 1
+ self.keys_only_ = x
+
+ def clear_keys_only(self):
+ if self.has_keys_only_:
+ self.has_keys_only_ = 0
+ self.keys_only_ = 0
+
+ def has_keys_only(self): return self.has_keys_only_
+
+ def entityfilter(self):
+ if self.entityfilter_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.entityfilter_
+
+ def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter()
+
+ def clear_entityfilter(self):
+ if self.has_entityfilter_:
+ self.has_entityfilter_ = 0;
+ if self.entityfilter_ is not None: self.entityfilter_.Clear()
+
+ def has_entityfilter(self): return self.has_entityfilter_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan())
+ for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i))
+ if (x.has_offset()): self.set_offset(x.offset())
+ if (x.has_limit()): self.set_limit(x.limit())
+ if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+ if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_primaryscan_ != x.has_primaryscan_: return 0
+ if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0
+ if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0
+ for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_):
+ if e1 != e2: return 0
+ if self.has_offset_ != x.has_offset_: return 0
+ if self.has_offset_ and self.offset_ != x.offset_: return 0
+ if self.has_limit_ != x.has_limit_: return 0
+ if self.has_limit_ and self.limit_ != x.limit_: return 0
+ if self.has_keys_only_ != x.has_keys_only_: return 0
+ if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+ if self.has_entityfilter_ != x.has_entityfilter_: return 0
+ if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_primaryscan_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: primaryscan not set.')
+ elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0
+ for p in self.mergejoinscan_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (not self.has_keys_only_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: keys_only not set.')
+ if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.primaryscan_.ByteSize()
+ n += 2 * len(self.mergejoinscan_)
+ for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize()
+ if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+ if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
+ if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize()
+ return n + 4
+
+ def Clear(self):
+ self.clear_primaryscan()
+ self.clear_mergejoinscan()
+ self.clear_offset()
+ self.clear_limit()
+ self.clear_keys_only()
+ self.clear_entityfilter()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(11)
+ self.primaryscan_.OutputUnchecked(out)
+ out.putVarInt32(12)
+ for i in xrange(len(self.mergejoinscan_)):
+ out.putVarInt32(59)
+ self.mergejoinscan_[i].OutputUnchecked(out)
+ out.putVarInt32(60)
+ if (self.has_offset_):
+ out.putVarInt32(80)
+ out.putVarInt32(self.offset_)
+ if (self.has_limit_):
+ out.putVarInt32(88)
+ out.putVarInt32(self.limit_)
+ out.putVarInt32(96)
+ out.putBoolean(self.keys_only_)
+ if (self.has_entityfilter_):
+ out.putVarInt32(107)
+ self.entityfilter_.OutputUnchecked(out)
+ out.putVarInt32(108)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.mutable_primaryscan().TryMerge(d)
+ continue
+ if tt == 59:
+ self.add_mergejoinscan().TryMerge(d)
+ continue
+ if tt == 80:
+ self.set_offset(d.getVarInt32())
+ continue
+ if tt == 88:
+ self.set_limit(d.getVarInt32())
+ continue
+ if tt == 96:
+ self.set_keys_only(d.getBoolean())
+ continue
+ if tt == 107:
+ self.mutable_entityfilter().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_primaryscan_:
+ res+=prefix+"PrimaryScan {\n"
+ res+=self.primaryscan_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt=0
+ for e in self.mergejoinscan_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("MergeJoinScan%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+ if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+ if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+ if self.has_entityfilter_:
+ res+=prefix+"EntityFilter {\n"
+ res+=self.entityfilter_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kPrimaryScanGroup = 1
+ kPrimaryScanindex_name = 2
+ kPrimaryScanstart_key = 3
+ kPrimaryScanstart_inclusive = 4
+ kPrimaryScanend_key = 5
+ kPrimaryScanend_inclusive = 6
+ kMergeJoinScanGroup = 7
+ kMergeJoinScanindex_name = 8
+ kMergeJoinScanprefix_value = 9
+ koffset = 10
+ klimit = 11
+ kkeys_only = 12
+ kEntityFilterGroup = 13
+ kEntityFilterdistinct = 14
+ kEntityFilteroffset = 15
+ kEntityFilterlimit = 16
+ kEntityFilterkind = 17
+ kEntityFilterancestor = 18
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "PrimaryScan",
+ 2: "index_name",
+ 3: "start_key",
+ 4: "start_inclusive",
+ 5: "end_key",
+ 6: "end_inclusive",
+ 7: "MergeJoinScan",
+ 8: "index_name",
+ 9: "prefix_value",
+ 10: "offset",
+ 11: "limit",
+ 12: "keys_only",
+ 13: "EntityFilter",
+ 14: "distinct",
+ 15: "offset",
+ 16: "limit",
+ 17: "kind",
+ 18: "ancestor",
+ }, 18)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.NUMERIC,
+ 7: ProtocolBuffer.Encoder.STARTGROUP,
+ 8: ProtocolBuffer.Encoder.STRING,
+ 9: ProtocolBuffer.Encoder.STRING,
+ 10: ProtocolBuffer.Encoder.NUMERIC,
+ 11: ProtocolBuffer.Encoder.NUMERIC,
+ 12: ProtocolBuffer.Encoder.NUMERIC,
+ 13: ProtocolBuffer.Encoder.STARTGROUP,
+ 14: ProtocolBuffer.Encoder.NUMERIC,
+ 15: ProtocolBuffer.Encoder.NUMERIC,
+ 16: ProtocolBuffer.Encoder.NUMERIC,
+ 17: ProtocolBuffer.Encoder.STRING,
+ 18: ProtocolBuffer.Encoder.STRING,
+ }, 18, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class RunCompiledQueryRequest(ProtocolBuffer.ProtocolMessage):
+ has_compiled_query_ = 0
+ has_original_query_ = 0
+ original_query_ = None
+ has_count_ = 0
+ count_ = 0
+
+ def __init__(self, contents=None):
+ self.compiled_query_ = CompiledQuery()
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def compiled_query(self): return self.compiled_query_
+
+ def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query_
+
+ def clear_compiled_query(self):self.has_compiled_query_ = 0; self.compiled_query_.Clear()
+
+ def has_compiled_query(self): return self.has_compiled_query_
+
+ def original_query(self):
+ if self.original_query_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.original_query_ is None: self.original_query_ = Query()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.original_query_
+
+ def mutable_original_query(self): self.has_original_query_ = 1; return self.original_query()
+
+ def clear_original_query(self):
+ if self.has_original_query_:
+ self.has_original_query_ = 0;
+ if self.original_query_ is not None: self.original_query_.Clear()
+
+ def has_original_query(self): return self.has_original_query_
+
+ def count(self): return self.count_
+
+ def set_count(self, x):
+ self.has_count_ = 1
+ self.count_ = x
+
+ def clear_count(self):
+ if self.has_count_:
+ self.has_count_ = 0
+ self.count_ = 0
+
+ def has_count(self): return self.has_count_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
+ if (x.has_original_query()): self.mutable_original_query().MergeFrom(x.original_query())
+ if (x.has_count()): self.set_count(x.count())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_compiled_query_ != x.has_compiled_query_: return 0
+ if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
+ if self.has_original_query_ != x.has_original_query_: return 0
+ if self.has_original_query_ and self.original_query_ != x.original_query_: return 0
+ if self.has_count_ != x.has_count_: return 0
+ if self.has_count_ and self.count_ != x.count_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_compiled_query_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: compiled_query not set.')
+ elif not self.compiled_query_.IsInitialized(debug_strs): initialized = 0
+ if (self.has_original_query_ and not self.original_query_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.compiled_query_.ByteSize())
+ if (self.has_original_query_): n += 1 + self.lengthString(self.original_query_.ByteSize())
+ if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_compiled_query()
+ self.clear_original_query()
+ self.clear_count()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.compiled_query_.ByteSize())
+ self.compiled_query_.OutputUnchecked(out)
+ if (self.has_original_query_):
+ out.putVarInt32(18)
+ out.putVarInt32(self.original_query_.ByteSize())
+ self.original_query_.OutputUnchecked(out)
+ if (self.has_count_):
+ out.putVarInt32(24)
+ out.putVarInt32(self.count_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_compiled_query().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_original_query().TryMerge(tmp)
+ continue
+ if tt == 24:
+ self.set_count(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_compiled_query_:
+ res+=prefix+"compiled_query <\n"
+ res+=self.compiled_query_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_original_query_:
+ res+=prefix+"original_query <\n"
+ res+=self.original_query_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcompiled_query = 1
+ koriginal_query = 2
+ kcount = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "compiled_query",
+ 2: "original_query",
+ 3: "count",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class QueryExplanation(ProtocolBuffer.ProtocolMessage):
+ has_native_ancestor_ = 0
+ native_ancestor_ = 0
+ has_native_offset_ = 0
+ native_offset_ = 0
+ has_native_limit_ = 0
+ native_limit_ = 0
+
+ def __init__(self, contents=None):
+ self.native_index_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def native_ancestor(self): return self.native_ancestor_
+
+ def set_native_ancestor(self, x):
+ self.has_native_ancestor_ = 1
+ self.native_ancestor_ = x
+
+ def clear_native_ancestor(self):
+ if self.has_native_ancestor_:
+ self.has_native_ancestor_ = 0
+ self.native_ancestor_ = 0
+
+ def has_native_ancestor(self): return self.has_native_ancestor_
+
+ def native_index_size(self): return len(self.native_index_)
+ def native_index_list(self): return self.native_index_
+
+ def native_index(self, i):
+ return self.native_index_[i]
+
+ def mutable_native_index(self, i):
+ return self.native_index_[i]
+
+ def add_native_index(self):
+ x = Index()
+ self.native_index_.append(x)
+ return x
+
+ def clear_native_index(self):
+ self.native_index_ = []
+ def native_offset(self): return self.native_offset_
+
+ def set_native_offset(self, x):
+ self.has_native_offset_ = 1
+ self.native_offset_ = x
+
+ def clear_native_offset(self):
+ if self.has_native_offset_:
+ self.has_native_offset_ = 0
+ self.native_offset_ = 0
+
+ def has_native_offset(self): return self.has_native_offset_
+
+ def native_limit(self): return self.native_limit_
+
+ def set_native_limit(self, x):
+ self.has_native_limit_ = 1
+ self.native_limit_ = x
+
+ def clear_native_limit(self):
+ if self.has_native_limit_:
+ self.has_native_limit_ = 0
+ self.native_limit_ = 0
+
+ def has_native_limit(self): return self.has_native_limit_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_native_ancestor()): self.set_native_ancestor(x.native_ancestor())
+ for i in xrange(x.native_index_size()): self.add_native_index().CopyFrom(x.native_index(i))
+ if (x.has_native_offset()): self.set_native_offset(x.native_offset())
+ if (x.has_native_limit()): self.set_native_limit(x.native_limit())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_native_ancestor_ != x.has_native_ancestor_: return 0
+ if self.has_native_ancestor_ and self.native_ancestor_ != x.native_ancestor_: return 0
+ if len(self.native_index_) != len(x.native_index_): return 0
+ for e1, e2 in zip(self.native_index_, x.native_index_):
+ if e1 != e2: return 0
+ if self.has_native_offset_ != x.has_native_offset_: return 0
+ if self.has_native_offset_ and self.native_offset_ != x.native_offset_: return 0
+ if self.has_native_limit_ != x.has_native_limit_: return 0
+ if self.has_native_limit_ and self.native_limit_ != x.native_limit_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.native_index_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_native_ancestor_): n += 2
+ n += 1 * len(self.native_index_)
+ for i in xrange(len(self.native_index_)): n += self.lengthString(self.native_index_[i].ByteSize())
+ if (self.has_native_offset_): n += 1 + self.lengthVarInt64(self.native_offset_)
+ if (self.has_native_limit_): n += 1 + self.lengthVarInt64(self.native_limit_)
+ return n + 0
+
+ def Clear(self):
+ self.clear_native_ancestor()
+ self.clear_native_index()
+ self.clear_native_offset()
+ self.clear_native_limit()
+
+ def OutputUnchecked(self, out):
+ if (self.has_native_ancestor_):
+ out.putVarInt32(8)
+ out.putBoolean(self.native_ancestor_)
+ for i in xrange(len(self.native_index_)):
+ out.putVarInt32(18)
+ out.putVarInt32(self.native_index_[i].ByteSize())
+ self.native_index_[i].OutputUnchecked(out)
+ if (self.has_native_offset_):
+ out.putVarInt32(24)
+ out.putVarInt32(self.native_offset_)
+ if (self.has_native_limit_):
+ out.putVarInt32(32)
+ out.putVarInt32(self.native_limit_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_native_ancestor(d.getBoolean())
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_native_index().TryMerge(tmp)
+ continue
+ if tt == 24:
+ self.set_native_offset(d.getVarInt32())
+ continue
+ if tt == 32:
+ self.set_native_limit(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_native_ancestor_: res+=prefix+("native_ancestor: %s\n" % self.DebugFormatBool(self.native_ancestor_))
+ cnt=0
+ for e in self.native_index_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("native_index%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_native_offset_: res+=prefix+("native_offset: %s\n" % self.DebugFormatInt32(self.native_offset_))
+ if self.has_native_limit_: res+=prefix+("native_limit: %s\n" % self.DebugFormatInt32(self.native_limit_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ knative_ancestor = 1
+ knative_index = 2
+ knative_offset = 3
+ knative_limit = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "native_ancestor",
+ 2: "native_index",
+ 3: "native_offset",
+ 4: "native_limit",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Cursor(ProtocolBuffer.ProtocolMessage):
+ has_cursor_ = 0
+ cursor_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def cursor(self): return self.cursor_
+
+ def set_cursor(self, x):
+ self.has_cursor_ = 1
+ self.cursor_ = x
+
+ def clear_cursor(self):
+ if self.has_cursor_:
+ self.has_cursor_ = 0
+ self.cursor_ = 0
+
+ def has_cursor(self): return self.has_cursor_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_cursor()): self.set_cursor(x.cursor())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_cursor_ != x.has_cursor_: return 0
+ if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_cursor_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: cursor not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 9
+
+ def Clear(self):
+ self.clear_cursor()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(9)
+ out.put64(self.cursor_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 9:
+ self.set_cursor(d.get64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcursor = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "cursor",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.DOUBLE,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Error(ProtocolBuffer.ProtocolMessage):
+
+ BAD_REQUEST = 1
+ CONCURRENT_TRANSACTION = 2
+ INTERNAL_ERROR = 3
+ NEED_INDEX = 4
+ TIMEOUT = 5
+ PERMISSION_DENIED = 6
+
+ _ErrorCode_NAMES = {
+ 1: "BAD_REQUEST",
+ 2: "CONCURRENT_TRANSACTION",
+ 3: "INTERNAL_ERROR",
+ 4: "NEED_INDEX",
+ 5: "TIMEOUT",
+ 6: "PERMISSION_DENIED",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Cost(ProtocolBuffer.ProtocolMessage):
+ has_index_writes_ = 0
+ index_writes_ = 0
+ has_index_write_bytes_ = 0
+ index_write_bytes_ = 0
+ has_entity_writes_ = 0
+ entity_writes_ = 0
+ has_entity_write_bytes_ = 0
+ entity_write_bytes_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def index_writes(self): return self.index_writes_
+
+ def set_index_writes(self, x):
+ self.has_index_writes_ = 1
+ self.index_writes_ = x
+
+ def clear_index_writes(self):
+ if self.has_index_writes_:
+ self.has_index_writes_ = 0
+ self.index_writes_ = 0
+
+ def has_index_writes(self): return self.has_index_writes_
+
+ def index_write_bytes(self): return self.index_write_bytes_
+
+ def set_index_write_bytes(self, x):
+ self.has_index_write_bytes_ = 1
+ self.index_write_bytes_ = x
+
+ def clear_index_write_bytes(self):
+ if self.has_index_write_bytes_:
+ self.has_index_write_bytes_ = 0
+ self.index_write_bytes_ = 0
+
+ def has_index_write_bytes(self): return self.has_index_write_bytes_
+
+ def entity_writes(self): return self.entity_writes_
+
+ def set_entity_writes(self, x):
+ self.has_entity_writes_ = 1
+ self.entity_writes_ = x
+
+ def clear_entity_writes(self):
+ if self.has_entity_writes_:
+ self.has_entity_writes_ = 0
+ self.entity_writes_ = 0
+
+ def has_entity_writes(self): return self.has_entity_writes_
+
+ def entity_write_bytes(self): return self.entity_write_bytes_
+
+ def set_entity_write_bytes(self, x):
+ self.has_entity_write_bytes_ = 1
+ self.entity_write_bytes_ = x
+
+ def clear_entity_write_bytes(self):
+ if self.has_entity_write_bytes_:
+ self.has_entity_write_bytes_ = 0
+ self.entity_write_bytes_ = 0
+
+ def has_entity_write_bytes(self): return self.has_entity_write_bytes_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_index_writes()): self.set_index_writes(x.index_writes())
+ if (x.has_index_write_bytes()): self.set_index_write_bytes(x.index_write_bytes())
+ if (x.has_entity_writes()): self.set_entity_writes(x.entity_writes())
+ if (x.has_entity_write_bytes()): self.set_entity_write_bytes(x.entity_write_bytes())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_index_writes_ != x.has_index_writes_: return 0
+ if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0
+ if self.has_index_write_bytes_ != x.has_index_write_bytes_: return 0
+ if self.has_index_write_bytes_ and self.index_write_bytes_ != x.index_write_bytes_: return 0
+ if self.has_entity_writes_ != x.has_entity_writes_: return 0
+ if self.has_entity_writes_ and self.entity_writes_ != x.entity_writes_: return 0
+ if self.has_entity_write_bytes_ != x.has_entity_write_bytes_: return 0
+ if self.has_entity_write_bytes_ and self.entity_write_bytes_ != x.entity_write_bytes_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
+ if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
+ if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
+ if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
+ return n + 0
+
+ def Clear(self):
+ self.clear_index_writes()
+ self.clear_index_write_bytes()
+ self.clear_entity_writes()
+ self.clear_entity_write_bytes()
+
+ def OutputUnchecked(self, out):
+ if (self.has_index_writes_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.index_writes_)
+ if (self.has_index_write_bytes_):
+ out.putVarInt32(16)
+ out.putVarInt32(self.index_write_bytes_)
+ if (self.has_entity_writes_):
+ out.putVarInt32(24)
+ out.putVarInt32(self.entity_writes_)
+ if (self.has_entity_write_bytes_):
+ out.putVarInt32(32)
+ out.putVarInt32(self.entity_write_bytes_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_index_writes(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_index_write_bytes(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.set_entity_writes(d.getVarInt32())
+ continue
+ if tt == 32:
+ self.set_entity_write_bytes(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_))
+ if self.has_index_write_bytes_: res+=prefix+("index_write_bytes: %s\n" % self.DebugFormatInt32(self.index_write_bytes_))
+ if self.has_entity_writes_: res+=prefix+("entity_writes: %s\n" % self.DebugFormatInt32(self.entity_writes_))
+ if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kindex_writes = 1
+ kindex_write_bytes = 2
+ kentity_writes = 3
+ kentity_write_bytes = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "index_writes",
+ 2: "index_write_bytes",
+ 3: "entity_writes",
+ 4: "entity_write_bytes",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class GetRequest(ProtocolBuffer.ProtocolMessage):
+ has_transaction_ = 0
+ transaction_ = None
+
+ def __init__(self, contents=None):
+ self.key_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def key_size(self): return len(self.key_)
+ def key_list(self): return self.key_
+
+ def key(self, i):
+ return self.key_[i]
+
+ def mutable_key(self, i):
+ return self.key_[i]
+
+ def add_key(self):
+ x = Reference()
+ self.key_.append(x)
+ return x
+
+ def clear_key(self):
+ self.key_ = []
+ def transaction(self):
+ if self.transaction_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.transaction_ is None: self.transaction_ = Transaction()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.transaction_
+
+ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+ def clear_transaction(self):
+ if self.has_transaction_:
+ self.has_transaction_ = 0;
+ if self.transaction_ is not None: self.transaction_.Clear()
+
+ def has_transaction(self): return self.has_transaction_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
+ if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.key_) != len(x.key_): return 0
+ for e1, e2 in zip(self.key_, x.key_):
+ if e1 != e2: return 0
+ if self.has_transaction_ != x.has_transaction_: return 0
+ if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.key_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.key_)
+ for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
+ if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_transaction()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.key_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.key_[i].ByteSize())
+ self.key_[i].OutputUnchecked(out)
+ if (self.has_transaction_):
+ out.putVarInt32(18)
+ out.putVarInt32(self.transaction_.ByteSize())
+ self.transaction_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_key().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_transaction().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.key_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("key%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_transaction_:
+ res+=prefix+"transaction <\n"
+ res+=self.transaction_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkey = 1
+ ktransaction = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "key",
+ 2: "transaction",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
+ has_entity_ = 0
+ entity_ = None
+
+ def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def entity(self):
+ if self.entity_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.entity_ is None: self.entity_ = EntityProto()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.entity_
+
+ def mutable_entity(self): self.has_entity_ = 1; return self.entity()
+
+ def clear_entity(self):
+ if self.has_entity_:
+ self.has_entity_ = 0;
+ if self.entity_ is not None: self.entity_.Clear()
+
+ def has_entity(self): return self.has_entity_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_entity_ != x.has_entity_: return 0
+ if self.has_entity_ and self.entity_ != x.entity_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_entity()
+
+ def OutputUnchecked(self, out):
+ if (self.has_entity_):
+ out.putVarInt32(18)
+ out.putVarInt32(self.entity_.ByteSize())
+ self.entity_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_entity().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_entity_:
+ res+=prefix+"entity <\n"
+ res+=self.entity_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+class GetResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.entity_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def entity_size(self): return len(self.entity_)
+ def entity_list(self): return self.entity_
+
+ def entity(self, i):
+ return self.entity_[i]
+
+ def mutable_entity(self, i):
+ return self.entity_[i]
+
+ def add_entity(self):
+ x = GetResponse_Entity()
+ self.entity_.append(x)
+ return x
+
+ def clear_entity(self):
+ self.entity_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.entity_) != len(x.entity_): return 0
+ for e1, e2 in zip(self.entity_, x.entity_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.entity_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.entity_)
+ for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_entity()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.entity_)):
+ out.putVarInt32(11)
+ self.entity_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_entity().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.entity_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Entity%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kEntityGroup = 1
+ kEntityentity = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Entity",
+ 2: "entity",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class PutRequest(ProtocolBuffer.ProtocolMessage):
+ has_transaction_ = 0
+ transaction_ = None
+ has_trusted_ = 0
+ trusted_ = 0
+
+ def __init__(self, contents=None):
+ self.entity_ = []
+ self.composite_index_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def entity_size(self): return len(self.entity_)
+ def entity_list(self): return self.entity_
+
+ def entity(self, i):
+ return self.entity_[i]
+
+ def mutable_entity(self, i):
+ return self.entity_[i]
+
+ def add_entity(self):
+ x = EntityProto()
+ self.entity_.append(x)
+ return x
+
+ def clear_entity(self):
+ self.entity_ = []
+ def transaction(self):
+ if self.transaction_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.transaction_ is None: self.transaction_ = Transaction()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.transaction_
+
+ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+ def clear_transaction(self):
+ if self.has_transaction_:
+ self.has_transaction_ = 0;
+ if self.transaction_ is not None: self.transaction_.Clear()
+
+ def has_transaction(self): return self.has_transaction_
+
+ def composite_index_size(self): return len(self.composite_index_)
+ def composite_index_list(self): return self.composite_index_
+
+ def composite_index(self, i):
+ return self.composite_index_[i]
+
+ def mutable_composite_index(self, i):
+ return self.composite_index_[i]
+
+ def add_composite_index(self):
+ x = CompositeIndex()
+ self.composite_index_.append(x)
+ return x
+
+ def clear_composite_index(self):
+ self.composite_index_ = []
+ def trusted(self): return self.trusted_
+
+ def set_trusted(self, x):
+ self.has_trusted_ = 1
+ self.trusted_ = x
+
+ def clear_trusted(self):
+ if self.has_trusted_:
+ self.has_trusted_ = 0
+ self.trusted_ = 0
+
+ def has_trusted(self): return self.has_trusted_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
+ if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+ for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
+ if (x.has_trusted()): self.set_trusted(x.trusted())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.entity_) != len(x.entity_): return 0
+ for e1, e2 in zip(self.entity_, x.entity_):
+ if e1 != e2: return 0
+ if self.has_transaction_ != x.has_transaction_: return 0
+ if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+ if len(self.composite_index_) != len(x.composite_index_): return 0
+ for e1, e2 in zip(self.composite_index_, x.composite_index_):
+ if e1 != e2: return 0
+ if self.has_trusted_ != x.has_trusted_: return 0
+ if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.entity_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+ for p in self.composite_index_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.entity_)
+ for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize())
+ if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+ n += 1 * len(self.composite_index_)
+ for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
+ if (self.has_trusted_): n += 2
+ return n + 0
+
+ def Clear(self):
+ self.clear_entity()
+ self.clear_transaction()
+ self.clear_composite_index()
+ self.clear_trusted()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.entity_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.entity_[i].ByteSize())
+ self.entity_[i].OutputUnchecked(out)
+ if (self.has_transaction_):
+ out.putVarInt32(18)
+ out.putVarInt32(self.transaction_.ByteSize())
+ self.transaction_.OutputUnchecked(out)
+ for i in xrange(len(self.composite_index_)):
+ out.putVarInt32(26)
+ out.putVarInt32(self.composite_index_[i].ByteSize())
+ self.composite_index_[i].OutputUnchecked(out)
+ if (self.has_trusted_):
+ out.putVarInt32(32)
+ out.putBoolean(self.trusted_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_entity().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_transaction().TryMerge(tmp)
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_composite_index().TryMerge(tmp)
+ continue
+ if tt == 32:
+ self.set_trusted(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.entity_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("entity%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_transaction_:
+ res+=prefix+"transaction <\n"
+ res+=self.transaction_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt=0
+ for e in self.composite_index_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("composite_index%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kentity = 1
+ ktransaction = 2
+ kcomposite_index = 3
+ ktrusted = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "entity",
+ 2: "transaction",
+ 3: "composite_index",
+ 4: "trusted",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class PutResponse(ProtocolBuffer.ProtocolMessage):
+ has_cost_ = 0
+ cost_ = None
+
+ def __init__(self, contents=None):
+ self.key_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def key_size(self): return len(self.key_)
+ def key_list(self): return self.key_
+
+ def key(self, i):
+ return self.key_[i]
+
+ def mutable_key(self, i):
+ return self.key_[i]
+
+ def add_key(self):
+ x = Reference()
+ self.key_.append(x)
+ return x
+
+ def clear_key(self):
+ self.key_ = []
+ def cost(self):
+ if self.cost_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.cost_ is None: self.cost_ = Cost()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.cost_
+
+ def mutable_cost(self): self.has_cost_ = 1; return self.cost()
+
+ def clear_cost(self):
+ if self.has_cost_:
+ self.has_cost_ = 0;
+ if self.cost_ is not None: self.cost_.Clear()
+
+ def has_cost(self): return self.has_cost_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
+ if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.key_) != len(x.key_): return 0
+ for e1, e2 in zip(self.key_, x.key_):
+ if e1 != e2: return 0
+ if self.has_cost_ != x.has_cost_: return 0
+ if self.has_cost_ and self.cost_ != x.cost_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.key_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.key_)
+ for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
+ if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_cost()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.key_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.key_[i].ByteSize())
+ self.key_[i].OutputUnchecked(out)
+ if (self.has_cost_):
+ out.putVarInt32(18)
+ out.putVarInt32(self.cost_.ByteSize())
+ self.cost_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_key().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_cost().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.key_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("key%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_cost_:
+ res+=prefix+"cost <\n"
+ res+=self.cost_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkey = 1
+ kcost = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "key",
+ 2: "cost",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class DeleteRequest(ProtocolBuffer.ProtocolMessage):
+ has_transaction_ = 0
+ transaction_ = None
+ has_trusted_ = 0
+ trusted_ = 0
+
+ def __init__(self, contents=None):
+ self.key_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def key_size(self): return len(self.key_)
+ def key_list(self): return self.key_
+
+ def key(self, i):
+ return self.key_[i]
+
+ def mutable_key(self, i):
+ return self.key_[i]
+
+ def add_key(self):
+ x = Reference()
+ self.key_.append(x)
+ return x
+
+ def clear_key(self):
+ self.key_ = []
+ def transaction(self):
+ if self.transaction_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.transaction_ is None: self.transaction_ = Transaction()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.transaction_
+
+ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+ def clear_transaction(self):
+ if self.has_transaction_:
+ self.has_transaction_ = 0;
+ if self.transaction_ is not None: self.transaction_.Clear()
+
+ def has_transaction(self): return self.has_transaction_
+
+ def trusted(self): return self.trusted_
+
+ def set_trusted(self, x):
+ self.has_trusted_ = 1
+ self.trusted_ = x
+
+ def clear_trusted(self):
+ if self.has_trusted_:
+ self.has_trusted_ = 0
+ self.trusted_ = 0
+
+ def has_trusted(self): return self.has_trusted_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
+ if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+ if (x.has_trusted()): self.set_trusted(x.trusted())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.key_) != len(x.key_): return 0
+ for e1, e2 in zip(self.key_, x.key_):
+ if e1 != e2: return 0
+ if self.has_transaction_ != x.has_transaction_: return 0
+ if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+ if self.has_trusted_ != x.has_trusted_: return 0
+ if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.key_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.key_)
+ for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
+ if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+ if (self.has_trusted_): n += 2
+ return n + 0
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_transaction()
+ self.clear_trusted()
+
+ def OutputUnchecked(self, out):
+ if (self.has_trusted_):
+ out.putVarInt32(32)
+ out.putBoolean(self.trusted_)
+ if (self.has_transaction_):
+ out.putVarInt32(42)
+ out.putVarInt32(self.transaction_.ByteSize())
+ self.transaction_.OutputUnchecked(out)
+ for i in xrange(len(self.key_)):
+ out.putVarInt32(50)
+ out.putVarInt32(self.key_[i].ByteSize())
+ self.key_[i].OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 32:
+ self.set_trusted(d.getBoolean())
+ continue
+ if tt == 42:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_transaction().TryMerge(tmp)
+ continue
+ if tt == 50:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_key().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.key_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("key%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_transaction_:
+ res+=prefix+"transaction <\n"
+ res+=self.transaction_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkey = 6
+ ktransaction = 5
+ ktrusted = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 4: "trusted",
+ 5: "transaction",
+ 6: "key",
+ }, 6)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.STRING,
+ }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class DeleteResponse(ProtocolBuffer.ProtocolMessage):
+ has_cost_ = 0
+ cost_ = None
+
+ def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def cost(self):
+ if self.cost_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.cost_ is None: self.cost_ = Cost()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.cost_
+
+ def mutable_cost(self): self.has_cost_ = 1; return self.cost()
+
+ def clear_cost(self):
+ if self.has_cost_:
+ self.has_cost_ = 0;
+ if self.cost_ is not None: self.cost_.Clear()
+
+ def has_cost(self): return self.has_cost_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_cost_ != x.has_cost_: return 0
+ if self.has_cost_ and self.cost_ != x.cost_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_cost()
+
+ def OutputUnchecked(self, out):
+ if (self.has_cost_):
+ out.putVarInt32(10)
+ out.putVarInt32(self.cost_.ByteSize())
+ self.cost_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_cost().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_cost_:
+ res+=prefix+"cost <\n"
+ res+=self.cost_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcost = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "cost",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class NextRequest(ProtocolBuffer.ProtocolMessage):
+ has_cursor_ = 0
+ has_count_ = 0
+ count_ = 0
+ has_compile_ = 0
+ compile_ = 0
+
+ def __init__(self, contents=None):
+ self.cursor_ = Cursor()
+ if contents is not None: self.MergeFromString(contents)
+
+ def cursor(self): return self.cursor_
+
+ def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_
+
+ def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear()
+
+ def has_cursor(self): return self.has_cursor_
+
+ def count(self): return self.count_
+
+ def set_count(self, x):
+ self.has_count_ = 1
+ self.count_ = x
+
+ def clear_count(self):
+ if self.has_count_:
+ self.has_count_ = 0
+ self.count_ = 0
+
+ def has_count(self): return self.has_count_
+
+ def compile(self): return self.compile_
+
+ def set_compile(self, x):
+ self.has_compile_ = 1
+ self.compile_ = x
+
+ def clear_compile(self):
+ if self.has_compile_:
+ self.has_compile_ = 0
+ self.compile_ = 0
+
+ def has_compile(self): return self.has_compile_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
+ if (x.has_count()): self.set_count(x.count())
+ if (x.has_compile()): self.set_compile(x.compile())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_cursor_ != x.has_cursor_: return 0
+ if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
+ if self.has_count_ != x.has_count_: return 0
+ if self.has_count_ and self.count_ != x.count_: return 0
+ if self.has_compile_ != x.has_compile_: return 0
+ if self.has_compile_ and self.compile_ != x.compile_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_cursor_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: cursor not set.')
+ elif not self.cursor_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.cursor_.ByteSize())
+ if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
+ if (self.has_compile_): n += 2
+ return n + 1
+
+ def Clear(self):
+ self.clear_cursor()
+ self.clear_count()
+ self.clear_compile()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.cursor_.ByteSize())
+ self.cursor_.OutputUnchecked(out)
+ if (self.has_count_):
+ out.putVarInt32(16)
+ out.putVarInt32(self.count_)
+ if (self.has_compile_):
+ out.putVarInt32(24)
+ out.putBoolean(self.compile_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_cursor().TryMerge(tmp)
+ continue
+ if tt == 16:
+ self.set_count(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.set_compile(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_cursor_:
+ res+=prefix+"cursor <\n"
+ res+=self.cursor_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+ if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcursor = 1
+ kcount = 2
+ kcompile = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "cursor",
+ 2: "count",
+ 3: "compile",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class QueryResult(ProtocolBuffer.ProtocolMessage):
+ has_cursor_ = 0
+ cursor_ = None
+ has_more_results_ = 0
+ more_results_ = 0
+ has_keys_only_ = 0
+ keys_only_ = 0
+ has_compiled_query_ = 0
+ compiled_query_ = None
+
+ def __init__(self, contents=None):
+ self.result_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def cursor(self):
+ if self.cursor_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.cursor_ is None: self.cursor_ = Cursor()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.cursor_
+
+ def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor()
+
+ def clear_cursor(self):
+ if self.has_cursor_:
+ self.has_cursor_ = 0;
+ if self.cursor_ is not None: self.cursor_.Clear()
+
+ def has_cursor(self): return self.has_cursor_
+
+ def result_size(self): return len(self.result_)
+ def result_list(self): return self.result_
+
+ def result(self, i):
+ return self.result_[i]
+
+ def mutable_result(self, i):
+ return self.result_[i]
+
+ def add_result(self):
+ x = EntityProto()
+ self.result_.append(x)
+ return x
+
+ def clear_result(self):
+ self.result_ = []
+ def more_results(self): return self.more_results_
+
+ def set_more_results(self, x):
+ self.has_more_results_ = 1
+ self.more_results_ = x
+
+ def clear_more_results(self):
+ if self.has_more_results_:
+ self.has_more_results_ = 0
+ self.more_results_ = 0
+
+ def has_more_results(self): return self.has_more_results_
+
+ def keys_only(self): return self.keys_only_
+
+ def set_keys_only(self, x):
+ self.has_keys_only_ = 1
+ self.keys_only_ = x
+
+ def clear_keys_only(self):
+ if self.has_keys_only_:
+ self.has_keys_only_ = 0
+ self.keys_only_ = 0
+
+ def has_keys_only(self): return self.has_keys_only_
+
+ def compiled_query(self):
+ if self.compiled_query_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.compiled_query_
+
+ def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query()
+
+ def clear_compiled_query(self):
+ if self.has_compiled_query_:
+ self.has_compiled_query_ = 0;
+ if self.compiled_query_ is not None: self.compiled_query_.Clear()
+
+ def has_compiled_query(self): return self.has_compiled_query_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
+ for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
+ if (x.has_more_results()): self.set_more_results(x.more_results())
+ if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+ if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_cursor_ != x.has_cursor_: return 0
+ if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
+ if len(self.result_) != len(x.result_): return 0
+ for e1, e2 in zip(self.result_, x.result_):
+ if e1 != e2: return 0
+ if self.has_more_results_ != x.has_more_results_: return 0
+ if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
+ if self.has_keys_only_ != x.has_keys_only_: return 0
+ if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+ if self.has_compiled_query_ != x.has_compiled_query_: return 0
+ if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0
+ for p in self.result_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (not self.has_more_results_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: more_results not set.')
+ if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
+ n += 1 * len(self.result_)
+ for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
+ if (self.has_keys_only_): n += 2
+ if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize())
+ return n + 2
+
+ def Clear(self):
+ self.clear_cursor()
+ self.clear_result()
+ self.clear_more_results()
+ self.clear_keys_only()
+ self.clear_compiled_query()
+
+ def OutputUnchecked(self, out):
+ if (self.has_cursor_):
+ out.putVarInt32(10)
+ out.putVarInt32(self.cursor_.ByteSize())
+ self.cursor_.OutputUnchecked(out)
+ for i in xrange(len(self.result_)):
+ out.putVarInt32(18)
+ out.putVarInt32(self.result_[i].ByteSize())
+ self.result_[i].OutputUnchecked(out)
+ out.putVarInt32(24)
+ out.putBoolean(self.more_results_)
+ if (self.has_keys_only_):
+ out.putVarInt32(32)
+ out.putBoolean(self.keys_only_)
+ if (self.has_compiled_query_):
+ out.putVarInt32(42)
+ out.putVarInt32(self.compiled_query_.ByteSize())
+ self.compiled_query_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_cursor().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_result().TryMerge(tmp)
+ continue
+ if tt == 24:
+ self.set_more_results(d.getBoolean())
+ continue
+ if tt == 32:
+ self.set_keys_only(d.getBoolean())
+ continue
+ if tt == 42:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_compiled_query().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_cursor_:
+ res+=prefix+"cursor <\n"
+ res+=self.cursor_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt=0
+ for e in self.result_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("result%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
+ if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+ if self.has_compiled_query_:
+ res+=prefix+"compiled_query <\n"
+ res+=self.compiled_query_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcursor = 1
+ kresult = 2
+ kmore_results = 3
+ kkeys_only = 4
+ kcompiled_query = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "cursor",
+ 2: "result",
+ 3: "more_results",
+ 4: "keys_only",
+ 5: "compiled_query",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class GetSchemaRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_ = 0
+ app_ = ""
+ has_start_kind_ = 0
+ start_kind_ = ""
+ has_end_kind_ = 0
+ end_kind_ = ""
+ has_properties_ = 0
+ properties_ = 1
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def app(self): return self.app_
+
+ def set_app(self, x):
+ self.has_app_ = 1
+ self.app_ = x
+
+ def clear_app(self):
+ if self.has_app_:
+ self.has_app_ = 0
+ self.app_ = ""
+
+ def has_app(self): return self.has_app_
+
+ def start_kind(self): return self.start_kind_
+
+ def set_start_kind(self, x):
+ self.has_start_kind_ = 1
+ self.start_kind_ = x
+
+ def clear_start_kind(self):
+ if self.has_start_kind_:
+ self.has_start_kind_ = 0
+ self.start_kind_ = ""
+
+ def has_start_kind(self): return self.has_start_kind_
+
+ def end_kind(self): return self.end_kind_
+
+ def set_end_kind(self, x):
+ self.has_end_kind_ = 1
+ self.end_kind_ = x
+
+ def clear_end_kind(self):
+ if self.has_end_kind_:
+ self.has_end_kind_ = 0
+ self.end_kind_ = ""
+
+ def has_end_kind(self): return self.has_end_kind_
+
+ def properties(self): return self.properties_
+
+ def set_properties(self, x):
+ self.has_properties_ = 1
+ self.properties_ = x
+
+ def clear_properties(self):
+ if self.has_properties_:
+ self.has_properties_ = 0
+ self.properties_ = 1
+
+ def has_properties(self): return self.has_properties_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app()): self.set_app(x.app())
+ if (x.has_start_kind()): self.set_start_kind(x.start_kind())
+ if (x.has_end_kind()): self.set_end_kind(x.end_kind())
+ if (x.has_properties()): self.set_properties(x.properties())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_ != x.has_app_: return 0
+ if self.has_app_ and self.app_ != x.app_: return 0
+ if self.has_start_kind_ != x.has_start_kind_: return 0
+ if self.has_start_kind_ and self.start_kind_ != x.start_kind_: return 0
+ if self.has_end_kind_ != x.has_end_kind_: return 0
+ if self.has_end_kind_ and self.end_kind_ != x.end_kind_: return 0
+ if self.has_properties_ != x.has_properties_: return 0
+ if self.has_properties_ and self.properties_ != x.properties_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_))
+ if (self.has_start_kind_): n += 1 + self.lengthString(len(self.start_kind_))
+ if (self.has_end_kind_): n += 1 + self.lengthString(len(self.end_kind_))
+ if (self.has_properties_): n += 2
+ return n + 1
+
+ def Clear(self):
+ self.clear_app()
+ self.clear_start_kind()
+ self.clear_end_kind()
+ self.clear_properties()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_)
+ if (self.has_start_kind_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.start_kind_)
+ if (self.has_end_kind_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.end_kind_)
+ if (self.has_properties_):
+ out.putVarInt32(32)
+ out.putBoolean(self.properties_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_start_kind(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_end_kind(d.getPrefixedString())
+ continue
+ if tt == 32:
+ self.set_properties(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+ if self.has_start_kind_: res+=prefix+("start_kind: %s\n" % self.DebugFormatString(self.start_kind_))
+ if self.has_end_kind_: res+=prefix+("end_kind: %s\n" % self.DebugFormatString(self.end_kind_))
+ if self.has_properties_: res+=prefix+("properties: %s\n" % self.DebugFormatBool(self.properties_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp = 1
+ kstart_kind = 2
+ kend_kind = 3
+ kproperties = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app",
+ 2: "start_kind",
+ 3: "end_kind",
+ 4: "properties",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Schema(ProtocolBuffer.ProtocolMessage):
+ has_more_results_ = 0
+ more_results_ = 0
+
+ def __init__(self, contents=None):
+ self.kind_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def kind_size(self): return len(self.kind_)
+ def kind_list(self): return self.kind_
+
+ def kind(self, i):
+ return self.kind_[i]
+
+ def mutable_kind(self, i):
+ return self.kind_[i]
+
+ def add_kind(self):
+ x = EntityProto()
+ self.kind_.append(x)
+ return x
+
+ def clear_kind(self):
+ self.kind_ = []
+ def more_results(self): return self.more_results_
+
+ def set_more_results(self, x):
+ self.has_more_results_ = 1
+ self.more_results_ = x
+
+ def clear_more_results(self):
+ if self.has_more_results_:
+ self.has_more_results_ = 0
+ self.more_results_ = 0
+
+ def has_more_results(self): return self.has_more_results_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.kind_size()): self.add_kind().CopyFrom(x.kind(i))
+ if (x.has_more_results()): self.set_more_results(x.more_results())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.kind_) != len(x.kind_): return 0
+ for e1, e2 in zip(self.kind_, x.kind_):
+ if e1 != e2: return 0
+ if self.has_more_results_ != x.has_more_results_: return 0
+ if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.kind_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.kind_)
+ for i in xrange(len(self.kind_)): n += self.lengthString(self.kind_[i].ByteSize())
+ if (self.has_more_results_): n += 2
+ return n + 0
+
+ def Clear(self):
+ self.clear_kind()
+ self.clear_more_results()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.kind_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.kind_[i].ByteSize())
+ self.kind_[i].OutputUnchecked(out)
+ if (self.has_more_results_):
+ out.putVarInt32(16)
+ out.putBoolean(self.more_results_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_kind().TryMerge(tmp)
+ continue
+ if tt == 16:
+ self.set_more_results(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.kind_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("kind%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkind = 1
+ kmore_results = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "kind",
+ 2: "more_results",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
+ has_model_key_ = 0
+ has_size_ = 0
+ size_ = 0
+
+ def __init__(self, contents=None):
+ self.model_key_ = Reference()
+ if contents is not None: self.MergeFromString(contents)
+
+ def model_key(self): return self.model_key_
+
+ def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key_
+
+ def clear_model_key(self):self.has_model_key_ = 0; self.model_key_.Clear()
+
+ def has_model_key(self): return self.has_model_key_
+
+ def size(self): return self.size_
+
+ def set_size(self, x):
+ self.has_size_ = 1
+ self.size_ = x
+
+ def clear_size(self):
+ if self.has_size_:
+ self.has_size_ = 0
+ self.size_ = 0
+
+ def has_size(self): return self.has_size_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key())
+ if (x.has_size()): self.set_size(x.size())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_model_key_ != x.has_model_key_: return 0
+ if self.has_model_key_ and self.model_key_ != x.model_key_: return 0
+ if self.has_size_ != x.has_size_: return 0
+ if self.has_size_ and self.size_ != x.size_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_model_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: model_key not set.')
+ elif not self.model_key_.IsInitialized(debug_strs): initialized = 0
+ if (not self.has_size_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: size not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.model_key_.ByteSize())
+ n += self.lengthVarInt64(self.size_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_model_key()
+ self.clear_size()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.model_key_.ByteSize())
+ self.model_key_.OutputUnchecked(out)
+ out.putVarInt32(16)
+ out.putVarInt64(self.size_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_model_key().TryMerge(tmp)
+ continue
+ if tt == 16:
+ self.set_size(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_model_key_:
+ res+=prefix+"model_key <\n"
+ res+=self.model_key_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kmodel_key = 1
+ ksize = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "model_key",
+ 2: "size",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
+ has_start_ = 0
+ start_ = 0
+ has_end_ = 0
+ end_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def start(self): return self.start_
+
+ def set_start(self, x):
+ self.has_start_ = 1
+ self.start_ = x
+
+ def clear_start(self):
+ if self.has_start_:
+ self.has_start_ = 0
+ self.start_ = 0
+
+ def has_start(self): return self.has_start_
+
+ def end(self): return self.end_
+
+ def set_end(self, x):
+ self.has_end_ = 1
+ self.end_ = x
+
+ def clear_end(self):
+ if self.has_end_:
+ self.has_end_ = 0
+ self.end_ = 0
+
+ def has_end(self): return self.has_end_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_start()): self.set_start(x.start())
+ if (x.has_end()): self.set_end(x.end())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_start_ != x.has_start_: return 0
+ if self.has_start_ and self.start_ != x.start_: return 0
+ if self.has_end_ != x.has_end_: return 0
+ if self.has_end_ and self.end_ != x.end_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_start_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: start not set.')
+ if (not self.has_end_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: end not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.start_)
+ n += self.lengthVarInt64(self.end_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_start()
+ self.clear_end()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt64(self.start_)
+ out.putVarInt32(16)
+ out.putVarInt64(self.end_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_start(d.getVarInt64())
+ continue
+ if tt == 16:
+ self.set_end(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_))
+ if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kstart = 1
+ kend = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "start",
+ 2: "end",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CompositeIndices(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.index_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def index_size(self): return len(self.index_)
+ def index_list(self): return self.index_
+
+ def index(self, i):
+ return self.index_[i]
+
+ def mutable_index(self, i):
+ return self.index_[i]
+
+ def add_index(self):
+ x = CompositeIndex()
+ self.index_.append(x)
+ return x
+
+ def clear_index(self):
+ self.index_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.index_) != len(x.index_): return 0
+ for e1, e2 in zip(self.index_, x.index_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.index_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.index_)
+ for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_index()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.index_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.index_[i].ByteSize())
+ self.index_[i].OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_index().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.index_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("index%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kindex = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "index",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ActionRequest(ProtocolBuffer.ProtocolMessage):
+ has_transaction_ = 0
+ has_action_ = 0
+
+ def __init__(self, contents=None):
+ self.transaction_ = Transaction()
+ self.action_ = Action()
+ if contents is not None: self.MergeFromString(contents)
+
+ def transaction(self): return self.transaction_
+
+ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_
+
+ def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear()
+
+ def has_transaction(self): return self.has_transaction_
+
+ def action(self): return self.action_
+
+ def mutable_action(self): self.has_action_ = 1; return self.action_
+
+ def clear_action(self):self.has_action_ = 0; self.action_.Clear()
+
+ def has_action(self): return self.has_action_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+ if (x.has_action()): self.mutable_action().MergeFrom(x.action())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_transaction_ != x.has_transaction_: return 0
+ if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+ if self.has_action_ != x.has_action_: return 0
+ if self.has_action_ and self.action_ != x.action_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_transaction_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: transaction not set.')
+ elif not self.transaction_.IsInitialized(debug_strs): initialized = 0
+ if (not self.has_action_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: action not set.')
+ elif not self.action_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.transaction_.ByteSize())
+ n += self.lengthString(self.action_.ByteSize())
+ return n + 2
+
+ def Clear(self):
+ self.clear_transaction()
+ self.clear_action()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.transaction_.ByteSize())
+ self.transaction_.OutputUnchecked(out)
+ out.putVarInt32(18)
+ out.putVarInt32(self.action_.ByteSize())
+ self.action_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_transaction().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_action().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_transaction_:
+ res+=prefix+"transaction <\n"
+ res+=self.transaction_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_action_:
+ res+=prefix+"action <\n"
+ res+=self.action_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ ktransaction = 1
+ kaction = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "transaction",
+ 2: "action",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ActionResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CommitResponse(ProtocolBuffer.ProtocolMessage):
+ has_cost_ = 0
+ cost_ = None
+
+ def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def cost(self):
+ if self.cost_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.cost_ is None: self.cost_ = Cost()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.cost_
+
+ def mutable_cost(self): self.has_cost_ = 1; return self.cost()
+
+ def clear_cost(self):
+ if self.has_cost_:
+ self.has_cost_ = 0;
+ if self.cost_ is not None: self.cost_.Clear()
+
+ def has_cost(self): return self.has_cost_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_cost_ != x.has_cost_: return 0
+ if self.has_cost_ and self.cost_ != x.cost_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_cost()
+
+ def OutputUnchecked(self, out):
+ if (self.has_cost_):
+ out.putVarInt32(10)
+ out.putVarInt32(self.cost_.ByteSize())
+ self.cost_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_cost().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_cost_:
+ res+=prefix+"cost <\n"
+ res+=self.cost_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcost = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "cost",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','RunCompiledQueryRequest','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','GetSchemaRequest','Schema','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','ActionRequest','ActionResponse','CommitResponse']
diff --git a/google_appengine/google/appengine/datastore/datastore_pb.pyc b/google_appengine/google/appengine/datastore/datastore_pb.pyc
new file mode 100644
index 0000000..7cd9465
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/datastore_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/datastore/datastore_v3_pb.py b/google_appengine/google/appengine/datastore/datastore_v3_pb.py
new file mode 100755
index 0000000..627c7bf
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/datastore_v3_pb.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""The Python datastore protocol buffer definition.
+
+Proto2 compiler expects generated file names to follow specific pattern,
+which is not the case for the datastore_pb.py (should be datastore_v3_pb.py).
+This file with the expected name redirects to the real legacy file.
+"""
+
+
+from google.appengine.datastore.datastore_pb import *
diff --git a/google_appengine/google/appengine/datastore/datastore_v3_pb.pyc b/google_appengine/google/appengine/datastore/datastore_v3_pb.pyc
new file mode 100644
index 0000000..40ced82
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/datastore_v3_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/datastore/entity_pb.py b/google_appengine/google/appengine/datastore/entity_pb.py
new file mode 100644
index 0000000..c6817bc
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/entity_pb.py
@@ -0,0 +1,2599 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class PropertyValue_ReferenceValuePathElement(ProtocolBuffer.ProtocolMessage):
+ has_type_ = 0
+ type_ = ""
+ has_id_ = 0
+ id_ = 0
+ has_name_ = 0
+ name_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def type(self): return self.type_
+
+ def set_type(self, x):
+ self.has_type_ = 1
+ self.type_ = x
+
+ def clear_type(self):
+ if self.has_type_:
+ self.has_type_ = 0
+ self.type_ = ""
+
+ def has_type(self): return self.has_type_
+
+ def id(self): return self.id_
+
+ def set_id(self, x):
+ self.has_id_ = 1
+ self.id_ = x
+
+ def clear_id(self):
+ if self.has_id_:
+ self.has_id_ = 0
+ self.id_ = 0
+
+ def has_id(self): return self.has_id_
+
+ def name(self): return self.name_
+
+ def set_name(self, x):
+ self.has_name_ = 1
+ self.name_ = x
+
+ def clear_name(self):
+ if self.has_name_:
+ self.has_name_ = 0
+ self.name_ = ""
+
+ def has_name(self): return self.has_name_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_type()): self.set_type(x.type())
+ if (x.has_id()): self.set_id(x.id())
+ if (x.has_name()): self.set_name(x.name())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_type_ != x.has_type_: return 0
+ if self.has_type_ and self.type_ != x.type_: return 0
+ if self.has_id_ != x.has_id_: return 0
+ if self.has_id_ and self.id_ != x.id_: return 0
+ if self.has_name_ != x.has_name_: return 0
+ if self.has_name_ and self.name_ != x.name_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_type_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: type not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.type_))
+ if (self.has_id_): n += 2 + self.lengthVarInt64(self.id_)
+ if (self.has_name_): n += 2 + self.lengthString(len(self.name_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_type()
+ self.clear_id()
+ self.clear_name()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(122)
+ out.putPrefixedString(self.type_)
+ if (self.has_id_):
+ out.putVarInt32(128)
+ out.putVarInt64(self.id_)
+ if (self.has_name_):
+ out.putVarInt32(138)
+ out.putPrefixedString(self.name_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 116: break
+ if tt == 122:
+ self.set_type(d.getPrefixedString())
+ continue
+ if tt == 128:
+ self.set_id(d.getVarInt64())
+ continue
+ if tt == 138:
+ self.set_name(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
+ if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
+ if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
+ return res
+
+class PropertyValue_PointValue(ProtocolBuffer.ProtocolMessage):
+ has_x_ = 0
+ x_ = 0.0
+ has_y_ = 0
+ y_ = 0.0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def x(self): return self.x_
+
+ def set_x(self, x):
+ self.has_x_ = 1
+ self.x_ = x
+
+ def clear_x(self):
+ if self.has_x_:
+ self.has_x_ = 0
+ self.x_ = 0.0
+
+ def has_x(self): return self.has_x_
+
+ def y(self): return self.y_
+
+ def set_y(self, x):
+ self.has_y_ = 1
+ self.y_ = x
+
+ def clear_y(self):
+ if self.has_y_:
+ self.has_y_ = 0
+ self.y_ = 0.0
+
+ def has_y(self): return self.has_y_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_x()): self.set_x(x.x())
+ if (x.has_y()): self.set_y(x.y())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_x_ != x.has_x_: return 0
+ if self.has_x_ and self.x_ != x.x_: return 0
+ if self.has_y_ != x.has_y_: return 0
+ if self.has_y_ and self.y_ != x.y_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_x_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: x not set.')
+ if (not self.has_y_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: y not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 18
+
+ def Clear(self):
+ self.clear_x()
+ self.clear_y()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(49)
+ out.putDouble(self.x_)
+ out.putVarInt32(57)
+ out.putDouble(self.y_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 44: break
+ if tt == 49:
+ self.set_x(d.getDouble())
+ continue
+ if tt == 57:
+ self.set_y(d.getDouble())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_x_: res+=prefix+("x: %s\n" % self.DebugFormat(self.x_))
+ if self.has_y_: res+=prefix+("y: %s\n" % self.DebugFormat(self.y_))
+ return res
+
+class PropertyValue_UserValue(ProtocolBuffer.ProtocolMessage):
+ has_email_ = 0
+ email_ = ""
+ has_auth_domain_ = 0
+ auth_domain_ = ""
+ has_nickname_ = 0
+ nickname_ = ""
+ has_gaiaid_ = 0
+ gaiaid_ = 0
+ has_obfuscated_gaiaid_ = 0
+ obfuscated_gaiaid_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def email(self): return self.email_
+
+ def set_email(self, x):
+ self.has_email_ = 1
+ self.email_ = x
+
+ def clear_email(self):
+ if self.has_email_:
+ self.has_email_ = 0
+ self.email_ = ""
+
+ def has_email(self): return self.has_email_
+
+ def auth_domain(self): return self.auth_domain_
+
+ def set_auth_domain(self, x):
+ self.has_auth_domain_ = 1
+ self.auth_domain_ = x
+
+ def clear_auth_domain(self):
+ if self.has_auth_domain_:
+ self.has_auth_domain_ = 0
+ self.auth_domain_ = ""
+
+ def has_auth_domain(self): return self.has_auth_domain_
+
+ def nickname(self): return self.nickname_
+
+ def set_nickname(self, x):
+ self.has_nickname_ = 1
+ self.nickname_ = x
+
+ def clear_nickname(self):
+ if self.has_nickname_:
+ self.has_nickname_ = 0
+ self.nickname_ = ""
+
+ def has_nickname(self): return self.has_nickname_
+
+ def gaiaid(self): return self.gaiaid_
+
+ def set_gaiaid(self, x):
+ self.has_gaiaid_ = 1
+ self.gaiaid_ = x
+
+ def clear_gaiaid(self):
+ if self.has_gaiaid_:
+ self.has_gaiaid_ = 0
+ self.gaiaid_ = 0
+
+ def has_gaiaid(self): return self.has_gaiaid_
+
+ def obfuscated_gaiaid(self): return self.obfuscated_gaiaid_
+
+ def set_obfuscated_gaiaid(self, x):
+ self.has_obfuscated_gaiaid_ = 1
+ self.obfuscated_gaiaid_ = x
+
+ def clear_obfuscated_gaiaid(self):
+ if self.has_obfuscated_gaiaid_:
+ self.has_obfuscated_gaiaid_ = 0
+ self.obfuscated_gaiaid_ = ""
+
+ def has_obfuscated_gaiaid(self): return self.has_obfuscated_gaiaid_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_email()): self.set_email(x.email())
+ if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+ if (x.has_nickname()): self.set_nickname(x.nickname())
+ if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
+ if (x.has_obfuscated_gaiaid()): self.set_obfuscated_gaiaid(x.obfuscated_gaiaid())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_email_ != x.has_email_: return 0
+ if self.has_email_ and self.email_ != x.email_: return 0
+ if self.has_auth_domain_ != x.has_auth_domain_: return 0
+ if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+ if self.has_nickname_ != x.has_nickname_: return 0
+ if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
+ if self.has_gaiaid_ != x.has_gaiaid_: return 0
+ if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
+ if self.has_obfuscated_gaiaid_ != x.has_obfuscated_gaiaid_: return 0
+ if self.has_obfuscated_gaiaid_ and self.obfuscated_gaiaid_ != x.obfuscated_gaiaid_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_email_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: email not set.')
+ if (not self.has_auth_domain_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: auth_domain not set.')
+ if (not self.has_gaiaid_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: gaiaid not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.email_))
+ n += self.lengthString(len(self.auth_domain_))
+ if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
+ n += self.lengthVarInt64(self.gaiaid_)
+ if (self.has_obfuscated_gaiaid_): n += 2 + self.lengthString(len(self.obfuscated_gaiaid_))
+ return n + 4
+
+ def Clear(self):
+ self.clear_email()
+ self.clear_auth_domain()
+ self.clear_nickname()
+ self.clear_gaiaid()
+ self.clear_obfuscated_gaiaid()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(74)
+ out.putPrefixedString(self.email_)
+ out.putVarInt32(82)
+ out.putPrefixedString(self.auth_domain_)
+ if (self.has_nickname_):
+ out.putVarInt32(90)
+ out.putPrefixedString(self.nickname_)
+ out.putVarInt32(144)
+ out.putVarInt64(self.gaiaid_)
+ if (self.has_obfuscated_gaiaid_):
+ out.putVarInt32(154)
+ out.putPrefixedString(self.obfuscated_gaiaid_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 68: break
+ if tt == 74:
+ self.set_email(d.getPrefixedString())
+ continue
+ if tt == 82:
+ self.set_auth_domain(d.getPrefixedString())
+ continue
+ if tt == 90:
+ self.set_nickname(d.getPrefixedString())
+ continue
+ if tt == 144:
+ self.set_gaiaid(d.getVarInt64())
+ continue
+ if tt == 154:
+ self.set_obfuscated_gaiaid(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
+ if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+ if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
+ if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
+ if self.has_obfuscated_gaiaid_: res+=prefix+("obfuscated_gaiaid: %s\n" % self.DebugFormatString(self.obfuscated_gaiaid_))
+ return res
+
+class PropertyValue_ReferenceValue(ProtocolBuffer.ProtocolMessage):
+ has_app_ = 0
+ app_ = ""
+
+ def __init__(self, contents=None):
+ self.pathelement_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def app(self): return self.app_
+
+ def set_app(self, x):
+ self.has_app_ = 1
+ self.app_ = x
+
+ def clear_app(self):
+ if self.has_app_:
+ self.has_app_ = 0
+ self.app_ = ""
+
+ def has_app(self): return self.has_app_
+
+ def pathelement_size(self): return len(self.pathelement_)
+ def pathelement_list(self): return self.pathelement_
+
+ def pathelement(self, i):
+ return self.pathelement_[i]
+
+ def mutable_pathelement(self, i):
+ return self.pathelement_[i]
+
+ def add_pathelement(self):
+ x = PropertyValue_ReferenceValuePathElement()
+ self.pathelement_.append(x)
+ return x
+
+ def clear_pathelement(self):
+ self.pathelement_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app()): self.set_app(x.app())
+ for i in xrange(x.pathelement_size()): self.add_pathelement().CopyFrom(x.pathelement(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_ != x.has_app_: return 0
+ if self.has_app_ and self.app_ != x.app_: return 0
+ if len(self.pathelement_) != len(x.pathelement_): return 0
+ for e1, e2 in zip(self.pathelement_, x.pathelement_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app not set.')
+ for p in self.pathelement_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_))
+ n += 2 * len(self.pathelement_)
+ for i in xrange(len(self.pathelement_)): n += self.pathelement_[i].ByteSize()
+ return n + 1
+
+ def Clear(self):
+ self.clear_app()
+ self.clear_pathelement()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(106)
+ out.putPrefixedString(self.app_)
+ for i in xrange(len(self.pathelement_)):
+ out.putVarInt32(115)
+ self.pathelement_[i].OutputUnchecked(out)
+ out.putVarInt32(116)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 100: break
+ if tt == 106:
+ self.set_app(d.getPrefixedString())
+ continue
+ if tt == 115:
+ self.add_pathelement().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+ cnt=0
+ for e in self.pathelement_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("PathElement%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+class PropertyValue(ProtocolBuffer.ProtocolMessage):
+ has_int64value_ = 0
+ int64value_ = 0
+ has_booleanvalue_ = 0
+ booleanvalue_ = 0
+ has_stringvalue_ = 0
+ stringvalue_ = ""
+ has_doublevalue_ = 0
+ doublevalue_ = 0.0
+ has_pointvalue_ = 0
+ pointvalue_ = None
+ has_uservalue_ = 0
+ uservalue_ = None
+ has_referencevalue_ = 0
+ referencevalue_ = None
+
+ def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def int64value(self): return self.int64value_
+
+ def set_int64value(self, x):
+ self.has_int64value_ = 1
+ self.int64value_ = x
+
+ def clear_int64value(self):
+ if self.has_int64value_:
+ self.has_int64value_ = 0
+ self.int64value_ = 0
+
+ def has_int64value(self): return self.has_int64value_
+
+ def booleanvalue(self): return self.booleanvalue_
+
+ def set_booleanvalue(self, x):
+ self.has_booleanvalue_ = 1
+ self.booleanvalue_ = x
+
+ def clear_booleanvalue(self):
+ if self.has_booleanvalue_:
+ self.has_booleanvalue_ = 0
+ self.booleanvalue_ = 0
+
+ def has_booleanvalue(self): return self.has_booleanvalue_
+
+ def stringvalue(self): return self.stringvalue_
+
+ def set_stringvalue(self, x):
+ self.has_stringvalue_ = 1
+ self.stringvalue_ = x
+
+ def clear_stringvalue(self):
+ if self.has_stringvalue_:
+ self.has_stringvalue_ = 0
+ self.stringvalue_ = ""
+
+ def has_stringvalue(self): return self.has_stringvalue_
+
+ def doublevalue(self): return self.doublevalue_
+
+ def set_doublevalue(self, x):
+ self.has_doublevalue_ = 1
+ self.doublevalue_ = x
+
+ def clear_doublevalue(self):
+ if self.has_doublevalue_:
+ self.has_doublevalue_ = 0
+ self.doublevalue_ = 0.0
+
+ def has_doublevalue(self): return self.has_doublevalue_
+
+ def pointvalue(self):
+ if self.pointvalue_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.pointvalue_ is None: self.pointvalue_ = PropertyValue_PointValue()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.pointvalue_
+
+ def mutable_pointvalue(self): self.has_pointvalue_ = 1; return self.pointvalue()
+
+ def clear_pointvalue(self):
+ if self.has_pointvalue_:
+ self.has_pointvalue_ = 0;
+ if self.pointvalue_ is not None: self.pointvalue_.Clear()
+
+ def has_pointvalue(self): return self.has_pointvalue_
+
+ def uservalue(self):
+ if self.uservalue_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.uservalue_ is None: self.uservalue_ = PropertyValue_UserValue()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.uservalue_
+
+ def mutable_uservalue(self): self.has_uservalue_ = 1; return self.uservalue()
+
+ def clear_uservalue(self):
+ if self.has_uservalue_:
+ self.has_uservalue_ = 0;
+ if self.uservalue_ is not None: self.uservalue_.Clear()
+
+ def has_uservalue(self): return self.has_uservalue_
+
+ def referencevalue(self):
+ if self.referencevalue_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.referencevalue_ is None: self.referencevalue_ = PropertyValue_ReferenceValue()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.referencevalue_
+
+ def mutable_referencevalue(self): self.has_referencevalue_ = 1; return self.referencevalue()
+
+ def clear_referencevalue(self):
+ if self.has_referencevalue_:
+ self.has_referencevalue_ = 0;
+ if self.referencevalue_ is not None: self.referencevalue_.Clear()
+
+ def has_referencevalue(self): return self.has_referencevalue_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_int64value()): self.set_int64value(x.int64value())
+ if (x.has_booleanvalue()): self.set_booleanvalue(x.booleanvalue())
+ if (x.has_stringvalue()): self.set_stringvalue(x.stringvalue())
+ if (x.has_doublevalue()): self.set_doublevalue(x.doublevalue())
+ if (x.has_pointvalue()): self.mutable_pointvalue().MergeFrom(x.pointvalue())
+ if (x.has_uservalue()): self.mutable_uservalue().MergeFrom(x.uservalue())
+ if (x.has_referencevalue()): self.mutable_referencevalue().MergeFrom(x.referencevalue())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_int64value_ != x.has_int64value_: return 0
+ if self.has_int64value_ and self.int64value_ != x.int64value_: return 0
+ if self.has_booleanvalue_ != x.has_booleanvalue_: return 0
+ if self.has_booleanvalue_ and self.booleanvalue_ != x.booleanvalue_: return 0
+ if self.has_stringvalue_ != x.has_stringvalue_: return 0
+ if self.has_stringvalue_ and self.stringvalue_ != x.stringvalue_: return 0
+ if self.has_doublevalue_ != x.has_doublevalue_: return 0
+ if self.has_doublevalue_ and self.doublevalue_ != x.doublevalue_: return 0
+ if self.has_pointvalue_ != x.has_pointvalue_: return 0
+ if self.has_pointvalue_ and self.pointvalue_ != x.pointvalue_: return 0
+ if self.has_uservalue_ != x.has_uservalue_: return 0
+ if self.has_uservalue_ and self.uservalue_ != x.uservalue_: return 0
+ if self.has_referencevalue_ != x.has_referencevalue_: return 0
+ if self.has_referencevalue_ and self.referencevalue_ != x.referencevalue_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_pointvalue_ and not self.pointvalue_.IsInitialized(debug_strs)): initialized = 0
+ if (self.has_uservalue_ and not self.uservalue_.IsInitialized(debug_strs)): initialized = 0
+ if (self.has_referencevalue_ and not self.referencevalue_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_int64value_): n += 1 + self.lengthVarInt64(self.int64value_)
+ if (self.has_booleanvalue_): n += 2
+ if (self.has_stringvalue_): n += 1 + self.lengthString(len(self.stringvalue_))
+ if (self.has_doublevalue_): n += 9
+ if (self.has_pointvalue_): n += 2 + self.pointvalue_.ByteSize()
+ if (self.has_uservalue_): n += 2 + self.uservalue_.ByteSize()
+ if (self.has_referencevalue_): n += 2 + self.referencevalue_.ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_int64value()
+ self.clear_booleanvalue()
+ self.clear_stringvalue()
+ self.clear_doublevalue()
+ self.clear_pointvalue()
+ self.clear_uservalue()
+ self.clear_referencevalue()
+
+ def OutputUnchecked(self, out):
+ if (self.has_int64value_):
+ out.putVarInt32(8)
+ out.putVarInt64(self.int64value_)
+ if (self.has_booleanvalue_):
+ out.putVarInt32(16)
+ out.putBoolean(self.booleanvalue_)
+ if (self.has_stringvalue_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.stringvalue_)
+ if (self.has_doublevalue_):
+ out.putVarInt32(33)
+ out.putDouble(self.doublevalue_)
+ if (self.has_pointvalue_):
+ out.putVarInt32(43)
+ self.pointvalue_.OutputUnchecked(out)
+ out.putVarInt32(44)
+ if (self.has_uservalue_):
+ out.putVarInt32(67)
+ self.uservalue_.OutputUnchecked(out)
+ out.putVarInt32(68)
+ if (self.has_referencevalue_):
+ out.putVarInt32(99)
+ self.referencevalue_.OutputUnchecked(out)
+ out.putVarInt32(100)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_int64value(d.getVarInt64())
+ continue
+ if tt == 16:
+ self.set_booleanvalue(d.getBoolean())
+ continue
+ if tt == 26:
+ self.set_stringvalue(d.getPrefixedString())
+ continue
+ if tt == 33:
+ self.set_doublevalue(d.getDouble())
+ continue
+ if tt == 43:
+ self.mutable_pointvalue().TryMerge(d)
+ continue
+ if tt == 67:
+ self.mutable_uservalue().TryMerge(d)
+ continue
+ if tt == 99:
+ self.mutable_referencevalue().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_int64value_: res+=prefix+("int64Value: %s\n" % self.DebugFormatInt64(self.int64value_))
+ if self.has_booleanvalue_: res+=prefix+("booleanValue: %s\n" % self.DebugFormatBool(self.booleanvalue_))
+ if self.has_stringvalue_: res+=prefix+("stringValue: %s\n" % self.DebugFormatString(self.stringvalue_))
+ if self.has_doublevalue_: res+=prefix+("doubleValue: %s\n" % self.DebugFormat(self.doublevalue_))
+ if self.has_pointvalue_:
+ res+=prefix+"PointValue {\n"
+ res+=self.pointvalue_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ if self.has_uservalue_:
+ res+=prefix+"UserValue {\n"
+ res+=self.uservalue_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ if self.has_referencevalue_:
+ res+=prefix+"ReferenceValue {\n"
+ res+=self.referencevalue_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kint64Value = 1
+ kbooleanValue = 2
+ kstringValue = 3
+ kdoubleValue = 4
+ kPointValueGroup = 5
+ kPointValuex = 6
+ kPointValuey = 7
+ kUserValueGroup = 8
+ kUserValueemail = 9
+ kUserValueauth_domain = 10
+ kUserValuenickname = 11
+ kUserValuegaiaid = 18
+ kUserValueobfuscated_gaiaid = 19
+ kReferenceValueGroup = 12
+ kReferenceValueapp = 13
+ kReferenceValuePathElementGroup = 14
+ kReferenceValuePathElementtype = 15
+ kReferenceValuePathElementid = 16
+ kReferenceValuePathElementname = 17
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "int64Value",
+ 2: "booleanValue",
+ 3: "stringValue",
+ 4: "doubleValue",
+ 5: "PointValue",
+ 6: "x",
+ 7: "y",
+ 8: "UserValue",
+ 9: "email",
+ 10: "auth_domain",
+ 11: "nickname",
+ 12: "ReferenceValue",
+ 13: "app",
+ 14: "PathElement",
+ 15: "type",
+ 16: "id",
+ 17: "name",
+ 18: "gaiaid",
+ 19: "obfuscated_gaiaid",
+ }, 19)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.DOUBLE,
+ 5: ProtocolBuffer.Encoder.STARTGROUP,
+ 6: ProtocolBuffer.Encoder.DOUBLE,
+ 7: ProtocolBuffer.Encoder.DOUBLE,
+ 8: ProtocolBuffer.Encoder.STARTGROUP,
+ 9: ProtocolBuffer.Encoder.STRING,
+ 10: ProtocolBuffer.Encoder.STRING,
+ 11: ProtocolBuffer.Encoder.STRING,
+ 12: ProtocolBuffer.Encoder.STARTGROUP,
+ 13: ProtocolBuffer.Encoder.STRING,
+ 14: ProtocolBuffer.Encoder.STARTGROUP,
+ 15: ProtocolBuffer.Encoder.STRING,
+ 16: ProtocolBuffer.Encoder.NUMERIC,
+ 17: ProtocolBuffer.Encoder.STRING,
+ 18: ProtocolBuffer.Encoder.NUMERIC,
+ 19: ProtocolBuffer.Encoder.STRING,
+ }, 19, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Property(ProtocolBuffer.ProtocolMessage):
+
+ BLOB = 14
+ TEXT = 15
+ BYTESTRING = 16
+ ATOM_CATEGORY = 1
+ ATOM_LINK = 2
+ ATOM_TITLE = 3
+ ATOM_CONTENT = 4
+ ATOM_SUMMARY = 5
+ ATOM_AUTHOR = 6
+ GD_WHEN = 7
+ GD_EMAIL = 8
+ GEORSS_POINT = 9
+ GD_IM = 10
+ GD_PHONENUMBER = 11
+ GD_POSTALADDRESS = 12
+ GD_RATING = 13
+ BLOBKEY = 17
+
+ _Meaning_NAMES = {
+ 14: "BLOB",
+ 15: "TEXT",
+ 16: "BYTESTRING",
+ 1: "ATOM_CATEGORY",
+ 2: "ATOM_LINK",
+ 3: "ATOM_TITLE",
+ 4: "ATOM_CONTENT",
+ 5: "ATOM_SUMMARY",
+ 6: "ATOM_AUTHOR",
+ 7: "GD_WHEN",
+ 8: "GD_EMAIL",
+ 9: "GEORSS_POINT",
+ 10: "GD_IM",
+ 11: "GD_PHONENUMBER",
+ 12: "GD_POSTALADDRESS",
+ 13: "GD_RATING",
+ 17: "BLOBKEY",
+ }
+
+ def Meaning_Name(cls, x): return cls._Meaning_NAMES.get(x, "")
+ Meaning_Name = classmethod(Meaning_Name)
+
+ has_meaning_ = 0
+ meaning_ = 0
+ has_meaning_uri_ = 0
+ meaning_uri_ = ""
+ has_name_ = 0
+ name_ = ""
+ has_value_ = 0
+ has_multiple_ = 0
+ multiple_ = 0
+
+ def __init__(self, contents=None):
+ self.value_ = PropertyValue()
+ if contents is not None: self.MergeFromString(contents)
+
+ def meaning(self): return self.meaning_
+
+ def set_meaning(self, x):
+ self.has_meaning_ = 1
+ self.meaning_ = x
+
+ def clear_meaning(self):
+ if self.has_meaning_:
+ self.has_meaning_ = 0
+ self.meaning_ = 0
+
+ def has_meaning(self): return self.has_meaning_
+
+ def meaning_uri(self): return self.meaning_uri_
+
+ def set_meaning_uri(self, x):
+ self.has_meaning_uri_ = 1
+ self.meaning_uri_ = x
+
+ def clear_meaning_uri(self):
+ if self.has_meaning_uri_:
+ self.has_meaning_uri_ = 0
+ self.meaning_uri_ = ""
+
+ def has_meaning_uri(self): return self.has_meaning_uri_
+
+ def name(self): return self.name_
+
+ def set_name(self, x):
+ self.has_name_ = 1
+ self.name_ = x
+
+ def clear_name(self):
+ if self.has_name_:
+ self.has_name_ = 0
+ self.name_ = ""
+
+ def has_name(self): return self.has_name_
+
+ def value(self): return self.value_
+
+ def mutable_value(self): self.has_value_ = 1; return self.value_
+
+ def clear_value(self):self.has_value_ = 0; self.value_.Clear()
+
+ def has_value(self): return self.has_value_
+
+ def multiple(self): return self.multiple_
+
+ def set_multiple(self, x):
+ self.has_multiple_ = 1
+ self.multiple_ = x
+
+ def clear_multiple(self):
+ if self.has_multiple_:
+ self.has_multiple_ = 0
+ self.multiple_ = 0
+
+ def has_multiple(self): return self.has_multiple_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_meaning()): self.set_meaning(x.meaning())
+ if (x.has_meaning_uri()): self.set_meaning_uri(x.meaning_uri())
+ if (x.has_name()): self.set_name(x.name())
+ if (x.has_value()): self.mutable_value().MergeFrom(x.value())
+ if (x.has_multiple()): self.set_multiple(x.multiple())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_meaning_ != x.has_meaning_: return 0
+ if self.has_meaning_ and self.meaning_ != x.meaning_: return 0
+ if self.has_meaning_uri_ != x.has_meaning_uri_: return 0
+ if self.has_meaning_uri_ and self.meaning_uri_ != x.meaning_uri_: return 0
+ if self.has_name_ != x.has_name_: return 0
+ if self.has_name_ and self.name_ != x.name_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ if self.has_multiple_ != x.has_multiple_: return 0
+ if self.has_multiple_ and self.multiple_ != x.multiple_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: name not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ elif not self.value_.IsInitialized(debug_strs): initialized = 0
+ if (not self.has_multiple_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: multiple not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
+ if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
+ n += self.lengthString(len(self.name_))
+ n += self.lengthString(self.value_.ByteSize())
+ return n + 4
+
+ def Clear(self):
+ self.clear_meaning()
+ self.clear_meaning_uri()
+ self.clear_name()
+ self.clear_value()
+ self.clear_multiple()
+
+ def OutputUnchecked(self, out):
+ if (self.has_meaning_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.meaning_)
+ if (self.has_meaning_uri_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.meaning_uri_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.name_)
+ out.putVarInt32(32)
+ out.putBoolean(self.multiple_)
+ out.putVarInt32(42)
+ out.putVarInt32(self.value_.ByteSize())
+ self.value_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_meaning(d.getVarInt32())
+ continue
+ if tt == 18:
+ self.set_meaning_uri(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_name(d.getPrefixedString())
+ continue
+ if tt == 32:
+ self.set_multiple(d.getBoolean())
+ continue
+ if tt == 42:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_value().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_))
+ if self.has_meaning_uri_: res+=prefix+("meaning_uri: %s\n" % self.DebugFormatString(self.meaning_uri_))
+ if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
+ if self.has_value_:
+ res+=prefix+"value <\n"
+ res+=self.value_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_multiple_: res+=prefix+("multiple: %s\n" % self.DebugFormatBool(self.multiple_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kmeaning = 1
+ kmeaning_uri = 2
+ kname = 3
+ kvalue = 5
+ kmultiple = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "meaning",
+ 2: "meaning_uri",
+ 3: "name",
+ 4: "multiple",
+ 5: "value",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Path_Element(ProtocolBuffer.ProtocolMessage):
+ has_type_ = 0
+ type_ = ""
+ has_id_ = 0
+ id_ = 0
+ has_name_ = 0
+ name_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def type(self): return self.type_
+
+ def set_type(self, x):
+ self.has_type_ = 1
+ self.type_ = x
+
+ def clear_type(self):
+ if self.has_type_:
+ self.has_type_ = 0
+ self.type_ = ""
+
+ def has_type(self): return self.has_type_
+
+ def id(self): return self.id_
+
+ def set_id(self, x):
+ self.has_id_ = 1
+ self.id_ = x
+
+ def clear_id(self):
+ if self.has_id_:
+ self.has_id_ = 0
+ self.id_ = 0
+
+ def has_id(self): return self.has_id_
+
+ def name(self): return self.name_
+
+ def set_name(self, x):
+ self.has_name_ = 1
+ self.name_ = x
+
+ def clear_name(self):
+ if self.has_name_:
+ self.has_name_ = 0
+ self.name_ = ""
+
+ def has_name(self): return self.has_name_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_type()): self.set_type(x.type())
+ if (x.has_id()): self.set_id(x.id())
+ if (x.has_name()): self.set_name(x.name())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_type_ != x.has_type_: return 0
+ if self.has_type_ and self.type_ != x.type_: return 0
+ if self.has_id_ != x.has_id_: return 0
+ if self.has_id_ and self.id_ != x.id_: return 0
+ if self.has_name_ != x.has_name_: return 0
+ if self.has_name_ and self.name_ != x.name_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_type_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: type not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.type_))
+ if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
+ if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_type()
+ self.clear_id()
+ self.clear_name()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.type_)
+ if (self.has_id_):
+ out.putVarInt32(24)
+ out.putVarInt64(self.id_)
+ if (self.has_name_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.name_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_type(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_id(d.getVarInt64())
+ continue
+ if tt == 34:
+ self.set_name(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
+ if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
+ if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
+ return res
+
+class Path(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.element_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def element_size(self): return len(self.element_)
+ def element_list(self): return self.element_
+
+ def element(self, i):
+ return self.element_[i]
+
+ def mutable_element(self, i):
+ return self.element_[i]
+
+ def add_element(self):
+ x = Path_Element()
+ self.element_.append(x)
+ return x
+
+ def clear_element(self):
+ self.element_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.element_size()): self.add_element().CopyFrom(x.element(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.element_) != len(x.element_): return 0
+ for e1, e2 in zip(self.element_, x.element_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.element_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.element_)
+ for i in xrange(len(self.element_)): n += self.element_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_element()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.element_)):
+ out.putVarInt32(11)
+ self.element_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_element().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.element_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Element%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kElementGroup = 1
+ kElementtype = 2
+ kElementid = 3
+ kElementname = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Element",
+ 2: "type",
+ 3: "id",
+ 4: "name",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Reference(ProtocolBuffer.ProtocolMessage):
+ has_app_ = 0
+ app_ = ""
+ has_path_ = 0
+
+ def __init__(self, contents=None):
+ self.path_ = Path()
+ if contents is not None: self.MergeFromString(contents)
+
+ def app(self): return self.app_
+
+ def set_app(self, x):
+ self.has_app_ = 1
+ self.app_ = x
+
+ def clear_app(self):
+ if self.has_app_:
+ self.has_app_ = 0
+ self.app_ = ""
+
+ def has_app(self): return self.has_app_
+
+ def path(self): return self.path_
+
+ def mutable_path(self): self.has_path_ = 1; return self.path_
+
+ def clear_path(self):self.has_path_ = 0; self.path_.Clear()
+
+ def has_path(self): return self.has_path_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app()): self.set_app(x.app())
+ if (x.has_path()): self.mutable_path().MergeFrom(x.path())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_ != x.has_app_: return 0
+ if self.has_app_ and self.app_ != x.app_: return 0
+ if self.has_path_ != x.has_path_: return 0
+ if self.has_path_ and self.path_ != x.path_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app not set.')
+ if (not self.has_path_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: path not set.')
+ elif not self.path_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_))
+ n += self.lengthString(self.path_.ByteSize())
+ return n + 2
+
+ def Clear(self):
+ self.clear_app()
+ self.clear_path()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(106)
+ out.putPrefixedString(self.app_)
+ out.putVarInt32(114)
+ out.putVarInt32(self.path_.ByteSize())
+ self.path_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 106:
+ self.set_app(d.getPrefixedString())
+ continue
+ if tt == 114:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_path().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+ if self.has_path_:
+ res+=prefix+"path <\n"
+ res+=self.path_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp = 13
+ kpath = 14
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 13: "app",
+ 14: "path",
+ }, 14)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 13: ProtocolBuffer.Encoder.STRING,
+ 14: ProtocolBuffer.Encoder.STRING,
+ }, 14, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class User(ProtocolBuffer.ProtocolMessage):
+ has_email_ = 0
+ email_ = ""
+ has_auth_domain_ = 0
+ auth_domain_ = ""
+ has_nickname_ = 0
+ nickname_ = ""
+ has_gaiaid_ = 0
+ gaiaid_ = 0
+ has_obfuscated_gaiaid_ = 0
+ obfuscated_gaiaid_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def email(self): return self.email_
+
+ def set_email(self, x):
+ self.has_email_ = 1
+ self.email_ = x
+
+ def clear_email(self):
+ if self.has_email_:
+ self.has_email_ = 0
+ self.email_ = ""
+
+ def has_email(self): return self.has_email_
+
+ def auth_domain(self): return self.auth_domain_
+
+ def set_auth_domain(self, x):
+ self.has_auth_domain_ = 1
+ self.auth_domain_ = x
+
+ def clear_auth_domain(self):
+ if self.has_auth_domain_:
+ self.has_auth_domain_ = 0
+ self.auth_domain_ = ""
+
+ def has_auth_domain(self): return self.has_auth_domain_
+
+ def nickname(self): return self.nickname_
+
+ def set_nickname(self, x):
+ self.has_nickname_ = 1
+ self.nickname_ = x
+
+ def clear_nickname(self):
+ if self.has_nickname_:
+ self.has_nickname_ = 0
+ self.nickname_ = ""
+
+ def has_nickname(self): return self.has_nickname_
+
+ def gaiaid(self): return self.gaiaid_
+
+ def set_gaiaid(self, x):
+ self.has_gaiaid_ = 1
+ self.gaiaid_ = x
+
+ def clear_gaiaid(self):
+ if self.has_gaiaid_:
+ self.has_gaiaid_ = 0
+ self.gaiaid_ = 0
+
+ def has_gaiaid(self): return self.has_gaiaid_
+
+ def obfuscated_gaiaid(self): return self.obfuscated_gaiaid_
+
+ def set_obfuscated_gaiaid(self, x):
+ self.has_obfuscated_gaiaid_ = 1
+ self.obfuscated_gaiaid_ = x
+
+ def clear_obfuscated_gaiaid(self):
+ if self.has_obfuscated_gaiaid_:
+ self.has_obfuscated_gaiaid_ = 0
+ self.obfuscated_gaiaid_ = ""
+
+ def has_obfuscated_gaiaid(self): return self.has_obfuscated_gaiaid_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_email()): self.set_email(x.email())
+ if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+ if (x.has_nickname()): self.set_nickname(x.nickname())
+ if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
+ if (x.has_obfuscated_gaiaid()): self.set_obfuscated_gaiaid(x.obfuscated_gaiaid())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_email_ != x.has_email_: return 0
+ if self.has_email_ and self.email_ != x.email_: return 0
+ if self.has_auth_domain_ != x.has_auth_domain_: return 0
+ if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+ if self.has_nickname_ != x.has_nickname_: return 0
+ if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
+ if self.has_gaiaid_ != x.has_gaiaid_: return 0
+ if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
+ if self.has_obfuscated_gaiaid_ != x.has_obfuscated_gaiaid_: return 0
+ if self.has_obfuscated_gaiaid_ and self.obfuscated_gaiaid_ != x.obfuscated_gaiaid_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_email_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: email not set.')
+ if (not self.has_auth_domain_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: auth_domain not set.')
+ if (not self.has_gaiaid_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: gaiaid not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.email_))
+ n += self.lengthString(len(self.auth_domain_))
+ if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
+ n += self.lengthVarInt64(self.gaiaid_)
+ if (self.has_obfuscated_gaiaid_): n += 1 + self.lengthString(len(self.obfuscated_gaiaid_))
+ return n + 3
+
+ def Clear(self):
+ self.clear_email()
+ self.clear_auth_domain()
+ self.clear_nickname()
+ self.clear_gaiaid()
+ self.clear_obfuscated_gaiaid()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.email_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.auth_domain_)
+ if (self.has_nickname_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.nickname_)
+ out.putVarInt32(32)
+ out.putVarInt64(self.gaiaid_)
+ if (self.has_obfuscated_gaiaid_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.obfuscated_gaiaid_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_email(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_auth_domain(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_nickname(d.getPrefixedString())
+ continue
+ if tt == 32:
+ self.set_gaiaid(d.getVarInt64())
+ continue
+ if tt == 42:
+ self.set_obfuscated_gaiaid(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
+ if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+ if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
+ if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
+ if self.has_obfuscated_gaiaid_: res+=prefix+("obfuscated_gaiaid: %s\n" % self.DebugFormatString(self.obfuscated_gaiaid_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kemail = 1
+ kauth_domain = 2
+ knickname = 3
+ kgaiaid = 4
+ kobfuscated_gaiaid = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "email",
+ 2: "auth_domain",
+ 3: "nickname",
+ 4: "gaiaid",
+ 5: "obfuscated_gaiaid",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class EntityProto(ProtocolBuffer.ProtocolMessage):
+
+ GD_CONTACT = 1
+ GD_EVENT = 2
+ GD_MESSAGE = 3
+
+ _Kind_NAMES = {
+ 1: "GD_CONTACT",
+ 2: "GD_EVENT",
+ 3: "GD_MESSAGE",
+ }
+
+ def Kind_Name(cls, x): return cls._Kind_NAMES.get(x, "")
+ Kind_Name = classmethod(Kind_Name)
+
+ has_key_ = 0
+ has_entity_group_ = 0
+ has_owner_ = 0
+ owner_ = None
+ has_kind_ = 0
+ kind_ = 0
+ has_kind_uri_ = 0
+ kind_uri_ = ""
+
+ def __init__(self, contents=None):
+ self.key_ = Reference()
+ self.entity_group_ = Path()
+ self.property_ = []
+ self.raw_property_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def mutable_key(self): self.has_key_ = 1; return self.key_
+
+ def clear_key(self):self.has_key_ = 0; self.key_.Clear()
+
+ def has_key(self): return self.has_key_
+
+ def entity_group(self): return self.entity_group_
+
+ def mutable_entity_group(self): self.has_entity_group_ = 1; return self.entity_group_
+
+ def clear_entity_group(self):self.has_entity_group_ = 0; self.entity_group_.Clear()
+
+ def has_entity_group(self): return self.has_entity_group_
+
+ def owner(self):
+ if self.owner_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.owner_ is None: self.owner_ = User()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.owner_
+
+ def mutable_owner(self): self.has_owner_ = 1; return self.owner()
+
+ def clear_owner(self):
+ if self.has_owner_:
+ self.has_owner_ = 0;
+ if self.owner_ is not None: self.owner_.Clear()
+
+ def has_owner(self): return self.has_owner_
+
+ def kind(self): return self.kind_
+
+ def set_kind(self, x):
+ self.has_kind_ = 1
+ self.kind_ = x
+
+ def clear_kind(self):
+ if self.has_kind_:
+ self.has_kind_ = 0
+ self.kind_ = 0
+
+ def has_kind(self): return self.has_kind_
+
+ def kind_uri(self): return self.kind_uri_
+
+ def set_kind_uri(self, x):
+ self.has_kind_uri_ = 1
+ self.kind_uri_ = x
+
+ def clear_kind_uri(self):
+ if self.has_kind_uri_:
+ self.has_kind_uri_ = 0
+ self.kind_uri_ = ""
+
+ def has_kind_uri(self): return self.has_kind_uri_
+
+ def property_size(self): return len(self.property_)
+ def property_list(self): return self.property_
+
+ def property(self, i):
+ return self.property_[i]
+
+ def mutable_property(self, i):
+ return self.property_[i]
+
+ def add_property(self):
+ x = Property()
+ self.property_.append(x)
+ return x
+
+ def clear_property(self):
+ self.property_ = []
+ def raw_property_size(self): return len(self.raw_property_)
+ def raw_property_list(self): return self.raw_property_
+
+ def raw_property(self, i):
+ return self.raw_property_[i]
+
+ def mutable_raw_property(self, i):
+ return self.raw_property_[i]
+
+ def add_raw_property(self):
+ x = Property()
+ self.raw_property_.append(x)
+ return x
+
+ def clear_raw_property(self):
+ self.raw_property_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.mutable_key().MergeFrom(x.key())
+ if (x.has_entity_group()): self.mutable_entity_group().MergeFrom(x.entity_group())
+ if (x.has_owner()): self.mutable_owner().MergeFrom(x.owner())
+ if (x.has_kind()): self.set_kind(x.kind())
+ if (x.has_kind_uri()): self.set_kind_uri(x.kind_uri())
+ for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
+ for i in xrange(x.raw_property_size()): self.add_raw_property().CopyFrom(x.raw_property(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_entity_group_ != x.has_entity_group_: return 0
+ if self.has_entity_group_ and self.entity_group_ != x.entity_group_: return 0
+ if self.has_owner_ != x.has_owner_: return 0
+ if self.has_owner_ and self.owner_ != x.owner_: return 0
+ if self.has_kind_ != x.has_kind_: return 0
+ if self.has_kind_ and self.kind_ != x.kind_: return 0
+ if self.has_kind_uri_ != x.has_kind_uri_: return 0
+ if self.has_kind_uri_ and self.kind_uri_ != x.kind_uri_: return 0
+ if len(self.property_) != len(x.property_): return 0
+ for e1, e2 in zip(self.property_, x.property_):
+ if e1 != e2: return 0
+ if len(self.raw_property_) != len(x.raw_property_): return 0
+ for e1, e2 in zip(self.raw_property_, x.raw_property_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ elif not self.key_.IsInitialized(debug_strs): initialized = 0
+ if (not self.has_entity_group_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: entity_group not set.')
+ elif not self.entity_group_.IsInitialized(debug_strs): initialized = 0
+ if (self.has_owner_ and not self.owner_.IsInitialized(debug_strs)): initialized = 0
+ for p in self.property_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ for p in self.raw_property_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.key_.ByteSize())
+ n += self.lengthString(self.entity_group_.ByteSize())
+ if (self.has_owner_): n += 2 + self.lengthString(self.owner_.ByteSize())
+ if (self.has_kind_): n += 1 + self.lengthVarInt64(self.kind_)
+ if (self.has_kind_uri_): n += 1 + self.lengthString(len(self.kind_uri_))
+ n += 1 * len(self.property_)
+ for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
+ n += 1 * len(self.raw_property_)
+ for i in xrange(len(self.raw_property_)): n += self.lengthString(self.raw_property_[i].ByteSize())
+ return n + 3
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_entity_group()
+ self.clear_owner()
+ self.clear_kind()
+ self.clear_kind_uri()
+ self.clear_property()
+ self.clear_raw_property()
+
+ def OutputUnchecked(self, out):
+ if (self.has_kind_):
+ out.putVarInt32(32)
+ out.putVarInt32(self.kind_)
+ if (self.has_kind_uri_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.kind_uri_)
+ out.putVarInt32(106)
+ out.putVarInt32(self.key_.ByteSize())
+ self.key_.OutputUnchecked(out)
+ for i in xrange(len(self.property_)):
+ out.putVarInt32(114)
+ out.putVarInt32(self.property_[i].ByteSize())
+ self.property_[i].OutputUnchecked(out)
+ for i in xrange(len(self.raw_property_)):
+ out.putVarInt32(122)
+ out.putVarInt32(self.raw_property_[i].ByteSize())
+ self.raw_property_[i].OutputUnchecked(out)
+ out.putVarInt32(130)
+ out.putVarInt32(self.entity_group_.ByteSize())
+ self.entity_group_.OutputUnchecked(out)
+ if (self.has_owner_):
+ out.putVarInt32(138)
+ out.putVarInt32(self.owner_.ByteSize())
+ self.owner_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 32:
+ self.set_kind(d.getVarInt32())
+ continue
+ if tt == 42:
+ self.set_kind_uri(d.getPrefixedString())
+ continue
+ if tt == 106:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_key().TryMerge(tmp)
+ continue
+ if tt == 114:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_property().TryMerge(tmp)
+ continue
+ if tt == 122:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_raw_property().TryMerge(tmp)
+ continue
+ if tt == 130:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_entity_group().TryMerge(tmp)
+ continue
+ if tt == 138:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_owner().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_:
+ res+=prefix+"key <\n"
+ res+=self.key_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_entity_group_:
+ res+=prefix+"entity_group <\n"
+ res+=self.entity_group_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_owner_:
+ res+=prefix+"owner <\n"
+ res+=self.owner_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatInt32(self.kind_))
+ if self.has_kind_uri_: res+=prefix+("kind_uri: %s\n" % self.DebugFormatString(self.kind_uri_))
+ cnt=0
+ for e in self.property_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("property%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ cnt=0
+ for e in self.raw_property_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("raw_property%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkey = 13
+ kentity_group = 16
+ kowner = 17
+ kkind = 4
+ kkind_uri = 5
+ kproperty = 14
+ kraw_property = 15
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 4: "kind",
+ 5: "kind_uri",
+ 13: "key",
+ 14: "property",
+ 15: "raw_property",
+ 16: "entity_group",
+ 17: "owner",
+ }, 17)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 13: ProtocolBuffer.Encoder.STRING,
+ 14: ProtocolBuffer.Encoder.STRING,
+ 15: ProtocolBuffer.Encoder.STRING,
+ 16: ProtocolBuffer.Encoder.STRING,
+ 17: ProtocolBuffer.Encoder.STRING,
+ }, 17, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CompositeProperty(ProtocolBuffer.ProtocolMessage):
+ has_index_id_ = 0
+ index_id_ = 0
+
+ def __init__(self, contents=None):
+ self.value_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def index_id(self): return self.index_id_
+
+ def set_index_id(self, x):
+ self.has_index_id_ = 1
+ self.index_id_ = x
+
+ def clear_index_id(self):
+ if self.has_index_id_:
+ self.has_index_id_ = 0
+ self.index_id_ = 0
+
+ def has_index_id(self): return self.has_index_id_
+
+ def value_size(self): return len(self.value_)
+ def value_list(self): return self.value_
+
+ def value(self, i):
+ return self.value_[i]
+
+ def set_value(self, i, x):
+ self.value_[i] = x
+
+ def add_value(self, x):
+ self.value_.append(x)
+
+ def clear_value(self):
+ self.value_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_index_id()): self.set_index_id(x.index_id())
+ for i in xrange(x.value_size()): self.add_value(x.value(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_index_id_ != x.has_index_id_: return 0
+ if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
+ if len(self.value_) != len(x.value_): return 0
+ for e1, e2 in zip(self.value_, x.value_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_index_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: index_id not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.index_id_)
+ n += 1 * len(self.value_)
+ for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
+ return n + 1
+
+ def Clear(self):
+ self.clear_index_id()
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt64(self.index_id_)
+ for i in xrange(len(self.value_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.value_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_index_id(d.getVarInt64())
+ continue
+ if tt == 18:
+ self.add_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
+ cnt=0
+ for e in self.value_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("value%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kindex_id = 1
+ kvalue = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "index_id",
+ 2: "value",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Index_Property(ProtocolBuffer.ProtocolMessage):
+
+ ASCENDING = 1
+ DESCENDING = 2
+
+ _Direction_NAMES = {
+ 1: "ASCENDING",
+ 2: "DESCENDING",
+ }
+
+ def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
+ Direction_Name = classmethod(Direction_Name)
+
+ has_name_ = 0
+ name_ = ""
+ has_direction_ = 0
+ direction_ = 1
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def name(self): return self.name_
+
+ def set_name(self, x):
+ self.has_name_ = 1
+ self.name_ = x
+
+ def clear_name(self):
+ if self.has_name_:
+ self.has_name_ = 0
+ self.name_ = ""
+
+ def has_name(self): return self.has_name_
+
+ def direction(self): return self.direction_
+
+ def set_direction(self, x):
+ self.has_direction_ = 1
+ self.direction_ = x
+
+ def clear_direction(self):
+ if self.has_direction_:
+ self.has_direction_ = 0
+ self.direction_ = 1
+
+ def has_direction(self): return self.has_direction_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_name()): self.set_name(x.name())
+ if (x.has_direction()): self.set_direction(x.direction())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_name_ != x.has_name_: return 0
+ if self.has_name_ and self.name_ != x.name_: return 0
+ if self.has_direction_ != x.has_direction_: return 0
+ if self.has_direction_ and self.direction_ != x.direction_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: name not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.name_))
+ if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_name()
+ self.clear_direction()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.name_)
+ if (self.has_direction_):
+ out.putVarInt32(32)
+ out.putVarInt32(self.direction_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 20: break
+ if tt == 26:
+ self.set_name(d.getPrefixedString())
+ continue
+ if tt == 32:
+ self.set_direction(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
+ if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
+ return res
+
+class Index(ProtocolBuffer.ProtocolMessage):
+ has_entity_type_ = 0
+ entity_type_ = ""
+ has_ancestor_ = 0
+ ancestor_ = 0
+
+ def __init__(self, contents=None):
+ self.property_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def entity_type(self): return self.entity_type_
+
+ def set_entity_type(self, x):
+ self.has_entity_type_ = 1
+ self.entity_type_ = x
+
+ def clear_entity_type(self):
+ if self.has_entity_type_:
+ self.has_entity_type_ = 0
+ self.entity_type_ = ""
+
+ def has_entity_type(self): return self.has_entity_type_
+
+ def ancestor(self): return self.ancestor_
+
+ def set_ancestor(self, x):
+ self.has_ancestor_ = 1
+ self.ancestor_ = x
+
+ def clear_ancestor(self):
+ if self.has_ancestor_:
+ self.has_ancestor_ = 0
+ self.ancestor_ = 0
+
+ def has_ancestor(self): return self.has_ancestor_
+
+ def property_size(self): return len(self.property_)
+ def property_list(self): return self.property_
+
+ def property(self, i):
+ return self.property_[i]
+
+ def mutable_property(self, i):
+ return self.property_[i]
+
+ def add_property(self):
+ x = Index_Property()
+ self.property_.append(x)
+ return x
+
+ def clear_property(self):
+ self.property_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_entity_type()): self.set_entity_type(x.entity_type())
+ if (x.has_ancestor()): self.set_ancestor(x.ancestor())
+ for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_entity_type_ != x.has_entity_type_: return 0
+ if self.has_entity_type_ and self.entity_type_ != x.entity_type_: return 0
+ if self.has_ancestor_ != x.has_ancestor_: return 0
+ if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
+ if len(self.property_) != len(x.property_): return 0
+ for e1, e2 in zip(self.property_, x.property_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_entity_type_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: entity_type not set.')
+ if (not self.has_ancestor_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: ancestor not set.')
+ for p in self.property_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.entity_type_))
+ n += 2 * len(self.property_)
+ for i in xrange(len(self.property_)): n += self.property_[i].ByteSize()
+ return n + 3
+
+ def Clear(self):
+ self.clear_entity_type()
+ self.clear_ancestor()
+ self.clear_property()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.entity_type_)
+ for i in xrange(len(self.property_)):
+ out.putVarInt32(19)
+ self.property_[i].OutputUnchecked(out)
+ out.putVarInt32(20)
+ out.putVarInt32(40)
+ out.putBoolean(self.ancestor_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_entity_type(d.getPrefixedString())
+ continue
+ if tt == 19:
+ self.add_property().TryMerge(d)
+ continue
+ if tt == 40:
+ self.set_ancestor(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_entity_type_: res+=prefix+("entity_type: %s\n" % self.DebugFormatString(self.entity_type_))
+ if self.has_ancestor_: res+=prefix+("ancestor: %s\n" % self.DebugFormatBool(self.ancestor_))
+ cnt=0
+ for e in self.property_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Property%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kentity_type = 1
+ kancestor = 5
+ kPropertyGroup = 2
+ kPropertyname = 3
+ kPropertydirection = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "entity_type",
+ 2: "Property",
+ 3: "name",
+ 4: "direction",
+ 5: "ancestor",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STARTGROUP,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CompositeIndex(ProtocolBuffer.ProtocolMessage):
+
+ WRITE_ONLY = 1
+ READ_WRITE = 2
+ DELETED = 3
+ ERROR = 4
+
+ _State_NAMES = {
+ 1: "WRITE_ONLY",
+ 2: "READ_WRITE",
+ 3: "DELETED",
+ 4: "ERROR",
+ }
+
+ def State_Name(cls, x): return cls._State_NAMES.get(x, "")
+ State_Name = classmethod(State_Name)
+
+ has_app_id_ = 0
+ app_id_ = ""
+ has_id_ = 0
+ id_ = 0
+ has_definition_ = 0
+ has_state_ = 0
+ state_ = 0
+
+ def __init__(self, contents=None):
+ self.definition_ = Index()
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def id(self): return self.id_
+
+ def set_id(self, x):
+ self.has_id_ = 1
+ self.id_ = x
+
+ def clear_id(self):
+ if self.has_id_:
+ self.has_id_ = 0
+ self.id_ = 0
+
+ def has_id(self): return self.has_id_
+
+ def definition(self): return self.definition_
+
+ def mutable_definition(self): self.has_definition_ = 1; return self.definition_
+
+ def clear_definition(self):self.has_definition_ = 0; self.definition_.Clear()
+
+ def has_definition(self): return self.has_definition_
+
+ def state(self): return self.state_
+
+ def set_state(self, x):
+ self.has_state_ = 1
+ self.state_ = x
+
+ def clear_state(self):
+ if self.has_state_:
+ self.has_state_ = 0
+ self.state_ = 0
+
+ def has_state(self): return self.has_state_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ if (x.has_id()): self.set_id(x.id())
+ if (x.has_definition()): self.mutable_definition().MergeFrom(x.definition())
+ if (x.has_state()): self.set_state(x.state())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if self.has_id_ != x.has_id_: return 0
+ if self.has_id_ and self.id_ != x.id_: return 0
+ if self.has_definition_ != x.has_definition_: return 0
+ if self.has_definition_ and self.definition_ != x.definition_: return 0
+ if self.has_state_ != x.has_state_: return 0
+ if self.has_state_ and self.state_ != x.state_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: id not set.')
+ if (not self.has_definition_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: definition not set.')
+ elif not self.definition_.IsInitialized(debug_strs): initialized = 0
+ if (not self.has_state_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: state not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += self.lengthVarInt64(self.id_)
+ n += self.lengthString(self.definition_.ByteSize())
+ n += self.lengthVarInt64(self.state_)
+ return n + 4
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_id()
+ self.clear_definition()
+ self.clear_state()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ out.putVarInt32(16)
+ out.putVarInt64(self.id_)
+ out.putVarInt32(26)
+ out.putVarInt32(self.definition_.ByteSize())
+ self.definition_.OutputUnchecked(out)
+ out.putVarInt32(32)
+ out.putVarInt32(self.state_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_id(d.getVarInt64())
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_definition().TryMerge(tmp)
+ continue
+ if tt == 32:
+ self.set_state(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
+ if self.has_definition_:
+ res+=prefix+"definition <\n"
+ res+=self.definition_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kid = 2
+ kdefinition = 3
+ kstate = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "id",
+ 3: "definition",
+ 4: "state",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['PropertyValue','PropertyValue_ReferenceValuePathElement','PropertyValue_PointValue','PropertyValue_UserValue','PropertyValue_ReferenceValue','Property','Path','Path_Element','Reference','User','EntityProto','CompositeProperty','Index','Index_Property','CompositeIndex']
diff --git a/google_appengine/google/appengine/datastore/entity_pb.pyc b/google_appengine/google/appengine/datastore/entity_pb.pyc
new file mode 100644
index 0000000..1e463c9
--- /dev/null
+++ b/google_appengine/google/appengine/datastore/entity_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/dist/__init__.py b/google_appengine/google/appengine/dist/__init__.py
new file mode 100755
index 0000000..237b487
--- /dev/null
+++ b/google_appengine/google/appengine/dist/__init__.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Specify the modules for which a stub exists."""
+
+__all__ = [
+
+ 'ftplib',
+ 'httplib',
+ 'neo_cgi',
+ 'py_imp',
+ 'select',
+ 'socket',
+ 'subprocess',
+ 'tempfile',
+
+ 'use_library',
+ ]
+
+from google.appengine.dist import _library
+
+use_library = _library.use_library
diff --git a/google_appengine/google/appengine/dist/__init__.pyc b/google_appengine/google/appengine/dist/__init__.pyc
new file mode 100644
index 0000000..aa8ac0d
--- /dev/null
+++ b/google_appengine/google/appengine/dist/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/dist/_library.py b/google_appengine/google/appengine/dist/_library.py
new file mode 100755
index 0000000..a0148e5
--- /dev/null
+++ b/google_appengine/google/appengine/dist/_library.py
@@ -0,0 +1,284 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Code to exist off of google.appengine.dist.
+
+Kept in a separate file from the __init__ module for testing purposes.
+"""
+
+
+__all__ = ['use_library']
+
+
+import distutils.version
+import os
+import sys
+
+server_software = os.getenv('SERVER_SOFTWARE')
+USING_SDK = not server_software or server_software.startswith('Dev')
+del server_software
+
+if not USING_SDK:
+ import google
+ this_version = os.path.dirname(os.path.dirname(google.__file__))
+ versions = os.path.dirname(this_version)
+ PYTHON_LIB = os.path.dirname(versions)
+ del google, this_version, versions
+else:
+ PYTHON_LIB = '/base/python_lib'
+
+installed = {}
+
+
+def SetAllowedModule(_):
+ pass
+
+
+class UnacceptableVersionError(Exception):
+ """Raised when a version of a package that is unacceptable is requested."""
+ pass
+
+
+def DjangoVersion():
+ """Discover the version of Django installed.
+
+ Returns:
+ A distutils.version.LooseVersion.
+ """
+ import django
+ return distutils.version.LooseVersion('.'.join(map(str, django.VERSION)))
+
+
+def PylonsVersion():
+ """Discover the version of Pylons installed.
+
+ Returns:
+ A distutils.version.LooseVersion.
+ """
+ import pylons
+ return distutils.version.LooseVersion(pylons.__version__)
+
+
+PACKAGES = {
+ 'django': (DjangoVersion,
+ {'0.96': None,
+ '1.0': None,
+ '1.1': None,
+ }),
+
+
+
+
+
+
+
+ '_test': (lambda: distutils.version.LooseVersion('1.0'), {'1.0': None}),
+ '_testpkg': (lambda: distutils.version.LooseVersion('1.0'),
+ {'1.0': set([('_test', '1.0')])}),
+ }
+
+
+def EqualVersions(version, baseline):
+ """Test that a version is acceptable as compared to the baseline.
+
+ Meant to be used to compare version numbers as returned by a package itself
+ and not user input.
+
+ Args:
+ version: distutils.version.LooseVersion.
+ The version that is being checked.
+ baseline: distutils.version.LooseVersion.
+ The version that one hopes version compares equal to.
+
+ Returns:
+ A bool indicating whether the versions are considered equal.
+ """
+ baseline_tuple = baseline.version
+ truncated_tuple = version.version[:len(baseline_tuple)]
+ if truncated_tuple == baseline_tuple:
+ return True
+ else:
+ return False
+
+
+def AllowInstalledLibrary(name, desired):
+ """Allow the use of a package without performing a version check.
+
+ Needed to clear a package's dependencies in case the dependencies need to be
+ imported in order to perform a version check. The version check is skipped on
+ the dependencies because the assumption is that the package that triggered
+ the call would not be installed without the proper dependencies (which might
+ be a different version than what the package explicitly requires).
+
+ Args:
+ name: Name of package.
+ desired: Desired version.
+
+ Raises:
+ UnacceptableVersion Error if the installed version of a package is
+ unacceptable.
+ """
+ if name == 'django' and desired != '0.96':
+ tail = os.path.join('lib', 'django')
+ sys.path[:] = [dirname
+ for dirname in sys.path
+ if not dirname.endswith(tail)]
+ CallSetAllowedModule(name, desired)
+ dependencies = PACKAGES[name][1][desired]
+ if dependencies:
+ for dep_name, dep_version in dependencies:
+ AllowInstalledLibrary(dep_name, dep_version)
+ installed[name] = desired, False
+
+
+def CheckInstalledLibrary(name, desired):
+ """Check that the library and its dependencies are installed.
+
+ Args:
+ name: Name of the library that should be installed.
+ desired: The desired version.
+
+ Raises:
+ UnacceptableVersionError if the installed version of a package is
+ unacceptable.
+ """
+ dependencies = PACKAGES[name][1][desired]
+ if dependencies:
+ for dep_name, dep_version in dependencies:
+ AllowInstalledLibrary(dep_name, dep_version)
+ CheckInstalledVersion(name, desired, explicit=True)
+
+
+def CheckInstalledVersion(name, desired, explicit):
+ """Check that the installed version of a package is acceptable.
+
+ Args:
+ name: Name of package.
+ desired: Desired version string.
+ explicit: Explicitly requested by the user or implicitly because of a
+ dependency.
+
+ Raises:
+ UnacceptableVersionError if the installed version of a package is
+ unacceptable.
+ """
+ CallSetAllowedModule(name, desired)
+ find_version = PACKAGES[name][0]
+ installed_version = find_version()
+ desired_version = distutils.version.LooseVersion(desired)
+ if not EqualVersions(installed_version, desired_version):
+ raise UnacceptableVersionError(
+ '%s %s was requested, but %s is already in use' %
+ (name, desired_version, installed_version))
+ installed[name] = desired, explicit
+
+
+def CallSetAllowedModule(name, desired):
+ """Helper to call SetAllowedModule(name), after special-casing Django."""
+ if name == 'django' and desired != '0.96':
+ tail = os.path.join('lib', 'django')
+ sys.path[:] = [dirname
+ for dirname in sys.path
+ if not dirname.endswith(tail)]
+ SetAllowedModule(name)
+
+
+def CreatePath(name, version):
+ """Create the path to a package."""
+ package_dir = '%s-%s' % (name, version)
+ return os.path.join(PYTHON_LIB, 'versions', 'third_party', package_dir)
+
+
+def RemoveLibrary(name):
+ """Remove a library that has been installed."""
+ installed_version, _ = installed[name]
+ path = CreatePath(name, installed_version)
+ try:
+ sys.path.remove(path)
+ except ValueError:
+ pass
+ del installed[name]
+
+
+def AddLibrary(name, version, explicit):
+ """Add a library to sys.path and 'installed'."""
+ sys.path.insert(1, CreatePath(name, version))
+ installed[name] = version, explicit
+
+
+def InstallLibrary(name, version, explicit=True):
+ """Install a package.
+
+ If the installation is explicit then the user made the installation request,
+ not a package as a dependency. Explicit installation leads to stricter
+ version checking.
+
+ Args:
+ name: Name of the requested package (already validated as available).
+ version: The desired version (already validated as available).
+ explicit: Explicitly requested by the user or implicitly because of a
+ dependency.
+ """
+ installed_version, explicitly_installed = installed.get(name, [None] * 2)
+ if name in sys.modules:
+ if explicit:
+ CheckInstalledVersion(name, version, explicit=True)
+ return
+ elif installed_version:
+ if version == installed_version:
+ return
+ if explicit:
+ if explicitly_installed:
+ raise ValueError('%s %s requested, but %s already in use' %
+ (name, version, installed_version))
+ RemoveLibrary(name)
+ else:
+ version_ob = distutils.version.LooseVersion(version)
+ installed_ob = distutils.version.LooseVersion(installed_version)
+ if version_ob <= installed_ob:
+ return
+ else:
+ RemoveLibrary(name)
+ AddLibrary(name, version, explicit)
+ dep_details = PACKAGES[name][1][version]
+ if not dep_details:
+ return
+ for dep_name, dep_version in dep_details:
+ InstallLibrary(dep_name, dep_version, explicit=False)
+
+
+def use_library(name, version):
+ """Specify a third-party package to use.
+
+ Args:
+ name: Name of package to use.
+ version: Version of the package to use (string).
+ """
+ if name not in PACKAGES:
+ raise ValueError('%s is not a supported package' % name)
+ versions = PACKAGES[name][1].keys()
+ if version not in versions:
+ raise ValueError('%s is not a supported version for %s; '
+ 'supported versions are %s' % (version, name, versions))
+ if USING_SDK:
+ CheckInstalledLibrary(name, version)
+ else:
+ InstallLibrary(name, version, explicit=True)
+
+
+if not USING_SDK:
+ InstallLibrary('django', '0.96', explicit=False)
diff --git a/google_appengine/google/appengine/dist/_library.pyc b/google_appengine/google/appengine/dist/_library.pyc
new file mode 100644
index 0000000..457aed8
--- /dev/null
+++ b/google_appengine/google/appengine/dist/_library.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/dist/ftplib.py b/google_appengine/google/appengine/dist/ftplib.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/dist/ftplib.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/dist/httplib.py b/google_appengine/google/appengine/dist/httplib.py
new file mode 100755
index 0000000..c1bee3a
--- /dev/null
+++ b/google_appengine/google/appengine/dist/httplib.py
@@ -0,0 +1,388 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Copyright 2008 Python Software Foundation, Ian Bicking, and Google."""
+
+import mimetools
+import StringIO
+import sys
+
+
+CONTINUE = 100
+SWITCHING_PROTOCOLS = 101
+PROCESSING = 102
+OK = 200
+CREATED = 201
+ACCEPTED = 202
+NON_AUTHORITATIVE_INFORMATION = 203
+NO_CONTENT = 204
+RESET_CONTENT = 205
+PARTIAL_CONTENT = 206
+MULTI_STATUS = 207
+IM_USED = 226
+MULTIPLE_CHOICES = 300
+MOVED_PERMANENTLY = 301
+FOUND = 302
+SEE_OTHER = 303
+NOT_MODIFIED = 304
+USE_PROXY = 305
+TEMPORARY_REDIRECT = 307
+BAD_REQUEST = 400
+UNAUTHORIZED = 401
+PAYMENT_REQUIRED = 402
+FORBIDDEN = 403
+NOT_FOUND = 404
+METHOD_NOT_ALLOWED = 405
+NOT_ACCEPTABLE = 406
+PROXY_AUTHENTICATION_REQUIRED = 407
+REQUEST_TIMEOUT = 408
+CONFLICT = 409
+GONE = 410
+LENGTH_REQUIRED = 411
+PRECONDITION_FAILED = 412
+REQUEST_ENTITY_TOO_LARGE = 413
+REQUEST_URI_TOO_LONG = 414
+UNSUPPORTED_MEDIA_TYPE = 415
+REQUESTED_RANGE_NOT_SATISFIABLE = 416
+EXPECTATION_FAILED = 417
+UNPROCESSABLE_ENTITY = 422
+LOCKED = 423
+FAILED_DEPENDENCY = 424
+UPGRADE_REQUIRED = 426
+INTERNAL_SERVER_ERROR = 500
+NOT_IMPLEMENTED = 501
+BAD_GATEWAY = 502
+SERVICE_UNAVAILABLE = 503
+GATEWAY_TIMEOUT = 504
+HTTP_VERSION_NOT_SUPPORTED = 505
+INSUFFICIENT_STORAGE = 507
+NOT_EXTENDED = 510
+
+responses = {
+ 100: 'Continue',
+ 101: 'Switching Protocols',
+
+ 200: 'OK',
+ 201: 'Created',
+ 202: 'Accepted',
+ 203: 'Non-Authoritative Information',
+ 204: 'No Content',
+ 205: 'Reset Content',
+ 206: 'Partial Content',
+
+ 300: 'Multiple Choices',
+ 301: 'Moved Permanently',
+ 302: 'Found',
+ 303: 'See Other',
+ 304: 'Not Modified',
+ 305: 'Use Proxy',
+ 306: '(Unused)',
+ 307: 'Temporary Redirect',
+
+ 400: 'Bad Request',
+ 401: 'Unauthorized',
+ 402: 'Payment Required',
+ 403: 'Forbidden',
+ 404: 'Not Found',
+ 405: 'Method Not Allowed',
+ 406: 'Not Acceptable',
+ 407: 'Proxy Authentication Required',
+ 408: 'Request Timeout',
+ 409: 'Conflict',
+ 410: 'Gone',
+ 411: 'Length Required',
+ 412: 'Precondition Failed',
+ 413: 'Request Entity Too Large',
+ 414: 'Request-URI Too Long',
+ 415: 'Unsupported Media Type',
+ 416: 'Requested Range Not Satisfiable',
+ 417: 'Expectation Failed',
+
+ 500: 'Internal Server Error',
+ 501: 'Not Implemented',
+ 502: 'Bad Gateway',
+ 503: 'Service Unavailable',
+ 504: 'Gateway Timeout',
+ 505: 'HTTP Version Not Supported',
+}
+
+HTTP_PORT = 80
+HTTPS_PORT = 443
+
+
+
+
+
+class HTTPConnection:
+
+
+ protocol = 'http'
+ default_port = HTTP_PORT
+ _allow_truncated = True
+ _follow_redirects = False
+
+ def __init__(self, host, port=None, strict=False, timeout=None):
+ from google.appengine.api import urlfetch
+ self._fetch = urlfetch.fetch
+ self._method_map = {
+ 'GET': urlfetch.GET,
+ 'POST': urlfetch.POST,
+ 'HEAD': urlfetch.HEAD,
+ 'PUT': urlfetch.PUT,
+ 'DELETE': urlfetch.DELETE,
+ }
+ self.host = host
+ self.port = port
+ self._method = self._url = None
+ self._body = ''
+ self.headers = []
+
+ def connect(self):
+ pass
+
+ def request(self, method, url, body=None, headers=None):
+ self._method = method
+ self._url = url
+ try:
+ self._body = body.read()
+ except AttributeError:
+ self._body = body
+ if headers is None:
+ headers = []
+ elif hasattr(headers, 'items'):
+ headers = headers.items()
+ self.headers = headers
+
+ def putrequest(self, request, selector, skip_host=False, skip_accept_encoding=False):
+ self._method = request
+ self._url = selector
+
+ def putheader(self, header, *lines):
+ line = '\r\n\t'.join([str(line) for line in lines])
+ self.headers.append((header, line))
+
+ def endheaders(self):
+ pass
+
+ def set_debuglevel(self, level=None):
+ pass
+
+ def send(self, data):
+ self._body += data
+
+ def getresponse(self):
+ if self.port and self.port != self.default_port:
+ host = '%s:%s' % (self.host, self.port)
+ else:
+ host = self.host
+ if not self._url.startswith(self.protocol):
+ url = '%s://%s%s' % (self.protocol, host, self._url)
+ else:
+ url = self._url
+ headers = dict(self.headers)
+
+ try:
+ method = self._method_map[self._method.upper()]
+ except KeyError:
+ raise ValueError("%r is an unrecognized HTTP method" % self._method)
+
+ response = self._fetch(url, self._body, method, headers,
+ self._allow_truncated, self._follow_redirects)
+ return HTTPResponse(response)
+
+ def close(self):
+ pass
+
+
+class HTTPSConnection(HTTPConnection):
+
+ protocol = 'https'
+ default_port = HTTPS_PORT
+
+ def __init__(self, host, port=None, key_file=None, cert_file=None,
+ strict=False, timeout=None):
+ if key_file is not None or cert_file is not None:
+ raise NotImplementedError(
+ "key_file and cert_file arguments are not implemented")
+ HTTPConnection.__init__(self, host, port=port, strict=strict,
+ timeout=timeout)
+
+
+class HTTPResponse(object):
+
+ def __init__(self, fetch_response):
+ self._fetch_response = fetch_response
+ self.fp = StringIO.StringIO(fetch_response.content)
+
+ def __getattr__(self, attr):
+ return getattr(self.fp, attr)
+
+ def getheader(self, name, default=None):
+ return self._fetch_response.headers.get(name, default)
+
+ def getheaders(self):
+ return self._fetch_response.headers.items()
+
+ @property
+ def msg(self):
+ msg = mimetools.Message(StringIO.StringIO(''))
+ for name, value in self._fetch_response.headers.items():
+ msg[name] = str(value)
+ return msg
+
+ version = 11
+
+ @property
+ def status(self):
+ return self._fetch_response.status_code
+
+ @property
+ def reason(self):
+ return responses.get(self._fetch_response.status_code, 'Unknown')
+
+
+class HTTP:
+ "Compatibility class with httplib.py from 1.5."
+
+ _http_vsn = 11
+ _http_vsn_str = 'HTTP/1.1'
+
+ debuglevel = 0
+
+ _connection_class = HTTPConnection
+
+ def __init__(self, host='', port=None, strict=None):
+ "Provide a default host, since the superclass requires one."
+
+ if port == 0:
+ port = None
+
+ self._setup(self._connection_class(host, port, strict))
+
+ def _setup(self, conn):
+ self._conn = conn
+
+ self.send = conn.send
+ self.putrequest = conn.putrequest
+ self.endheaders = conn.endheaders
+ self.set_debuglevel = conn.set_debuglevel
+
+ conn._http_vsn = self._http_vsn
+ conn._http_vsn_str = self._http_vsn_str
+
+ self.file = None
+
+ def connect(self, host=None, port=None):
+ "Accept arguments to set the host/port, since the superclass doesn't."
+ self.__init__(host, port)
+
+ def getfile(self):
+ "Provide a getfile, since the superclass' does not use this concept."
+ return self.file
+
+ def putheader(self, header, *values):
+ "The superclass allows only one value argument."
+ self._conn.putheader(header, '\r\n\t'.join([str(v) for v in values]))
+
+ def getreply(self):
+ """Compat definition since superclass does not define it.
+
+ Returns a tuple consisting of:
+ - server status code (e.g. '200' if all goes well)
+ - server "reason" corresponding to status code
+ - any RFC822 headers in the response from the server
+ """
+ response = self._conn.getresponse()
+
+ self.headers = response.msg
+ self.file = response.fp
+ return response.status, response.reason, response.msg
+
+ def close(self):
+ self._conn.close()
+
+ self.file = None
+
+
+class HTTPS(HTTP):
+ """Compatibility with 1.5 httplib interface
+
+ Python 1.5.2 did not have an HTTPS class, but it defined an
+ interface for sending http requests that is also useful for
+ https.
+ """
+
+ _connection_class = HTTPSConnection
+
+ def __init__(self, host='', port=None, key_file=None, cert_file=None,
+ strict=None):
+ if key_file is not None or cert_file is not None:
+ raise NotImplementedError(
+ "key_file and cert_file arguments are not implemented")
+
+
+ if port == 0:
+ port = None
+ self._setup(self._connection_class(host, port, key_file,
+ cert_file, strict))
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+
+
+class HTTPException(Exception):
+ pass
+
+class NotConnected(HTTPException):
+ pass
+
+class InvalidURL(HTTPException):
+ pass
+
+class UnknownProtocol(HTTPException):
+ def __init__(self, version):
+ self.version = version
+ HTTPException.__init__(self, version)
+
+class UnknownTransferEncoding(HTTPException):
+ pass
+
+class UnimplementedFileMode(HTTPException):
+ pass
+
+class IncompleteRead(HTTPException):
+ def __init__(self, partial):
+ self.partial = partial
+ HTTPException.__init__(self, partial)
+
+class ImproperConnectionState(HTTPException):
+ pass
+
+class CannotSendRequest(ImproperConnectionState):
+ pass
+
+class CannotSendHeader(ImproperConnectionState):
+ pass
+
+class ResponseNotReady(ImproperConnectionState):
+ pass
+
+class BadStatusLine(HTTPException):
+ def __init__(self, line):
+ self.line = line
+ HTTPException.__init__(self, line)
+
+error = HTTPException
diff --git a/google_appengine/google/appengine/dist/httplib.pyc b/google_appengine/google/appengine/dist/httplib.pyc
new file mode 100644
index 0000000..5e42f94
--- /dev/null
+++ b/google_appengine/google/appengine/dist/httplib.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/dist/neo_cgi.py b/google_appengine/google/appengine/dist/neo_cgi.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/dist/neo_cgi.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/dist/py_imp.py b/google_appengine/google/appengine/dist/py_imp.py
new file mode 100755
index 0000000..a6a0f38
--- /dev/null
+++ b/google_appengine/google/appengine/dist/py_imp.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub replacement for Python's imp module."""
+
+
+import os
+import sys
+
+
+PY_SOURCE, PY_COMPILED, C_EXTENSION = 1, 2, 3
+PKG_DIRECTORY, C_BUILTIN, PY_FROZEN = 5, 6, 7
+
+
+def get_magic():
+ """Return the magic string used to recognize byte-compiled code files."""
+ return '\0\0\0\0'
+
+
+_PY_SOURCE_SUFFIX = ('.py', 'U', PY_SOURCE)
+_PKG_DIRECTORY_SUFFIX = ('', '', PKG_DIRECTORY)
+
+
+def get_suffixes():
+ """Return a list that describes the files that find_module() looks for."""
+ return [_PY_SOURCE_SUFFIX]
+
+
+def find_module(name, path=None):
+ """Try to find the named module on the given search path or sys.path."""
+ if path == None:
+ path = sys.path
+
+ for directory in path:
+ filename = os.path.join(directory, '%s.py' % name)
+ if os.path.exists(filename):
+ return open(filename, 'U'), filename, _PY_SOURCE_SUFFIX
+
+ dirname = os.path.join(directory, name)
+ filename = os.path.join(dirname, '__init__.py')
+ if os.path.exists(filename):
+ return None, dirname, _PKG_DIRECTORY_SUFFIX
+
+ raise ImportError('No module named %s' % name)
+
+
+def load_module(name, file_, pathname, description):
+ """Load or reload the specified module.
+
+ Please note that this function has only rudimentary supported on App Engine:
+ Only loading packages is supported.
+ """
+ suffix, mode, type_ = description
+ if type_ == PKG_DIRECTORY:
+ if name in sys.modules:
+ mod = sys.modules[name]
+ else:
+ mod = new_module(name)
+ sys.modules[name] = mod
+ filename = os.path.join(pathname, '__init__.py')
+ mod.__file__ = filename
+ execfile(filename, mod.__dict__, mod.__dict__)
+ return mod
+ else:
+ raise NotImplementedError('Only importing packages is supported on '
+ 'App Engine')
+
+
+def new_module(name):
+ """Return a new empty module object."""
+ return type(sys)(name)
+
+
+def lock_held():
+ """Return False since threading is not supported."""
+ return False
+
+
+def acquire_lock():
+ """Acquiring the lock is a no-op since no threading is supported."""
+ pass
+
+
+def release_lock():
+ """There is no lock to release since acquiring is a no-op when there is no
+ threading."""
+ pass
+
+
+def init_builtin(name):
+ raise NotImplementedError('This function is not supported on App Engine.')
+
+
+def init_frozen(name):
+ raise NotImplementedError('This function is not supported on App Engine.')
+
+
+def is_builtin(name):
+ return name in sys.builtin_module_names
+
+
+def is_frozen(name):
+ return False
+
+
+def load_compiled(name, pathname, file_=None):
+ raise NotImplementedError('This function is not supported on App Engine.')
+
+
+def load_dynamic(name, pathname, file_=None):
+ raise NotImplementedError('This function is not supported on App Engine.')
+
+
+def load_source(name, pathname, file_=None):
+ raise NotImplementedError('This function is not supported on App Engine.')
+
+
+class NullImporter(object):
+ """Null importer object"""
+
+ def __init__(self, path_string):
+ if not path_string:
+ raise ImportError("empty pathname")
+ elif os.path.isdir(path_string):
+ raise ImportError("existing directory")
+
+ def find_module(self, fullname):
+ return None
diff --git a/google_appengine/google/appengine/dist/py_imp.pyc b/google_appengine/google/appengine/dist/py_imp.pyc
new file mode 100644
index 0000000..38fafcb
--- /dev/null
+++ b/google_appengine/google/appengine/dist/py_imp.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/dist/py_zipimport.py b/google_appengine/google/appengine/dist/py_zipimport.py
new file mode 100755
index 0000000..1ec76b5
--- /dev/null
+++ b/google_appengine/google/appengine/dist/py_zipimport.py
@@ -0,0 +1,291 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Pure Python zipfile importer.
+
+This approximates the standard zipimport module, which isn't supported
+by Google App Engine. See PEP 302 for more information about the API
+for import hooks.
+
+Usage:
+ import py_zipimport
+
+As a side effect of importing, the module overrides sys.path_hooks,
+and also creates an alias 'zipimport' for itself. When your app is
+running in Google App Engine production, you don't even need to import
+it, since this is already done for you. In the Google App Engine SDK
+this module is not used; instead, the standard zipimport module is
+used.
+"""
+
+
+__all__ = ['ZipImportError', 'zipimporter']
+
+
+import os
+import sys
+import types
+import UserDict
+import zipfile
+
+
+_SEARCH_ORDER = [
+
+ ('.py', False),
+ ('/__init__.py', True),
+]
+
+
+_zipfile_cache = {}
+
+
+class ZipImportError(ImportError):
+ """Exception raised by zipimporter objects."""
+
+
+class zipimporter:
+ """A PEP-302-style importer that can import from a zipfile.
+
+ Just insert or append this class (not an instance) to sys.path_hooks
+ and you're in business. Instances satisfy both the 'importer' and
+ 'loader' APIs specified in PEP 302.
+ """
+
+ def __init__(self, path_entry):
+ """Constructor.
+
+ Args:
+ path_entry: The entry in sys.path. This should be the name of an
+ existing zipfile possibly with a path separator and a prefix
+ path within the archive appended, e.g. /x/django.zip or
+ /x/django.zip/foo/bar.
+
+ Raises:
+ ZipImportError if the path_entry does not represent a valid
+ zipfile with optional prefix.
+ """
+ archive = path_entry
+ prefix = ''
+ while not os.path.lexists(archive):
+ head, tail = os.path.split(archive)
+ if head == archive:
+ msg = 'Nothing found for %r' % path_entry
+ raise ZipImportError(msg)
+ archive = head
+ prefix = os.path.join(tail, prefix)
+ if not os.path.isfile(archive):
+ msg = 'Non-file %r found for %r' % (archive, path_entry)
+ raise ZipImportError(msg)
+ self.archive = archive
+ self.prefix = os.path.join(prefix, '')
+ self.zipfile = _zipfile_cache.get(archive)
+ if self.zipfile is None:
+ try:
+ self.zipfile = zipfile.ZipFile(self.archive)
+ except (EnvironmentError, zipfile.BadZipfile), err:
+ msg = 'Can\'t open zipfile %s: %s: %s' % (self.archive,
+ err.__class__.__name__, err)
+ import logging
+ logging.warn(msg)
+ raise ZipImportError(msg)
+ else:
+ _zipfile_cache[archive] = self.zipfile
+ import logging
+ logging.info('zipimporter(%r, %r)', archive, prefix)
+
+ def __repr__(self):
+ """Return a string representation matching zipimport.c."""
+ name = self.archive
+ if self.prefix:
+ name = os.path.join(name, self.prefix)
+ return '<zipimporter object "%s">' % name
+
+ def _get_info(self, fullmodname):
+ """Internal helper for find_module() and load_module().
+
+ Args:
+ fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.
+
+ Returns:
+ A tuple (submodname, is_package, relpath) where:
+ submodname: The final component of the module name, e.g. 'mail'.
+ is_package: A bool indicating whether this is a package.
+ relpath: The path to the module's source code within to the zipfile.
+
+ Raises:
+ ImportError if the module is not found in the archive.
+ """
+ parts = fullmodname.split('.')
+ submodname = parts[-1]
+ for suffix, is_package in _SEARCH_ORDER:
+ relpath = os.path.join(self.prefix,
+ submodname + suffix.replace('/', os.sep))
+ try:
+ self.zipfile.getinfo(relpath.replace(os.sep, '/'))
+ except KeyError:
+ pass
+ else:
+ return submodname, is_package, relpath
+ msg = ('Can\'t find module %s in zipfile %s with prefix %r' %
+ (fullmodname, self.archive, self.prefix))
+ raise ZipImportError(msg)
+
+ def _get_source(self, fullmodname):
+ """Internal helper for load_module().
+
+ Args:
+ fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.
+
+ Returns:
+ A tuple (submodname, is_package, fullpath, source) where:
+ submodname: The final component of the module name, e.g. 'mail'.
+ is_package: A bool indicating whether this is a package.
+ fullpath: The path to the module's source code including the
+ zipfile's filename.
+ source: The module's source code.
+
+ Raises:
+ ImportError if the module is not found in the archive.
+ """
+ submodname, is_package, relpath = self._get_info(fullmodname)
+ fullpath = '%s%s%s' % (self.archive, os.sep, relpath)
+ source = self.zipfile.read(relpath.replace(os.sep, '/'))
+ source = source.replace('\r\n', '\n')
+ source = source.replace('\r', '\n')
+ return submodname, is_package, fullpath, source
+
+ def find_module(self, fullmodname, path=None):
+ """PEP-302-compliant find_module() method.
+
+ Args:
+ fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.
+ path: Optional and ignored; present for API compatibility only.
+
+ Returns:
+ None if the module isn't found in the archive; self if it is found.
+ """
+ try:
+ submodname, is_package, relpath = self._get_info(fullmodname)
+ except ImportError:
+ return None
+ else:
+ return self
+
+ def load_module(self, fullmodname):
+ """PEP-302-compliant load_module() method.
+
+ Args:
+ fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.
+
+ Returns:
+ The module object constructed from the source code.
+
+ Raises:
+ SyntaxError if the module's source code is syntactically incorrect.
+ ImportError if there was a problem accessing the source code.
+ Whatever else can be raised by executing the module's source code.
+ """
+ submodname, is_package, fullpath, source = self._get_source(fullmodname)
+ code = compile(source, fullpath, 'exec')
+ mod = sys.modules.get(fullmodname)
+ try:
+ if mod is None:
+ mod = sys.modules[fullmodname] = types.ModuleType(fullmodname)
+ mod.__loader__ = self
+ mod.__file__ = fullpath
+ mod.__name__ = fullmodname
+ if is_package:
+ mod.__path__ = [os.path.dirname(mod.__file__)]
+ exec code in mod.__dict__
+ except:
+ if fullmodname in sys.modules:
+ del sys.modules[fullmodname]
+ raise
+ return mod
+
+
+ def get_data(self, fullpath):
+ """Return (binary) content of a data file in the zipfile."""
+ prefix = os.path.join(self.archive, '')
+ if fullpath.startswith(prefix):
+ relpath = fullpath[len(prefix):]
+ elif os.path.isabs(fullpath):
+ raise IOError('Absolute path %r doesn\'t start with zipfile name %r' %
+ (fullpath, prefix))
+ else:
+ relpath = fullpath
+ try:
+ return self.zipfile.read(relpath)
+ except KeyError:
+ raise IOError('Path %r not found in zipfile %r' %
+ (relpath, self.archive))
+
+ def is_package(self, fullmodname):
+ """Return whether a module is a package."""
+ submodname, is_package, relpath = self._get_info(fullmodname)
+ return is_package
+
+ def get_code(self, fullmodname):
+ """Return bytecode for a module."""
+ submodname, is_package, fullpath, source = self._get_source(fullmodname)
+ return compile(source, fullpath, 'exec')
+
+ def get_source(self, fullmodname):
+ """Return source code for a module."""
+ submodname, is_package, fullpath, source = self._get_source(fullmodname)
+ return source
+
+
+class ZipFileCache(UserDict.DictMixin):
+ """Helper class to export archive data in _zip_directory_cache.
+
+ Just take the info from _zipfile_cache and convert it as required.
+ """
+
+ def __init__(self, archive):
+ _zipfile_cache[archive]
+
+ self._archive = archive
+
+ def keys(self):
+ return _zipfile_cache[self._archive].namelist()
+
+ def __getitem__(self, filename):
+ info = _zipfile_cache[self._archive].getinfo(filename)
+ dt = info.date_time
+ dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
+ dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
+ return (os.path.join(self._archive, info.filename), info.compress_type,
+ info.compress_size, info.file_size, info.header_offset, dostime,
+ dosdate, info.CRC)
+
+
+class ZipDirectoryCache(UserDict.DictMixin):
+ """Helper class to export _zip_directory_cache."""
+
+ def keys(self):
+ return _zipfile_cache.keys()
+
+ def __getitem__(self, archive):
+ return ZipFileCache(archive)
+
+
+_zip_directory_cache = ZipDirectoryCache()
+
+
+sys.modules['zipimport'] = sys.modules[__name__]
+sys.path_hooks[:] = [zipimporter]
diff --git a/google_appengine/google/appengine/dist/py_zipimport.pyc b/google_appengine/google/appengine/dist/py_zipimport.pyc
new file mode 100644
index 0000000..b8cbca8
--- /dev/null
+++ b/google_appengine/google/appengine/dist/py_zipimport.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/dist/select.py b/google_appengine/google/appengine/dist/select.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/dist/select.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/dist/socket.py b/google_appengine/google/appengine/dist/socket.py
new file mode 100755
index 0000000..d9f731b
--- /dev/null
+++ b/google_appengine/google/appengine/dist/socket.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+AF_INET = None
+SOCK_STREAM = None
+SOCK_DGRAM = None
+
+_GLOBAL_DEFAULT_TIMEOUT = object()
+
+
+class error(OSError):
+ pass
+
+class herror(error):
+ pass
+
+class gaierror(error):
+ pass
+
+class timeout(error):
+ pass
+
+
+def _fileobject(fp, mode='rb', bufsize=-1, close=False):
+ """Assuming that the argument is a StringIO or file instance."""
+ if not hasattr(fp, 'fileno'):
+ fp.fileno = lambda: None
+ return fp
+
+ssl = None
diff --git a/google_appengine/google/appengine/dist/socket.pyc b/google_appengine/google/appengine/dist/socket.pyc
new file mode 100644
index 0000000..4be2c06
--- /dev/null
+++ b/google_appengine/google/appengine/dist/socket.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/dist/subprocess.py b/google_appengine/google/appengine/dist/subprocess.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/dist/subprocess.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/dist/tempfile.py b/google_appengine/google/appengine/dist/tempfile.py
new file mode 100755
index 0000000..5c3999a
--- /dev/null
+++ b/google_appengine/google/appengine/dist/tempfile.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Temporary files.
+
+This module is a replacement for the stock tempfile module in Python,
+and provides only in-memory temporary files as implemented by
+cStringIO. The only functionality provided is the TemporaryFile
+function.
+"""
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+__all__ = [
+ "TemporaryFile",
+
+ "NamedTemporaryFile", "mkstemp", "mkdtemp", "mktemp",
+ "TMP_MAX", "gettempprefix", "tempdir", "gettempdir",
+]
+
+TMP_MAX = 10000
+
+template = "tmp"
+
+tempdir = None
+
+def TemporaryFile(mode='w+b', bufsize=-1, suffix="",
+ prefix=template, dir=None):
+ """Create and return a temporary file.
+ Arguments:
+ 'prefix', 'suffix', 'dir', 'mode', 'bufsize' are all ignored.
+
+ Returns an object with a file-like interface. The file is in memory
+ only, and does not exist on disk.
+ """
+
+ return StringIO()
+
+def PlaceHolder(*args, **kwargs):
+ raise NotImplementedError("Only tempfile.TemporaryFile is available for use")
+
+NamedTemporaryFile = PlaceHolder
+mkstemp = PlaceHolder
+mkdtemp = PlaceHolder
+mktemp = PlaceHolder
+gettempprefix = PlaceHolder
+tempdir = PlaceHolder
+gettempdir = PlaceHolder
diff --git a/google_appengine/google/appengine/dist/tempfile.pyc b/google_appengine/google/appengine/dist/tempfile.pyc
new file mode 100644
index 0000000..a32e559
--- /dev/null
+++ b/google_appengine/google/appengine/dist/tempfile.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/__init__.py b/google_appengine/google/appengine/ext/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/ext/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/ext/__init__.pyc b/google_appengine/google/appengine/ext/__init__.pyc
new file mode 100644
index 0000000..de39486
--- /dev/null
+++ b/google_appengine/google/appengine/ext/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/admin/__init__.py b/google_appengine/google/appengine/ext/admin/__init__.py
new file mode 100755
index 0000000..8062c9a
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/__init__.py
@@ -0,0 +1,1297 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Simple datastore view and interactive console, for use in dev_appserver."""
+
+
+
+
+
+import cgi
+import csv
+import cStringIO
+import datetime
+import logging
+import math
+import mimetypes
+import os
+import os.path
+import pickle
+import pprint
+import random
+import sys
+import time
+import traceback
+import types
+import urllib
+import urlparse
+import wsgiref.handlers
+
+try:
+ from google.appengine.cron import groctimespecification
+ from google.appengine.api import croninfo
+except ImportError:
+ HAVE_CRON = False
+else:
+ HAVE_CRON = True
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore
+from google.appengine.api import datastore_admin
+from google.appengine.api import datastore_types
+from google.appengine.api import datastore_errors
+from google.appengine.api import memcache
+from google.appengine.api.labs import taskqueue
+from google.appengine.api import users
+from google.appengine.ext import db
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp import template
+
+_DEBUG = True
+
+
+class ImageHandler(webapp.RequestHandler):
+ """Serves a static image.
+
+ This exists because we don't want to burden the user with specifying
+ a static file handler for the image resources used by the admin tool.
+ """
+
+ PATH = '/images/.*'
+
+ def get(self):
+ image_name = os.path.basename(self.request.path)
+ content_type, encoding = mimetypes.guess_type(image_name)
+ if not content_type or not content_type.startswith('image/'):
+ logging.debug('image_name=%r, content_type=%r, encoding=%r',
+ image_name, content_type, encoding)
+ self.error(404)
+ return
+ directory = os.path.dirname(__file__)
+ path = os.path.join(directory, 'templates', 'images', image_name)
+ try:
+ image_stream = open(path, 'rb')
+ except IOError, e:
+ logging.error('Cannot open image %s: %s', image_name, e)
+ self.error(404)
+ return
+ try:
+ image_data = image_stream.read()
+ finally:
+ image_stream.close()
+ self.response.headers['Content-Type'] = content_type
+ self.response.out.write(image_data)
+
+
+class BaseRequestHandler(webapp.RequestHandler):
+ """Supplies a common template generation function.
+
+ When you call generate(), we augment the template variables supplied with
+ the current user in the 'user' variable and the current webapp request
+ in the 'request' variable.
+ """
+
+ def generate(self, template_name, template_values={}):
+ base_path = self.base_path()
+ values = {
+ 'application_name': self.request.environ['APPLICATION_ID'],
+ 'user': users.get_current_user(),
+ 'request': self.request,
+ 'home_path': base_path + DefaultPageHandler.PATH,
+ 'datastore_path': base_path + DatastoreQueryHandler.PATH,
+ 'datastore_edit_path': base_path + DatastoreEditHandler.PATH,
+ 'datastore_batch_edit_path': base_path + DatastoreBatchEditHandler.PATH,
+ 'interactive_path': base_path + InteractivePageHandler.PATH,
+ 'interactive_execute_path': base_path + InteractiveExecuteHandler.PATH,
+ 'memcache_path': base_path + MemcachePageHandler.PATH,
+ 'queues_path': base_path + QueuesPageHandler.PATH,
+ 'xmpp_path': base_path + XMPPPageHandler.PATH,
+ 'inboundmail_path': base_path + InboundMailPageHandler.PATH,
+ }
+ if HAVE_CRON:
+ values['cron_path'] = base_path + CronPageHandler.PATH
+
+ values.update(template_values)
+ directory = os.path.dirname(__file__)
+ path = os.path.join(directory, os.path.join('templates', template_name))
+ self.response.out.write(template.render(path, values, debug=_DEBUG))
+
+ def base_path(self):
+ """Returns the base path of this admin app, which is chosen by the user.
+
+ The user specifies which paths map to this application in their app.cfg.
+ You can get that base path with this method. Combine with the constant
+ paths specified by the classes to construct URLs.
+ """
+ path = self.__class__.PATH
+ return self.request.path[:-len(path)]
+
+ def filter_url(self, args):
+ """Filters the current URL to only have the given list of arguments.
+
+ For example, if your URL is /search?q=foo&num=100&start=10, then
+
+ self.filter_url(['start', 'num']) => /search?num=100&start=10
+ self.filter_url(['q']) => /search?q=10
+ self.filter_url(['random']) => /search?
+
+ """
+ queries = []
+ for arg in args:
+ value = self.request.get(arg)
+ if value:
+ queries.append(arg + '=' + urllib.quote_plus(self.request.get(arg)))
+ return self.request.path + '?' + '&'.join(queries)
+
+ def in_production(self):
+ """Detects if app is running in production.
+
+ Returns a boolean.
+ """
+ server_software = os.environ['SERVER_SOFTWARE']
+ return not server_software.startswith('Development')
+
+
+class DefaultPageHandler(BaseRequestHandler):
+ """Redirects to the Datastore application by default."""
+
+ PATH = '/'
+
+ def get(self):
+ if self.request.path.endswith('/'):
+ base = self.request.path[:-1]
+ else:
+ base = self.request.path
+ self.redirect(base + DatastoreQueryHandler.PATH)
+
+
+class InteractivePageHandler(BaseRequestHandler):
+ """Shows our interactive console HTML."""
+ PATH = '/interactive'
+
+ def get(self):
+ self.generate('interactive.html')
+
+
+class InteractiveExecuteHandler(BaseRequestHandler):
+ """Executes the Python code submitted in a POST within this context.
+
+ For obvious reasons, this should only be available to administrators
+ of the applications.
+ """
+
+ PATH = InteractivePageHandler.PATH + '/execute'
+
+ def post(self):
+ save_stdout = sys.stdout
+ results_io = cStringIO.StringIO()
+ try:
+ sys.stdout = results_io
+
+ code = self.request.get('code')
+ code = code.replace("\r\n", "\n")
+
+ try:
+ compiled_code = compile(code, '<string>', 'exec')
+ exec(compiled_code, globals())
+ except Exception, e:
+ traceback.print_exc(file=results_io)
+ finally:
+ sys.stdout = save_stdout
+
+ results = results_io.getvalue()
+ self.generate('interactive-output.html', {'output': results})
+
+
+class CronPageHandler(BaseRequestHandler):
+ """Shows information about configured cron jobs in this application."""
+ PATH = '/cron'
+
+ def get(self, now=None):
+ """Shows template displaying the configured cron jobs."""
+ if not now:
+ now = datetime.datetime.now()
+ values = {'request': self.request}
+ cron_info = _ParseCronYaml()
+ values['cronjobs'] = []
+ values['now'] = str(now)
+ if cron_info and cron_info.cron:
+ for entry in cron_info.cron:
+ job = {}
+ values['cronjobs'].append(job)
+ if entry.description:
+ job['description'] = entry.description
+ else:
+ job['description'] = '(no description)'
+ if entry.timezone:
+ job['timezone'] = entry.timezone
+ job['url'] = entry.url
+ job['schedule'] = entry.schedule
+ schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
+ matches = schedule.GetMatches(now, 3)
+ job['times'] = []
+ for match in matches:
+ job['times'].append({'runtime': match.strftime("%Y-%m-%d %H:%M:%SZ"),
+ 'difference': str(match - now)})
+ self.generate('cron.html', values)
+
+
+class XMPPPageHandler(BaseRequestHandler):
+ """Tests XMPP requests."""
+ PATH = '/xmpp'
+
+ def get(self):
+ """Shows template displaying the XMPP."""
+ xmpp_configured = True
+ values = {
+ 'xmpp_configured': xmpp_configured,
+ 'request': self.request
+ }
+ self.generate('xmpp.html', values)
+
+
+class InboundMailPageHandler(BaseRequestHandler):
+ """Tests Mail requests."""
+ PATH = '/inboundmail'
+
+ def get(self):
+ """Shows template displaying the Inbound Mail form."""
+ inboundmail_configured = True
+ values = {
+ 'inboundmail_configured': inboundmail_configured,
+ 'request': self.request
+ }
+ self.generate('inboundmail.html', values)
+
+
+class QueuesPageHandler(BaseRequestHandler):
+ """Shows information about configured (and default) task queues."""
+ PATH = '/queues'
+
+ def __init__(self):
+ self.stub = apiproxy_stub_map.apiproxy.GetStub('taskqueue')
+
+ def get(self):
+ """Shows template displaying the configured task queues."""
+ values = {
+ 'request': self.request,
+ 'queues': self.stub.GetQueues(),
+ }
+ self.generate('queues.html', values)
+
+ def post(self):
+ """Handle modifying actions and/or redirect to GET page."""
+
+ if self.request.get('action:flushqueue'):
+ self.stub.FlushQueue(self.request.get('queue'))
+ self.redirect(self.request.path_url)
+
+
+class TasksPageHandler(BaseRequestHandler):
+ """Shows information about a queue's tasks."""
+
+ PATH = '/tasks'
+
+ PAGE_SIZE = 20
+
+ def __init__(self):
+ self.stub = apiproxy_stub_map.apiproxy.GetStub('taskqueue')
+
+ def get(self):
+ """Shows template displaying the queue's tasks."""
+ queue = self.request.get('queue')
+ start = int(self.request.get('start', 0))
+ all_tasks = self.stub.GetTasks(queue)
+
+ next_start = start + self.PAGE_SIZE
+ tasks = all_tasks[start:next_start]
+ current_page = int(start / self.PAGE_SIZE) + 1
+ pages = []
+ for number in xrange(int(math.ceil(len(all_tasks) /
+ float(self.PAGE_SIZE)))):
+ pages.append({
+ 'number': number + 1,
+ 'start': number * self.PAGE_SIZE
+ })
+ if not all_tasks[next_start:]:
+ next_start = -1
+ prev_start = start - self.PAGE_SIZE
+ if prev_start < 0:
+ prev_start = -1
+
+ values = {
+ 'request': self.request,
+ 'queue_name': queue,
+ 'tasks': tasks,
+ 'start_base_url': self.filter_url(['queue']),
+ 'prev_start': prev_start,
+ 'next_start': next_start,
+ 'pages': pages,
+ 'current_page': current_page,
+ }
+ self.generate('tasks.html', values)
+
+ def post(self):
+ if self.request.get('action:deletetask'):
+ self.stub.DeleteTask(self.request.get('queue'), self.request.get('task'))
+ self.redirect(self.request.path_url + '?queue=' + self.request.get('queue'))
+ return
+
+
+class MemcachePageHandler(BaseRequestHandler):
+ """Shows stats about memcache and query form to get values."""
+ PATH = '/memcache'
+
+ TYPES = ((str, str, 'String'),
+ (unicode, unicode, 'Unicode String'),
+ (bool, lambda value: MemcachePageHandler._ToBool(value), 'Boolean'),
+ (int, int, 'Integer'),
+ (long, long, 'Long Integer'),
+ (float, float, 'Float'))
+ DEFAULT_TYPESTR_FOR_NEW = 'String'
+
+ @staticmethod
+ def _ToBool(string_value):
+ """Convert string to boolean value.
+
+ Args:
+ string_value: A string.
+
+ Returns:
+ Boolean. True if string_value is "true", False if string_value is
+ "false". This is case-insensitive.
+
+ Raises:
+ ValueError: string_value not "true" or "false".
+ """
+ string_value_low = string_value.lower()
+ if string_value_low not in ('false', 'true'):
+ raise ValueError('invalid literal for boolean: %s' % string_value)
+ return string_value_low == 'true'
+
+ def _GetValueAndType(self, key):
+ """Fetch value from memcache and detect its type.
+
+ Args:
+ key: String
+
+ Returns:
+ (value, type), value is a Python object or None if the key was not set in
+ the cache, type is a string describing the type of the value.
+ """
+ try:
+ value = memcache.get(key)
+ except (pickle.UnpicklingError, AttributeError, EOFError, ImportError,
+ IndexError), e:
+ msg = 'Failed to retrieve value from cache: %s' % e
+ return msg, 'error'
+
+ if value is None:
+ return None, self.DEFAULT_TYPESTR_FOR_NEW
+
+ for typeobj, _, typestr in self.TYPES:
+ if isinstance(value, typeobj):
+ break
+ else:
+ typestr = 'pickled'
+ value = pprint.pformat(value, indent=2)
+
+ return value, typestr
+
+ def _SetValue(self, key, type_, value):
+ """Convert a string value and store the result in memcache.
+
+ Args:
+ key: String
+ type_: String, describing what type the value should have in the cache.
+ value: String, will be converted according to type_.
+
+ Returns:
+ Result of memcache.set(ket, converted_value). True if value was set.
+
+ Raises:
+ ValueError: Value can't be converted according to type_.
+ """
+ for _, converter, typestr in self.TYPES:
+ if typestr == type_:
+ value = converter(value)
+ break
+ else:
+ raise ValueError('Type %s not supported.' % type_)
+ return memcache.set(key, value)
+
+ def get(self):
+ """Show template and prepare stats and/or key+value to display/edit."""
+ values = {'request': self.request,
+ 'message': self.request.get('message')}
+
+ edit = self.request.get('edit')
+ key = self.request.get('key')
+ if edit:
+ key = edit
+ values['show_stats'] = False
+ values['show_value'] = False
+ values['show_valueform'] = True
+ values['types'] = [typestr for _, _, typestr in self.TYPES]
+ elif key:
+ values['show_stats'] = True
+ values['show_value'] = True
+ values['show_valueform'] = False
+ else:
+ values['show_stats'] = True
+ values['show_valueform'] = False
+ values['show_value'] = False
+
+ if key:
+ values['key'] = key
+ values['value'], values['type'] = self._GetValueAndType(key)
+ values['key_exists'] = values['value'] is not None
+
+ if values['type'] in ('pickled', 'error'):
+ values['writable'] = False
+ else:
+ values['writable'] = True
+
+ if values['show_stats']:
+ memcache_stats = memcache.get_stats()
+ if not memcache_stats:
+ memcache_stats = {'hits': 0, 'misses': 0, 'byte_hits': 0, 'items': 0,
+ 'bytes': 0, 'oldest_item_age': 0}
+ values['stats'] = memcache_stats
+ try:
+ hitratio = memcache_stats['hits'] * 100 / (memcache_stats['hits']
+ + memcache_stats['misses'])
+ except ZeroDivisionError:
+ hitratio = 0
+ values['hitratio'] = hitratio
+ delta_t = datetime.timedelta(seconds=memcache_stats['oldest_item_age'])
+ values['oldest_item_age'] = datetime.datetime.now() - delta_t
+
+ self.generate('memcache.html', values)
+
+ def _urlencode(self, query):
+ """Encode a dictionary into a URL query string.
+
+ In contrast to urllib this encodes unicode characters as UTF8.
+
+ Args:
+ query: Dictionary of key/value pairs.
+
+ Returns:
+ String.
+ """
+ return '&'.join('%s=%s' % (urllib.quote_plus(k.encode('utf8')),
+ urllib.quote_plus(v.encode('utf8')))
+ for k, v in query.iteritems())
+
+ def post(self):
+ """Handle modifying actions and/or redirect to GET page."""
+ next_param = {}
+
+ if self.request.get('action:flush'):
+ if memcache.flush_all():
+ next_param['message'] = 'Cache flushed, all keys dropped.'
+ else:
+ next_param['message'] = 'Flushing the cache failed. Please try again.'
+
+ elif self.request.get('action:display'):
+ next_param['key'] = self.request.get('key')
+
+ elif self.request.get('action:edit'):
+ next_param['edit'] = self.request.get('key')
+
+ elif self.request.get('action:delete'):
+ key = self.request.get('key')
+ result = memcache.delete(key)
+ if result == memcache.DELETE_NETWORK_FAILURE:
+ next_param['message'] = ('ERROR: Network failure, key "%s" not deleted.'
+ % key)
+ elif result == memcache.DELETE_ITEM_MISSING:
+ next_param['message'] = 'Key "%s" not in cache.' % key
+ elif result == memcache.DELETE_SUCCESSFUL:
+ next_param['message'] = 'Key "%s" deleted.' % key
+ else:
+ next_param['message'] = ('Unknown return value. Key "%s" might still '
+ 'exist.' % key)
+
+ elif self.request.get('action:save'):
+ key = self.request.get('key')
+ value = self.request.get('value')
+ type_ = self.request.get('type')
+ next_param['key'] = key
+ try:
+ if self._SetValue(key, type_, value):
+ next_param['message'] = 'Key "%s" saved.' % key
+ else:
+ next_param['message'] = 'ERROR: Failed to save key "%s".' % key
+ except ValueError, e:
+ next_param['message'] = 'ERROR: Unable to encode value: %s' % e
+
+ elif self.request.get('action:cancel'):
+ next_param['key'] = self.request.get('key')
+
+ else:
+ next_param['message'] = 'Unknown action.'
+
+ next = self.request.path_url
+ if next_param:
+ next = '%s?%s' % (next, self._urlencode(next_param))
+ self.redirect(next)
+
+
+class DatastoreRequestHandler(BaseRequestHandler):
+ """The base request handler for our datastore admin pages.
+
+ We provide utility functions for quering the datastore and infering the
+ types of entity properties.
+ """
+
+ def start(self):
+ """Returns the santized "start" argument from the URL."""
+ return self.request.get_range('start', min_value=0, default=0)
+
+ def num(self):
+ """Returns the sanitized "num" argument from the URL."""
+ return self.request.get_range('num', min_value=1, max_value=100,
+ default=10)
+
+ def execute_query(self, start=0, num=0, no_order=False):
+ """Parses the URL arguments and executes the query.
+
+ We return a tuple (list of entities, total entity count).
+
+ If the appropriate URL arguments are not given, we return an empty
+ set of results and 0 for the entity count.
+ """
+ kind = self.request.get('kind')
+ if not kind:
+ return ([], 0)
+ query = datastore.Query(kind)
+
+ order = self.request.get('order')
+ order_type = self.request.get('order_type')
+ if order and order_type:
+ order_type = DataType.get_by_name(order_type).python_type()
+ if order.startswith('-'):
+ direction = datastore.Query.DESCENDING
+ order = order[1:]
+ else:
+ direction = datastore.Query.ASCENDING
+ try:
+ query.Order((order, order_type, direction))
+ except datastore_errors.BadArgumentError:
+ pass
+
+ if not start:
+ start = self.start()
+ if not num:
+ num = self.num()
+ total = query.Count()
+ entities = query.Get(start + num)[start:]
+ return (entities, total)
+
+ def get_key_values(self, entities):
+ """Returns the union of key names used by the given list of entities.
+
+ We return the union as a dictionary mapping the key names to a sample
+ value from one of the entities for the key name.
+ """
+ key_dict = {}
+ for entity in entities:
+ for key, value in entity.iteritems():
+ if key_dict.has_key(key):
+ key_dict[key].append(value)
+ else:
+ key_dict[key] = [value]
+ return key_dict
+
+
+class DatastoreQueryHandler(DatastoreRequestHandler):
+ """Our main request handler that executes queries and lists entities.
+
+ We use execute_query() in our base request handler to parse URL arguments
+ and execute the datastore query.
+ """
+
+ PATH = '/datastore'
+
+ def get_kinds(self):
+ """Get sorted list of kind names the datastore knows about.
+
+ This should only be called in the development environment as GetSchema is
+ expensive and no caching is done.
+ """
+ schema = datastore_admin.GetSchema()
+ kinds = []
+ for entity_proto in schema:
+ kinds.append(entity_proto.key().path().element_list()[-1].type())
+ kinds.sort()
+ return kinds
+
+ def get(self):
+ """Formats the results from execute_query() for datastore.html.
+
+ The only complex part of that process is calculating the pager variables
+ to generate the Gooooogle pager at the bottom of the page.
+ """
+ result_set, total = self.execute_query()
+ key_values = self.get_key_values(result_set)
+ keys = key_values.keys()
+ keys.sort()
+
+ headers = []
+ for key in keys:
+ sample_value = key_values[key][0]
+ headers.append({
+ 'name': key,
+ 'type': DataType.get(sample_value).name(),
+ })
+
+ entities = []
+ edit_path = self.base_path() + DatastoreEditHandler.PATH
+ for entity in result_set:
+ attributes = []
+ for key in keys:
+ if entity.has_key(key):
+ raw_value = entity[key]
+ value = DataType.get(raw_value).format(raw_value)
+ short_value = DataType.get(raw_value).short_format(raw_value)
+ else:
+ value = ''
+ short_value = ''
+ attributes.append({
+ 'name': key,
+ 'value': value,
+ 'short_value': short_value,
+ })
+ entities.append({
+ 'key': str(entity.key()),
+ 'key_name': entity.key().name(),
+ 'key_id': entity.key().id(),
+ 'shortened_key': str(entity.key())[:8] + '...',
+ 'attributes': attributes,
+ 'edit_uri': edit_path + '?key=' + str(entity.key()) + '&kind=' + urllib.quote(self.request.get('kind')) + '&next=' + urllib.quote(self.request.uri),
+ })
+
+ start = self.start()
+ num = self.num()
+ max_pager_links = 8
+ current_page = start / num
+ num_pages = int(math.ceil(total * 1.0 / num))
+ page_start = max(math.floor(current_page - max_pager_links / 2), 0)
+ page_end = min(page_start + max_pager_links, num_pages)
+
+ pages = []
+ for page in range(page_start + 1, page_end + 1):
+ pages.append({
+ 'number': page,
+ 'start': (page - 1) * num,
+ })
+ current_page += 1
+
+ in_production = self.in_production()
+ if in_production:
+ kinds = None
+ else:
+ kinds = self.get_kinds()
+
+ values = {
+ 'request': self.request,
+ 'in_production': in_production,
+ 'kinds': kinds,
+ 'kind': self.request.get('kind'),
+ 'order': self.request.get('order'),
+ 'headers': headers,
+ 'entities': entities,
+ 'message': self.request.get('msg'),
+ 'pages': pages,
+ 'current_page': current_page,
+ 'num': num,
+ 'next_start': -1,
+ 'prev_start': -1,
+ 'start': start,
+ 'total': total,
+ 'start_base_url': self.filter_url(['kind', 'order', 'order_type',
+ 'num']),
+ 'order_base_url': self.filter_url(['kind', 'num']),
+ }
+ if current_page > 1:
+ values['prev_start'] = int((current_page - 2) * num)
+ if current_page < num_pages:
+ values['next_start'] = int(current_page * num)
+
+ self.generate('datastore.html', values)
+
+
+class DatastoreBatchEditHandler(DatastoreRequestHandler):
+ """Request handler for a batch operation on entities.
+
+ Supports deleting multiple entities by key, then redirecting to another url.
+ """
+
+ PATH = DatastoreQueryHandler.PATH + '/batchedit'
+
+ def post(self):
+ kind = self.request.get('kind')
+
+ keys = []
+ index = 0
+ num_keys = int(self.request.get('numkeys'))
+ for i in xrange(1, num_keys+1):
+ key = self.request.get('key%d' % i)
+ if key:
+ keys.append(key)
+
+ if self.request.get('action') == 'Delete':
+ num_deleted = 0
+ for key in keys:
+ datastore.Delete(datastore.Key(key))
+ num_deleted = num_deleted + 1
+ message = '%d entit%s deleted.' % (
+ num_deleted, ('ies', 'y')[num_deleted == 1])
+ self.redirect(
+ '%s&msg=%s' % (self.request.get('next'), urllib.quote_plus(message)))
+ return
+
+ self.error(404)
+
+
+class DatastoreEditHandler(DatastoreRequestHandler):
+ """Request handler for the entity create/edit form.
+
+ We determine how to generate a form to edit an entity by doing a query
+ on the entity kind and looking at the set of keys and their types in
+ the result set. We use the DataType subclasses for those introspected types
+ to generate the form and parse the form results.
+ """
+
+ PATH = DatastoreQueryHandler.PATH + '/edit'
+
+ def get(self):
+ kind = self.request.get('kind')
+ sample_entities = self.execute_query()[0]
+ if len(sample_entities) < 1:
+ next_uri = self.request.get('next')
+ kind_param = 'kind=%s' % kind
+ if not kind_param in next_uri:
+ if '?' in next_uri:
+ next_uri += '&' + kind_param
+ else:
+ next_uri += '?' + kind_param
+ self.redirect(next_uri)
+ return
+
+ entity_key = self.request.get('key')
+ if entity_key:
+ key_instance = datastore.Key(entity_key)
+ entity_key_name = key_instance.name()
+ entity_key_id = key_instance.id()
+ parent_key = key_instance.parent()
+ entity = datastore.Get(key_instance)
+ else:
+ key_instance = None
+ entity_key_name = None
+ entity_key_id = None
+ parent_key = None
+ entity = None
+
+ if parent_key:
+ parent_kind = parent_key.kind()
+ else:
+ parent_kind = None
+
+ fields = []
+ key_values = self.get_key_values(sample_entities)
+ for key, sample_values in key_values.iteritems():
+ if entity and entity.has_key(key):
+ data_type = DataType.get(entity[key])
+ else:
+ data_type = DataType.get(sample_values[0])
+ name = data_type.name() + "|" + key
+ if entity and entity.has_key(key):
+ value = entity[key]
+ else:
+ value = None
+ field = data_type.input_field(name, value, sample_values)
+ fields.append((key, data_type.name(), field))
+
+ self.generate('datastore_edit.html', {
+ 'kind': kind,
+ 'key': entity_key,
+ 'key_name': entity_key_name,
+ 'key_id': entity_key_id,
+ 'fields': fields,
+ 'focus': self.request.get('focus'),
+ 'next': self.request.get('next'),
+ 'parent_key': parent_key,
+ 'parent_kind': parent_kind,
+ })
+
+ def post(self):
+ kind = self.request.get('kind')
+ entity_key = self.request.get('key')
+ if entity_key:
+ if self.request.get('action') == 'Delete':
+ datastore.Delete(datastore.Key(entity_key))
+ self.redirect(self.request.get('next'))
+ return
+ entity = datastore.Get(datastore.Key(entity_key))
+ else:
+ entity = datastore.Entity(kind)
+
+ args = self.request.arguments()
+ for arg in args:
+ bar = arg.find('|')
+ if bar > 0:
+ data_type_name = arg[:bar]
+ field_name = arg[bar + 1:]
+ form_value = self.request.get(arg)
+ data_type = DataType.get_by_name(data_type_name)
+ if entity and entity.has_key(field_name):
+ old_formatted_value = data_type.format(entity[field_name])
+ if old_formatted_value == form_value:
+ continue
+
+ if len(form_value) > 0:
+ value = data_type.parse(form_value)
+ entity[field_name] = value
+ elif entity.has_key(field_name):
+ del entity[field_name]
+
+ datastore.Put(entity)
+
+ self.redirect(self.request.get('next'))
+
+
+class DataType(object):
+ """A DataType represents a data type in the datastore.
+
+ Each DataType subtype defines four methods:
+
+ format: returns a formatted string for a datastore value
+ input_field: returns a string HTML <input> element for this DataType
+ name: the friendly string name of this DataType
+ parse: parses the formatted string representation of this DataType
+ python_type: the canonical Python type for this datastore type
+
+ We use DataType instances to display formatted values in our result lists,
+ and we uses input_field/format/parse to generate forms and parse the results
+ from those forms to allow editing of entities.
+ """
+ @staticmethod
+ def get(value):
+ return _DATA_TYPES[value.__class__]
+
+ @staticmethod
+ def get_by_name(name):
+ return _NAMED_DATA_TYPES[name]
+
+ def format(self, value):
+ return str(value)
+
+ def short_format(self, value):
+ return self.format(value)
+
+ def input_field(self, name, value, sample_values):
+ if value is not None:
+ string_value = self.format(value)
+ else:
+ string_value = ''
+ return '<input class="%s" name="%s" type="text" size="%d" value="%s"/>' % (cgi.escape(self.name()), cgi.escape(name), self.input_field_size(),
+ cgi.escape(string_value, True))
+
+ def input_field_size(self):
+ return 30
+
+
+class StringType(DataType):
+ def format(self, value):
+ return value
+
+ def input_field(self, name, value, sample_values):
+ multiline = False
+ if value:
+ multiline = len(value) > 255 or value.find('\n') >= 0
+ if not multiline:
+ for sample_value in sample_values:
+ if sample_value and (len(sample_value) > 255 or
+ sample_value.find('\n') >= 0):
+ multiline = True
+ break
+ if multiline:
+ if not value:
+ value = ''
+ return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(value))
+ else:
+ return DataType.input_field(self, name, value, sample_values)
+
+ def name(self):
+ return 'string'
+
+ def parse(self, value):
+ return value
+
+ def python_type(self):
+ return str
+
+ def input_field_size(self):
+ return 50
+
+
+class TextType(StringType):
+ def name(self):
+ return 'Text'
+
+ def input_field(self, name, value, sample_values):
+ return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(str(value)))
+
+ def parse(self, value):
+ return datastore_types.Text(value)
+
+ def python_type(self):
+ return datastore_types.Text
+
+
+class BlobType(StringType):
+ def name(self):
+ return 'Blob'
+
+ def input_field(self, name, value, sample_values):
+ return '&lt;binary&gt;'
+
+ def format(self, value):
+ return '<binary>'
+
+ def python_type(self):
+ return datastore_types.Blob
+
+
+class TimeType(DataType):
+ _FORMAT = '%Y-%m-%d %H:%M:%S'
+
+ def format(self, value):
+ return value.strftime(TimeType._FORMAT)
+
+ def name(self):
+ return 'datetime'
+
+ def parse(self, value):
+ return datetime.datetime(*(time.strptime(value, TimeType._FORMAT)[0:6]))
+
+ def python_type(self):
+ return datetime.datetime
+
+
+class ListType(DataType):
+ def format(self, value):
+ value_file = cStringIO.StringIO()
+ try:
+ writer = csv.writer(value_file)
+ writer.writerow(value)
+ return value_file.getvalue()
+ finally:
+ value_file.close()
+
+ def name(self):
+ return 'list'
+
+ def parse(self, value):
+ value_file = cStringIO.StringIO(value)
+ try:
+ reader = csv.reader(value_file)
+ return reader.next()
+ finally:
+ value_file.close()
+
+ def python_type(self):
+ return list
+
+
+class BoolType(DataType):
+ def name(self):
+ return 'bool'
+
+ def input_field(self, name, value, sample_values):
+ selected = { None: '', False: '', True: '' };
+ selected[value] = "selected"
+ return """<select class="%s" name="%s">
+ <option %s value=''></option>
+ <option %s value='0'>False</option>
+ <option %s value='1'>True</option></select>""" % (cgi.escape(self.name()), cgi.escape(name), selected[None],
+ selected[False], selected[True])
+
+ def parse(self, value):
+ if value.lower() is 'true':
+ return True
+ if value.lower() is 'false':
+ return False
+ return bool(int(value))
+
+ def python_type(self):
+ return bool
+
+
+class NumberType(DataType):
+ def input_field_size(self):
+ return 10
+
+
+class IntType(NumberType):
+ def name(self):
+ return 'int'
+
+ def parse(self, value):
+ return int(value)
+
+ def python_type(self):
+ return int
+
+
+class LongType(NumberType):
+ def name(self):
+ return 'long'
+
+ def parse(self, value):
+ return long(value)
+
+ def python_type(self):
+ return long
+
+
+class FloatType(NumberType):
+ def name(self):
+ return 'float'
+
+ def parse(self, value):
+ return float(value)
+
+ def python_type(self):
+ return float
+
+
+class UserType(DataType):
+ def name(self):
+ return 'User'
+
+ def parse(self, value):
+ return users.User(value)
+
+ def python_type(self):
+ return users.User
+
+ def input_field_size(self):
+ return 15
+
+class ReferenceType(DataType):
+ def name(self):
+ return 'Key'
+
+ def short_format(self, value):
+ return str(value)[:8] + '...'
+
+ def parse(self, value):
+ return datastore_types.Key(value)
+
+ def python_type(self):
+ return datastore_types.Key
+
+ def input_field_size(self):
+ return 85
+
+
+class EmailType(StringType):
+ def name(self):
+ return 'Email'
+
+ def parse(self, value):
+ return datastore_types.Email(value)
+
+ def python_type(self):
+ return datastore_types.Email
+
+
+class CategoryType(StringType):
+ def name(self):
+ return 'Category'
+
+ def parse(self, value):
+ return datastore_types.Category(value)
+
+ def python_type(self):
+ return datastore_types.Category
+
+
+class LinkType(StringType):
+ def name(self):
+ return 'Link'
+
+ def parse(self, value):
+ return datastore_types.Link(value)
+
+ def python_type(self):
+ return datastore_types.Link
+
+
+class GeoPtType(DataType):
+ def name(self):
+ return 'GeoPt'
+
+ def parse(self, value):
+ return datastore_types.GeoPt(value)
+
+ def python_type(self):
+ return datastore_types.GeoPt
+
+
+class ImType(DataType):
+ def name(self):
+ return 'IM'
+
+ def parse(self, value):
+ return datastore_types.IM(value)
+
+ def python_type(self):
+ return datastore_types.IM
+
+
+class PhoneNumberType(StringType):
+ def name(self):
+ return 'PhoneNumber'
+
+ def parse(self, value):
+ return datastore_types.PhoneNumber(value)
+
+ def python_type(self):
+ return datastore_types.PhoneNumber
+
+
+class PostalAddressType(StringType):
+ def name(self):
+ return 'PostalAddress'
+
+ def parse(self, value):
+ return datastore_types.PostalAddress(value)
+
+ def python_type(self):
+ return datastore_types.PostalAddress
+
+
+class RatingType(NumberType):
+ def name(self):
+ return 'Rating'
+
+ def parse(self, value):
+ return datastore_types.Rating(value)
+
+ def python_type(self):
+ return datastore_types.Rating
+
+
+class NoneType(DataType):
+ def name(self):
+ return 'None'
+
+ def parse(self, value):
+ return None
+
+ def python_type(self):
+ return None
+
+ def format(self, value):
+ return 'None'
+
+_DATA_TYPES = {
+ types.NoneType: NoneType(),
+ types.StringType: StringType(),
+ types.UnicodeType: StringType(),
+ datastore_types.Text: TextType(),
+ datastore_types.Blob: BlobType(),
+ types.BooleanType: BoolType(),
+ types.IntType: IntType(),
+ types.LongType: LongType(),
+ types.FloatType: FloatType(),
+ datetime.datetime: TimeType(),
+ users.User: UserType(),
+ datastore_types.Key: ReferenceType(),
+ types.ListType: ListType(),
+ datastore_types.Email: EmailType(),
+ datastore_types.Category: CategoryType(),
+ datastore_types.Link: LinkType(),
+ datastore_types.GeoPt: GeoPtType(),
+ datastore_types.IM: ImType(),
+ datastore_types.PhoneNumber: PhoneNumberType(),
+ datastore_types.PostalAddress: PostalAddressType(),
+ datastore_types.Rating: RatingType(),
+}
+
+_NAMED_DATA_TYPES = {}
+for data_type in _DATA_TYPES.values():
+ _NAMED_DATA_TYPES[data_type.name()] = data_type
+
+
+def _ParseCronYaml():
+ """Loads the cron.yaml file and parses it.
+
+ The CWD of the dev_appserver is the root of the application here.
+
+ Returns a dict representing the contents of cron.yaml.
+ """
+ cronyaml_files = 'cron.yaml', 'cron.yml'
+ for cronyaml in cronyaml_files:
+ try:
+ fh = open(cronyaml, "r")
+ except IOError:
+ continue
+ try:
+ cron_info = croninfo.LoadSingleCron(fh)
+ return cron_info
+ finally:
+ fh.close()
+ return None
+
+
+def main():
+ handlers = [
+ ('.*' + DatastoreQueryHandler.PATH, DatastoreQueryHandler),
+ ('.*' + DatastoreEditHandler.PATH, DatastoreEditHandler),
+ ('.*' + DatastoreBatchEditHandler.PATH, DatastoreBatchEditHandler),
+ ('.*' + InteractivePageHandler.PATH, InteractivePageHandler),
+ ('.*' + InteractiveExecuteHandler.PATH, InteractiveExecuteHandler),
+ ('.*' + MemcachePageHandler.PATH, MemcachePageHandler),
+ ('.*' + ImageHandler.PATH, ImageHandler),
+ ('.*' + QueuesPageHandler.PATH, QueuesPageHandler),
+ ('.*' + TasksPageHandler.PATH, TasksPageHandler),
+ ('.*' + XMPPPageHandler.PATH, XMPPPageHandler),
+ ('.*' + InboundMailPageHandler.PATH, InboundMailPageHandler),
+ ('.*', DefaultPageHandler),
+ ]
+ if HAVE_CRON:
+ handlers.insert(0, ('.*' + CronPageHandler.PATH, CronPageHandler))
+ application = webapp.WSGIApplication(handlers, debug=_DEBUG)
+ wsgiref.handlers.CGIHandler().run(application)
+
+
+import django
+if django.VERSION[:2] < (0, 97):
+ from django.template import defaultfilters
+ def safe(text, dummy=None):
+ return text
+ defaultfilters.register.filter("safe", safe)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/google_appengine/google/appengine/ext/admin/templates/base.html b/google_appengine/google/appengine/ext/admin/templates/base.html
new file mode 100644
index 0000000..beef28e
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/base.html
@@ -0,0 +1,96 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <meta http-equiv="content-type" content="text/html; charset=utf-8"/>
+ <title>{% block title %}{% endblock %}</title>
+ <style type="text/css">{% include "css/base.css" %}</style>
+ <style type="text/css">{% include "css/ae.css" %}</style>
+ <style type="text/css">{% include "css/nav.css" %}</style>
+ {% block head %}{% endblock %}
+ </head>
+ <body {% block bodyattributes %}{% endblock %}>
+ <div class="g-doc">
+
+ <div id="hd" class="g-section">
+
+ <div class="g-section">
+ <img id="ae-logo" src="./images/google.gif" width="153" height="47"
+ alt="Google App Engine"/>
+ </div>
+
+ <div id="ae-appbar-lrg" class="g-section">
+ <h1>{{ application_name }} Development Console</h1>
+ </div>
+
+ </div>
+
+
+ <div id="bd" class="g-section">
+
+ <div class="g-section g-tpl-160">
+
+ <div id="ae-lhs-nav" class="g-unit g-first">
+
+ <div id="ae-nav" class="g-c">
+
+ <ul id="menu">
+ <li><a href="{{ datastore_path }}">Datastore Viewer</a></li>
+ <li><a href="{{ interactive_path }}">Interactive Console</a></li>
+ <li><a href="{{ memcache_path }}">Memcache Viewer</a></li>
+ <li><a href="{{ queues_path }}">Task Queues</a></li>
+ {% if cron_path %}
+ <li><a href="{{ cron_path }}">Cron Jobs</a></li>
+ {% endif %}
+ <li><a href="{{ xmpp_path }}">XMPP</a></li>
+ {% comment %}
+ <li><a href="{{ inboundmail_path }}">Inbound Mail</a></li>
+ {% endcomment %}
+ </ul>
+
+ </div>
+
+ </div>
+
+ <div id="ae-content" class="g-unit">
+ {% block body %}{% endblock %}
+ </div>
+
+ </div>
+
+ <div id="ft">
+ <p>
+ &copy;2009 Google
+ </p>
+ </div>
+ {% block final %}{% endblock %}
+ </div>
+ <script type="text/javascript">
+ //<![CDATA[
+
+ function walk(element, condition, operation) {
+ if (!element) return;
+ if (condition(element)) {
+ operation(element);
+ return;
+ }
+ for (var e = element.firstChild; e != null; e = e.nextSibling) {
+ walk(e, condition, operation);
+ }
+ }
+
+ function isCurrentLink(e) {
+ if (e.tagName != "A") return false;
+ re = new RegExp("^" + e.href + ".*(\\?.*)?$");
+ return re.test(window.location.href);
+ }
+
+ function makeSelected(e) {
+ e.className = "ae-nav-selected";
+ }
+
+ walk(document.getElementById("menu"), isCurrentLink, makeSelected);
+
+ //]]>
+ </script>
+ </body>
+</html>
diff --git a/google_appengine/google/appengine/ext/admin/templates/cron.html b/google_appengine/google/appengine/ext/admin/templates/cron.html
new file mode 100644
index 0000000..c692ae8
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/cron.html
@@ -0,0 +1,85 @@
+{% extends "base.html" %}
+
+{% block title %}
+{{ application_name }} Development Console - Cron Viewer{% endblock %}
+
+{% block head %}
+ <style type="text/css">{% include "css/cron.css" %}</style>
+{% endblock %}
+
+{% block breadcrumbs %}
+ <span class="item"><a href="">Cron Viewer</a></span>
+{% endblock %}
+
+{% block body %}
+<h3>Cron Jobs</h3>
+
+{% if message %}
+<div class="ah-cron-message">
+{{ message|escape }}
+</div>
+{% endif %}
+
+{% if cronjobs %}
+ <table id="ah-cron-jobs" class="ae-table ae-table-striped">
+ <colgroup>
+ <col style="width:60%">
+ <col>
+ </colgroup>
+ <thead>
+ <tr>
+ <th>Cron Job</th>
+ <th>Schedule</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for job in cronjobs %}
+ <tr class="{% cycle ae-odd,ae-even %}">
+ <td valign="top">
+ <h3>{{ job.url|escape }}</h3>
+ <p>
+ {{ job.description|escape }}
+ </p>
+ </td>
+ <td valign="top">
+ <table class="ae-table">
+ <tr>
+ <td>
+ <strong>{{ job.schedule|escape }}</strong>
+ </td>
+ <td class="ah-cron-test">
+ <a href="{{ job.url }}">Test this job</a>
+ </td>
+ </tr>
+ </table>
+
+ {% if job.timezone %}
+ <strong>Timezone: {{ job.timezone }}</strong>
+ <div class="ah-cron-message">
+ Schedules with timezones won't be calculated correctly here. Use the
+ appcfg.py cron_info command to view the next run times for this schedule,
+ after installing the pytz package.
+ </div>
+ {% endif %}
+ <div class="ah-cron-times">
+ In production, this would run at these times:
+ <ol>
+ {% for run in job.times %}
+ <li>
+ {{ run.runtime }} <span class="ae-unimportant">{{ run.difference }} from now</span>
+ </li>
+ {% endfor %}
+ </ol>
+ </div>
+ </td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% else %}
+ This application doesn't define any cron jobs. See the documentation for more.
+{% endif %}
+
+
+{% endblock %}
+
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/ae.css b/google_appengine/google/appengine/ext/admin/templates/css/ae.css
new file mode 100755
index 0000000..0e34b50
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/ae.css
@@ -0,0 +1,170 @@
+/* Goog.css Overrides */
+h1 {
+ font-size: 1.5em;
+}
+
+.g-doc {
+ width: auto;
+ margin: 0 10px;
+}
+
+/* Header Selectors */
+#ae-logo {
+ margin-bottom: 0;
+}
+#ae-appbar-lrg {
+ margin: 0 0 1.25em 0;
+ padding: .2em .6em;
+ background-color: #e5ecf9;
+ border-top: 1px solid #36c;
+}
+#ae-appbar-lrg h1 {
+ margin: 0;
+ padding: 0;
+}
+
+/* Footer Selectors */
+#ft p {
+ text-align: center;
+ margin-top: 2.5em;
+ padding-top: .5em;
+ border-top: 2px solid #c3d9ff;
+}
+
+/* bd selectors */
+#bd h3 {
+ font-weight: bold;
+ font-size: 1.4em;
+}
+#bd p {
+ padding: 0 0 1em 0;
+}
+#ae-content {
+ padding-left: 1em;
+ border-left: 3px solid #e5ecf9;
+ min-height: 200px;
+}
+
+/* Tables */
+.ae-table-plain {
+ border-collapse: collapse;
+ width: 100%;
+}
+.ae-table {
+ border: 1px solid #c5d7ef;
+ border-collapse: collapse;
+ width: 100%;
+}
+
+#bd h2.ae-table-title {
+ background: #e5ecf9;
+ margin: 0;
+ color: #000;
+ font-size: 1em;
+ padding: 3px 0 3px 5px;
+ border-left: 1px solid #c5d7ef;
+ border-right: 1px solid #c5d7ef;
+ border-top: 1px solid #c5d7ef;
+}
+.ae-table-caption,
+.ae-table caption {
+ border: 1px solid #c5d7ef;
+ background: #e5ecf9;
+ /**
+ * Fixes the caption margin ff display bug.
+ * see www.aurora-il.org/table_test.htm
+ * this is a slight variation to specifically target FF since Safari
+ * was shifting the caption over in an ugly fashion with margin-left: -1px
+ */
+ -moz-margin-start: -1px;
+}
+.ae-table caption {
+ padding: 3px 5px;
+ text-align: left;
+}
+.ae-table th,
+.ae-table td {
+ background-color: #fff;
+ padding: .35em 1em .25em .35em;
+ margin: 0;
+}
+.ae-table thead th {
+ font-weight: bold;
+ text-align: left;
+ background: #c5d7ef;
+ vertical-align: bottom;
+}
+.ae-table tfoot tr td {
+ border-top: 1px solid #c5d7ef;
+ background-color: #e5ecf9;
+}
+.ae-table td {
+ border-top: 1px solid #c5d7ef;
+ border-bottom: 1px solid #c5d7ef;
+}
+.ae-even td,
+.ae-even th,
+.ae-even-top td,
+.ae-even-tween td,
+.ae-even-bottom td,
+ol.ae-even {
+ background-color: #e9e9e9;
+ border-top: 1px solid #c5d7ef;
+ border-bottom: 1px solid #c5d7ef;
+}
+.ae-even-top td {
+ border-bottom: 0;
+}
+.ae-even-bottom td {
+ border-top: 0;
+}
+.ae-even-tween td {
+ border: 0;
+}
+.ae-table .ae-tween td {
+ border: 0;
+}
+.ae-table .ae-tween-top td {
+ border-bottom: 0;
+}
+.ae-table .ae-tween-bottom td {
+ border-top: 0;
+}
+.ae-table #ae-live td {
+ background-color: #ffeac0;
+}
+.ae-table-fixed {
+ table-layout: fixed;
+}
+.ae-table-fixed td,
+.ae-table-nowrap {
+ overflow: hidden;
+ white-space: nowrap;
+}
+.ae-new-usr td {
+ border-top: 1px solid #ccccce;
+ background-color: #ffe;
+}
+.ae-error-td td {
+ border: 2px solid #f00;
+ background-color: #fee;
+}
+.ae-table .ae-pager {
+ background-color: #c5d7ef;
+}
+
+.ae-errorbox {
+ border: 1px solid #f00;
+ background-color: #fee;
+ margin-bottom: 1em;
+ padding: 1em;
+ display: inline-block;
+}
+
+.ae-message {
+ border: 1px solid #e5ecf9;
+ background-color: #f6f9ff;
+ margin-bottom: 1em;
+ padding: 1em;
+ display: inline-block;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/base.css b/google_appengine/google/appengine/ext/admin/templates/css/base.css
new file mode 100755
index 0000000..e326283
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/base.css
@@ -0,0 +1,2 @@
+/* Copyright 2008 Google, Inc. All Rights Reserved */
+html,body,div,h1,h2,h3,h4,h5,h6,p,img,dl,dt,dd,ol,ul,li,table,caption,tbody,tfoot,thead,tr,th,td,form,fieldset,embed,object,applet{margin:0;padding:0;border:0}body{font-size:62.5%;font-family:Arial,sans-serif;color:#000;background:#fff}a{color:#00c}a:active{color:#f00}a:visited{color:#551a8b}table{border-collapse:collapse;border-width:0;empty-cells:show}ul{padding:0 0 1em 1em}ol{padding:0 0 1em 1.3em}li{line-height:1.5em;padding:0 0 .5em 0}p{padding:0 0 1em 0}h1,h2,h3,h4,h5{padding:0 0 1em 0}h1,h2{font-size:1.3em}h3{font-size:1.1em}h4,h5,table{font-size:1em}sup,sub{font-size:.7em}input,select,textarea,option{font-family:inherit;font-size:inherit}.g-doc,.g-doc-1024,.g-doc-800{font-size:130%}.g-doc{width:100%;text-align:left}.g-section:after{content:".";display:block;height:0;clear:both;visibility:hidden}.g-unit .g-section:after{clear:none}.g-unit .g-section{width:100%;overflow:hidden}.g-section,.g-unit{zoom:1}.g-split .g-unit{text-align:right}.g-split .g-first{text-align:left}.g-tpl-25-75 .g-unit,.g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75 .g-unit{width:74.999%;float:right;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-tpl-25-75 .g-first,.g-tpl-25-75 .g-first{width:24.999%;float:left;display:inline;margin:0}.g-tpl-25-75-alt .g-unit,.g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-unit{width:24.999%;float:left;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-tpl-25-75-alt .g-first,.g-tpl-25-75-alt .g-first{width:74.999%;float:right;display:inline;margin:0}.g-tpl-75-25 .g-unit,.g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25 .g-unit{width:24.999%;float:right;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-tpl-75-25 .g-first,.g-tpl-75-25 .g-first{width:74.999%;float:left;display:inline;margin:0}.g-tpl-75-25-alt .g-unit,.g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-unit{width:74.999%;float:left;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-tpl-75-25-alt .g-first,.g-tpl-75-25-alt .g-first{width:24.999%;float:right;display:inline;margin:0}.g-tpl-33-67 .g-unit,.g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67 .g-unit{width:66.999%;float:right;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-tpl-33-67 .g-first,.g-tpl-33-67 .g-first{width:32.999%;float:left;display:inline;margin:0}.g-tpl-33-67-alt .g-unit,.g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-unit{width:32.999%;float:left;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-tpl-33-67-alt .g-first,.g-tpl-33-67-alt .g-first{width:66.999%;float:right;display:inline;margin:0}.g-tpl-67-33 .g-unit,.g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33 .g-unit{width:32.999%;float:right;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-tpl-67-33 .g-first,.g-tpl-67-33 .g-first{width:66.999%;float:left;display:inline;margin:0}.g-tpl-67-33-alt .g-unit,.g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-unit{width:66.999%;float:left;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-tpl-67-33-alt .g-first,.g-tpl-67-33-alt .g-first{width:32.999%;float:right;display:inline;margin:0}.g-tpl-50-50 .g-unit,.g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50 .g-unit{width:49.999%;float:right;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-tpl-50-50 .g-first,.g-tpl-50-50 .g-first{width:49.999%;float:left;display:inline;margin:0}.g-tpl-50-50-alt .g-unit,.g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-unit{width:49.999%;float:left;display:inline;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-tpl-50-50-alt .g-first,.g-tpl-50-50-alt .g-first{width:49.999%;float:right;display:inline;margin:0}.g-tpl-nest .g-unit,.g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest .g-unit{float:left;width:auto;display:inline;margin:0}.g-tpl-nest-alt .g-unit,.g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest-alt .g-unit{float:right;width:auto;display:inline;margin:0}.g-doc-1024{width:73.074em;*width:71.313em;min-width:950px;margin:0 auto;text-align:left}.g-doc-800{width:57.69em;*width:56.3em;min-width:750px;margin:0 auto;text-align:left}.g-tpl-160 .g-unit,.g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-unit .g-tpl-160 .g-unit{display:block;margin:0 0 0 161px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-tpl-160 .g-first,.g-tpl-160 .g-first{display:block;margin:0;width:161px;float:left}.g-tpl-160-alt .g-unit,.g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-160-alt .g-unit{display:block;margin:0 161px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-tpl-160-alt .g-first,.g-tpl-160-alt .g-first{display:block;margin:0;width:161px;float:right}.g-tpl-180 .g-unit,.g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-unit .g-tpl-180 .g-unit{display:block;margin:0 0 0 181px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-tpl-180 .g-first,.g-tpl-180 .g-first{display:block;margin:0;width:181px;float:left}.g-tpl-180-alt .g-unit,.g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-180-alt .g-unit{display:block;margin:0 181px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-tpl-180-alt .g-first,.g-tpl-180-alt .g-first{display:block;margin:0;width:181px;float:right}.g-tpl-300 .g-unit,.g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-unit .g-tpl-300 .g-unit{display:block;margin:0 0 0 301px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-tpl-300 .g-first,.g-tpl-300 .g-first{display:block;margin:0;width:301px;float:left}.g-tpl-300-alt .g-unit,.g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-300-alt .g-unit{display:block;margin:0 301px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-tpl-300-alt .g-first,.g-tpl-300-alt .g-first{display:block;margin:0;width:301px;float:right} \ No newline at end of file
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/cron.css b/google_appengine/google/appengine/ext/admin/templates/css/cron.css
new file mode 100644
index 0000000..679d358
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/cron.css
@@ -0,0 +1,26 @@
+.ah-cron-message {
+ color: red;
+ margin-bottom: 1em;
+}
+
+#ah-cron-jobs .ah-cron-message {
+ margin: 1em;
+}
+
+.ah-cron-times {
+ margin-top: 1em;
+}
+#ah-cron-jobs .ae-table,
+#ah-cron-jobs .ae-table td {
+ border: 0;
+ padding: 0;
+}
+#ah-cron-jobs ol {
+ list-style: none;
+}
+#ah-cron-jobs li {
+ padding: .2em 0;
+}
+.ah-cron-test {
+ text-align: right;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/datastore.css b/google_appengine/google/appengine/ext/admin/templates/css/datastore.css
new file mode 100644
index 0000000..f2f9f1d
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/datastore.css
@@ -0,0 +1,71 @@
+#datastore_search {
+ margin-bottom: 1em;
+}
+
+#hint {
+ background-color: #F6F9FF;
+ border: 1px solid #E5ECF9;
+ margin-bottom: 1em;
+ padding: 0.5em 1em;
+}
+
+#message {
+ color: red;
+ position: relative;
+ bottom: 6px;
+}
+
+#pagetotal {
+ float: right;
+}
+
+#pagetotal .count {
+ font-weight: bold;
+}
+
+table.entities {
+ border: 1px solid #c5d7ef;
+ border-collapse: collapse;
+ width: 100%;
+ margin-bottom: 0;
+}
+
+table.entities th, table.entities td {
+ padding: .25em 1.5em .5em .5em;
+}
+
+table.entities th {
+ font-weight: bold;
+ text-align: left;
+ background: #e5ecf9;
+ white-space: nowrap;
+}
+
+table.entities th a, table.entities th a:visited {
+ color: black;
+ text-decoration: none;
+}
+
+table.entities td {
+ background-color: #fff;
+ text-align: left;
+ vertical-align: top;
+ cursor: pointer;
+}
+
+table.entities tr.even td {
+ background-color: #f9f9f9;
+}
+
+div.entities {
+ background-color: #c5d7ef;
+ margin-top: 0;
+}
+
+#entities-pager, #entities-control {
+ padding: .3em 1em .4em 1em;
+}
+
+#entities-pager {
+ text-align: right;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/form.css b/google_appengine/google/appengine/ext/admin/templates/css/form.css
new file mode 100644
index 0000000..0f8e2e0
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/form.css
@@ -0,0 +1,20 @@
+table.form {
+ border-collapse: collapse;
+}
+
+table.form td.name, table.form td.value, table.form td.buttons {
+ border: 0;
+ padding: 7px;
+ padding-left: 0;
+ vertical-align: top;
+}
+
+table.form td.name {
+ font-weight: bold;
+ padding-top: 9px;
+ padding-right: 14px;
+}
+
+table.form td.buttons {
+ padding-top: 12px;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/inboundmail.css b/google_appengine/google/appengine/ext/admin/templates/css/inboundmail.css
new file mode 100644
index 0000000..7318a4e
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/inboundmail.css
@@ -0,0 +1,19 @@
+#inboundmail label {
+ display: block;
+ font-weight: bold;
+}
+#inboundmail legend {
+ font-weight: bold;
+}
+#inboundmail .radio label {
+ display: inline;
+ font-weight: normal;
+}
+
+#inboundmail fieldset,
+#inboundmail .fieldset {
+ margin-bottom: 8px;
+}
+#inboundmail-submit {
+ margin-top: 2em;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/memcache.css b/google_appengine/google/appengine/ext/admin/templates/css/memcache.css
new file mode 100644
index 0000000..729c871
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/memcache.css
@@ -0,0 +1,54 @@
+.message {
+ color: red;
+ margin-bottom: 1em;
+}
+
+#flush_form {
+ display: inline;
+ margin-left: 2em;
+}
+
+#memcache_search {
+ margin-bottom: 2em;
+}
+
+#value_display {
+ border: 1px solid #c5d7ef;
+}
+
+#value_display_key {
+ text-align: left;
+ padding: 1ex;
+ background: #e5ecf9;
+}
+
+#value_display_value {
+ height: 20em;
+ margin: 0;
+ padding: 1ex;
+ background: #f9f9f9;
+ font-family: monospace;
+ overflow: auto;
+ white-space: -moz-pre-wrap;
+ white-space: -pre-wrap;
+ white-space: -o-pre-wrap;
+ white-space: pre-wrap;
+ word-wrap: break-word;
+}
+
+#memcache_edit th {
+ font-weight: bold;
+ padding: 2ex 3ex 0 0;
+}
+
+#memcache_edit td {
+ padding: 2ex 0 0 0;
+}
+
+#memcache_edit th#value_key {
+ vertical-align: top;
+}
+
+#memcache_edit div#value_key_text {
+ padding-top: 3px;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/nav.css b/google_appengine/google/appengine/ext/admin/templates/css/nav.css
new file mode 100755
index 0000000..6a3cb39
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/nav.css
@@ -0,0 +1,88 @@
+#ae-nav ul {
+ list-style-type: none;
+ margin: 0;
+ padding: 1em 0;
+}
+#ae-nav ul li {
+ padding-left: .5em;
+}
+
+#ae-nav .ae-nav-selected {
+ color: #000;
+ display: block;
+ font-weight: bold;
+ background-color: #e5ecf9;
+ border-top-left-radius: 4px;
+ -moz-border-radius-topleft: 4px;
+ -webkit-border-top-left-radius: 4px;
+ border-bottom-left-radius: 4px;
+ -moz-border-radius-bottomleft: 4px;
+ -webkit-border-bottom-left-radius: 4px;
+}
+
+a.ae-nav-selected {
+ color: #000;
+ text-decoration:none;
+}
+
+/* aka disabled items */
+#ae-nav ul li span.ae-nav-disabled {
+ color: #666;
+}
+
+/* Sub-navigation rules */
+#ae-nav ul ul {
+ margin: 0;
+ padding: 0 0 0 .5em;
+}
+#ae-nav ul ul li {
+ padding-left: .5em;
+}
+#ae-nav ul li a,
+#ae-nav ul li span,
+#ae-nav ul ul li a {
+ padding-left: .5em;
+}
+
+/* ae-nav Link Selectors */
+#ae-nav li a:link,
+#ae-nav li a:visited {
+ color: #00c;
+}
+#ae-nav li a:link.ae-nav-selected,
+#ae-nav li a:visited.ae-nav-selected {
+ color: #000;
+ text-decoration: none;
+}
+
+/* Group of boxed help links */
+.ae-nav-group {
+ padding: .5em;
+ margin: 0 .75em 0 0;
+ background-color: #fffbe8;
+ border: 1px solid #fff1a9;
+}
+.ae-nav-group h4 {
+ font-weight: bold;
+ padding: auto auto .5em .5em;
+ padding-left: .4em;
+ margin-bottom: .5em;
+ padding-bottom: 0;
+}
+.ae-nav-group ul {
+ margin: 0 0 .5em 0;
+ padding: 0 0 0 1.3em;
+ list-style-type: none;
+}
+.ae-nav-group ul li {
+ padding-bottom: .5em;
+}
+
+/* ae-nav-group link Selectors */
+.ae-nav-group li a:link,
+.ae-nav-group li a:visited {
+ color: #00c;
+}
+.ae-nav-group li a:hover {
+ color: #00c;
+} \ No newline at end of file
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/pager.css b/google_appengine/google/appengine/ext/admin/templates/css/pager.css
new file mode 100644
index 0000000..393ce46
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/pager.css
@@ -0,0 +1,7 @@
+.ae-page-number {
+ margin: 0 0.5em;
+}
+
+.ae-page-selected {
+ font-weight: bold;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/queues.css b/google_appengine/google/appengine/ext/admin/templates/css/queues.css
new file mode 100644
index 0000000..35bf035
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/queues.css
@@ -0,0 +1,26 @@
+.ah-queues-message {
+ color: red;
+ margin-bottom: 1em;
+}
+
+#ah-queues .ah-queues-message {
+ margin: 1em;
+}
+
+.ah-queues-times {
+ margin-top: 1em;
+}
+#ah-queues .ae-table,
+#ah-queues .ae-table td {
+ border: 0;
+ padding: 0;
+}
+#ah-queues ol {
+ list-style: none;
+}
+#ah-queues li {
+ padding: .2em 0;
+}
+.ah-queues-test {
+ text-align: right;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/tasks.css b/google_appengine/google/appengine/ext/admin/templates/css/tasks.css
new file mode 100644
index 0000000..811f728
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/tasks.css
@@ -0,0 +1,26 @@
+.ah-tasks-message {
+ color: red;
+ margin-bottom: 1em;
+}
+
+#ah-tasks .ah-tasks-message {
+ margin: 1em;
+}
+
+.ah-task-times {
+ margin-top: 1em;
+}
+#ah-tasks .ae-table,
+#ah-tasks .ae-table td {
+ border: 0;
+ padding: 0;
+}
+#ah-tasks ol {
+ list-style: none;
+}
+#ah-tasks li {
+ padding: .2em 0;
+}
+.ah-task-test {
+ text-align: right;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/css/xmpp.css b/google_appengine/google/appengine/ext/admin/templates/css/xmpp.css
new file mode 100644
index 0000000..94f6647
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/css/xmpp.css
@@ -0,0 +1,19 @@
+#xmpp label {
+ display: block;
+ font-weight: bold;
+}
+#xmpp legend {
+ font-weight: bold;
+}
+#xmpp .radio label {
+ display: inline;
+ font-weight: normal;
+}
+
+#xmpp fieldset,
+#xmpp .fieldset {
+ margin-bottom: 8px;
+}
+#xmpp-submit {
+ margin-top: 2em;
+}
diff --git a/google_appengine/google/appengine/ext/admin/templates/datastore.html b/google_appengine/google/appengine/ext/admin/templates/datastore.html
new file mode 100644
index 0000000..4ef5ef2
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/datastore.html
@@ -0,0 +1,183 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - Datastore Viewer{% endblock %}
+
+{% block head %}
+ <style type="text/css">{% include "css/datastore.css" %}</style>
+ <style type="text/css">{% include "css/pager.css" %}</style>
+ <script type="text/javascript">
+ //<![CDATA[
+
+ {% if in_production %}
+ function manageCreateButton() {
+ var input = document.getElementById("kind_input");
+ var button = document.getElementById("create_button");
+ if (input && button) {
+ if (input.value.length == 0) {
+ button.disabled = true;
+ } else {
+ button.disabled = false;
+ }
+ }
+ }
+ {% endif %}
+
+ {% if entities %}
+ function checkAllEntities() {
+ var allCheckBox = document.getElementById("allkeys");
+ var check = allCheckBox.checked;
+ for (var i = 1; i <= {{ entities|length }}; i++) {
+ var box = document.getElementById("key" + i);
+ if (box)
+ box.checked = check;
+ }
+ updateDeleteButtonAndCheckbox();
+ }
+
+ function updateDeleteButtonAndCheckbox() {
+ var button = document.getElementById("delete_button");
+ var uncheck = false;
+ var disable = true;
+ for (var i = 1; i <= {{ entities|length }}; i++) {
+ var box = document.getElementById("key" + i);
+ if (box) {
+ if (box.checked) {
+ disable = false;
+ } else {
+ uncheck = true;
+ }
+ }
+ }
+ button.disabled = disable;
+ if (uncheck)
+ document.getElementById("allkeys").checked = false;
+ }
+ {% endif %}
+
+ //]]>
+ </script>
+{% endblock %}
+
+{% block body %}
+ <h3>Datastore Viewer</h3>
+
+ {% if in_production %}
+ <div id="hint">
+ The <a href="http://appengine.google.com/datastore/explorer?&app_id={{ application_name }}">Admin Console Data Viewer</a>
+ allows you to run GQL queries and much more!
+ </div>
+ {% endif %}
+
+ {% if message %}
+ <div id="message">
+ {{ message }}
+ </div>
+ {% endif %}
+
+ {% if entities %}
+ <div id="pagetotal">
+ Results <span class="count">{{ start|add:1 }}</span> - <span class="count">{{ entities|length|add:start }}</span> of <span class="count">{{ total }}</span>
+ </div>
+ {% endif %}
+
+ {% if kinds or in_production %}
+ <form action="{{ request.path }}" method="get">
+ <div id="datastore_search">
+ <span class="field">
+ <span class="name">Entity Kind:</span>
+ <span class="value">
+ {% if in_production %}
+ <input id="kind_input" name="kind" type="text" size="8" value="{{ kind|escape }}" onkeyup="manageCreateButton()" onkeydown="manageCreateButton()"/>
+ {% else %}
+ <select name="kind" id="kind_input">
+ {% for a_kind in kinds %}
+ <option value="{{ a_kind|escape }}"{% ifequal a_kind kind %} selected="selected"{% endifequal %}>{{ a_kind|escape }}</option>
+ {% endfor %}
+ </select>
+ {% endif %}
+ </span>
+ </span>
+ <span class="buttons">
+ <input type="submit" value="List Entities"/>
+ <input type="button" id="create_button" onclick="location.href='{{ datastore_edit_path }}?kind=' + encodeURIComponent(document.getElementById('kind_input').value) + '&amp;next={{ request.uri|urlencode }}'" value="Create New Entity"/>
+ </span>
+ </div>
+ </form>
+ {% else %}
+ <div id="datastore_empty">
+ The datastore is empty. You need to add data programatically before you can use this tool to view and edit it.
+ </div>
+ {% endif %}
+
+ {% if entities %}
+ <form action="{{ datastore_batch_edit_path }}" method="post">
+ <input type="hidden" name="kind" value="{{ kind|escape }}"/>
+ <input type="hidden" name="numkeys" value="{{ entities|length }}"/>
+ <input type="hidden" name="next" value="{{ start_base_url }}"/>
+ <input type="hidden" name="action" value="Delete"/>
+ <table class="entities">
+ <thead>
+ <tr>
+ <th><input id="allkeys" type="checkbox" onclick="checkAllEntities();"/></th>
+ <th>Key</th>
+ <th>ID</th>
+ <th>Key Name</th>
+ {% for header in headers %}
+ <th style="cursor: pointer" onclick="document.location.href='{{ order_base_url }}&amp;order={% ifequal order header.name %}-{% endifequal %}{{ header.name|urlencode }}&amp;order_type={{ header.type|urlencode }}'"><a href="{{ order_base_url }}&amp;order={% ifequal order header.name %}-{% endifequal %}{{ header.name|urlencode }}&amp;order_type={{ header.type|urlencode }}" onclick="return false">{{ header.name }}</a></th>
+ {% endfor %}
+ </tr>
+ {% for entity in entities %}
+ <tr class="{% if forloop.counter|divisibleby:2 %}even{% else %}odd{% endif %}">
+ <td><input id="key{{ forloop.counter }}" type="checkbox" name="key{{ forloop.counter }}" value="{{ entity.key|escape }}" onclick="updateDeleteButtonAndCheckbox();"/></td>
+ <td onclick="location.href='{{ entity.edit_uri|escape }}'"><a href="{{ entity.edit_uri|escape }}" title="Edit entity #{{ entity.key|escape }}" onclick="return false">{{ entity.shortened_key|escape }}</a></td>
+ <td>
+ {% if entity.key_id %}
+ {{entity.key_id}}
+ {% endif %}
+ </td>
+ <td>
+ {% if entity.key_name %}
+ {{entity.key_name}}
+ {% endif %}
+ </td>
+ {% for attribute in entity.attributes %}
+ <td onclick="location.href='{{ entity.edit_uri|escape }}&amp;focus={{ attribute.name|urlencode }}'">{{ attribute.short_value|truncatewords:20|escape }}</td>
+ {% endfor %}
+ </tr>
+ {% endfor %}
+ </table>
+ <div class="entities g-section g-tpl-50-50">
+ <div class="g-unit g-first">
+ <div id="entities-control"><input id="delete_button" type="submit" value="Delete" onclick="return confirm('Are you sure you wish to delete these entities?')" /></div>
+ </div>
+ <div class="g-unit">
+ <div id="entities-pager">
+ {% if pages %}
+ {% include "pager.html" %}
+ {% endif %}
+ </div>
+ </div>
+ </div>
+ </form>
+ {% else %}
+ {% if kind %}
+ <p style="font-size: medium">Sorry, there are no entities of kind &quot;{{ kind|escape }}&quot; in your datastore.</p>
+ {% endif %}
+ {% endif %}
+{% endblock %}
+
+{% block final %}
+ <script type="text/javascript">
+ //<![CDATA[
+
+ {% if in_production %}
+ manageCreateButton();
+ {% endif %}
+ updateDeleteButtonAndCheckbox();
+ document.getElementById("kind_input").focus();
+
+ //]]>
+ </script>
+{% endblock %}
+
+
diff --git a/google_appengine/google/appengine/ext/admin/templates/datastore_edit.html b/google_appengine/google/appengine/ext/admin/templates/datastore_edit.html
new file mode 100644
index 0000000..a441218
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/datastore_edit.html
@@ -0,0 +1,162 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - Datastore Viewer - {% if key %}Edit Entity{% else %}New Entity{% endif %}{% endblock %}
+
+{% block head %}
+ <style type="text/css">{% include "css/form.css" %}</style>
+ <style type="text/css">
+
+ .field_type {
+ color: gray;
+ font-weight: normal;
+ }
+
+ </style>
+ <script type="text/javascript">
+
+ function load() {
+ var elements = document.getElementsByTagName("input");
+ for (var i = 0; i < elements.length; i++) {
+ var element = elements[i];
+ var hint = null;
+ if (element.className == "time") {
+ hint = "e.g., 2006-30-05 23:56:04";
+ }
+ if (hint) registerHint(element, hint);
+ }
+ }
+
+ function registerHint(element, hint) {
+ function showDefault() {
+ if (element.value.length == 0 || element.value == hint) {
+ element.style.color = "gray";
+ element.value = hint;
+ }
+ }
+ function clearDefault() {
+ if (element.style.color == "gray" || element.value == hint) {
+ element.value = "";
+ element.style.color = "black";
+ }
+ }
+ element.onblur = showDefault;
+ element.onfocus = clearDefault;
+ showDefault();
+ }
+
+ function clearHints(form) {
+ var elements = form.getElementsByTagName("input");
+ for (var i = 0; i < elements.length; i++) {
+ var element = elements[i];
+ if (element.type == "text" && element.style.color == "gray") {
+ element.onblur = null;
+ element.onfocus = null;
+ element.value = "";
+ }
+ }
+ return true;
+ }
+
+ </script>
+{% endblock %}
+
+{% block bodyattributes %}onload="load()"{% endblock %}
+
+{% block body %}
+ <h3>{% if key %}Edit Entity{% else %}New Entity{% endif %}</h3>
+
+ <form action="{{ request.path }}" method="post" onsubmit="return clearHints(this)">
+ <div><input type="hidden" name="next" value="{{ next }}"/></div>
+ <table class="form">
+ <tr>
+ <td class="name">Entity Kind</td>
+ <td class="value text">
+ {{ kind }}
+ <input type="hidden" name="kind" value="{{ kind }}"/>
+ </td>
+ </tr>
+ {% if key %}
+ <tr>
+ <td class="name">Entity Key</td>
+ <td class="value text">
+ {{ key }}
+ <input type="hidden" name="key" value="{{ key }}"/>
+ </td>
+ </tr>
+ {% endif %}
+ {% if key_name %}
+ <tr>
+ <td class="name">Key Name</td>
+ <td class="value text">
+ {{ key_name }}
+ </td>
+ </tr>
+ {% endif %}
+ {% if key_id %}
+ <tr>
+ <td class="name">ID</td>
+ <td class="value text">
+ {{ key_id }}
+ </td>
+ </tr>
+ {% endif %}
+ {% if parent_key %}
+ <tr>
+ <td class="name">Parent</td>
+ <td class="value text">
+ <a href="?key={{parent_key}}&kind={{parent_kind}}">{{ parent_key }}</a>
+ </td>
+ </tr>
+ {% endif %}
+ {% for field in fields %}
+ <tr>
+ <td class="name">
+ <span class="field_name">{{ field.0|escape }}</span>
+ <span class="field_type">({{ field.1|escape }})</span>
+ </td>
+ <td class="value"><div style="position: relative">{{ field.2|safe }}</div></td>
+ </tr>
+ {% endfor %}
+ <tr>
+ <td></td>
+ <td class="buttons">
+ <input type="submit" value="Save Changes"/>
+ {% if key %}
+ <input type="submit" name="action" value="Delete" onclick="return confirm('Are you sure you want to permanently delete this entity?');"/>
+ {% endif %}
+ </td>
+ </tr>
+ </table>
+ </form>
+
+ <div id="datepicker"></div>
+{% endblock %}
+
+{% block final %}
+<script type="text/javascript">
+//<![CDATA[
+
+// Sets the focus on the field with the given name in the given array (if any)
+function setFocus(fields, fieldName) {
+ for (var i = 0; i < fields.length; i++) {
+ var field = fields[i];
+ var name = field.name;
+ if (field.focus && name.length > focus.length &&
+ name.substring(name.length - focus.length - 1) == '|' + focus) {
+ field.focus();
+ break;
+ }
+ }
+}
+
+// Focus on the appropriate field in the form based on the "focus" argument
+// in the URL
+var focus = "{{ focus|addslashes }}";
+if (focus) {
+ setFocus(document.getElementsByTagName("input"), focus);
+ setFocus(document.getElementsByTagName("textarea"), focus);
+}
+
+//]]>
+</script>
+{% endblock %}
diff --git a/google_appengine/google/appengine/ext/admin/templates/images/google.gif b/google_appengine/google/appengine/ext/admin/templates/images/google.gif
new file mode 100755
index 0000000..5e9d2f3
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/images/google.gif
Binary files differ
diff --git a/google_appengine/google/appengine/ext/admin/templates/inboundmail.html b/google_appengine/google/appengine/ext/admin/templates/inboundmail.html
new file mode 100644
index 0000000..44cb389
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/inboundmail.html
@@ -0,0 +1,158 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - Inbound Mail{% endblock %}
+
+{% block breadcrumbs %}
+ <span class="item"><a href="">Email</a></span>
+{% endblock %}
+
+{% block head %}
+
+ <style type="text/css">{% include "css/inboundmail.css" %}</style>
+ <script type="text/javascript">
+ {% include "js/webhook.js" %}
+ {% include "js/multipart_form_data.js" %}
+ {% include "js/rfc822_date.js" %}
+
+ var feedbackEl;
+ var formEl;
+ var payloadEl;
+ var fromEl;
+ var toEl;
+ var ccEl;
+ var subjectEl;
+ var bodyEl;
+ var contentLengthEl;
+ //var contentTypeEl;
+
+ var sendInboundMailWebhook = function() {
+
+ if (!feedbackEl) {
+ feedbackEl = document.getElementById('feedback');
+ formEl = document.getElementById('inboundmail-form');
+ fromEl = document.getElementById('from');
+ toEl = document.getElementById('to');
+ ccEl = document.getElementById('cc');
+ subjectEl = document.getElementById('subject');
+ bodyEl = document.getElementById('body');
+ payloadEl = document.getElementById('payload');
+ contentLengthEl = document.getElementById('content-length');
+ }
+
+ var from = fromEl.value;
+ var to = toEl.value;
+ var cc = ccEl.value;
+ var subject = subjectEl.value;
+ var body = bodyEl.value;
+
+ if (!to || !from || !body) {
+ feedbackEl.className = 'ae-errorbox';
+ feedbackEl.innerHTML = 'From, To and Message body are required.';
+ return;
+ }
+
+ feedbackEl.className = 'ae-message';
+ feedbackEl.innerHTML = 'Sending mail message...';
+
+ var mpfd = new MultipartFormData();
+ mpfd.addHeader('MIME-Version', '1.0');
+ mpfd.addHeader('Date', RFC822Date.format(new Date()));
+ mpfd.addHeader('From', from);
+ mpfd.addHeader('To', to);
+ if (cc) {
+ mpfd.addHeader('Cc', cc);
+ }
+ mpfd.addHeader('Subject', subject);
+ mpfd.addHeader('Content-Type', 'multipart/alternative; ' +
+ 'boundary=' + mpfd.boundary);
+ mpfd.addPart(null, body, 'text/plain; charset=UTF-8');
+ mpfd.addPart(null, body, 'text/html; charset=UTF-8');
+
+ payloadEl.value = mpfd.toString();
+
+ contentLengthEl = payloadEl.value.length;
+
+ formEl.action = '/_ah/mail/' + escape(to);
+
+ (new Webhook('inboundmail-form')).run(handleInboundMailResult);
+
+ // Prevents actual form posts.
+ return false;
+ };
+
+ var handleInboundMailResult = function(hook, req, error) {
+ if (error != null || req == null || req.status != 200) {
+ feedbackEl.className = 'ae-errorbox';
+ feedbackEl.innerHTML = 'Message send failure<br>' +
+ req.responseText;
+ } else {
+ var timestamp;
+ var dateString = new Date().toString();
+ var match = dateString.match(/(\d\d:\d\d:\d\d).+\((.+)\)/);
+ if (!match || !match[0] || !match[2]) {
+ timestamp = dateString;
+ } else {
+ timestamp = match[1] + ' ' + match[2];
+ }
+
+ feedbackEl.className = 'ae-message';
+ feedbackEl.innerHTML = 'Message has been sent at ' + timestamp;
+ }
+ };
+
+ </script>
+{% endblock %}
+
+{% block body %}
+<div id="inboundmail">
+ <h3>Email</h3>
+ {% if inboundmail_configured %}{% else %}
+ <div class="ae-errorbox">
+ Inbound mail is not yet configured properly in your app.yaml in the services section.
+ </div>
+ {% endif %}
+ <div id="feedback"></div>
+ <form id="inboundmail-form"
+ action="/_ah/mail/" method="post"
+ onsubmit="sendInboundMailWebhook(); return false">
+
+ <input type="hidden" name="payload" id="payload">
+ <input type="hidden" id="content-type" name="header:Content-Type" value="message/rfc822">
+ <input type="hidden" id="content-length" name="header:Content-Length">
+
+ <div class="fieldset">
+ <label for="from">From:</label>
+ <input type="text" id="from" name="from" size="40">
+ </div>
+
+ <div class="fieldset">
+ <label for="to">To:</label>
+ <input type="text" id="to" name="to" size="40">
+ </div>
+
+ <div class="fieldset">
+ <label for="cc">Cc:</label>
+ <input type="text" id="cc" name="cc" size="40">
+ </div>
+
+ <div class="fieldset">
+ <label for="subject">Subject:</label>
+ <input type="text" id="subject" name="subject" size="40">
+ </div>
+
+ <div id="body-c" class="fieldset">
+ <label for="body">Message body (plain text):</label>
+ <textarea id="body" name="body" rows="10" cols="50"></textarea>
+ </div>
+
+ <div id="inboundmail-submit">
+ <input type="submit" value="Send Email">
+ </div>
+
+ </form>
+</div>
+
+{% endblock %}
+
+{% block final %}
+{% endblock %}
diff --git a/google_appengine/google/appengine/ext/admin/templates/interactive-output.html b/google_appengine/google/appengine/ext/admin/templates/interactive-output.html
new file mode 100644
index 0000000..8ecdc7b
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/interactive-output.html
@@ -0,0 +1,36 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="content-type" content="text/html; charset=utf-8"/>
+<title>Results</title>
+<style type="text/css">
+body {
+ margin: 0;
+ padding: 0;
+ position: absolute;
+ top: 0;
+ bottom: 0;
+ left: 0;
+ right: 0;
+ background-color: #f5f5f5;
+}
+#output {
+ font-family: monospace;
+ font-size: 10pt;
+ margin: 0;
+ padding: 0;
+ height: 100%;
+ width: 100%;
+ overflow: auto;
+ white-space: -moz-pre-wrap;
+ white-space: -pre-wrap;
+ white-space: -o-pre-wrap;
+ white-space: pre-wrap;
+ word-wrap: break-word;
+}
+</style>
+</head>
+<body>
+<pre id="output">{{ output|escape }}</pre>
+</body>
+</html>
diff --git a/google_appengine/google/appengine/ext/admin/templates/interactive.html b/google_appengine/google/appengine/ext/admin/templates/interactive.html
new file mode 100644
index 0000000..78667e7
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/interactive.html
@@ -0,0 +1,104 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - Interactive Console{% endblock %}
+
+{% block breadcrumbs %}
+ <span class="item"><a href="">Interactive Console</a></span>
+{% endblock %}
+
+{% block head %}
+ <style type="text/css">
+
+ #console {
+ width: 100%;
+ border-collapse: collapse;
+ }
+
+ #console td {
+ width: 50%;
+ padding: 0;
+ border: 0;
+ vertical-align: top;
+ padding-right: 25px;
+ }
+
+ #code {
+ overflow: auto;
+ white-space: pre-wrap;
+ word-wrap: break-word;
+ }
+
+ #output {
+ border: 1px solid silver;
+ background-color: #f5f5f5;
+ overflow: auto;
+ }
+
+ #code, #output {
+ font-family: monospace;
+ font-size: 10pt;
+ height: 25em;
+ width: 100%;
+ padding: 0;
+ margin: 0;
+ }
+
+ #submitbutton {
+ text-align: center;
+ margin-top: 1em;
+ }
+ </style>
+{% endblock %}
+
+{% block body %}
+<h3>Interactive Console</h3>
+<form action="{{ interactive_execute_path }}" target="output" method="post">
+ <table id="console">
+ <tr>
+ <td>
+ <textarea id="code" name="code" wrap="off" rows="20" cols="80">from google.appengine.api import users
+
+# Say hello to the current user
+user = users.get_current_user()
+if user:
+ nickname = user.nickname()
+else:
+ nickname = "guest"
+print "Hello, " + nickname
+
+</textarea>
+ </td>
+ <td>
+ <iframe name="output" id="output"></iframe>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <div id="submitbutton"><input type="submit" value="Run Program"/></div>
+ </td>
+ </tr>
+ </table>
+</form>
+{% endblock %}
+
+{% block final %}
+<script type="text/javascript">
+//<![CDATA[
+var iframe = document.getElementById('output');
+var idoc = null;
+if (iframe.contentDocument) {
+ // DOM
+ idoc = iframe.contentDocument;
+} else if (iframe.contentWindow) {
+ // IE
+ idoc = iframe.contentWindow.document;
+}
+if (idoc) {
+ idoc.open();
+ idoc.write('<html><body style="background-color:#f5f5f5;margin:0;padding:0"><pre style="margin:0;padding:0;color:#888">Press "Run Program" to see the<br/>output of your code in this frame!</pre></body></html>');
+ idoc.close();
+}
+document.getElementById('code').focus();
+//]]>
+</script>
+{% endblock %}
diff --git a/google_appengine/google/appengine/ext/admin/templates/js/multipart_form_data.js b/google_appengine/google/appengine/ext/admin/templates/js/multipart_form_data.js
new file mode 100644
index 0000000..8b9706e
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/js/multipart_form_data.js
@@ -0,0 +1,125 @@
+// Copyright 2009 Google Inc. All Rights Reserved.
+
+/**
+ * A multipart form data construction class for XHR.
+ * @see http://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
+ * @constructor
+ */
+var MultipartFormData = function() {
+ /**
+ * @type {Array}
+ */
+ this.headers = [];
+
+ /**
+ * @type {Array}
+ */
+ this.parts = [];
+
+ /**
+ * A random string for the boundary.
+ * @type {string}
+ */
+ this.boundary = MultipartFormData.getRandomBoundary();
+};
+
+
+/**
+ * @type {string}
+ */
+MultipartFormData.CRLF = '\r\n';
+
+
+/**
+ * @type {string}
+ * @private
+ */
+MultipartFormData.TEN_CHARS_ =
+
+
+/**
+ * Generates a random number and some random characters from it.
+ */
+MultipartFormData.getRandomBoundary = function() {
+ var anyTenCharacters = 'DiStRIcT10';
+ var randomNumber = Math.floor(Math.random() * 10000000);
+ var nums = randomNumber.toString().split('');
+ var randomChars = '';
+ for (var i = 0, num; num = nums[i]; i++) {
+ randomChars += anyTenCharacters[num];
+ }
+ return randomChars + '-' + randomNumber;
+};
+
+
+/**
+ * @param {string} name The name for this header.
+ * @param {string} value The value for this header.
+ */
+MultipartFormData.prototype.addHeader = function(name, value) {
+ this.headers.push({
+ 'name': name,
+ 'value': value
+ });
+};
+
+
+/**
+ * @param {?string} name The name for this part.
+ * @param {string} value The value for this part.
+ * @param {string} opt_contentType Content-type for this part.
+ * @param {string} opt_contentDisposition Content disposition for this part.
+ * @param {string} opt_filename The filename for this part
+ */
+MultipartFormData.prototype.addPart = function(name, value, opt_contentType,
+ opt_contentDisposition, opt_filename) {
+ var contentType = opt_contentType || null;
+ var contentDisposition = opt_contentDisposition || null;
+ var filename = opt_filename || null;
+ this.parts.push({
+ 'name': name,
+ 'value': value,
+ 'contentType': contentType,
+ 'contentDisposition': contentDisposition,
+ 'filename': filename
+ });
+};
+
+/**
+ * @return {string} The string to set as a payload.
+ */
+MultipartFormData.prototype.toString = function() {
+ var lines = [];
+
+ for (var i = 0, header; header = this.headers[i]; i++) {
+ lines.push(header['name'] + ': ' + header['value']);
+ }
+ if (this.headers.length > 0) {
+ lines.push('');
+ }
+
+ for (var i = 0, part; part = this.parts[i]; i++) {
+ lines.push('--' + this.boundary);
+
+ if (part['contentDisposition']) {
+ var contentDisposition = 'Content-Disposition: form-data; ';
+ contentDisposition += 'name="' + part['name'] + '"';
+ if (part['filename']) {
+ contentDisposition += '; filename="' + part['filename'] + '"';
+ }
+ lines.push(contentDisposition);
+ }
+
+ if (part['contentType']) {
+ lines.push('Content-Type: ' + part['contentType']);
+ }
+
+ lines.push('');
+ lines.push(part['value']);
+ }
+
+ lines.push('--' + this.boundary + '--');
+
+ return lines.join(MultipartFormData.CRLF) + MultipartFormData.CRLF;
+};
+
diff --git a/google_appengine/google/appengine/ext/admin/templates/js/rfc822_date.js b/google_appengine/google/appengine/ext/admin/templates/js/rfc822_date.js
new file mode 100644
index 0000000..9037075
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/js/rfc822_date.js
@@ -0,0 +1,70 @@
+// Copyright 2009 Google Inc. All Rights Reserved.
+
+var RFC822Date = {};
+
+/**
+ * Return a DateTime in RFC822 format.
+ * @see http://www.w3.org/Protocols/rfc822/#z28
+ * @param {Date} date A Date object.
+ * @param {string} opt_tzo The timezone offset.
+ */
+RFC822Date.format = function(date, opt_tzo) {
+ var tzo = opt_tzo || RFC822Date.getTZO(date.getTimezoneOffset());
+ var rfc822Date = RFC822Date.DAYS[date.getDay()] + ', ';
+ rfc822Date += RFC822Date.padZero(date.getDate()) + ' ';
+ rfc822Date += RFC822Date.MONTHS[date.getMonth()] + ' ';
+ rfc822Date += date.getFullYear() + ' ';
+ rfc822Date += RFC822Date.padZero(date.getHours()) + ':';
+ rfc822Date += RFC822Date.padZero(date.getMinutes()) + ':';
+ rfc822Date += RFC822Date.padZero(date.getSeconds()) + ' ' ;
+ rfc822Date += tzo;
+ return rfc822Date;
+};
+
+
+/**
+ * @type {Array}
+ */
+RFC822Date.MONTHS = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+ 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
+
+
+/**
+ * @type {Array}
+ */
+RFC822Date.DAYS = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
+
+
+/**
+ * Pads a value with a 0 if it is less than 10;
+ * @param {number|string}
+ * @return {string}
+ */
+RFC822Date.padZero = function(val) {
+ val = val + ''; // cast into string
+ if (val.length < 2) {
+ val = '0' + val;
+ }
+ return val;
+};
+
+
+/**
+ * Returns a timezone offset in the format +|-dddd.
+ * @param {String} tzo A time zone offset from GMT in minutes.
+ * @return {string} The time zone offset as a string.
+ */
+RFC822Date.getTZO = function(tzo) {
+ var hours = Math.floor(tzo / 60);
+ var tzoFormatted = hours > 0 ? '-' : '+';
+
+ var absoluteHours = Math.abs(hours);
+ tzoFormatted += absoluteHours < 10 ? '0' : '';
+ tzoFormatted += absoluteHours;
+
+ var moduloMinutes = Math.abs(tzo % 60);
+ tzoFormatted += moduloMinutes == 0 ? '00' : moduloMinutes
+
+ return tzoFormatted;
+};
+
diff --git a/google_appengine/google/appengine/ext/admin/templates/js/webhook.js b/google_appengine/google/appengine/ext/admin/templates/js/webhook.js
new file mode 100644
index 0000000..7eb348e
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/js/webhook.js
@@ -0,0 +1,87 @@
+// Copyright 2009 Google Inc. All Rights Reserved.
+
+function Webhook(formId) {
+ this.formId = formId;
+ this.action = null;
+ this.headers = {};
+ this.method = null;
+ this.payload = null;
+};
+
+Webhook.prototype.HEADER_KEY = 'header:';
+
+Webhook.prototype.parse = function() {
+ var form = document.getElementById(this.formId);
+ if (form == null) {
+ return 'could not find form with id "' + this.formId + '"';
+ };
+ this.action = form.action;
+ this.method = form.method;
+ for (var i = 0, n = form.elements.length; i < n; i++) {
+ var currentElement = form.elements[i];
+ if (currentElement.tagName != 'INPUT' ||
+ currentElement.type.toUpperCase() != 'HIDDEN') {
+ continue;
+ }
+ var key = currentElement.name;
+ var value = currentElement.value;
+ var headerIndex = key.indexOf(this.HEADER_KEY);
+ if (headerIndex == 0) {
+ var header = key.substr(this.HEADER_KEY.length);
+ this.headers[header] = value;
+ } else if (key == 'payload') {
+ this.payload = value;
+ }
+ }
+
+ if (this.action == '') {
+ return 'action not found';
+ }
+ if (this.method == '') {
+ return 'method not found';
+ }
+ return '';
+};
+
+Webhook.prototype.send = function(callback) {
+ var req = null;
+ if (window.XMLHttpRequest) {
+ req = new XMLHttpRequest();
+ } else if (window.ActiveXObject) {
+ req = new ActiveXObject('MSXML2.XMLHTTP.3.0');
+ }
+
+ try {
+ req.open(this.method, this.action, false);
+ for (var key in this.headers) {
+ req.setRequestHeader(key, this.headers[key]);
+ };
+ req.send(this.payload);
+ } catch (e) {
+ callback(this, req, e);
+ return;
+ }
+
+ // If the responseText matches our <form action="/_ah/login then the
+ // user is not logged in as an Administrator so we'll fake the request.
+ if (req.responseText.match(/<form[^>]+_ah\/login/)) {
+ var fakeReq = {
+ 'status': 403,
+ 'responseText': 'Current logged in user is not authorized ' +
+ 'to view this page'
+ }
+ fakeReq.getAllResponseHeaders = function(){};
+ callback(this, fakeReq, null);
+ } else {
+ callback(this, req, null);
+ }
+};
+
+Webhook.prototype.run = function(callback) {
+ var error = this.parse();
+ if (error != '') {
+ callback(this, null, error);
+ } else {
+ this.send(callback);
+ }
+};
diff --git a/google_appengine/google/appengine/ext/admin/templates/memcache.html b/google_appengine/google/appengine/ext/admin/templates/memcache.html
new file mode 100644
index 0000000..55f869a
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/memcache.html
@@ -0,0 +1,119 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - Memcache Viewer{% endblock %}
+
+{% block head %}
+ <style type="text/css">{% include "css/memcache.css" %}</style>
+{% endblock %}
+
+{% block breadcrumbs %}
+ <span class="item"><a href="">Memcache Viewer</a></span>
+{% endblock %}
+
+{% block body %}
+<h3>Memcache Viewer</h3>
+
+{% if message %}
+<div class="message">
+{{ message|escape }}
+</div>
+{% endif %}
+
+{% if show_stats %}
+<div id="stats">
+ <ul>
+ <li>Hit ratio: {{ hitratio }}% ({{ stats.hits }} hit{{ stats.hits|pluralize }} and {{ stats.misses }} miss{{ stats.misses|pluralize:"es" }})</li>
+ <li>Size of cache: {{ stats.items }} item{{ stats.items|pluralize }}, {{ stats.bytes|filesizeformat }}
+ <form id="flush_form" action="{{ request.path }}" method="post">
+ <input type="submit" name="action:flush" value="Flush Cache" onclick="return confirm('Are you sure you want to flush all keys from the cache?');"/>
+ </form>
+ </li>
+ <li>Cache contains items up to {{ oldest_item_age|timesince }} old.</li>
+ </ul>
+</div>
+
+<div id="memcache_search">
+ <form action="{{ request.path }}" method="post">
+ <span class="field">
+ <span class="name">Key:</span>
+ <span class="value"><input id="key_input" name="key" type="text" size="40" value="{{ key|escape }}"/></span>
+ </span>
+ <span class="buttons">
+ <input type="submit" name="action:display" value="Display"/>
+ <input type="submit" name="action:edit" value="Edit/Create"/>
+ <input type="submit" name="action:delete" value="Delete" onclick="return confirm('Are you sure you want to permanently delete this key?');"/>
+ </span>
+ </form>
+</div>
+{% endif %}
+
+{% if show_value %}
+{% if key_exists %}
+{% ifequal type "error" %}
+<div class="message">Error fetching {{ key|escape }}: {{ value|escape }}</div>
+{% else %}
+<div id="value_display">
+ <div id="value_display_key">"<b>{{ key|escape }}</b>" is a <b>{{ type|escape }}</b>:</div>
+ <pre id="value_display_value">{{ value|escape }}</pre>
+</div>
+{% endifequal %}
+{% else %}
+<div class="message">No such key: {{ key|escape }}</div>
+{% endif %}
+{% endif %}
+
+{% if show_valueform %}
+<div id="memcache_edit">
+ <form action="{{ request.path }}" method="post">
+ <table>
+ <tr>
+ <th>Key</th>
+ <td>
+ <input name="key" type="hidden" value="{{ key|escape }}"/>
+ {{ key|escape }}
+ </td>
+ </tr>
+ <tr>
+ <th>Type</th>
+ <td>
+ {% if key_exists %}
+ <input name="type" type="hidden" value="{{ type|escape }}"/>
+ {{ type|escape }}
+ {% else %}
+ <select name="type" size="1">
+ {% for typeopt in types %}
+ <option>{{ typeopt }}</option>
+ {% endfor %}
+ </select>
+ {% endif %}
+ </td>
+ </tr>
+ <tr>
+ <th id="value_key"><div id="value_key_text">Value</div></th>
+ <td>
+ <textarea id="value_input" name="value" cols="80" rows="20"{% if not writable %} readonly{% endif %}>{{ value|default_if_none:""|escape }}</textarea>
+ </td>
+ </tr>
+ <tr>
+ <th>&nbsp;</th>
+ <td>
+ {% if writable %}
+ <input type="submit" name="action:save" value="Save"/>
+ {% endif %}
+ <input type="submit" name="action:cancel" value="Cancel"/>
+ </td>
+ </tr>
+ </table>
+ </form>
+</div>
+{% endif %}
+
+{% endblock %}
+
+{% block final %}
+<script type="text/javascript">
+//<![CDATA[
+document.getElementById('key_input').focus();
+//]]>
+</script>
+{% endblock %}
diff --git a/google_appengine/google/appengine/ext/admin/templates/pager.html b/google_appengine/google/appengine/ext/admin/templates/pager.html
new file mode 100644
index 0000000..6c3ffa2
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/pager.html
@@ -0,0 +1,9 @@
+{% ifnotequal prev_start -1 %}<a href="{{ start_base_url }}&amp;start={{ prev_start }}">&lsaquo; Previous</a>{% endifnotequal %}
+&nbsp;
+{% for page in pages %}
+ <a {% ifequal page.number current_page %} class="ae-page-selected ae-page-number" {% endifequal %}
+ href="{{ start_base_url }}&amp;start={{ page.start }}">{{ page.number }}</a>
+{% endfor %}
+&nbsp;
+{% ifnotequal next_start -1 %}<a href="{{ start_base_url }}&amp;start={{ next_start }}">Next &rsaquo;</a>{% endifnotequal %}
+
diff --git a/google_appengine/google/appengine/ext/admin/templates/queues.html b/google_appengine/google/appengine/ext/admin/templates/queues.html
new file mode 100644
index 0000000..a3d46f5
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/queues.html
@@ -0,0 +1,75 @@
+{% extends "base.html" %}
+
+{% block title %}
+{{ application_name }} Development Console - Task Queue Viewer{% endblock %}
+
+{% block head %}
+ <style type="text/css">{% include "css/queues.css" %}</style>
+{% endblock %}
+
+{% block breadcrumbs %}
+ <span class="item"><a href="">Queue Viewer</a></span>
+{% endblock %}
+
+{% block body %}
+<h3>Task Queues</h3>
+
+{% if queues %}
+ <p>
+ Tasks will not run automatically. Select a queue to run tasks manually.
+ </p>
+
+ <table id="ah-queues" class="ae-table ae-table-striped">
+ <thead>
+ <tr>
+ <th>Queue Name</th>
+ <th>Maximum Rate</th>
+ <th>Bucket Size</th>
+ <th>Oldest Task (UTC)</th>
+ <th>Tasks in Queue</th>
+ <th></th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for queue in queues %}
+ <tr class="{% cycle ae-odd,ae-even %}">
+ <td valign="top">
+ <a href="/_ah/admin/tasks?queue={{ queue.name|escape }}">
+ {{ queue.name|escape }}</a>
+ </td>
+ <td valign="top">
+ {{ queue.max_rate|escape }}
+ </td>
+ <td valign="top">
+ {{ queue.bucket_size|escape }}
+ </td>
+ <td valign="top">
+ {% if queue.oldest_task %}
+ {{ queue.oldest_task|escape }}<br/>
+ ({{ queue.eta_delta|escape }})
+ {% else %}
+ None
+ {% endif %}
+ </td>
+ <td valign="top">
+ {{ queue.tasks_in_queue|escape }}
+ </td>
+ <td valign="top">
+ <form id="flushform" action="/_ah/admin/queues" method="post">
+ <input type="hidden" name="queue" value="{{ queue.name|escape }}"/>
+ <input type="submit" name="action:flushqueue" value="Flush Queue"
+ onclick="return confirm('Are you sure you want to flush all ' +
+ 'tasks from {{ queue.name|escape }}?');"/>
+ </form>
+ </td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% else %}
+ This application doesn't define any task queues. See the documentation for more.
+{% endif %}
+
+
+{% endblock %}
+
diff --git a/google_appengine/google/appengine/ext/admin/templates/tasks.html b/google_appengine/google/appengine/ext/admin/templates/tasks.html
new file mode 100644
index 0000000..c4749a0
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/tasks.html
@@ -0,0 +1,103 @@
+{% extends "base.html" %}
+
+{% block title %}
+{{ application_name }} Development Console - Tasks Viewer{% endblock %}
+
+{% block head %}
+ <style type="text/css">{% include "css/pager.css" %}</style>
+ <style type="text/css">{% include "css/tasks.css" %}</style>
+ <script type="text/javascript">
+ {% include "js/webhook.js" %}
+
+ var handleTaskResult = function(hook, req, error) {
+ if (error != null) {
+ return;
+ };
+ if (req == null) {
+ return;
+ };
+ if (req.status != 200) {
+ return;
+ };
+ var parts = hook.formId.split('.');// + [''];
+ var deleteForm = document.getElementById('deleteform.' + parts[1]);
+ if (deleteForm != null) {
+ deleteForm.submit();
+ };
+ };
+ </script>
+{% endblock %}
+
+{% block breadcrumbs %}
+ <span class="item"><a href="">Tasks Viewer</a></span>
+{% endblock %}
+
+{% block body %}
+<h3>Tasks for Queue: {{ queue_name|escape }}</h3>
+
+{% if tasks %}
+ <p>
+ Tasks will not run automatically. Push the 'Run' button to execute each task.
+ </p>
+
+ <table id="ah-tasks" class="ae-table ae-table-striped">
+ <thead>
+ <tr>
+ <th>Task Name</th>
+ <th>ETA (UTC)</th>
+ <th>Method</th>
+ <th>URL</th>
+ <th></th>
+ <th></th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for task in tasks %}
+ <tr class="{% cycle ae-odd,ae-even %}">
+ <td valign="top">
+ {{ task.name|escape }}
+ </td>
+ <td valign="top">
+ {{ task.eta|escape }} ({{ task.eta_delta|escape }})
+ </td>
+ <td valign="top">
+ {{ task.method|escape }}
+ </td>
+ <td valign="top">
+ {{ task.url|escape }}
+ </td>
+ <td valign="top">
+ <form id="runform.{{ task.name|escape }}" action="{{ task.url|escape }}" method="{{ task.method|escape }}" onsubmit="(new Webhook('runform.{{ task.name|escape }}')).run(handleTaskResult); return false">
+ <input type="hidden" name="payload" value="{{ task.body|escape }}">
+ {% for header in task.headers.items %}
+ <input type="hidden" name="header:{{ header.0|escape }}"
+ value="{{ header.1|escape }}"/>
+ {% endfor %}
+ <input type="submit" value="Run"/>
+ </form>
+ </td>
+ <td valign="top">
+ <form id="deleteform.{{ task.name|escape }}" action="/_ah/admin/tasks" method="post">
+ <input type="hidden" name="queue" value="{{ queue_name|escape }}"/>
+ <input type="hidden" name="task" value="{{ task.name|escape }}"/>
+ <input type="hidden" name="action:deletetask" value="true"/>
+ <input type="submit" value="Delete"/>
+ </form>
+ </td>
+ </tr>
+ {% endfor %}
+ <tr>
+ <td colspan="6" class="ae-pager" align="right">
+ {% include "pager.html" %}
+ </td>
+ </tr>
+ </tbody>
+ </table>
+
+{% else %}
+ This queue doesn't contain any tasks.
+{% endif %}
+
+
+{% endblock %}
+
diff --git a/google_appengine/google/appengine/ext/admin/templates/xmpp.html b/google_appengine/google/appengine/ext/admin/templates/xmpp.html
new file mode 100644
index 0000000..629a473
--- /dev/null
+++ b/google_appengine/google/appengine/ext/admin/templates/xmpp.html
@@ -0,0 +1,234 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - XMPP{% endblock %}
+
+{% block breadcrumbs %}
+ <span class="item"><a href="">XMPP</a></span>
+{% endblock %}
+
+{% block head %}
+ <style type="text/css">{% include "css/xmpp.css" %}</style>
+ <script type="text/javascript">
+ {% include "js/webhook.js" %}
+ {% include "js/multipart_form_data.js" %}
+
+ var xmppFeedbackEl;
+ var xmppForm;
+ var payloadEl;
+ var fromEl;
+ var toEl;
+ var chatEl;
+ var contentLengthEl;
+ var contentTypeEl;
+
+ var sendXmppWebhook = function() {
+
+ if (!xmppFeedbackEl) {
+ xmppFeedbackEl = document.getElementById('xmpp-feedback');
+ xmppForm = document.getElementById('xmpp-form');
+ fromEl = document.getElementById('from');
+ toEl = document.getElementById('to');
+ chatEl = document.getElementById('chat');
+ payloadEl = document.getElementById('payload');
+ contentTypeEl = document.getElementById('content-type');
+ }
+
+ var to = toEl.value;
+ var from = fromEl.value;
+ var body = chatEl.value;
+
+ if (!to || !from) {
+ xmppFeedbackEl.className = 'ae-errorbox';
+ xmppFeedbackEl.innerHTML = 'From and To are required.';
+ return;
+ }
+
+ xmppFeedbackEl.className = 'ae-message';
+ xmppFeedbackEl.innerHTML = 'Sending XMPP message...';
+
+ var formData = new MultipartFormData();
+ formData.addPart('to', to, null, 'form-data');
+ formData.addPart('from', from, null, 'form-data');
+ formData.addPart('body', body, null, 'form-data');
+ formData.addPart('stanza', buildXmlStanza(from, to, body), 'text/xml', 'form-data');
+
+ payloadEl.value = formData.toString();
+ contentTypeEl.value = 'multipart/form-data; boundary=' +
+ formData.boundary;
+
+ (new Webhook('xmpp-form')).run(handleXmppResult);
+
+ // Prevents actual form posts.
+ return false;
+ };
+
+ var handleXmppResult = function(hook, req, error) {
+ if (error != null || req == null || req.status != 200) {
+ xmppFeedbackEl.className = 'ae-errorbox';
+ xmppFeedbackEl.innerHTML = 'Message send failure<br>' +
+ req.responseText;
+ } else {
+ var timestamp;
+ var dateString = new Date().toString();
+ var match = dateString.match(/(\d\d:\d\d:\d\d).+\((.+)\)/);
+ if (!match || !match[0] || !match[2]) {
+ timestamp = dateString;
+ } else {
+ timestamp = match[1] + ' ' + match[2];
+ }
+
+ xmppFeedbackEl.className = 'ae-message';
+ xmppFeedbackEl.innerHTML = 'Message has been sent at ' + timestamp;
+ }
+ };
+
+ var buildXmlStanza = function(from, to, body) {
+ var xml = '<message from="' + from + '" '+
+ 'to="' + to + '">' +
+ '<body>' + body + '</body>' +
+ '</message>';
+ return xml;
+ };
+ </script>
+{% endblock %}
+
+{% block body %}
+<div id="xmpp">
+ <h3>XMPP</h3>
+ {% if xmpp_configured %}{% else %}
+ <div class="ae-errorbox">
+ XMPP is not yet configured properly in your app.yaml, in the services section.
+ </div>
+ {% endif %}
+ <div id="xmpp-feedback"></div>
+ <form id="xmpp-form"
+ action="/_ah/xmpp/message/chat/" method="post"
+ onsubmit="sendXmppWebhook(); return false">
+
+ <input type="hidden" name="payload" id="payload">
+ <input type="hidden" id="content-type" name="header:Content-Type">
+
+ <fieldset>
+ <input type="hidden" name="message_type" id="message-type-chat" value="chat">
+ <!--
+ <legend>Message Type:</legend>
+ <div class="radio">
+ <input type="radio" name="message_type" id="message-type-chat" value="chat">
+ <label for="message-type-chat">Chat message</label>
+ </div>
+
+ <div class="radio">
+ <input type="radio" name="message_type" id="message-type-xml" value="xml">
+ <label for="message-type-xml">XML stanza</label>
+ </div>
+
+ <div class="radio">
+ <input type="radio" name="message_type" id="message-type-presence" value="presence">
+ <label for="message-type-presence">Presence</label>
+ </div>
+ -->
+ </fieldset>
+
+ <div class="fieldset">
+ <label for="from">From:</label>
+ <input type="text" id="from" name="from" size="40">
+ </div>
+
+
+ <div class="fieldset">
+ <label for="to">To:</label>
+ <input type="text" id="to" name="to" size="40">
+ </div>
+
+
+ <div id="chat-c" class="fieldset">
+ <label for="chat">Chat (plain text):</label>
+ <textarea id="chat" name="chat" rows="10" cols="50"></textarea>
+ </div>
+
+ <!--
+ <div id="xml-c" class="fieldset">
+ <label for="xml">XML Stanza:</label>
+ <textarea id="xml" name="xml" rows="10" cols="50"></textarea>
+ </div>
+
+
+ <fieldset id="presence-c">
+ <legend>Presence:</legend>
+
+ <div class="radio">
+ <input type="radio" id="presence-online" name="presence" value="online">
+ <label for="presence-online">Online</label>
+ </div>
+
+ <div class="radio">
+ <input type="radio" id="presence-offline" name="presence" value="offline">
+ <label for="presence-offline">Offline</label>
+ </div>
+ </div>
+ -->
+
+ <div id="xmpp-submit">
+ <input type="submit" value="Send Message">
+ </div>
+
+ </form>
+</div>
+<!--
+<script type="text/javascript">
+ var messageTypes = ['chat', 'xml', 'presence'];
+
+ var messageTypeEls = [];
+ for (var i = 0, messageType; messageType = messageTypes[i]; i++) {
+ var messageTypeEl = document.getElementById('message-type-' +
+ messageType);
+ messageTypeEls.push(messageTypeEl);
+ }
+
+ // Initializes the chosen type to be the first radio.
+ var chosenMessageTypeId = messageTypeEls[0].id;
+
+ var messageTypeDict = {};
+ for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+ var type = messageTypeEl.id.replace('message-type-', '');
+ var formEl = document.getElementById(type + '-c');
+ messageTypeDict[messageTypeEl.id] = formEl;
+ // Initially hides all of the conditional form elements.
+ formEl.style.display = 'none';
+ }
+
+ var setChosenMessageType = function(messageTypeId) {
+ document.getElementById(messageTypeId).checked = true;
+
+ // Hides previously chosen message type
+ messageTypeDict[chosenMessageTypeId].style.display = 'none';
+
+ // Sets the new chosen type and shows its field.
+ chosenMessageTypeId = messageTypeId;
+ messageTypeDict[chosenMessageTypeId].style.display = '';
+ }
+
+ var messageTypeClickHandler = function(e) {
+ for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+ if (messageTypeEl.checked) {
+ setChosenMessageType(messageTypeEl.id);
+ break;
+ }
+ }
+ };
+
+ // set up event listeners
+ for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+ messageTypeEl.onclick = messageTypeClickHandler;
+ }
+
+ // Init
+ setChosenMessageType(chosenMessageTypeId);
+
+</script>
+-->
+
+{% endblock %}
+
+{% block final %}
+{% endblock %}
diff --git a/google_appengine/google/appengine/ext/bulkload/__init__.py b/google_appengine/google/appengine/ext/bulkload/__init__.py
new file mode 100755
index 0000000..75a899a
--- /dev/null
+++ b/google_appengine/google/appengine/ext/bulkload/__init__.py
@@ -0,0 +1,435 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A mix-in handler for bulk loading data into an application.
+
+For complete documentation, see the Tools and Libraries section of the
+documentation.
+
+To use this in your app, first write a script, e.g. bulkload.py, that
+instantiates a Loader for each entity kind you want to import and call
+bulkload.main(instance). For example:
+
+person = bulkload.Loader(
+ 'Person',
+ [('name', str),
+ ('email', datastore_types.Email),
+ ('cool', bool), # ('0', 'False', 'No', '')=False, otherwise bool(value)
+ ('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
+ ])
+
+if __name__ == '__main__':
+ bulkload.main(person)
+
+See the Loader class for more information. Then, add a handler for it in your
+app.yaml, e.g.:
+
+ handlers:
+ - url: /load
+ script: bulkload.py
+ login: admin
+
+Finally, deploy your app and run bulkloader.py. For example, to load the
+file people.csv into a dev_appserver running on your local machine:
+
+./bulkloader.py --filename people.csv --kind Person --cookie ... \
+ --url http://localhost:8080/load
+
+The kind parameter is used to look up the Loader instance that will be used.
+The bulkload handler should usually be admin_only, so that non-admins can't use
+the shell to modify your app's data. The bulkload client uses the cookie
+parameter to piggyback its HTTP requests on your login session. A GET request
+to the URL specified for your bulkload script will give you a cookie parameter
+you can use (/load in the example above). If your bulkload handler is not
+admin_only, you may omit the cookie parameter.
+
+If you want to do extra processing before the entities are stored, you can
+subclass Loader and override HandleEntity. HandleEntity is called once with
+each entity that is imported from the CSV data. You can return one or more
+entities from HandleEntity to be stored in its place, or None if nothing
+should be stored.
+
+For example, this loads calendar events and stores them as
+datastore_entities.Event entities. It also populates their author field with a
+reference to the corresponding datastore_entites.Contact entity. If no Contact
+entity exists yet for the given author, it creates one and stores it first.
+
+class EventLoader(bulkload.Loader):
+ def __init__(self):
+ EventLoader.__init__(self, 'Event',
+ [('title', str),
+ ('creator', str),
+ ('where', str),
+ ('startTime', lambda x:
+ datetime.datetime.fromtimestamp(float(x))),
+ ])
+
+ def HandleEntity(self, entity):
+ event = datastore_entities.Event(entity.title)
+ event.update(entity)
+
+ creator = event['creator']
+ if creator:
+ contact = datastore.Query('Contact', {'title': creator}).Get(1)
+ if not contact:
+ contact = [datastore_entities.Contact(creator)]
+ datastore.Put(contact[0])
+ event['author'] = contact[0].key()
+
+ return event
+
+if __name__ == '__main__':
+ bulkload.main(EventLoader())
+"""
+
+
+
+
+
+import Cookie
+import StringIO
+import csv
+import httplib
+import os
+import traceback
+
+import google
+import wsgiref.handlers
+
+from google.appengine.api import datastore
+from google.appengine.ext import webapp
+from google.appengine.ext.bulkload import constants
+
+
+def Validate(value, type):
+ """ Checks that value is non-empty and of the right type.
+
+ Raises ValueError if value is None or empty, TypeError if it's not the given
+ type.
+
+ Args:
+ value: any value
+ type: a type or tuple of types
+ """
+ if not value:
+ raise ValueError('Value should not be empty; received %s.' % value)
+ elif not isinstance(value, type):
+ raise TypeError('Expected a %s, but received %s (a %s).' %
+ (type, value, value.__class__))
+
+
+class Loader(object):
+ """A base class for creating datastore entities from input data.
+
+ To add a handler for bulk loading a new entity kind into your datastore,
+ write a subclass of this class that calls Loader.__init__ from your
+ class's __init__.
+
+ If you need to run extra code to convert entities from the input
+ data, create new properties, or otherwise modify the entities before
+ they're inserted, override HandleEntity.
+
+ See the CreateEntity method for the creation of entities from the
+ (parsed) input data.
+ """
+
+ __loaders = {}
+ __kind = None
+ __properties = None
+
+ def __init__(self, kind, properties):
+ """ Constructor.
+
+ Populates this Loader's kind and properties map. Also registers it with
+ the bulk loader, so that all you need to do is instantiate your Loader,
+ and the bulkload handler will automatically use it.
+
+ Args:
+ kind: a string containing the entity kind that this loader handles
+
+ properties: list of (name, converter) tuples.
+
+ This is used to automatically convert the CSV columns into properties.
+ The converter should be a function that takes one argument, a string
+ value from the CSV file, and returns a correctly typed property value
+ that should be inserted. The tuples in this list should match the
+ columns in your CSV file, in order.
+
+ For example:
+ [('name', str),
+ ('id_number', int),
+ ('email', datastore_types.Email),
+ ('user', users.User),
+ ('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
+ ('description', datastore_types.Text),
+ ]
+ """
+ Validate(kind, basestring)
+ self.__kind = kind
+
+ Validate(properties, list)
+ for name, fn in properties:
+ Validate(name, basestring)
+ assert callable(fn), (
+ 'Conversion function %s for property %s is not callable.' % (fn, name))
+
+ self.__properties = properties
+
+ Loader.__loaders[kind] = self
+
+
+ def kind(self):
+ """ Return the entity kind that this Loader handes.
+ """
+ return self.__kind
+
+ def CreateEntity(self, values, key_name=None):
+ """ Creates an entity from a list of property values.
+
+ Args:
+ values: list/tuple of str
+ key_name: if provided, the name for the (single) resulting Entity
+
+ Returns:
+ list of datastore.Entity
+
+ The returned entities are populated with the property values from the
+ argument, converted to native types using the properties map given in
+ the constructor, and passed through HandleEntity. They're ready to be
+ inserted.
+
+ Raises:
+ AssertionError if the number of values doesn't match the number
+ of properties in the properties map.
+ """
+ Validate(values, (list, tuple))
+ assert len(values) == len(self.__properties), (
+ 'Expected %d CSV columns, found %d.' %
+ (len(self.__properties), len(values)))
+
+ entity = datastore.Entity(self.__kind, name=key_name)
+ for (name, converter), val in zip(self.__properties, values):
+ if converter is bool and val.lower() in ('0', 'false', 'no'):
+ val = False
+ entity[name] = converter(val)
+
+ entities = self.HandleEntity(entity)
+
+ if entities is not None:
+ if not isinstance(entities, (list, tuple)):
+ entities = [entities]
+
+ for entity in entities:
+ if not isinstance(entity, datastore.Entity):
+ raise TypeError('Expected a datastore.Entity, received %s (a %s).' %
+ (entity, entity.__class__))
+
+ return entities
+
+
+ def HandleEntity(self, entity):
+ """ Subclasses can override this to add custom entity conversion code.
+
+ This is called for each entity, after its properties are populated from
+ CSV but before it is stored. Subclasses can override this to add custom
+ entity handling code.
+
+ The entity to be inserted should be returned. If multiple entities should
+ be inserted, return a list of entities. If no entities should be inserted,
+ return None or [].
+
+ Args:
+ entity: datastore.Entity
+
+ Returns:
+ datastore.Entity or list of datastore.Entity
+ """
+ return entity
+
+
+ @staticmethod
+ def RegisteredLoaders():
+ """ Returns a list of the Loader instances that have been created.
+ """
+ return dict(Loader.__loaders)
+
+
+class BulkLoad(webapp.RequestHandler):
+ """A handler for bulk load requests.
+
+ This class contains handlers for the bulkloading process. One for
+ GET to provide cookie information for the upload script, and one
+ handler for a POST request to upload the entities.
+
+ In the POST request, the body contains the data representing the
+ entities' property values. The original format was a sequences of
+ lines of comma-separated values (and is handled by the Load
+ method). The current (version 1) format is a binary format described
+ in the Tools and Libraries section of the documentation, and is
+ handled by the LoadV1 method).
+ """
+
+ def get(self):
+ """ Handle a GET. Just show an info page.
+ """
+ page = self.InfoPage(self.request.uri)
+ self.response.out.write(page)
+
+
+ def post(self):
+ """ Handle a POST. Reads CSV data, converts to entities, and stores them.
+ """
+ self.response.headers['Content-Type'] = 'text/plain'
+ response, output = self.Load(self.request.get(constants.KIND_PARAM),
+ self.request.get(constants.CSV_PARAM))
+ self.response.set_status(response)
+ self.response.out.write(output)
+
+
+ def InfoPage(self, uri):
+ """ Renders an information page with the POST endpoint and cookie flag.
+
+ Args:
+ uri: a string containing the request URI
+ Returns:
+ A string with the contents of the info page to be displayed
+ """
+ page = """
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html><head>
+<title>Bulk Loader</title>
+</head><body>"""
+
+ page += ('The bulk load endpoint is: <a href="%s">%s</a><br />\n' %
+ (uri, uri))
+
+ cookies = os.environ.get('HTTP_COOKIE', None)
+ if cookies:
+ cookie = Cookie.BaseCookie(cookies)
+ for param in ['ACSID', 'dev_appserver_login']:
+ value = cookie.get(param)
+ if value:
+ page += ("Pass this flag to the client: --cookie='%s=%s'\n" %
+ (param, value.value))
+ break
+
+ else:
+ page += 'No cookie found!\n'
+
+ page += '</body></html>'
+ return page
+
+ def IterRows(self, reader):
+ """ Yields a tuple of a line number and row for each row of the CSV data.
+
+ Args:
+ reader: a csv reader for the input data.
+ """
+ line_num = 1
+ for columns in reader:
+ yield (line_num, columns)
+ line_num += 1
+
+ def LoadEntities(self, iter, loader, key_format=None):
+ """Generates entities and loads them into the datastore. Returns
+ a tuple of HTTP code and string reply.
+
+ Args:
+ iter: an iterator yielding pairs of a line number and row contents.
+ key_format: a format string to convert a line number into an
+ entity id. If None, then entity ID's are automatically generated.
+ """
+ entities = []
+ output = []
+ for line_num, columns in iter:
+ key_name = None
+ if key_format is not None:
+ key_name = key_format % line_num
+ if columns:
+ try:
+ output.append('\nLoading from line %d...' % line_num)
+ new_entities = loader.CreateEntity(columns, key_name=key_name)
+ if new_entities:
+ entities.extend(new_entities)
+ output.append('done.')
+ except:
+ stacktrace = traceback.format_exc()
+ output.append('error:\n%s' % stacktrace)
+ return (httplib.BAD_REQUEST, ''.join(output))
+
+ datastore.Put(entities)
+
+ return (httplib.OK, ''.join(output))
+
+ def Load(self, kind, data):
+ """Parses CSV data, uses a Loader to convert to entities, and stores them.
+
+ On error, fails fast. Returns a "bad request" HTTP response code and
+ includes the traceback in the output.
+
+ Args:
+ kind: a string containing the entity kind that this loader handles
+ data: a string containing the CSV data to load
+
+ Returns:
+ tuple (response code, output) where:
+ response code: integer HTTP response code to return
+ output: string containing the HTTP response body
+ """
+ data = data.encode('utf-8')
+ Validate(kind, basestring)
+ Validate(data, basestring)
+ output = []
+
+ try:
+ loader = Loader.RegisteredLoaders()[kind]
+ except KeyError:
+ output.append('Error: no Loader defined for kind %s.' % kind)
+ return (httplib.BAD_REQUEST, ''.join(output))
+
+ buffer = StringIO.StringIO(data)
+ reader = csv.reader(buffer, skipinitialspace=True)
+
+ try:
+ csv.field_size_limit(800000)
+ except AttributeError:
+ pass
+
+ return self.LoadEntities(self.IterRows(reader), loader)
+
+
+def main(*loaders):
+ """Starts bulk upload.
+
+ Raises TypeError if not, at least one Loader instance is given.
+
+ Args:
+ loaders: One or more Loader instance.
+ """
+ if not loaders:
+ raise TypeError('Expected at least one argument.')
+
+ for loader in loaders:
+ if not isinstance(loader, Loader):
+ raise TypeError('Expected a Loader instance; received %r' % loader)
+
+ application = webapp.WSGIApplication([('.*', BulkLoad)])
+ wsgiref.handlers.CGIHandler().run(application)
+
+if __name__ == '__main__':
+ main()
diff --git a/google_appengine/google/appengine/ext/bulkload/constants.py b/google_appengine/google/appengine/ext/bulkload/constants.py
new file mode 100755
index 0000000..af3c857
--- /dev/null
+++ b/google_appengine/google/appengine/ext/bulkload/constants.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+""" Constants used by both the bulkload server-side mixin handler and the
+command-line client.
+"""
+
+
+KIND_PARAM = 'kind'
+CSV_PARAM = 'csv'
diff --git a/google_appengine/google/appengine/ext/db/__init__.py b/google_appengine/google/appengine/ext/db/__init__.py
new file mode 100755
index 0000000..365c4fd
--- /dev/null
+++ b/google_appengine/google/appengine/ext/db/__init__.py
@@ -0,0 +1,2959 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Simple, schema-based database abstraction layer for the datastore.
+
+Modeled after Django's abstraction layer on top of SQL databases,
+http://www.djangoproject.com/documentation/mode_api/. Ours is a little simpler
+and a lot less code because the datastore is so much simpler than SQL
+databases.
+
+The programming model is to declare Python subclasses of the Model class,
+declaring datastore properties as class members of that class. So if you want to
+publish a story with title, body, and created date, you would do it like this:
+
+ class Story(db.Model):
+ title = db.StringProperty()
+ body = db.TextProperty()
+ created = db.DateTimeProperty(auto_now_add=True)
+
+You can create a new Story in the datastore with this usage pattern:
+
+ story = Story(title='My title')
+ story.body = 'My body'
+ story.put()
+
+You query for Story entities using built in query interfaces that map directly
+to the syntax and semantics of the datastore:
+
+ stories = Story.all().filter('date >=', yesterday).order('-date')
+ for story in stories:
+ print story.title
+
+The Property declarations enforce types by performing validation on assignment.
+For example, the DateTimeProperty enforces that you assign valid datetime
+objects, and if you supply the "required" option for a property, you will not
+be able to assign None to that property.
+
+We also support references between models, so if a story has comments, you
+would represent it like this:
+
+ class Comment(db.Model):
+ story = db.ReferenceProperty(Story)
+ body = db.TextProperty()
+
+When you get a story out of the datastore, the story reference is resolved
+automatically the first time it is referenced, which makes it easy to use
+model instances without performing additional queries by hand:
+
+ comment = Comment.get(key)
+ print comment.story.title
+
+Likewise, you can access the set of comments that refer to each story through
+this property through a reverse reference called comment_set, which is a Query
+preconfigured to return all matching comments:
+
+ story = Story.get(key)
+ for comment in story.comment_set:
+ print comment.body
+
+"""
+
+
+
+
+
+
+import copy
+import datetime
+import logging
+import re
+import time
+import urlparse
+import warnings
+
+from google.appengine.api import datastore
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.api import users
+
+Error = datastore_errors.Error
+BadValueError = datastore_errors.BadValueError
+BadPropertyError = datastore_errors.BadPropertyError
+BadRequestError = datastore_errors.BadRequestError
+EntityNotFoundError = datastore_errors.EntityNotFoundError
+BadArgumentError = datastore_errors.BadArgumentError
+QueryNotFoundError = datastore_errors.QueryNotFoundError
+TransactionNotFoundError = datastore_errors.TransactionNotFoundError
+Rollback = datastore_errors.Rollback
+TransactionFailedError = datastore_errors.TransactionFailedError
+BadFilterError = datastore_errors.BadFilterError
+BadQueryError = datastore_errors.BadQueryError
+BadKeyError = datastore_errors.BadKeyError
+InternalError = datastore_errors.InternalError
+NeedIndexError = datastore_errors.NeedIndexError
+Timeout = datastore_errors.Timeout
+
+ValidationError = BadValueError
+
+Key = datastore_types.Key
+Category = datastore_types.Category
+Link = datastore_types.Link
+Email = datastore_types.Email
+GeoPt = datastore_types.GeoPt
+IM = datastore_types.IM
+PhoneNumber = datastore_types.PhoneNumber
+PostalAddress = datastore_types.PostalAddress
+Rating = datastore_types.Rating
+Text = datastore_types.Text
+Blob = datastore_types.Blob
+ByteString = datastore_types.ByteString
+BlobKey = datastore_types.BlobKey
+
+_kind_map = {}
+
+
+_SELF_REFERENCE = object()
+
+
+_RESERVED_WORDS = set(['key_name'])
+
+
+
+
+class NotSavedError(Error):
+ """Raised when a saved-object action is performed on a non-saved object."""
+
+
+class KindError(BadValueError):
+ """Raised when an entity is used with incorrect Model."""
+
+
+class PropertyError(Error):
+ """Raised when non-existent property is referenced."""
+
+
+class DuplicatePropertyError(Error):
+ """Raised when a property is duplicated in a model definition."""
+
+
+class ConfigurationError(Error):
+ """Raised when a property or model is improperly configured."""
+
+
+class ReservedWordError(Error):
+ """Raised when a property is defined for a reserved word."""
+
+
+class DerivedPropertyError(Error):
+ """Raised when attempting to assign a value to a derived property."""
+
+
+_ALLOWED_PROPERTY_TYPES = set([
+ basestring,
+ str,
+ unicode,
+ bool,
+ int,
+ long,
+ float,
+ Key,
+ datetime.datetime,
+ datetime.date,
+ datetime.time,
+ Blob,
+ ByteString,
+ Text,
+ users.User,
+ Category,
+ Link,
+ Email,
+ GeoPt,
+ IM,
+ PhoneNumber,
+ PostalAddress,
+ Rating,
+ BlobKey,
+ ])
+
+_ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
+_ALLOWED_EXPANDO_PROPERTY_TYPES.update((list, tuple, type(None)))
+
+_OPERATORS = ['<', '<=', '>', '>=', '=', '==', '!=', 'in']
+_FILTER_REGEX = re.compile(
+ '^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(_OPERATORS),
+ re.IGNORECASE | re.UNICODE)
+
+
+def class_for_kind(kind):
+ """Return base-class responsible for implementing kind.
+
+ Necessary to recover the class responsible for implementing provided
+ kind.
+
+ Args:
+ kind: Entity kind string.
+
+ Returns:
+ Class implementation for kind.
+
+ Raises:
+ KindError when there is no implementation for kind.
+ """
+ try:
+ return _kind_map[kind]
+ except KeyError:
+ raise KindError('No implementation for kind \'%s\'' % kind)
+
+
+def check_reserved_word(attr_name):
+ """Raise an exception if attribute name is a reserved word.
+
+ Args:
+ attr_name: Name to check to see if it is a reserved word.
+
+ Raises:
+ ReservedWordError when attr_name is determined to be a reserved word.
+ """
+ if datastore_types.RESERVED_PROPERTY_NAME.match(attr_name):
+ raise ReservedWordError(
+ "Cannot define property. All names both beginning and "
+ "ending with '__' are reserved.")
+
+ if attr_name in _RESERVED_WORDS or attr_name in dir(Model):
+ raise ReservedWordError(
+ "Cannot define property using reserved word '%(attr_name)s'. "
+ "If you would like to use this name in the datastore consider "
+ "using a different name like %(attr_name)s_ and adding "
+ "name='%(attr_name)s' to the parameter list of the property "
+ "definition." % locals())
+
+
+def query_descendants(model_instance):
+ """Returns a query for all the descendants of a model instance.
+
+ Args:
+ model_instance: Model instance to find the descendants of.
+
+ Returns:
+ Query that will retrieve all entities that have the given model instance
+ as an ancestor. Unlike normal ancestor queries, this does not include the
+ ancestor itself.
+ """
+
+ result = Query().ancestor(model_instance);
+ result.filter(datastore_types._KEY_SPECIAL_PROPERTY + ' >',
+ model_instance.key());
+ return result;
+
+
+def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
+ """Encodes a model instance as a protocol buffer.
+
+ Args:
+ model_instance: Model instance to encode.
+ Returns:
+ entity_pb.EntityProto representation of the model instance
+ """
+ return model_instance._populate_entity(_entity_class).ToPb()
+
+
+def model_from_protobuf(pb, _entity_class=datastore.Entity):
+ """Decodes a model instance from a protocol buffer.
+
+ Args:
+ pb: The protocol buffer representation of the model instance. Can be an
+ entity_pb.EntityProto or str encoding of an entity_bp.EntityProto
+
+ Returns:
+ Model instance resulting from decoding the protocol buffer
+ """
+ entity = _entity_class.FromPb(pb)
+ return class_for_kind(entity.kind()).from_entity(entity)
+
+
+def _initialize_properties(model_class, name, bases, dct):
+ """Initialize Property attributes for Model-class.
+
+ Args:
+ model_class: Model class to initialize properties for.
+ """
+ model_class._properties = {}
+ property_source = {}
+
+ def get_attr_source(name, cls):
+ for src_cls in cls.mro():
+ if name in src_cls.__dict__:
+ return src_cls
+
+ defined = set()
+ for base in bases:
+ if hasattr(base, '_properties'):
+ property_keys = set(base._properties.keys())
+ duplicate_property_keys = defined & property_keys
+ for dupe_prop_name in duplicate_property_keys:
+ old_source = property_source[dupe_prop_name] = get_attr_source(
+ dupe_prop_name, property_source[dupe_prop_name])
+ new_source = get_attr_source(dupe_prop_name, base)
+ if old_source != new_source:
+ raise DuplicatePropertyError(
+ 'Duplicate property, %s, is inherited from both %s and %s.' %
+ (dupe_prop_name, old_source.__name__, new_source.__name__))
+ property_keys -= duplicate_property_keys
+ if property_keys:
+ defined |= property_keys
+ property_source.update(dict.fromkeys(property_keys, base))
+ model_class._properties.update(base._properties)
+
+ for attr_name in dct.keys():
+ attr = dct[attr_name]
+ if isinstance(attr, Property):
+ check_reserved_word(attr_name)
+ if attr_name in defined:
+ raise DuplicatePropertyError('Duplicate property: %s' % attr_name)
+ defined.add(attr_name)
+ model_class._properties[attr_name] = attr
+ attr.__property_config__(model_class, attr_name)
+
+ model_class._unindexed_properties = frozenset(
+ name for name, prop in model_class._properties.items() if not prop.indexed)
+
+
+class PropertiedClass(type):
+ """Meta-class for initializing Model classes properties.
+
+ Used for initializing Properties defined in the context of a model.
+ By using a meta-class much of the configuration of a Property
+ descriptor becomes implicit. By using this meta-class, descriptors
+ that are of class Model are notified about which class they
+ belong to and what attribute they are associated with and can
+ do appropriate initialization via __property_config__.
+
+ Duplicate properties are not permitted.
+ """
+
+ def __init__(cls, name, bases, dct, map_kind=True):
+ """Initializes a class that might have property definitions.
+
+ This method is called when a class is created with the PropertiedClass
+ meta-class.
+
+ Loads all properties for this model and its base classes in to a dictionary
+ for easy reflection via the 'properties' method.
+
+ Configures each property defined in the new class.
+
+ Duplicate properties, either defined in the new class or defined separately
+ in two base classes are not permitted.
+
+ Properties may not assigned to names which are in the list of
+ _RESERVED_WORDS. It is still possible to store a property using a reserved
+ word in the datastore by using the 'name' keyword argument to the Property
+ constructor.
+
+ Args:
+ cls: Class being initialized.
+ name: Name of new class.
+ bases: Base classes of new class.
+ dct: Dictionary of new definitions for class.
+
+ Raises:
+ DuplicatePropertyError when a property is duplicated either in the new
+ class or separately in two base classes.
+ ReservedWordError when a property is given a name that is in the list of
+ reserved words, attributes of Model and names of the form '__.*__'.
+ """
+ super(PropertiedClass, cls).__init__(name, bases, dct)
+
+ _initialize_properties(cls, name, bases, dct)
+
+ if map_kind:
+ _kind_map[cls.kind()] = cls
+
+
+class Property(object):
+ """A Property is an attribute of a Model.
+
+ It defines the type of the attribute, which determines how it is stored
+ in the datastore and how the property values are validated. Different property
+ types support different options, which change validation rules, default
+ values, etc. The simplest example of a property is a StringProperty:
+
+ class Story(db.Model):
+ title = db.StringProperty()
+ """
+
+ creation_counter = 0
+
+ def __init__(self,
+ verbose_name=None,
+ name=None,
+ default=None,
+ required=False,
+ validator=None,
+ choices=None,
+ indexed=True):
+ """Initializes this Property with the given options.
+
+ Args:
+ verbose_name: User friendly name of property.
+ name: Storage name for property. By default, uses attribute name
+ as it is assigned in the Model sub-class.
+ default: Default value for property if none is assigned.
+ required: Whether property is required.
+ validator: User provided method used for validation.
+ choices: User provided set of valid property values.
+ indexed: Whether property is indexed.
+ """
+ self.verbose_name = verbose_name
+ self.name = name
+ self.default = default
+ self.required = required
+ self.validator = validator
+ self.choices = choices
+ self.indexed = indexed
+ self.creation_counter = Property.creation_counter
+ Property.creation_counter += 1
+
+ def __property_config__(self, model_class, property_name):
+ """Configure property, connecting it to its model.
+
+ Configure the property so that it knows its property name and what class
+ it belongs to.
+
+ Args:
+ model_class: Model class which Property will belong to.
+ property_name: Name of property within Model instance to store property
+ values in. By default this will be the property name preceded by
+ an underscore, but may change for different subclasses.
+ """
+ self.model_class = model_class
+ if self.name is None:
+ self.name = property_name
+
+ def __get__(self, model_instance, model_class):
+ """Returns the value for this property on the given model instance.
+
+ See http://docs.python.org/ref/descriptors.html for a description of
+ the arguments to this class and what they mean."""
+ if model_instance is None:
+ return self
+
+ try:
+ return getattr(model_instance, self._attr_name())
+ except AttributeError:
+ return None
+
+ def __set__(self, model_instance, value):
+ """Sets the value for this property on the given model instance.
+
+ See http://docs.python.org/ref/descriptors.html for a description of
+ the arguments to this class and what they mean.
+ """
+ value = self.validate(value)
+ setattr(model_instance, self._attr_name(), value)
+
+ def default_value(self):
+ """Default value for unassigned values.
+
+ Returns:
+ Default value as provided by __init__(default).
+ """
+ return self.default
+
+ def validate(self, value):
+ """Assert that provided value is compatible with this property.
+
+ Args:
+ value: Value to validate against this Property.
+
+ Returns:
+ A valid value, either the input unchanged or adapted to the
+ required type.
+
+ Raises:
+ BadValueError if the value is not appropriate for this
+ property in any way.
+ """
+ if self.empty(value):
+ if self.required:
+ raise BadValueError('Property %s is required' % self.name)
+ else:
+ if self.choices:
+ match = False
+ for choice in self.choices:
+ if choice == value:
+ match = True
+ if not match:
+ raise BadValueError('Property %s is %r; must be one of %r' %
+ (self.name, value, self.choices))
+ if self.validator is not None:
+ self.validator(value)
+ return value
+
+ def empty(self, value):
+ """Determine if value is empty in the context of this property.
+
+ For most kinds, this is equivalent to "not value", but for kinds like
+ bool, the test is more subtle, so subclasses can override this method
+ if necessary.
+
+ Args:
+ value: Value to validate against this Property.
+
+ Returns:
+ True if this value is considered empty in the context of this Property
+ type, otherwise False.
+ """
+ return not value
+
+ def get_value_for_datastore(self, model_instance):
+ """Datastore representation of this property.
+
+ Looks for this property in the given model instance, and returns the proper
+ datastore representation of the value that can be stored in a datastore
+ entity. Most critically, it will fetch the datastore key value for
+ reference properties.
+
+ Args:
+ model_instance: Instance to fetch datastore value from.
+
+ Returns:
+ Datastore representation of the model value in a form that is
+ appropriate for storing in the datastore.
+ """
+ return self.__get__(model_instance, model_instance.__class__)
+
+ def make_value_from_datastore(self, value):
+ """Native representation of this property.
+
+ Given a value retrieved from a datastore entity, return a value,
+ possibly converted, to be stored on the model instance. Usually
+ this returns the value unchanged, but a property class may
+ override this when it uses a different datatype on the model
+ instance than on the entity.
+
+ This API is not quite symmetric with get_value_for_datastore(),
+ because the model instance on which to store the converted value
+ may not exist yet -- we may be collecting values to be passed to a
+ model constructor.
+
+ Args:
+ value: value retrieved from the datastore entity.
+
+ Returns:
+ The value converted for use as a model instance attribute.
+ """
+ return value
+
+ def _require_parameter(self, kwds, parameter, value):
+ """Sets kwds[parameter] to value.
+
+ If kwds[parameter] exists and is not value, raises ConfigurationError.
+
+ Args:
+ kwds: The parameter dict, which maps parameter names (strings) to values.
+ parameter: The name of the parameter to set.
+ value: The value to set it to.
+ """
+ if parameter in kwds and kwds[parameter] != value:
+ raise ConfigurationError('%s must be %s.' % (parameter, value))
+
+ kwds[parameter] = value
+
+ def _attr_name(self):
+ """Attribute name we use for this property in model instances.
+
+ DO NOT USE THIS METHOD.
+ """
+ return '_' + self.name
+
+ data_type = str
+
+ def datastore_type(self):
+ """Deprecated backwards-compatible accessor method for self.data_type."""
+ return self.data_type
+
+
+class Model(object):
+ """Model is the superclass of all object entities in the datastore.
+
+ The programming model is to declare Python subclasses of the Model class,
+ declaring datastore properties as class members of that class. So if you want
+ to publish a story with title, body, and created date, you would do it like
+ this:
+
+ class Story(db.Model):
+ title = db.StringProperty()
+ body = db.TextProperty()
+ created = db.DateTimeProperty(auto_now_add=True)
+
+ A model instance can have a single parent. Model instances without any
+ parent are root entities. It is possible to efficiently query for
+ instances by their shared parent. All descendents of a single root
+ instance also behave as a transaction group. This means that when you
+ work one member of the group within a transaction all descendents of that
+ root join the transaction. All operations within a transaction on this
+ group are ACID.
+ """
+
+ __metaclass__ = PropertiedClass
+
+ def __init__(self,
+ parent=None,
+ key_name=None,
+ key=None,
+ _app=None,
+ _from_entity=False,
+ **kwds):
+ """Creates a new instance of this model.
+
+ To create a new entity, you instantiate a model and then call put(),
+ which saves the entity to the datastore:
+
+ person = Person()
+ person.name = 'Bret'
+ person.put()
+
+ You can initialize properties in the model in the constructor with keyword
+ arguments:
+
+ person = Person(name='Bret')
+
+ We initialize all other properties to the default value (as defined by the
+ properties in the model definition) if they are not provided in the
+ constructor.
+
+ Args:
+ parent: Parent instance for this instance or None, indicating a top-
+ level instance.
+ key_name: Name for new model instance.
+ key: Key instance for this instance, overrides parent and key_name
+ _from_entity: Intentionally undocumented.
+ args: Keyword arguments mapping to properties of model.
+ """
+ if key is not None:
+ if isinstance(key, (tuple, list)):
+ key = Key.from_path(*key)
+ if isinstance(key, basestring):
+ key = Key(encoded=key)
+ if not isinstance(key, Key):
+ raise TypeError('Expected Key type; received %s (is %s)' %
+ (key, key.__class__.__name__))
+ if not key.has_id_or_name():
+ raise BadKeyError('Key must have an id or name')
+ if key.kind() != self.kind():
+ raise BadKeyError('Expected Key kind to be %s; received %s' %
+ (self.kind(), key.kind()))
+ if _app is not None and key.app() != _app:
+ raise BadKeyError('Expected Key app to be %s; received %s' %
+ (_app, key.app()))
+ if key_name is not None:
+ raise BadArgumentError('Cannot use key and key_name at the same time')
+ if parent is not None:
+ raise BadArgumentError('Cannot use key and parent at the same time')
+ self._key = key
+ self._key_name = None
+ self._parent = None
+ self._parent_key = None
+ else:
+ if key_name == '':
+ raise BadKeyError('Name cannot be empty.')
+ elif key_name is not None and not isinstance(key_name, basestring):
+ raise BadKeyError('Name must be string type, not %s' %
+ key_name.__class__.__name__)
+
+ if parent is not None:
+ if not isinstance(parent, (Model, Key)):
+ raise TypeError('Expected Model type; received %s (is %s)' %
+ (parent, parent.__class__.__name__))
+ if isinstance(parent, Model) and not parent.has_key():
+ raise BadValueError(
+ "%s instance must have a complete key before it can be used as a "
+ "parent." % parent.kind())
+ if isinstance(parent, Key):
+ self._parent_key = parent
+ self._parent = None
+ else:
+ self._parent_key = parent.key()
+ self._parent = parent
+ else:
+ self._parent_key = None
+ self._parent = None
+ self._key_name = key_name
+ self._key = None
+
+ self._entity = None
+ self._app = _app
+
+ for prop in self.properties().values():
+ if prop.name in kwds:
+ value = kwds[prop.name]
+ else:
+ value = prop.default_value()
+ try:
+ prop.__set__(self, value)
+ except DerivedPropertyError, e:
+ if prop.name in kwds and not _from_entity:
+ raise
+
+ def key(self):
+ """Unique key for this entity.
+
+ This property is only available if this entity is already stored in the
+ datastore or if it has a full key, so it is available if this entity was
+ fetched returned from a query, or after put() is called the first time
+ for new entities, or if a complete key was given when constructed.
+
+ Returns:
+ Datastore key of persisted entity.
+
+ Raises:
+ NotSavedError when entity is not persistent.
+ """
+ if self.is_saved():
+ return self._entity.key()
+ elif self._key:
+ return self._key
+ elif self._key_name:
+ parent = self._parent_key or (self._parent and self._parent.key())
+ self._key = Key.from_path(self.kind(), self._key_name, parent=parent)
+ return self._key
+ else:
+ raise NotSavedError()
+
+ def _to_entity(self, entity):
+ """Copies information from this model to provided entity.
+
+ Args:
+ entity: Entity to save information on.
+ """
+ for prop in self.properties().values():
+ datastore_value = prop.get_value_for_datastore(self)
+ if datastore_value == []:
+ try:
+ del entity[prop.name]
+ except KeyError:
+ pass
+ else:
+ entity[prop.name] = datastore_value
+
+ def _populate_internal_entity(self, _entity_class=datastore.Entity):
+ """Populates self._entity, saving its state to the datastore.
+
+ After this method is called, calling is_saved() will return True.
+
+ Returns:
+ Populated self._entity
+ """
+ self._entity = self._populate_entity(_entity_class=_entity_class)
+ for attr in ('_key_name', '_key'):
+ try:
+ delattr(self, attr)
+ except AttributeError:
+ pass
+ return self._entity
+
+ def put(self):
+ """Writes this model instance to the datastore.
+
+ If this instance is new, we add an entity to the datastore.
+ Otherwise, we update this instance, and the key will remain the
+ same.
+
+ Returns:
+ The key of the instance (either the existing key or a new key).
+
+ Raises:
+ TransactionFailedError if the data could not be committed.
+ """
+ self._populate_internal_entity()
+ return datastore.Put(self._entity)
+
+ save = put
+
+ def _populate_entity(self, _entity_class=datastore.Entity):
+ """Internal helper -- Populate self._entity or create a new one
+ if that one does not exist. Does not change any state of the instance
+ other than the internal state of the entity.
+
+ This method is separate from _populate_internal_entity so that it is
+ possible to call to_xml without changing the state of an unsaved entity
+ to saved.
+
+ Returns:
+ self._entity or a new Entity which is not stored on the instance.
+ """
+ if self.is_saved():
+ entity = self._entity
+ else:
+ kwds = {'_app': self._app,
+ 'unindexed_properties': self._unindexed_properties}
+ if self._key is not None:
+ if self._key.id():
+ kwds['id'] = self._key.id()
+ else:
+ kwds['name'] = self._key.name()
+ if self._key.parent():
+ kwds['parent'] = self._key.parent()
+ else:
+ if self._key_name is not None:
+ kwds['name'] = self._key_name
+ if self._parent_key is not None:
+ kwds['parent'] = self._parent_key
+ elif self._parent is not None:
+ kwds['parent'] = self._parent._entity
+ entity = _entity_class(self.kind(), **kwds)
+
+ self._to_entity(entity)
+ return entity
+
+ def delete(self):
+ """Deletes this entity from the datastore.
+
+ Raises:
+ TransactionFailedError if the data could not be committed.
+ """
+ datastore.Delete(self.key())
+ self._entity = None
+
+
+ def is_saved(self):
+ """Determine if entity is persisted in the datastore.
+
+ New instances of Model do not start out saved in the data. Objects which
+ are saved to or loaded from the Datastore will have a True saved state.
+
+ Returns:
+ True if object has been persisted to the datastore, otherwise False.
+ """
+ return self._entity is not None
+
+ def has_key(self):
+ """Determine if this model instance has a complete key.
+
+ When not using a fully self-assigned Key, ids are not assigned until the
+ data is saved to the Datastore, but instances with a key name always have
+ a full key.
+
+ Returns:
+ True if the object has been persisted to the datastore or has a key
+ or has a key_name, otherwise False.
+ """
+ return self.is_saved() or self._key or self._key_name
+
+ def dynamic_properties(self):
+ """Returns a list of all dynamic properties defined for instance."""
+ return []
+
+ def instance_properties(self):
+ """Alias for dyanmic_properties."""
+ return self.dynamic_properties()
+
+ def parent(self):
+ """Get the parent of the model instance.
+
+ Returns:
+ Parent of contained entity or parent provided in constructor, None if
+ instance has no parent.
+ """
+ if self._parent is None:
+ parent_key = self.parent_key()
+ if parent_key is not None:
+ self._parent = get(parent_key)
+ return self._parent
+
+ def parent_key(self):
+ """Get the parent's key.
+
+ This method is useful for avoiding a potential fetch from the datastore
+ but still get information about the instances parent.
+
+ Returns:
+ Parent key of entity, None if there is no parent.
+ """
+ if self._parent_key is not None:
+ return self._parent_key
+ elif self._parent is not None:
+ return self._parent.key()
+ elif self._entity is not None:
+ return self._entity.parent()
+ elif self._key is not None:
+ return self._key.parent()
+ else:
+ return None
+
+ def to_xml(self, _entity_class=datastore.Entity):
+ """Generate an XML representation of this model instance.
+
+ atom and gd:namespace properties are converted to XML according to their
+ respective schemas. For more information, see:
+
+ http://www.atomenabled.org/developers/syndication/
+ http://code.google.com/apis/gdata/common-elements.html
+ """
+ entity = self._populate_entity(_entity_class)
+ return entity.ToXml()
+
+ @classmethod
+ def get(cls, keys):
+ """Fetch instance from the datastore of a specific Model type using key.
+
+ We support Key objects and string keys (we convert them to Key objects
+ automatically).
+
+ Useful for ensuring that specific instance types are retrieved from the
+ datastore. It also helps that the source code clearly indicates what
+ kind of object is being retreived. Example:
+
+ story = Story.get(story_key)
+
+ Args:
+ keys: Key within datastore entity collection to find; or string key;
+ or list of Keys or string keys.
+
+ Returns:
+ If a single key was given: a Model instance associated with key
+ for provided class if it exists in the datastore, otherwise
+ None; if a list of keys was given: a list whose items are either
+ a Model instance or None.
+
+ Raises:
+ KindError if any of the retreived objects are not instances of the
+ type associated with call to 'get'.
+ """
+ results = get(keys)
+ if results is None:
+ return None
+
+ if isinstance(results, Model):
+ instances = [results]
+ else:
+ instances = results
+
+ for instance in instances:
+ if not(instance is None or isinstance(instance, cls)):
+ raise KindError('Kind %r is not a subclass of kind %r' %
+ (instance.kind(), cls.kind()))
+
+ return results
+
+ @classmethod
+ def get_by_key_name(cls, key_names, parent=None):
+ """Get instance of Model class by its key's name.
+
+ Args:
+ key_names: A single key-name or a list of key-names.
+ parent: Parent of instances to get. Can be a model or key.
+ """
+ if isinstance(parent, Model):
+ parent = parent.key()
+ key_names, multiple = datastore.NormalizeAndTypeCheck(key_names, basestring)
+ keys = [datastore.Key.from_path(cls.kind(), name, parent=parent)
+ for name in key_names]
+ if multiple:
+ return get(keys)
+ else:
+ return get(*keys)
+
+ @classmethod
+ def get_by_id(cls, ids, parent=None):
+ """Get instance of Model class by id.
+
+ Args:
+ key_names: A single id or a list of ids.
+ parent: Parent of instances to get. Can be a model or key.
+ """
+ if isinstance(parent, Model):
+ parent = parent.key()
+ ids, multiple = datastore.NormalizeAndTypeCheck(ids, (int, long))
+ keys = [datastore.Key.from_path(cls.kind(), id, parent=parent)
+ for id in ids]
+ if multiple:
+ return get(keys)
+ else:
+ return get(*keys)
+
+ @classmethod
+ def get_or_insert(cls, key_name, **kwds):
+ """Transactionally retrieve or create an instance of Model class.
+
+ This acts much like the Python dictionary setdefault() method, where we
+ first try to retrieve a Model instance with the given key name and parent.
+ If it's not present, then we create a new instance (using the *kwds
+ supplied) and insert that with the supplied key name.
+
+ Subsequent calls to this method with the same key_name and parent will
+ always yield the same entity (though not the same actual object instance),
+ regardless of the *kwds supplied. If the specified entity has somehow
+ been deleted separately, then the next call will create a new entity and
+ return it.
+
+ If the 'parent' keyword argument is supplied, it must be a Model instance.
+ It will be used as the parent of the new instance of this Model class if
+ one is created.
+
+ This method is especially useful for having just one unique entity for
+ a specific identifier. Insertion/retrieval is done transactionally, which
+ guarantees uniqueness.
+
+ Example usage:
+
+ class WikiTopic(db.Model):
+ creation_date = db.DatetimeProperty(auto_now_add=True)
+ body = db.TextProperty(required=True)
+
+ # The first time through we'll create the new topic.
+ wiki_word = 'CommonIdioms'
+ topic = WikiTopic.get_or_insert(wiki_word,
+ body='This topic is totally new!')
+ assert topic.key().name() == 'CommonIdioms'
+ assert topic.body == 'This topic is totally new!'
+
+ # The second time through will just retrieve the entity.
+ overwrite_topic = WikiTopic.get_or_insert(wiki_word,
+ body='A totally different message!')
+ assert topic.key().name() == 'CommonIdioms'
+ assert topic.body == 'This topic is totally new!'
+
+ Args:
+ key_name: Key name to retrieve or create.
+ **kwds: Keyword arguments to pass to the constructor of the model class
+ if an instance for the specified key name does not already exist. If
+ an instance with the supplied key_name and parent already exists, the
+ rest of these arguments will be discarded.
+
+ Returns:
+ Existing instance of Model class with the specified key_name and parent
+ or a new one that has just been created.
+
+ Raises:
+ TransactionFailedError if the specified Model instance could not be
+ retrieved or created transactionally (due to high contention, etc).
+ """
+ def txn():
+ entity = cls.get_by_key_name(key_name, parent=kwds.get('parent'))
+ if entity is None:
+ entity = cls(key_name=key_name, **kwds)
+ entity.put()
+ return entity
+ return run_in_transaction(txn)
+
+ @classmethod
+ def all(cls, **kwds):
+ """Returns a query over all instances of this model from the datastore.
+
+ Returns:
+ Query that will retrieve all instances from entity collection.
+ """
+ return Query(cls, **kwds)
+
+ @classmethod
+ def gql(cls, query_string, *args, **kwds):
+ """Returns a query using GQL query string.
+
+ See appengine/ext/gql for more information about GQL.
+
+ Args:
+ query_string: properly formatted GQL query string with the
+ 'SELECT * FROM <entity>' part omitted
+ *args: rest of the positional arguments used to bind numeric references
+ in the query.
+ **kwds: dictionary-based arguments (for named parameters).
+ """
+ return GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string),
+ *args, **kwds)
+
+ @classmethod
+ def _load_entity_values(cls, entity):
+ """Load dynamic properties from entity.
+
+ Loads attributes which are not defined as part of the entity in
+ to the model instance.
+
+ Args:
+ entity: Entity which contain values to search dyanmic properties for.
+ """
+ entity_values = {}
+ for prop in cls.properties().values():
+ if prop.name in entity:
+ try:
+ value = prop.make_value_from_datastore(entity[prop.name])
+ entity_values[prop.name] = value
+ except KeyError:
+ entity_values[prop.name] = []
+
+ return entity_values
+
+ @classmethod
+ def from_entity(cls, entity):
+ """Converts the entity representation of this model to an instance.
+
+ Converts datastore.Entity instance to an instance of cls.
+
+ Args:
+ entity: Entity loaded directly from datastore.
+
+ Raises:
+ KindError when cls is incorrect model for entity.
+ """
+ if cls.kind() != entity.kind():
+ raise KindError('Class %s cannot handle kind \'%s\'' %
+ (repr(cls), entity.kind()))
+
+ entity_values = cls._load_entity_values(entity)
+ instance = cls(None, _from_entity=True, **entity_values)
+ if entity.is_saved():
+ instance._entity = entity
+ del instance._key_name
+ del instance._key
+ elif entity.key().has_id_or_name():
+ instance._key = entity.key()
+ return instance
+
+ @classmethod
+ def kind(cls):
+ """Returns the datastore kind we use for this model.
+
+ We just use the name of the model for now, ignoring potential collisions.
+ """
+ return cls.__name__
+
+ @classmethod
+ def entity_type(cls):
+ """Soon to be removed alias for kind."""
+ return cls.kind()
+
+ @classmethod
+ def properties(cls):
+ """Returns a dictionary of all the properties defined for this model."""
+ return dict(cls._properties)
+
+ @classmethod
+ def fields(cls):
+ """Soon to be removed alias for properties."""
+ return cls.properties()
+
+
+def get(keys):
+ """Fetch the specific Model instance with the given key from the datastore.
+
+ We support Key objects and string keys (we convert them to Key objects
+ automatically).
+
+ Args:
+ keys: Key within datastore entity collection to find; or string key;
+ or list of Keys or string keys.
+
+ Returns:
+ If a single key was given: a Model instance associated with key
+ for if it exists in the datastore, otherwise None; if a list of
+ keys was given: a list whose items are either a Model instance or
+ None.
+ """
+ keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys)
+ try:
+ entities = datastore.Get(keys)
+ except datastore_errors.EntityNotFoundError:
+ assert not multiple
+ return None
+ models = []
+ for entity in entities:
+ if entity is None:
+ model = None
+ else:
+ cls1 = class_for_kind(entity.kind())
+ model = cls1.from_entity(entity)
+ models.append(model)
+ if multiple:
+ return models
+ assert len(models) == 1
+ return models[0]
+
+
+def put(models):
+ """Store one or more Model instances.
+
+ Args:
+ models: Model instance or list of Model instances.
+
+ Returns:
+ A Key or a list of Keys (corresponding to the argument's plurality).
+
+ Raises:
+ TransactionFailedError if the data could not be committed.
+ """
+ models, multiple = datastore.NormalizeAndTypeCheck(models, Model)
+ entities = [model._populate_internal_entity() for model in models]
+ keys = datastore.Put(entities)
+ if multiple:
+ return keys
+ assert len(keys) == 1
+ return keys[0]
+
+save = put
+
+
+def delete(models):
+ """Delete one or more Model instances.
+
+ Args:
+ models_or_keys: Model instance or list of Model instances.
+
+ Raises:
+ TransactionFailedError if the data could not be committed.
+ """
+ models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
+ models, (Model, Key, basestring))
+ keys = []
+ for model_or_key in models_or_keys:
+ if isinstance(model_or_key, Model):
+ key = model_or_key = model_or_key.key()
+ elif isinstance(model_or_key, basestring):
+ key = model_or_key = Key(model_or_key)
+ else:
+ key = model_or_key
+ keys.append(key)
+ datastore.Delete(keys)
+
+def allocate_ids(model, size):
+ """Allocates a range of IDs of size for the model_key defined by model
+
+ Allocates a range of IDs in the datastore such that those IDs will not
+ be automatically assigned to new entities. You can only allocate IDs
+ for model keys from your app. If there is an error, raises a subclass of
+ datastore_errors.Error.
+
+ Args:
+ model: Model, Key or string to serve as a model specifying the ID sequence
+ in which to allocate IDs
+
+ Returns:
+ (start, end) of the allocated range, inclusive.
+ """
+ models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
+ model, (Model, Key, basestring))
+ keys = []
+ for model_or_key in models_or_keys:
+ if isinstance(model_or_key, Model):
+ key = model_or_key = model_or_key.key()
+ elif isinstance(model_or_key, basestring):
+ key = model_or_key = Key(model_or_key)
+ else:
+ key = model_or_key
+ keys.append(key)
+ return datastore.AllocateIds(keys, size)
+
+class Expando(Model):
+ """Dynamically expandable model.
+
+ An Expando does not require (but can still benefit from) the definition
+ of any properties before it can be used to store information in the
+ datastore. Properties can be added to an expando object by simply
+ performing an assignment. The assignment of properties is done on
+ an instance by instance basis, so it is possible for one object of an
+ expando type to have different properties from another or even the same
+ properties with different types. It is still possible to define
+ properties on an expando, allowing those properties to behave the same
+ as on any other model.
+
+ Example:
+ import datetime
+
+ class Song(db.Expando):
+ title = db.StringProperty()
+
+ crazy = Song(title='Crazy like a diamond',
+ author='Lucy Sky',
+ publish_date='yesterday',
+ rating=5.0)
+
+ hoboken = Song(title='The man from Hoboken',
+ author=['Anthony', 'Lou'],
+ publish_date=datetime.datetime(1977, 5, 3))
+
+ crazy.last_minute_note=db.Text('Get a train to the station.')
+
+ Possible Uses:
+
+ One use of an expando is to create an object without any specific
+ structure and later, when your application mature and it in the right
+ state, change it to a normal model object and define explicit properties.
+
+ Additional exceptions for expando:
+
+ Protected attributes (ones whose names begin with '_') cannot be used
+ as dynamic properties. These are names that are reserved for protected
+ transient (non-persisted) attributes.
+
+ Order of lookup:
+
+ When trying to set or access an attribute value, any other defined
+ properties, such as methods and other values in __dict__ take precedence
+ over values in the datastore.
+
+ 1 - Because it is not possible for the datastore to know what kind of
+ property to store on an undefined expando value, setting a property to
+ None is the same as deleting it from the expando.
+
+ 2 - Persistent variables on Expando must not begin with '_'. These
+ variables considered to be 'protected' in Python, and are used
+ internally.
+
+ 3 - Expando's dynamic properties are not able to store empty lists.
+ Attempting to assign an empty list to a dynamic property will raise
+ ValueError. Static properties on Expando can still support empty
+ lists but like normal Model properties is restricted from using
+ None.
+ """
+
+ _dynamic_properties = None
+
+ def __init__(self, parent=None, key_name=None, _app=None, **kwds):
+ """Creates a new instance of this expando model.
+
+ Args:
+ parent: Parent instance for this instance or None, indicating a top-
+ level instance.
+ key_name: Name for new model instance.
+ _app: Intentionally undocumented.
+ args: Keyword arguments mapping to properties of model.
+ """
+ super(Expando, self).__init__(parent, key_name, _app, **kwds)
+ self._dynamic_properties = {}
+ for prop, value in kwds.iteritems():
+ if prop not in self.properties() and value is not None:
+ setattr(self, prop, value)
+
+ def __setattr__(self, key, value):
+ """Dynamically set field values that are not defined.
+
+ Tries to set the value on the object normally, but failing that
+ sets the value on the contained entity.
+
+ Args:
+ key: Name of attribute.
+ value: Value to set for attribute. Must be compatible with
+ datastore.
+
+ Raises:
+ ValueError on attempt to assign empty list.
+ """
+ check_reserved_word(key)
+ if key[:1] != '_' and key not in self.properties():
+ if value == []:
+ raise ValueError('Cannot store empty list to dynamic property %s' %
+ key)
+ if type(value) not in _ALLOWED_EXPANDO_PROPERTY_TYPES:
+ raise TypeError("Expando cannot accept values of type '%s'." %
+ type(value).__name__)
+ if self._dynamic_properties is None:
+ self._dynamic_properties = {}
+ self._dynamic_properties[key] = value
+ else:
+ super(Expando, self).__setattr__(key, value)
+
+ def __getattr__(self, key):
+ """If no explicit attribute defined, retrieve value from entity.
+
+ Tries to get the value on the object normally, but failing that
+ retrieves value from contained entity.
+
+ Args:
+ key: Name of attribute.
+
+ Raises:
+ AttributeError when there is no attribute for key on object or
+ contained entity.
+ """
+ if self._dynamic_properties and key in self._dynamic_properties:
+ return self._dynamic_properties[key]
+ else:
+ return getattr(super(Expando, self), key)
+
+ def __delattr__(self, key):
+ """Remove attribute from expando.
+
+ Expando is not like normal entities in that undefined fields
+ can be removed.
+
+ Args:
+ key: Dynamic property to be deleted.
+ """
+ if self._dynamic_properties and key in self._dynamic_properties:
+ del self._dynamic_properties[key]
+ else:
+ object.__delattr__(self, key)
+
+ def dynamic_properties(self):
+ """Determine which properties are particular to instance of entity.
+
+ Returns:
+ Set of names which correspond only to the dynamic properties.
+ """
+ if self._dynamic_properties is None:
+ return []
+ return self._dynamic_properties.keys()
+
+ def _to_entity(self, entity):
+ """Store to entity, deleting dynamic properties that no longer exist.
+
+ When the expando is saved, it is possible that a given property no longer
+ exists. In this case, the property will be removed from the saved instance.
+
+ Args:
+ entity: Entity which will receive dynamic properties.
+ """
+ super(Expando, self)._to_entity(entity)
+
+ if self._dynamic_properties is None:
+ self._dynamic_properties = {}
+
+ for key, value in self._dynamic_properties.iteritems():
+ entity[key] = value
+
+ all_properties = set(self._dynamic_properties.iterkeys())
+ all_properties.update(self.properties().iterkeys())
+ for key in entity.keys():
+ if key not in all_properties:
+ del entity[key]
+
+ @classmethod
+ def _load_entity_values(cls, entity):
+ """Load dynamic properties from entity.
+
+ Expando needs to do a second pass to add the entity values which were
+ ignored by Model because they didn't have an corresponding predefined
+ property on the model.
+
+ Args:
+ entity: Entity which contain values to search dyanmic properties for.
+ """
+ entity_values = super(Expando, cls)._load_entity_values(entity)
+ for key, value in entity.iteritems():
+ if key not in entity_values:
+ entity_values[str(key)] = value
+ return entity_values
+
+
+class _BaseQuery(object):
+ """Base class for both Query and GqlQuery."""
+
+ def __init__(self, model_class=None, keys_only=False):
+ """Constructor.
+
+ Args:
+ model_class: Model class from which entities are constructed.
+ keys_only: Whether the query should return full entities or only keys.
+ """
+ self._model_class = model_class
+ self._keys_only = keys_only
+
+ def is_keys_only(self):
+ """Returns whether this query is keys only.
+
+ Returns:
+ True if this query returns keys, False if it returns entities.
+ """
+ return self._keys_only
+
+ def _get_query(self):
+ """Subclass must override (and not call their super method).
+
+ Returns:
+ A datastore.Query instance representing the query.
+ """
+ raise NotImplementedError
+
+ def run(self):
+ """Iterator for this query.
+
+ If you know the number of results you need, consider fetch() instead,
+ or use a GQL query with a LIMIT clause. It's more efficient.
+
+ Returns:
+ Iterator for this query.
+ """
+ iterator = self._get_query().Run()
+ if self._keys_only:
+ return iterator
+ else:
+ return _QueryIterator(self._model_class, iter(iterator))
+
+ def __iter__(self):
+ """Iterator for this query.
+
+ If you know the number of results you need, consider fetch() instead,
+ or use a GQL query with a LIMIT clause. It's more efficient.
+ """
+ return self.run()
+
+ def get(self):
+ """Get first result from this.
+
+ Beware: get() ignores the LIMIT clause on GQL queries.
+
+ Returns:
+ First result from running the query if there are any, else None.
+ """
+ results = self.fetch(1)
+ try:
+ return results[0]
+ except IndexError:
+ return None
+
+ def count(self, limit=None):
+ """Number of entities this query fetches.
+
+ Beware: count() ignores the LIMIT clause on GQL queries.
+
+ Args:
+ limit, a number. If there are more results than this, stop short and
+ just return this number. Providing this argument makes the count
+ operation more efficient.
+
+ Returns:
+ Number of entities this query fetches.
+ """
+ return self._get_query().Count(limit=limit)
+
+ def fetch(self, limit, offset=0):
+ """Return a list of items selected using SQL-like limit and offset.
+
+ Whenever possible, use fetch() instead of iterating over the query
+ results with run() or __iter__() . fetch() is more efficient.
+
+ Beware: fetch() ignores the LIMIT clause on GQL queries.
+
+ Args:
+ limit: Maximum number of results to return.
+ offset: Optional number of results to skip first; default zero.
+
+ Returns:
+ A list of db.Model instances. There may be fewer than 'limit'
+ results if there aren't enough results to satisfy the request.
+ """
+ accepted = (int, long)
+ if not (isinstance(limit, accepted) and isinstance(offset, accepted)):
+ raise TypeError('Arguments to fetch() must be integers')
+ if limit < 0 or offset < 0:
+ raise ValueError('Arguments to fetch() must be >= 0')
+ if limit == 0:
+ return []
+ raw = self._get_query().Get(limit, offset)
+
+ if self._keys_only:
+ return raw
+ else:
+ if self._model_class is not None:
+ return [self._model_class.from_entity(e) for e in raw]
+ else:
+ return [class_for_kind(e.kind()).from_entity(e) for e in raw]
+
+ def __getitem__(self, arg):
+ """Support for query[index] and query[start:stop].
+
+ Beware: this ignores the LIMIT clause on GQL queries.
+
+ Args:
+ arg: Either a single integer, corresponding to the query[index]
+ syntax, or a Python slice object, corresponding to the
+ query[start:stop] or query[start:stop:step] syntax.
+
+ Returns:
+ A single Model instance when the argument is a single integer.
+ A list of Model instances when the argument is a slice.
+ """
+ if isinstance(arg, slice):
+ start, stop, step = arg.start, arg.stop, arg.step
+ if start is None:
+ start = 0
+ if stop is None:
+ raise ValueError('Open-ended slices are not supported')
+ if step is None:
+ step = 1
+ if start < 0 or stop < 0 or step != 1:
+ raise ValueError(
+ 'Only slices with start>=0, stop>=0, step==1 are supported')
+ limit = stop - start
+ if limit < 0:
+ return []
+ return self.fetch(limit, start)
+ elif isinstance(arg, (int, long)):
+ if arg < 0:
+ raise ValueError('Only indices >= 0 are supported')
+ results = self.fetch(1, arg)
+ if results:
+ return results[0]
+ else:
+ raise IndexError('The query returned fewer than %d results' % (arg+1))
+ else:
+ raise TypeError('Only integer indices and slices are supported')
+
+
+class _QueryIterator(object):
+ """Wraps the datastore iterator to return Model instances.
+
+ The datastore returns entities. We wrap the datastore iterator to
+ return Model instances instead.
+ """
+
+ def __init__(self, model_class, datastore_iterator):
+ """Iterator constructor
+
+ Args:
+ model_class: Model class from which entities are constructed.
+ datastore_iterator: Underlying datastore iterator.
+ """
+ self.__model_class = model_class
+ self.__iterator = datastore_iterator
+
+ def __iter__(self):
+ """Iterator on self.
+
+ Returns:
+ Self.
+ """
+ return self
+
+ def next(self):
+ """Get next Model instance in query results.
+
+ Returns:
+ Next model instance.
+
+ Raises:
+ StopIteration when there are no more results in query.
+ """
+ if self.__model_class is not None:
+ return self.__model_class.from_entity(self.__iterator.next())
+ else:
+ entity = self.__iterator.next()
+ return class_for_kind(entity.kind()).from_entity(entity)
+
+
+def _normalize_query_parameter(value):
+ """Make any necessary type conversions to a query parameter.
+
+ The following conversions are made:
+ - Model instances are converted to Key instances. This is necessary so
+ that querying reference properties will work.
+ - datetime.date objects are converted to datetime.datetime objects (see
+ _date_to_datetime for details on this conversion). This is necessary so
+ that querying date properties with date objects will work.
+ - datetime.time objects are converted to datetime.datetime objects (see
+ _time_to_datetime for details on this conversion). This is necessary so
+ that querying time properties with time objects will work.
+
+ Args:
+ value: The query parameter value.
+
+ Returns:
+ The input value, or a converted value if value matches one of the
+ conversions specified above.
+ """
+ if isinstance(value, Model):
+ value = value.key()
+ if (isinstance(value, datetime.date) and
+ not isinstance(value, datetime.datetime)):
+ value = _date_to_datetime(value)
+ elif isinstance(value, datetime.time):
+ value = _time_to_datetime(value)
+ return value
+
+
+class Query(_BaseQuery):
+ """A Query instance queries over instances of Models.
+
+ You construct a query with a model class, like this:
+
+ class Story(db.Model):
+ title = db.StringProperty()
+ date = db.DateTimeProperty()
+
+ query = Query(Story)
+
+ You modify a query with filters and orders like this:
+
+ query.filter('title =', 'Foo')
+ query.order('-date')
+ query.ancestor(key_or_model_instance)
+
+ Every query can return an iterator, so you access the results of a query
+ by iterating over it:
+
+ for story in query:
+ print story.title
+
+ For convenience, all of the filtering and ordering methods return "self",
+ so the easiest way to use the query interface is to cascade all filters and
+ orders in the iterator line like this:
+
+ for story in Query(story).filter('title =', 'Foo').order('-date'):
+ print story.title
+ """
+
+ def __init__(self, model_class=None, keys_only=False):
+ """Constructs a query over instances of the given Model.
+
+ Args:
+ model_class: Model class to build query for.
+ keys_only: Whether the query should return full entities or only keys.
+ """
+ super(Query, self).__init__(model_class, keys_only)
+ self.__query_sets = [{}]
+ self.__orderings = []
+ self.__ancestor = None
+
+ def _get_query(self,
+ _query_class=datastore.Query,
+ _multi_query_class=datastore.MultiQuery):
+ queries = []
+ for query_set in self.__query_sets:
+ if self._model_class is not None:
+ kind = self._model_class.kind()
+ else:
+ kind = None
+ query = _query_class(kind,
+ query_set,
+ keys_only=self._keys_only)
+ query.Order(*self.__orderings)
+ if self.__ancestor is not None:
+ query.Ancestor(self.__ancestor)
+ queries.append(query)
+
+ if (_query_class != datastore.Query and
+ _multi_query_class == datastore.MultiQuery):
+ warnings.warn(
+ 'Custom _query_class specified without corresponding custom'
+ ' _query_multi_class. Things will break if you use queries with'
+ ' the "IN" or "!=" operators.', RuntimeWarning)
+ if len(queries) > 1:
+ raise datastore_errors.BadArgumentError(
+ 'Query requires multiple subqueries to satisfy. If _query_class'
+ ' is overridden, _multi_query_class must also be overridden.')
+ elif (_query_class == datastore.Query and
+ _multi_query_class != datastore.MultiQuery):
+ raise BadArgumentError('_query_class must also be overridden if'
+ ' _multi_query_class is overridden.')
+
+ if len(queries) == 1:
+ return queries[0]
+ else:
+ return _multi_query_class(queries, self.__orderings)
+
+ def __filter_disjunction(self, operations, values):
+ """Add a disjunction of several filters and several values to the query.
+
+ This is implemented by duplicating queries and combining the
+ results later.
+
+ Args:
+ operations: a string or list of strings. Each string contains a
+ property name and an operator to filter by. The operators
+ themselves must not require multiple queries to evaluate
+ (currently, this means that 'in' and '!=' are invalid).
+
+ values: a value or list of filter values, normalized by
+ _normalize_query_parameter.
+ """
+ if not isinstance(operations, (list, tuple)):
+ operations = [operations]
+ if not isinstance(values, (list, tuple)):
+ values = [values]
+
+ new_query_sets = []
+ for operation in operations:
+ if operation.lower().endswith('in') or operation.endswith('!='):
+ raise BadQueryError('Cannot use "in" or "!=" in a disjunction.')
+ for query_set in self.__query_sets:
+ for value in values:
+ new_query_set = copy.copy(query_set)
+ datastore._AddOrAppend(new_query_set, operation, value)
+ new_query_sets.append(new_query_set)
+ self.__query_sets = new_query_sets
+
+ def filter(self, property_operator, value):
+ """Add filter to query.
+
+ Args:
+ property_operator: string with the property and operator to filter by.
+ value: the filter value.
+
+ Returns:
+ Self to support method chaining.
+
+ Raises:
+ PropertyError if invalid property is provided.
+ """
+ match = _FILTER_REGEX.match(property_operator)
+ prop = match.group(1)
+ if match.group(3) is not None:
+ operator = match.group(3)
+ else:
+ operator = '=='
+
+ if self._model_class is None:
+ if prop != datastore_types._KEY_SPECIAL_PROPERTY:
+ raise BadQueryError(
+ 'Only %s filters are allowed on kindless queries.' %
+ datastore_types._KEY_SPECIAL_PROPERTY)
+ elif prop in self._model_class._unindexed_properties:
+ raise PropertyError('Property \'%s\' is not indexed' % prop)
+
+ if operator.lower() == 'in':
+ if self._keys_only:
+ raise BadQueryError('Keys only queries do not support IN filters.')
+ elif not isinstance(value, (list, tuple)):
+ raise BadValueError('Argument to the "in" operator must be a list')
+ values = [_normalize_query_parameter(v) for v in value]
+ self.__filter_disjunction(prop + ' =', values)
+ else:
+ if isinstance(value, (list, tuple)):
+ raise BadValueError('Filtering on lists is not supported')
+ if operator == '!=':
+ if self._keys_only:
+ raise BadQueryError('Keys only queries do not support != filters.')
+ self.__filter_disjunction([prop + ' <', prop + ' >'],
+ _normalize_query_parameter(value))
+ else:
+ value = _normalize_query_parameter(value)
+ for query_set in self.__query_sets:
+ datastore._AddOrAppend(query_set, property_operator, value)
+
+ return self
+
+ def order(self, property):
+ """Set order of query result.
+
+ To use descending order, prepend '-' (minus) to the property
+ name, e.g., '-date' rather than 'date'.
+
+ Args:
+ property: Property to sort on.
+
+ Returns:
+ Self to support method chaining.
+
+ Raises:
+ PropertyError if invalid property is provided.
+ """
+ if property.startswith('-'):
+ property = property[1:]
+ order = datastore.Query.DESCENDING
+ else:
+ order = datastore.Query.ASCENDING
+
+ if self._model_class is None:
+ if (property != datastore_types._KEY_SPECIAL_PROPERTY or
+ order != datastore.Query.ASCENDING):
+ raise BadQueryError(
+ 'Only %s ascending orders are supported on kindless queries' %
+ datastore_types._KEY_SPECIAL_PROPERTY)
+ else:
+ if not issubclass(self._model_class, Expando):
+ if (property not in self._model_class.properties() and
+ property not in datastore_types._SPECIAL_PROPERTIES):
+ raise PropertyError('Invalid property name \'%s\'' % property)
+
+ if property in self._model_class._unindexed_properties:
+ raise PropertyError('Property \'%s\' is not indexed' % property)
+
+ self.__orderings.append((property, order))
+ return self
+
+ def ancestor(self, ancestor):
+ """Sets an ancestor for this query.
+
+ This restricts the query to only return results that descend from
+ a given model instance. In other words, all of the results will
+ have the ancestor as their parent, or parent's parent, etc. The
+ ancestor itself is also a possible result!
+
+ Args:
+ ancestor: Model or Key (that has already been saved)
+
+ Returns:
+ Self to support method chaining.
+
+ Raises:
+ TypeError if the argument isn't a Key or Model; NotSavedError
+ if it is, but isn't saved yet.
+ """
+ if isinstance(ancestor, datastore.Key):
+ if ancestor.has_id_or_name():
+ self.__ancestor = ancestor
+ else:
+ raise NotSavedError()
+ elif isinstance(ancestor, Model):
+ if ancestor.has_key():
+ self.__ancestor = ancestor.key()
+ else:
+ raise NotSavedError()
+ else:
+ raise TypeError('ancestor should be Key or Model')
+ return self
+
+
+class GqlQuery(_BaseQuery):
+ """A Query class that uses GQL query syntax instead of .filter() etc."""
+
+ def __init__(self, query_string, *args, **kwds):
+ """Constructor.
+
+ Args:
+ query_string: Properly formatted GQL query string.
+ *args: Positional arguments used to bind numeric references in the query.
+ **kwds: Dictionary-based arguments for named references.
+
+ Raises:
+ PropertyError if the query filters or sorts on a property that's not
+ indexed.
+ """
+ from google.appengine.ext import gql
+ app = kwds.pop('_app', None)
+
+ self._proto_query = gql.GQL(query_string, _app=app)
+ if self._proto_query._entity is not None:
+ model_class = class_for_kind(self._proto_query._entity)
+ else:
+ model_class = None
+ super(GqlQuery, self).__init__(model_class,
+ keys_only=self._proto_query._keys_only)
+
+ if model_class is not None:
+ for property, unused in (self._proto_query.filters().keys() +
+ self._proto_query.orderings()):
+ if property in model_class._unindexed_properties:
+ raise PropertyError('Property \'%s\' is not indexed' % property)
+
+ self.bind(*args, **kwds)
+
+ def bind(self, *args, **kwds):
+ """Bind arguments (positional or keyword) to the query.
+
+ Note that you can also pass arguments directly to the query
+ constructor. Each time you call bind() the previous set of
+ arguments is replaced with the new set. This is useful because
+ the hard work in in parsing the query; so if you expect to be
+ using the same query with different sets of arguments, you should
+ hold on to the GqlQuery() object and call bind() on it each time.
+
+ Args:
+ *args: Positional arguments used to bind numeric references in the query.
+ **kwds: Dictionary-based arguments for named references.
+ """
+ self._args = []
+ for arg in args:
+ self._args.append(_normalize_query_parameter(arg))
+ self._kwds = {}
+ for name, arg in kwds.iteritems():
+ self._kwds[name] = _normalize_query_parameter(arg)
+
+ def run(self):
+ """Override _BaseQuery.run() so the LIMIT clause is handled properly."""
+ query_run = self._proto_query.Run(*self._args, **self._kwds)
+ if self._keys_only:
+ return query_run
+ else:
+ return _QueryIterator(self._model_class, iter(query_run))
+
+ def _get_query(self):
+ return self._proto_query.Bind(self._args, self._kwds)
+
+
+class UnindexedProperty(Property):
+ """A property that isn't indexed by either built-in or composite indices.
+
+ TextProperty and BlobProperty derive from this class.
+ """
+ def __init__(self, *args, **kwds):
+ """Construct property. See the Property class for details.
+
+ Raises:
+ ConfigurationError if indexed=True.
+ """
+ self._require_parameter(kwds, 'indexed', False)
+ kwds['indexed'] = True
+ super(UnindexedProperty, self).__init__(*args, **kwds)
+
+ def validate(self, value):
+ """Validate property.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not an instance of data_type.
+ """
+ if value is not None and not isinstance(value, self.data_type):
+ try:
+ value = self.data_type(value)
+ except TypeError, err:
+ raise BadValueError('Property %s must be convertible '
+ 'to a %s instance (%s)' %
+ (self.name, self.data_type.__name__, err))
+ value = super(UnindexedProperty, self).validate(value)
+ if value is not None and not isinstance(value, self.data_type):
+ raise BadValueError('Property %s must be a %s instance' %
+ (self.name, self.data_type.__name__))
+ return value
+
+
+class TextProperty(UnindexedProperty):
+ """A string that can be longer than 500 bytes."""
+
+ data_type = Text
+
+
+class StringProperty(Property):
+ """A textual property, which can be multi- or single-line."""
+
+ def __init__(self, verbose_name=None, multiline=False, **kwds):
+ """Construct string property.
+
+ Args:
+ verbose_name: Verbose name is always first parameter.
+ multi-line: Carriage returns permitted in property.
+ """
+ super(StringProperty, self).__init__(verbose_name, **kwds)
+ self.multiline = multiline
+
+ def validate(self, value):
+ """Validate string property.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not multi-line but value is.
+ """
+ value = super(StringProperty, self).validate(value)
+ if value is not None and not isinstance(value, basestring):
+ raise BadValueError(
+ 'Property %s must be a str or unicode instance, not a %s'
+ % (self.name, type(value).__name__))
+ if not self.multiline and value and value.find('\n') != -1:
+ raise BadValueError('Property %s is not multi-line' % self.name)
+ return value
+
+ data_type = basestring
+
+
+class _CoercingProperty(Property):
+ """A Property subclass that extends validate() to coerce to self.data_type."""
+
+ def validate(self, value):
+ """Coerce values (except None) to self.data_type.
+
+ Args:
+ value: The value to be validated and coerced.
+
+ Returns:
+ The coerced and validated value. It is guaranteed that this is
+ either None or an instance of self.data_type; otherwise an exception
+ is raised.
+
+ Raises:
+ BadValueError if the value could not be validated or coerced.
+ """
+ value = super(_CoercingProperty, self).validate(value)
+ if value is not None and not isinstance(value, self.data_type):
+ value = self.data_type(value)
+ return value
+
+
+class CategoryProperty(_CoercingProperty):
+ """A property whose values are Category instances."""
+
+ data_type = Category
+
+
+class LinkProperty(_CoercingProperty):
+ """A property whose values are Link instances."""
+
+ def validate(self, value):
+ value = super(LinkProperty, self).validate(value)
+ if value is not None:
+ scheme, netloc, path, query, fragment = urlparse.urlsplit(value)
+ if not scheme or not netloc:
+ raise BadValueError('Property %s must be a full URL (\'%s\')' %
+ (self.name, value))
+ return value
+
+ data_type = Link
+
+URLProperty = LinkProperty
+
+
+class EmailProperty(_CoercingProperty):
+ """A property whose values are Email instances."""
+
+ data_type = Email
+
+
+class GeoPtProperty(_CoercingProperty):
+ """A property whose values are GeoPt instances."""
+
+ data_type = GeoPt
+
+
+class IMProperty(_CoercingProperty):
+ """A property whose values are IM instances."""
+
+ data_type = IM
+
+
+class PhoneNumberProperty(_CoercingProperty):
+ """A property whose values are PhoneNumber instances."""
+
+ data_type = PhoneNumber
+
+
+class PostalAddressProperty(_CoercingProperty):
+ """A property whose values are PostalAddress instances."""
+
+ data_type = PostalAddress
+
+
+class BlobProperty(UnindexedProperty):
+ """A byte string that can be longer than 500 bytes."""
+
+ data_type = Blob
+
+
+class ByteStringProperty(Property):
+ """A short (<=500 bytes) byte string.
+
+ This type should be used for short binary values that need to be indexed. If
+ you do not require indexing (regardless of length), use BlobProperty instead.
+ """
+
+ def validate(self, value):
+ """Validate ByteString property.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not instance of 'ByteString'.
+ """
+ if value is not None and not isinstance(value, ByteString):
+ try:
+ value = ByteString(value)
+ except TypeError, err:
+ raise BadValueError('Property %s must be convertible '
+ 'to a ByteString instance (%s)' % (self.name, err))
+ value = super(ByteStringProperty, self).validate(value)
+ if value is not None and not isinstance(value, ByteString):
+ raise BadValueError('Property %s must be a ByteString instance'
+ % self.name)
+ return value
+
+ data_type = ByteString
+
+
+class DateTimeProperty(Property):
+ """The base class of all of our date/time properties.
+
+ We handle common operations, like converting between time tuples and
+ datetime instances.
+ """
+
+ def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False,
+ **kwds):
+ """Construct a DateTimeProperty
+
+ Args:
+ verbose_name: Verbose name is always first parameter.
+ auto_now: Date/time property is updated with the current time every time
+ it is saved to the datastore. Useful for properties that want to track
+ the modification time of an instance.
+ auto_now_add: Date/time is set to the when its instance is created.
+ Useful for properties that record the creation time of an entity.
+ """
+ super(DateTimeProperty, self).__init__(verbose_name, **kwds)
+ self.auto_now = auto_now
+ self.auto_now_add = auto_now_add
+
+ def validate(self, value):
+ """Validate datetime.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not instance of 'datetime'.
+ """
+ value = super(DateTimeProperty, self).validate(value)
+ if value and not isinstance(value, self.data_type):
+ raise BadValueError('Property %s must be a %s' %
+ (self.name, self.data_type.__name__))
+ return value
+
+ def default_value(self):
+ """Default value for datetime.
+
+ Returns:
+ value of now() as appropriate to the date-time instance if auto_now
+ or auto_now_add is set, else user configured default value implementation.
+ """
+ if self.auto_now or self.auto_now_add:
+ return self.now()
+ return Property.default_value(self)
+
+ def get_value_for_datastore(self, model_instance):
+ """Get value from property to send to datastore.
+
+ Returns:
+ now() as appropriate to the date-time instance in the odd case where
+ auto_now is set to True, else the default implementation.
+ """
+ if self.auto_now:
+ return self.now()
+ else:
+ return super(DateTimeProperty,
+ self).get_value_for_datastore(model_instance)
+
+ data_type = datetime.datetime
+
+ @staticmethod
+ def now():
+ """Get now as a full datetime value.
+
+ Returns:
+ 'now' as a whole timestamp, including both time and date.
+ """
+ return datetime.datetime.now()
+
+
+def _date_to_datetime(value):
+ """Convert a date to a datetime for datastore storage.
+
+ Args:
+ value: A datetime.date object.
+
+ Returns:
+ A datetime object with time set to 0:00.
+ """
+ assert isinstance(value, datetime.date)
+ return datetime.datetime(value.year, value.month, value.day)
+
+
+def _time_to_datetime(value):
+ """Convert a time to a datetime for datastore storage.
+
+ Args:
+ value: A datetime.time object.
+
+ Returns:
+ A datetime object with date set to 1970-01-01.
+ """
+ assert isinstance(value, datetime.time)
+ return datetime.datetime(1970, 1, 1,
+ value.hour, value.minute, value.second,
+ value.microsecond)
+
+
+class DateProperty(DateTimeProperty):
+ """A date property, which stores a date without a time."""
+
+
+ @staticmethod
+ def now():
+ """Get now as a date datetime value.
+
+ Returns:
+ 'date' part of 'now' only.
+ """
+ return datetime.datetime.now().date()
+
+ def validate(self, value):
+ """Validate date.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not instance of 'date',
+ or if it is an instance of 'datetime' (which is a subclass
+ of 'date', but for all practical purposes a different type).
+ """
+ value = super(DateProperty, self).validate(value)
+ if isinstance(value, datetime.datetime):
+ raise BadValueError('Property %s must be a %s, not a datetime' %
+ (self.name, self.data_type.__name__))
+ return value
+
+ def get_value_for_datastore(self, model_instance):
+ """Get value from property to send to datastore.
+
+ We retrieve a datetime.date from the model instance and return a
+ datetime.datetime instance with the time set to zero.
+
+ See base class method documentation for details.
+ """
+ value = super(DateProperty, self).get_value_for_datastore(model_instance)
+ if value is not None:
+ assert isinstance(value, datetime.date)
+ value = _date_to_datetime(value)
+ return value
+
+ def make_value_from_datastore(self, value):
+ """Native representation of this property.
+
+ We receive a datetime.datetime retrieved from the entity and return
+ a datetime.date instance representing its date portion.
+
+ See base class method documentation for details.
+ """
+ if value is not None:
+ assert isinstance(value, datetime.datetime)
+ value = value.date()
+ return value
+
+ data_type = datetime.date
+
+
+class TimeProperty(DateTimeProperty):
+ """A time property, which stores a time without a date."""
+
+
+ @staticmethod
+ def now():
+ """Get now as a time datetime value.
+
+ Returns:
+ 'time' part of 'now' only.
+ """
+ return datetime.datetime.now().time()
+
+ def empty(self, value):
+ """Is time property empty.
+
+ "0:0" (midnight) is not an empty value.
+
+ Returns:
+ True if value is None, else False.
+ """
+ return value is None
+
+ def get_value_for_datastore(self, model_instance):
+ """Get value from property to send to datastore.
+
+ We retrieve a datetime.time from the model instance and return a
+ datetime.datetime instance with the date set to 1/1/1970.
+
+ See base class method documentation for details.
+ """
+ value = super(TimeProperty, self).get_value_for_datastore(model_instance)
+ if value is not None:
+ assert isinstance(value, datetime.time), repr(value)
+ value = _time_to_datetime(value)
+ return value
+
+ def make_value_from_datastore(self, value):
+ """Native representation of this property.
+
+ We receive a datetime.datetime retrieved from the entity and return
+ a datetime.date instance representing its time portion.
+
+ See base class method documentation for details.
+ """
+ if value is not None:
+ assert isinstance(value, datetime.datetime)
+ value = value.time()
+ return value
+
+ data_type = datetime.time
+
+
+class IntegerProperty(Property):
+ """An integer property."""
+
+ def validate(self, value):
+ """Validate integer property.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if value is not an integer or long instance.
+ """
+ value = super(IntegerProperty, self).validate(value)
+ if value is None:
+ return value
+ if not isinstance(value, (int, long)) or isinstance(value, bool):
+ raise BadValueError('Property %s must be an int or long, not a %s'
+ % (self.name, type(value).__name__))
+ if value < -0x8000000000000000 or value > 0x7fffffffffffffff:
+ raise BadValueError('Property %s must fit in 64 bits' % self.name)
+ return value
+
+ data_type = int
+
+ def empty(self, value):
+ """Is integer property empty.
+
+ 0 is not an empty value.
+
+ Returns:
+ True if value is None, else False.
+ """
+ return value is None
+
+
+class RatingProperty(_CoercingProperty, IntegerProperty):
+ """A property whose values are Rating instances."""
+
+ data_type = Rating
+
+
+class FloatProperty(Property):
+ """A float property."""
+
+ def validate(self, value):
+ """Validate float.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not instance of 'float'.
+ """
+ value = super(FloatProperty, self).validate(value)
+ if value is not None and not isinstance(value, float):
+ raise BadValueError('Property %s must be a float' % self.name)
+ return value
+
+ data_type = float
+
+ def empty(self, value):
+ """Is float property empty.
+
+ 0.0 is not an empty value.
+
+ Returns:
+ True if value is None, else False.
+ """
+ return value is None
+
+
+class BooleanProperty(Property):
+ """A boolean property."""
+
+ def validate(self, value):
+ """Validate boolean.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not instance of 'bool'.
+ """
+ value = super(BooleanProperty, self).validate(value)
+ if value is not None and not isinstance(value, bool):
+ raise BadValueError('Property %s must be a bool' % self.name)
+ return value
+
+ data_type = bool
+
+ def empty(self, value):
+ """Is boolean property empty.
+
+ False is not an empty value.
+
+ Returns:
+ True if value is None, else False.
+ """
+ return value is None
+
+
+class UserProperty(Property):
+ """A user property."""
+
+ def __init__(self,
+ verbose_name=None,
+ name=None,
+ required=False,
+ validator=None,
+ choices=None,
+ auto_current_user=False,
+ auto_current_user_add=False,
+ indexed=True):
+ """Initializes this Property with the given options.
+
+ Note: this does *not* support the 'default' keyword argument.
+ Use auto_current_user_add=True instead.
+
+ Args:
+ verbose_name: User friendly name of property.
+ name: Storage name for property. By default, uses attribute name
+ as it is assigned in the Model sub-class.
+ required: Whether property is required.
+ validator: User provided method used for validation.
+ choices: User provided set of valid property values.
+ auto_current_user: If true, the value is set to the current user
+ each time the entity is written to the datastore.
+ auto_current_user_add: If true, the value is set to the current user
+ the first time the entity is written to the datastore.
+ indexed: Whether property is indexed.
+ """
+ super(UserProperty, self).__init__(verbose_name, name,
+ required=required,
+ validator=validator,
+ choices=choices,
+ indexed=indexed)
+ self.auto_current_user = auto_current_user
+ self.auto_current_user_add = auto_current_user_add
+
+ def validate(self, value):
+ """Validate user.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not instance of 'User'.
+ """
+ value = super(UserProperty, self).validate(value)
+ if value is not None and not isinstance(value, users.User):
+ raise BadValueError('Property %s must be a User' % self.name)
+ return value
+
+ def default_value(self):
+ """Default value for user.
+
+ Returns:
+ Value of users.get_current_user() if auto_current_user or
+ auto_current_user_add is set; else None. (But *not* the default
+ implementation, since we don't support the 'default' keyword
+ argument.)
+ """
+ if self.auto_current_user or self.auto_current_user_add:
+ return users.get_current_user()
+ return None
+
+ def get_value_for_datastore(self, model_instance):
+ """Get value from property to send to datastore.
+
+ Returns:
+ Value of users.get_current_user() if auto_current_user is set;
+ else the default implementation.
+ """
+ if self.auto_current_user:
+ return users.get_current_user()
+ return super(UserProperty, self).get_value_for_datastore(model_instance)
+
+ data_type = users.User
+
+
+class ListProperty(Property):
+ """A property that stores a list of things.
+
+ This is a parameterized property; the parameter must be a valid
+ non-list data type, and all items must conform to this type.
+ """
+
+ def __init__(self, item_type, verbose_name=None, default=None, **kwds):
+ """Construct ListProperty.
+
+ Args:
+ item_type: Type for the list items; must be one of the allowed property
+ types.
+ verbose_name: Optional verbose name.
+ default: Optional default value; if omitted, an empty list is used.
+ **kwds: Optional additional keyword arguments, passed to base class.
+
+ Note that the only permissible value for 'required' is True.
+ """
+ if item_type is str:
+ item_type = basestring
+ if not isinstance(item_type, type):
+ raise TypeError('Item type should be a type object')
+ if item_type not in _ALLOWED_PROPERTY_TYPES:
+ raise ValueError('Item type %s is not acceptable' % item_type.__name__)
+ if issubclass(item_type, (Blob, Text)):
+ self._require_parameter(kwds, 'indexed', False)
+ kwds['indexed'] = True
+ self._require_parameter(kwds, 'required', True)
+ if default is None:
+ default = []
+ self.item_type = item_type
+ super(ListProperty, self).__init__(verbose_name,
+ default=default,
+ **kwds)
+
+ def validate(self, value):
+ """Validate list.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError if property is not a list whose items are instances of
+ the item_type given to the constructor.
+ """
+ value = super(ListProperty, self).validate(value)
+ if value is not None:
+ if not isinstance(value, list):
+ raise BadValueError('Property %s must be a list' % self.name)
+
+ value = self.validate_list_contents(value)
+ return value
+
+ def validate_list_contents(self, value):
+ """Validates that all items in the list are of the correct type.
+
+ Returns:
+ The validated list.
+
+ Raises:
+ BadValueError if the list has items are not instances of the
+ item_type given to the constructor.
+ """
+ if self.item_type in (int, long):
+ item_type = (int, long)
+ else:
+ item_type = self.item_type
+
+ for item in value:
+ if not isinstance(item, item_type):
+ if item_type == (int, long):
+ raise BadValueError('Items in the %s list must all be integers.' %
+ self.name)
+ else:
+ raise BadValueError(
+ 'Items in the %s list must all be %s instances' %
+ (self.name, self.item_type.__name__))
+ return value
+
+ def empty(self, value):
+ """Is list property empty.
+
+ [] is not an empty value.
+
+ Returns:
+ True if value is None, else false.
+ """
+ return value is None
+
+ data_type = list
+
+ def default_value(self):
+ """Default value for list.
+
+ Because the property supplied to 'default' is a static value,
+ that value must be shallow copied to prevent all fields with
+ default values from sharing the same instance.
+
+ Returns:
+ Copy of the default value.
+ """
+ return list(super(ListProperty, self).default_value())
+
+ def get_value_for_datastore(self, model_instance):
+ """Get value from property to send to datastore.
+
+ Returns:
+ validated list appropriate to save in the datastore.
+ """
+ value = self.validate_list_contents(
+ super(ListProperty, self).get_value_for_datastore(model_instance))
+ if self.validator:
+ self.validator(value)
+ return value
+
+
+class StringListProperty(ListProperty):
+ """A property that stores a list of strings.
+
+ A shorthand for the most common type of ListProperty.
+ """
+
+ def __init__(self, verbose_name=None, default=None, **kwds):
+ """Construct StringListProperty.
+
+ Args:
+ verbose_name: Optional verbose name.
+ default: Optional default value; if omitted, an empty list is used.
+ **kwds: Optional additional keyword arguments, passed to ListProperty().
+ """
+ super(StringListProperty, self).__init__(basestring,
+ verbose_name=verbose_name,
+ default=default,
+ **kwds)
+
+
+class ReferenceProperty(Property):
+ """A property that represents a many-to-one reference to another model.
+
+ For example, a reference property in model A that refers to model B forms
+ a many-to-one relationship from A to B: every instance of A refers to a
+ single B instance, and every B instance can have many A instances refer
+ to it.
+ """
+
+ def __init__(self,
+ reference_class=None,
+ verbose_name=None,
+ collection_name=None,
+ **attrs):
+ """Construct ReferenceProperty.
+
+ Args:
+ reference_class: Which model class this property references.
+ verbose_name: User friendly name of property.
+ collection_name: If provided, alternate name of collection on
+ reference_class to store back references. Use this to allow
+ a Model to have multiple fields which refer to the same class.
+ """
+ super(ReferenceProperty, self).__init__(verbose_name, **attrs)
+
+ self.collection_name = collection_name
+
+ if reference_class is None:
+ reference_class = Model
+ if not ((isinstance(reference_class, type) and
+ issubclass(reference_class, Model)) or
+ reference_class is _SELF_REFERENCE):
+ raise KindError('reference_class must be Model or _SELF_REFERENCE')
+ self.reference_class = self.data_type = reference_class
+
+ def __property_config__(self, model_class, property_name):
+ """Loads all of the references that point to this model.
+
+ We need to do this to create the ReverseReferenceProperty properties for
+ this model and create the <reference>_set attributes on the referenced
+ model, e.g.:
+
+ class Story(db.Model):
+ title = db.StringProperty()
+ class Comment(db.Model):
+ story = db.ReferenceProperty(Story)
+ story = Story.get(id)
+ print [c for c in story.comment_set]
+
+ In this example, the comment_set property was created based on the reference
+ from Comment to Story (which is inherently one to many).
+
+ Args:
+ model_class: Model class which will have its reference properties
+ initialized.
+ property_name: Name of property being configured.
+
+ Raises:
+ DuplicatePropertyError if referenced class already has the provided
+ collection name as a property.
+ """
+ super(ReferenceProperty, self).__property_config__(model_class,
+ property_name)
+
+ if self.reference_class is _SELF_REFERENCE:
+ self.reference_class = self.data_type = model_class
+
+ if self.collection_name is None:
+ self.collection_name = '%s_set' % (model_class.__name__.lower())
+ existing_prop = getattr(self.reference_class, self.collection_name, None)
+ if existing_prop is not None:
+ if not (isinstance(existing_prop, _ReverseReferenceProperty) and
+ existing_prop._prop_name == property_name and
+ existing_prop._model.__name__ == model_class.__name__ and
+ existing_prop._model.__module__ == model_class.__module__):
+ raise DuplicatePropertyError('Class %s already has property %s '
+ % (self.reference_class.__name__,
+ self.collection_name))
+ setattr(self.reference_class,
+ self.collection_name,
+ _ReverseReferenceProperty(model_class, property_name))
+
+ def __get__(self, model_instance, model_class):
+ """Get reference object.
+
+ This method will fetch unresolved entities from the datastore if
+ they are not already loaded.
+
+ Returns:
+ ReferenceProperty to Model object if property is set, else None.
+ """
+ if model_instance is None:
+ return self
+ if hasattr(model_instance, self.__id_attr_name()):
+ reference_id = getattr(model_instance, self.__id_attr_name())
+ else:
+ reference_id = None
+ if reference_id is not None:
+ resolved = getattr(model_instance, self.__resolved_attr_name())
+ if resolved is not None:
+ return resolved
+ else:
+ instance = get(reference_id)
+ if instance is None:
+ raise Error('ReferenceProperty failed to be resolved')
+ setattr(model_instance, self.__resolved_attr_name(), instance)
+ return instance
+ else:
+ return None
+
+ def __set__(self, model_instance, value):
+ """Set reference."""
+ value = self.validate(value)
+ if value is not None:
+ if isinstance(value, datastore.Key):
+ setattr(model_instance, self.__id_attr_name(), value)
+ setattr(model_instance, self.__resolved_attr_name(), None)
+ else:
+ setattr(model_instance, self.__id_attr_name(), value.key())
+ setattr(model_instance, self.__resolved_attr_name(), value)
+ else:
+ setattr(model_instance, self.__id_attr_name(), None)
+ setattr(model_instance, self.__resolved_attr_name(), None)
+
+ def get_value_for_datastore(self, model_instance):
+ """Get key of reference rather than reference itself."""
+ return getattr(model_instance, self.__id_attr_name())
+
+ def validate(self, value):
+ """Validate reference.
+
+ Returns:
+ A valid value.
+
+ Raises:
+ BadValueError for the following reasons:
+ - Value is not saved.
+ - Object not of correct model type for reference.
+ """
+ if isinstance(value, datastore.Key):
+ return value
+
+ if value is not None and not value.has_key():
+ raise BadValueError(
+ '%s instance must have a complete key before it can be stored as a '
+ 'reference' % self.reference_class.kind())
+
+ value = super(ReferenceProperty, self).validate(value)
+
+ if value is not None and not isinstance(value, self.reference_class):
+ raise KindError('Property %s must be an instance of %s' %
+ (self.name, self.reference_class.kind()))
+
+ return value
+
+ def __id_attr_name(self):
+ """Get attribute of referenced id.
+
+ Returns:
+ Attribute where to store id of referenced entity.
+ """
+ return self._attr_name()
+
+ def __resolved_attr_name(self):
+ """Get attribute of resolved attribute.
+
+ The resolved attribute is where the actual loaded reference instance is
+ stored on the referring model instance.
+
+ Returns:
+ Attribute name of where to store resolved reference model instance.
+ """
+ return '_RESOLVED' + self._attr_name()
+
+
+Reference = ReferenceProperty
+
+
+def SelfReferenceProperty(verbose_name=None, collection_name=None, **attrs):
+ """Create a self reference.
+
+ Function for declaring a self referencing property on a model.
+
+ Example:
+ class HtmlNode(db.Model):
+ parent = db.SelfReferenceProperty('Parent', 'children')
+
+ Args:
+ verbose_name: User friendly name of property.
+ collection_name: Name of collection on model.
+
+ Raises:
+ ConfigurationError if reference_class provided as parameter.
+ """
+ if 'reference_class' in attrs:
+ raise ConfigurationError(
+ 'Do not provide reference_class to self-reference.')
+ return ReferenceProperty(_SELF_REFERENCE,
+ verbose_name,
+ collection_name,
+ **attrs)
+
+
+SelfReference = SelfReferenceProperty
+
+
+class _ReverseReferenceProperty(Property):
+ """The inverse of the Reference property above.
+
+ We construct reverse references automatically for the model to which
+ the Reference property is pointing to create the one-to-many property for
+ that model. For example, if you put a Reference property in model A that
+ refers to model B, we automatically create a _ReverseReference property in
+ B called a_set that can fetch all of the model A instances that refer to
+ that instance of model B.
+ """
+
+ def __init__(self, model, prop):
+ """Constructor for reverse reference.
+
+ Constructor does not take standard values of other property types.
+
+ Args:
+ model: Model class that this property is a collection of.
+ property: Name of foreign property on referred model that points back
+ to this properties entity.
+ """
+ self.__model = model
+ self.__property = prop
+
+ @property
+ def _model(self):
+ """Internal helper to access the model class, read-only."""
+ return self.__model
+
+ @property
+ def _prop_name(self):
+ """Internal helper to access the property name, read-only."""
+ return self.__property
+
+ def __get__(self, model_instance, model_class):
+ """Fetches collection of model instances of this collection property."""
+ if model_instance is not None:
+ query = Query(self.__model)
+ return query.filter(self.__property + ' =', model_instance.key())
+ else:
+ return self
+
+ def __set__(self, model_instance, value):
+ """Not possible to set a new collection."""
+ raise BadValueError('Virtual property is read-only')
+
+
+run_in_transaction = datastore.RunInTransaction
+run_in_transaction_custom_retries = datastore.RunInTransactionCustomRetries
+
+RunInTransaction = run_in_transaction
+RunInTransactionCustomRetries = run_in_transaction_custom_retries
diff --git a/google_appengine/google/appengine/ext/db/__init__.pyc b/google_appengine/google/appengine/ext/db/__init__.pyc
new file mode 100644
index 0000000..8e9880f
--- /dev/null
+++ b/google_appengine/google/appengine/ext/db/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/db/djangoforms.py b/google_appengine/google/appengine/ext/db/djangoforms.py
new file mode 100755
index 0000000..98f347f
--- /dev/null
+++ b/google_appengine/google/appengine/ext/db/djangoforms.py
@@ -0,0 +1,886 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Support for creating Django (new) forms from Datastore data models.
+
+This is our best shot at supporting as much of Django as possible: you
+won't be able to use Django's db package, but you can use our
+db package instead, and create Django forms from it, either fully
+automatically, or with overrides.
+
+Note, you should not import these classes from this module. Importing
+this module patches the classes in place, and you should continue to
+import them from google.appengine.db.
+
+Some of the code here is strongly inspired by Django's own ModelForm
+class (new in Django 0.97). Our code also supports Django 0.96 (so as
+to be maximally compatible). Note that our API is always similar to
+Django 0.97's API, even when used with Django 0.96 (which uses a
+different API, chiefly form_for_model()).
+
+Terminology notes:
+ - forms: always refers to the Django newforms subpackage
+ - field: always refers to a Django forms.Field instance
+ - property: always refers to a db.Property instance
+
+Mapping between properties and fields:
+
++====================+===================+==============+====================+
+| Property subclass | Field subclass | datatype | widget; notes |
++====================+===================+==============+====================+
+| StringProperty | CharField | unicode | Textarea |
+| | | | if multiline |
++--------------------+-------------------+--------------+--------------------+
+| TextProperty | CharField | unicode | Textarea |
++--------------------+-------------------+--------------+--------------------+
+| BlobProperty | FileField | str | skipped in v0.96 |
++--------------------+-------------------+--------------+--------------------+
+| DateTimeProperty | DateTimeField | datetime | skipped |
+| | | | if auto_now[_add] |
++--------------------+-------------------+--------------+--------------------+
+| DateProperty | DateField | date | ditto |
++--------------------+-------------------+--------------+--------------------+
+| TimeProperty | TimeField | time | ditto |
++--------------------+-------------------+--------------+--------------------+
+| IntegerProperty | IntegerField | int or long | |
++--------------------+-------------------+--------------+--------------------+
+| FloatProperty | FloatField | float | CharField in v0.96 |
++--------------------+-------------------+--------------+--------------------+
+| BooleanProperty | BooleanField | bool | |
++--------------------+-------------------+--------------+--------------------+
+| UserProperty | CharField | users.User | |
++--------------------+-------------------+--------------+--------------------+
+| StringListProperty | CharField | list of str | Textarea |
++--------------------+-------------------+--------------+--------------------+
+| LinkProperty | URLField | str | |
++--------------------+-------------------+--------------+--------------------+
+| ReferenceProperty | ModelChoiceField* | db.Model | |
++--------------------+-------------------+--------------+--------------------+
+| _ReverseReferenceP.| None | <iterable> | always skipped |
++====================+===================+==============+====================+
+
+Notes:
+*: this Field subclasses is defined by us, not in Django.
+"""
+
+
+
+import itertools
+import logging
+
+
+import django.core.exceptions
+import django.utils.datastructures
+
+try:
+ from django import newforms as forms
+except ImportError:
+ from django import forms
+
+try:
+ from django.utils.translation import ugettext_lazy as _
+except ImportError:
+ pass
+
+from google.appengine.api import users
+from google.appengine.ext import db
+
+
+
+
+def monkey_patch(name, bases, namespace):
+ """A 'metaclass' for adding new methods to an existing class.
+
+ This shouldn't be used to override existing methods. However,
+ because loading this module (like loading any module) should be
+ idempotent, we don't assert that.
+
+ Usage example:
+
+ class PatchClass(TargetClass):
+ __metaclass__ = monkey_patch
+ def foo(self, ...): ...
+ def bar(self, ...): ...
+
+ This is equivalent to:
+
+ def foo(self, ...): ...
+ def bar(self, ...): ...
+ TargetClass.foo = foo
+ TargetClass.bar = bar
+ PatchClass = TargetClass
+
+ Note that PatchClass becomes an alias for TargetClass; by convention
+ it is recommended to give PatchClass the same name as TargetClass.
+ """
+
+ assert len(bases) == 1, 'Exactly one base class is required'
+ base = bases[0]
+ for name, value in namespace.iteritems():
+ if name not in ('__metaclass__', '__module__'):
+ setattr(base, name, value)
+ return base
+
+
+
+
+class Property(db.Property):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, form_class=forms.CharField, **kwargs):
+ """Return a Django form field appropriate for this property.
+
+ Args:
+ form_class: a forms.Field subclass, default forms.CharField
+
+ Additional keyword arguments are passed to the form_class constructor,
+ with certain defaults:
+ required: self.required
+ label: prettified self.verbose_name, if not None
+ widget: a forms.Select instance if self.choices is non-empty
+ initial: self.default, if not None
+
+ Returns:
+ A fully configured instance of form_class, or None if no form
+ field should be generated for this property.
+ """
+ defaults = {'required': self.required}
+ if self.verbose_name:
+ defaults['label'] = self.verbose_name.capitalize().replace('_', ' ')
+ if self.choices:
+ choices = []
+ if not self.required or (self.default is None and
+ 'initial' not in kwargs):
+ choices.append(('', '---------'))
+ for choice in self.choices:
+ choices.append((str(choice), unicode(choice)))
+ defaults['widget'] = forms.Select(choices=choices)
+ if self.default is not None:
+ defaults['initial'] = self.default
+ defaults.update(kwargs)
+ return form_class(**defaults)
+
+ def get_value_for_form(self, instance):
+ """Extract the property value from the instance for use in a form.
+
+ Override this to do a property- or field-specific type conversion.
+
+ Args:
+ instance: a db.Model instance
+
+ Returns:
+ The property's value extracted from the instance, possibly
+ converted to a type suitable for a form field; possibly None.
+
+ By default this returns the instance attribute's value unchanged.
+ """
+ return getattr(instance, self.name)
+
+ def make_value_from_form(self, value):
+ """Convert a form value to a property value.
+
+ Override this to do a property- or field-specific type conversion.
+
+ Args:
+ value: the cleaned value retrieved from the form field
+
+ Returns:
+ A value suitable for assignment to a model instance's property;
+ possibly None.
+
+ By default this converts the value to self.data_type if it
+ isn't already an instance of that type, except if the value is
+ empty, in which case we return None.
+ """
+ if value in (None, ''):
+ return None
+ if not isinstance(value, self.data_type):
+ value = self.data_type(value)
+ return value
+
+
+class UserProperty(db.Property):
+ """This class exists solely to log a warning when it is used."""
+
+ def __init__(self, *args, **kwds):
+ logging.warn("Please don't use modelforms.UserProperty; "
+ "use db.UserProperty instead.")
+ super(UserProperty, self).__init__(*args, **kwds)
+
+
+class StringProperty(db.StringProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a string property.
+
+ This sets the widget default to forms.Textarea if the property's
+ multiline attribute is set.
+ """
+ defaults = {}
+ if self.multiline:
+ defaults['widget'] = forms.Textarea
+ defaults.update(kwargs)
+ return super(StringProperty, self).get_form_field(**defaults)
+
+
+class TextProperty(db.TextProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a text property.
+
+ This sets the widget default to forms.Textarea.
+ """
+ defaults = {'widget': forms.Textarea}
+ defaults.update(kwargs)
+ return super(TextProperty, self).get_form_field(**defaults)
+
+
+class BlobProperty(db.BlobProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a blob property.
+
+ This defaults to a forms.FileField instance when using Django 0.97
+ or later. For 0.96 this returns None, as file uploads are not
+ really supported in that version.
+ """
+ if not hasattr(forms, 'FileField'):
+ return None
+ defaults = {'form_class': forms.FileField}
+ defaults.update(kwargs)
+ return super(BlobProperty, self).get_form_field(**defaults)
+
+ def get_value_for_form(self, instance):
+ """Extract the property value from the instance for use in a form.
+
+ There is no way to convert a Blob into an initial value for a file
+ upload, so we always return None.
+ """
+ return None
+
+ def make_value_from_form(self, value):
+ """Convert a form value to a property value.
+
+ This extracts the content from the UploadedFile instance returned
+ by the FileField instance.
+ """
+ if value.__class__.__name__ == 'UploadedFile':
+ return db.Blob(value.content)
+ return super(BlobProperty, self).make_value_from_form(value)
+
+
+class DateTimeProperty(db.DateTimeProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a date-time property.
+
+ This defaults to a DateTimeField instance, except if auto_now or
+ auto_now_add is set, in which case None is returned, as such
+ 'auto' fields should not be rendered as part of the form.
+ """
+ if self.auto_now or self.auto_now_add:
+ return None
+ defaults = {'form_class': forms.DateTimeField}
+ defaults.update(kwargs)
+ return super(DateTimeProperty, self).get_form_field(**defaults)
+
+
+class DateProperty(db.DateProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a date property.
+
+ This defaults to a DateField instance, except if auto_now or
+ auto_now_add is set, in which case None is returned, as such
+ 'auto' fields should not be rendered as part of the form.
+ """
+ if self.auto_now or self.auto_now_add:
+ return None
+ defaults = {'form_class': forms.DateField}
+ defaults.update(kwargs)
+ return super(DateProperty, self).get_form_field(**defaults)
+
+
+class TimeProperty(db.TimeProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a time property.
+
+ This defaults to a TimeField instance, except if auto_now or
+ auto_now_add is set, in which case None is returned, as such
+ 'auto' fields should not be rendered as part of the form.
+ """
+ if self.auto_now or self.auto_now_add:
+ return None
+ defaults = {'form_class': forms.TimeField}
+ defaults.update(kwargs)
+ return super(TimeProperty, self).get_form_field(**defaults)
+
+
+class IntegerProperty(db.IntegerProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for an integer property.
+
+ This defaults to an IntegerField instance.
+ """
+ defaults = {'form_class': forms.IntegerField}
+ defaults.update(kwargs)
+ return super(IntegerProperty, self).get_form_field(**defaults)
+
+
+class FloatProperty(db.FloatProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for an integer property.
+
+ This defaults to a FloatField instance when using Django 0.97 or
+ later. For 0.96 this defaults to the CharField class.
+ """
+ defaults = {}
+ if hasattr(forms, 'FloatField'):
+ defaults['form_class'] = forms.FloatField
+ defaults.update(kwargs)
+ return super(FloatProperty, self).get_form_field(**defaults)
+
+
+class BooleanProperty(db.BooleanProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a boolean property.
+
+ This defaults to a BooleanField.
+ """
+ defaults = {'form_class': forms.BooleanField}
+ defaults.update(kwargs)
+ return super(BooleanProperty, self).get_form_field(**defaults)
+
+ def make_value_from_form(self, value):
+ """Convert a form value to a property value.
+
+ This is needed to ensure that False is not replaced with None.
+ """
+ if value is None:
+ return None
+ if isinstance(value, basestring) and value.lower() == 'false':
+ return False
+ return bool(value)
+
+
+class StringListProperty(db.StringListProperty):
+ __metaclass__ = monkey_patch
+
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a StringList property.
+
+ This defaults to a Textarea widget with a blank initial value.
+ """
+ defaults = {'widget': forms.Textarea,
+ 'initial': ''}
+ defaults.update(kwargs)
+ return super(StringListProperty, self).get_form_field(**defaults)
+
+ def get_value_for_form(self, instance):
+ """Extract the property value from the instance for use in a form.
+
+ This joins a list of strings with newlines.
+ """
+ value = super(StringListProperty, self).get_value_for_form(instance)
+ if not value:
+ return None
+ if isinstance(value, list):
+ value = '\n'.join(value)
+ return value
+
+ def make_value_from_form(self, value):
+ """Convert a form value to a property value.
+
+ This breaks the string into lines.
+ """
+ if not value:
+ return []
+ if isinstance(value, basestring):
+ value = value.splitlines()
+ return value
+
+
+class LinkProperty(db.LinkProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a URL property.
+
+ This defaults to a URLField instance.
+ """
+ defaults = {'form_class': forms.URLField}
+ defaults.update(kwargs)
+ return super(LinkProperty, self).get_form_field(**defaults)
+
+
+class _WrapIter(object):
+ """Helper class whose iter() calls a given function to get an iterator."""
+
+ def __init__(self, function):
+ self._function = function
+
+ def __iter__(self):
+ return self._function()
+
+
+class ModelChoiceField(forms.Field):
+
+ default_error_messages = {
+ 'invalid_choice': _(u'Please select a valid choice. '
+ u'That choice is not one of the available choices.'),
+ }
+
+ def __init__(self, reference_class, query=None, choices=None,
+ empty_label=u'---------',
+ required=True, widget=forms.Select, label=None, initial=None,
+ help_text=None, *args, **kwargs):
+ """Constructor.
+
+ Args:
+ reference_class: required; the db.Model subclass used in the reference
+ query: optional db.Query; default db.Query(reference_class)
+ choices: optional explicit list of (value, label) pairs representing
+ available choices; defaults to dynamically iterating over the
+ query argument (or its default)
+ empty_label: label to be used for the default selection item in
+ the widget; this is prepended to the choices
+ required, widget, label, initial, help_text, *args, **kwargs:
+ like for forms.Field.__init__(); widget defaults to forms.Select
+ """
+ assert issubclass(reference_class, db.Model)
+ if query is None:
+ query = db.Query(reference_class)
+ assert isinstance(query, db.Query)
+ super(ModelChoiceField, self).__init__(required, widget, label, initial,
+ help_text, *args, **kwargs)
+ self.empty_label = empty_label
+ self.reference_class = reference_class
+ self._query = query
+ self._choices = choices
+ self._update_widget_choices()
+
+ def _update_widget_choices(self):
+ """Helper to copy the choices to the widget."""
+ self.widget.choices = self.choices
+
+
+ def _get_query(self):
+ """Getter for the query attribute."""
+ return self._query
+
+ def _set_query(self, query):
+ """Setter for the query attribute.
+
+ As a side effect, the widget's choices are updated.
+ """
+ self._query = query
+ self._update_widget_choices()
+
+ query = property(_get_query, _set_query)
+
+ def _generate_choices(self):
+ """Generator yielding (key, label) pairs from the query results."""
+ yield ('', self.empty_label)
+ for inst in self._query:
+ yield (inst.key(), unicode(inst))
+
+
+ def _get_choices(self):
+ """Getter for the choices attribute.
+
+ This is required to return an object that can be iterated over
+ multiple times.
+ """
+ if self._choices is not None:
+ return self._choices
+ return _WrapIter(self._generate_choices)
+
+ def _set_choices(self, choices):
+ """Setter for the choices attribute.
+
+ As a side effect, the widget's choices are updated.
+ """
+ self._choices = choices
+ self._update_widget_choices()
+
+ choices = property(_get_choices, _set_choices)
+
+ def clean(self, value):
+ """Override Field.clean() to do reference-specific value cleaning.
+
+ This turns a non-empty value into a model instance.
+ """
+ value = super(ModelChoiceField, self).clean(value)
+ if not value:
+ return None
+ instance = db.get(value)
+ if instance is None:
+ raise db.BadValueError(self.error_messages['invalid_choice'])
+ return instance
+
+
+class ReferenceProperty(db.ReferenceProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a reference property.
+
+ This defaults to a ModelChoiceField instance.
+ """
+ defaults = {'form_class': ModelChoiceField,
+ 'reference_class': self.reference_class}
+ defaults.update(kwargs)
+ return super(ReferenceProperty, self).get_form_field(**defaults)
+
+ def get_value_for_form(self, instance):
+ """Extract the property value from the instance for use in a form.
+
+ This return the key object for the referenced object, or None.
+ """
+ value = super(ReferenceProperty, self).get_value_for_form(instance)
+ if value is not None:
+ value = value.key()
+ return value
+
+ def make_value_from_form(self, value):
+ """Convert a form value to a property value.
+
+ This turns a key string or object into a model instance.
+ """
+ if value:
+ if not isinstance(value, db.Model):
+ value = db.get(value)
+ return value
+
+
+class _ReverseReferenceProperty(db._ReverseReferenceProperty):
+ __metaclass__ = monkey_patch
+
+ def get_form_field(self, **kwargs):
+ """Return a Django form field appropriate for a reverse reference.
+
+ This always returns None, since reverse references are always
+ automatic.
+ """
+ return None
+
+
+def property_clean(prop, value):
+ """Apply Property level validation to value.
+
+ Calls .make_value_from_form() and .validate() on the property and catches
+ exceptions generated by either. The exceptions are converted to
+ forms.ValidationError exceptions.
+
+ Args:
+ prop: The property to validate against.
+ value: The value to validate.
+
+ Raises:
+ forms.ValidationError if the value cannot be validated.
+ """
+ if value is not None:
+ try:
+ prop.validate(prop.make_value_from_form(value))
+ except (db.BadValueError, ValueError), e:
+ raise forms.ValidationError(unicode(e))
+
+
+class ModelFormOptions(object):
+ """A simple class to hold internal options for a ModelForm class.
+
+ Instance attributes:
+ model: a db.Model class, or None
+ fields: list of field names to be defined, or None
+ exclude: list of field names to be skipped, or None
+
+ These instance attributes are copied from the 'Meta' class that is
+ usually present in a ModelForm class, and all default to None.
+ """
+
+
+ def __init__(self, options=None):
+ self.model = getattr(options, 'model', None)
+ self.fields = getattr(options, 'fields', None)
+ self.exclude = getattr(options, 'exclude', None)
+
+
+class ModelFormMetaclass(type):
+ """The metaclass for the ModelForm class defined below.
+
+ This is our analog of Django's own ModelFormMetaclass. (We
+ can't conveniently subclass that class because there are quite a few
+ differences.)
+
+ See the docs for ModelForm below for a usage example.
+ """
+
+ def __new__(cls, class_name, bases, attrs):
+ """Constructor for a new ModelForm class instance.
+
+ The signature of this method is determined by Python internals.
+
+ All Django Field instances are removed from attrs and added to
+ the base_fields attribute instead. Additional Field instances
+ are added to this based on the Datastore Model class specified
+ by the Meta attribute.
+ """
+ fields = sorted(((field_name, attrs.pop(field_name))
+ for field_name, obj in attrs.items()
+ if isinstance(obj, forms.Field)),
+ key=lambda obj: obj[1].creation_counter)
+ for base in bases[::-1]:
+ if hasattr(base, 'base_fields'):
+ fields = base.base_fields.items() + fields
+ declared_fields = django.utils.datastructures.SortedDict()
+ for field_name, obj in fields:
+ declared_fields[field_name] = obj
+
+ opts = ModelFormOptions(attrs.get('Meta', None))
+ attrs['_meta'] = opts
+
+ base_models = []
+ for base in bases:
+ base_opts = getattr(base, '_meta', None)
+ base_model = getattr(base_opts, 'model', None)
+ if base_model is not None:
+ base_models.append(base_model)
+ if len(base_models) > 1:
+ raise django.core.exceptions.ImproperlyConfigured(
+ "%s's base classes define more than one model." % class_name)
+
+ if opts.model is not None:
+ if base_models and base_models[0] is not opts.model:
+ raise django.core.exceptions.ImproperlyConfigured(
+ '%s defines a different model than its parent.' % class_name)
+
+ model_fields = django.utils.datastructures.SortedDict()
+ for name, prop in sorted(opts.model.properties().iteritems(),
+ key=lambda prop: prop[1].creation_counter):
+ if opts.fields and name not in opts.fields:
+ continue
+ if opts.exclude and name in opts.exclude:
+ continue
+ form_field = prop.get_form_field()
+ if form_field is not None:
+ model_fields[name] = form_field
+
+ model_fields.update(declared_fields)
+ attrs['base_fields'] = model_fields
+
+ props = opts.model.properties()
+ for name, field in model_fields.iteritems():
+ prop = props.get(name)
+ if prop:
+ def clean_for_property_field(value, prop=prop, old_clean=field.clean):
+ value = old_clean(value)
+ property_clean(prop, value)
+ return value
+ field.clean = clean_for_property_field
+ else:
+ attrs['base_fields'] = declared_fields
+
+ return super(ModelFormMetaclass, cls).__new__(cls,
+ class_name, bases, attrs)
+
+
+class BaseModelForm(forms.BaseForm):
+ """Base class for ModelForm.
+
+ This overrides the forms.BaseForm constructor and adds a save() method.
+
+ This class does not have a special metaclass; the magic metaclass is
+ added by the subclass ModelForm.
+ """
+
+ def __init__(self, data=None, files=None, auto_id=None, prefix=None,
+ initial=None, error_class=None, label_suffix=None,
+ instance=None):
+ """Constructor.
+
+ Args (all optional and defaulting to None):
+ data: dict of data values, typically from a POST request)
+ files: dict of file upload values; Django 0.97 or later only
+ auto_id, prefix: see Django documentation
+ initial: dict of initial values
+ error_class, label_suffix: see Django 0.97 or later documentation
+ instance: Model instance to be used for additional initial values
+
+ Except for initial and instance, these arguments are passed on to
+ the forms.BaseForm constructor unchanged, but only if not None.
+ Some arguments (files, error_class, label_suffix) are only
+ supported by Django 0.97 or later. Leave these blank (i.e. None)
+ when using Django 0.96. Their default values will be used with
+ Django 0.97 or later even when they are explicitly set to None.
+ """
+ opts = self._meta
+ self.instance = instance
+ object_data = {}
+ if instance is not None:
+ for name, prop in instance.properties().iteritems():
+ if opts.fields and name not in opts.fields:
+ continue
+ if opts.exclude and name in opts.exclude:
+ continue
+ object_data[name] = prop.get_value_for_form(instance)
+ if initial is not None:
+ object_data.update(initial)
+ kwargs = dict(data=data, files=files, auto_id=auto_id,
+ prefix=prefix, initial=object_data,
+ error_class=error_class, label_suffix=label_suffix)
+ kwargs = dict((name, value)
+ for name, value in kwargs.iteritems()
+ if value is not None)
+ super(BaseModelForm, self).__init__(**kwargs)
+
+ def save(self, commit=True):
+ """Save this form's cleaned data into a model instance.
+
+ Args:
+ commit: optional bool, default True; if true, the model instance
+ is also saved to the datastore.
+
+ Returns:
+ A model instance. If a model instance was already associated
+ with this form instance (either passed to the constructor with
+ instance=... or by a previous save() call), that same instance
+ is updated and returned; if no instance was associated yet, one
+ is created by this call.
+
+ Raises:
+ ValueError if the data couldn't be validated.
+ """
+ if not self.is_bound:
+ raise ValueError('Cannot save an unbound form')
+ opts = self._meta
+ instance = self.instance
+ if instance is None:
+ fail_message = 'created'
+ else:
+ fail_message = 'updated'
+ if self.errors:
+ raise ValueError("The %s could not be %s because the data didn't "
+ 'validate.' % (opts.model.kind(), fail_message))
+ cleaned_data = self._cleaned_data()
+ converted_data = {}
+ propiter = itertools.chain(
+ opts.model.properties().iteritems(),
+ iter([('key_name', StringProperty(name='key_name'))])
+ )
+ for name, prop in propiter:
+ value = cleaned_data.get(name)
+ if value is not None:
+ converted_data[name] = prop.make_value_from_form(value)
+ try:
+ if instance is None:
+ instance = opts.model(**converted_data)
+ self.instance = instance
+ else:
+ for name, value in converted_data.iteritems():
+ if name == 'key_name':
+ continue
+ setattr(instance, name, value)
+ except db.BadValueError, err:
+ raise ValueError('The %s could not be %s (%s)' %
+ (opts.model.kind(), fail_message, err))
+ if commit:
+ instance.put()
+ return instance
+
+ def _cleaned_data(self):
+ """Helper to retrieve the cleaned data attribute.
+
+ In Django 0.96 this attribute was called self.clean_data. In 0.97
+ and later it's been renamed to self.cleaned_data, to avoid a name
+ conflict. This helper abstracts the difference between the
+ versions away from its caller.
+ """
+ try:
+ return self.cleaned_data
+ except AttributeError:
+ return self.clean_data
+
+
+class ModelForm(BaseModelForm):
+ """A Django form tied to a Datastore model.
+
+ Note that this particular class just sets the metaclass; all other
+ functionality is defined in the base class, BaseModelForm, above.
+
+ Usage example:
+
+ from google.appengine.ext import db
+ from google.appengine.ext.db import djangoforms
+
+ # First, define a model class
+ class MyModel(db.Model):
+ foo = db.StringProperty()
+ bar = db.IntegerProperty(required=True, default=42)
+
+ # Now define a form class
+ class MyForm(djangoforms.ModelForm):
+ class Meta:
+ model = MyModel
+
+ You can now instantiate MyForm without arguments to create an
+ unbound form, or with data from a POST request to create a bound
+ form. You can also pass a model instance with the instance=...
+ keyword argument to create an unbound (!) form whose initial values
+ are taken from the instance. For bound forms, use the save() method
+ to return a model instance.
+
+ Like Django's own corresponding ModelForm class, the nested Meta
+ class can have two other attributes:
+
+ fields: if present and non-empty, a list of field names to be
+ included in the form; properties not listed here are
+ excluded from the form
+
+ exclude: if present and non-empty, a list of field names to be
+ excluded from the form
+
+ If exclude and fields are both non-empty, names occurring in both
+ are excluded (i.e. exclude wins). By default all property in the
+ model have a corresponding form field defined.
+
+ It is also possible to define form fields explicitly. This gives
+ more control over the widget used, constraints, initial value, and
+ so on. Such form fields are not affected by the nested Meta class's
+ fields and exclude attributes.
+
+ If you define a form field named 'key_name' it will be treated
+ specially and will be used as the value for the key_name parameter
+ to the Model constructor. This allows you to create instances with
+ named keys. The 'key_name' field will be ignored when updating an
+ instance (although it will still be shown on the form).
+ """
+
+ __metaclass__ = ModelFormMetaclass
diff --git a/google_appengine/google/appengine/ext/db/polymodel.py b/google_appengine/google/appengine/ext/db/polymodel.py
new file mode 100755
index 0000000..805a2c5
--- /dev/null
+++ b/google_appengine/google/appengine/ext/db/polymodel.py
@@ -0,0 +1,355 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Support for polymorphic models and queries.
+
+The Model class on its own is only able to support functional polymorphism.
+It is possible to create a subclass of Model and then subclass that one as
+many generations as necessary and those classes will share all the same
+properties and behaviors. The problem is that subclassing Model in this way
+places each subclass in their own Kind. This means that it is not possible
+to do polymorphic queries. Building a query on a base class will only return
+instances of that class from the Datastore, while queries on a subclass will
+only return those instances.
+
+This module allows applications to specify class hierarchies that support
+polymorphic queries.
+"""
+
+
+from google.appengine.ext import db
+
+_class_map = {}
+
+_CLASS_KEY_PROPERTY = 'class'
+
+
+class _ClassKeyProperty(db.ListProperty):
+ """Property representing class-key property of a polymorphic class.
+
+ The class key is a list of strings describing an polymorphic instances
+ place within its class hierarchy. This property is automatically calculated.
+ For example:
+
+ class Foo(PolyModel): ...
+ class Bar(Foo): ...
+ class Baz(Bar): ...
+
+ Foo.class_key() == ['Foo']
+ Bar.class_key() == ['Foo', 'Bar']
+ Baz.class_key() == ['Foo', 'Bar', 'Baz']
+ """
+
+ def __init__(self, name):
+ super(_ClassKeyProperty, self).__init__(name=name,
+ item_type=str,
+ default=None)
+
+ def __set__(self, *args):
+ raise db.DerivedPropertyError(
+ 'Class-key is a derived property and cannot be set.')
+
+ def __get__(self, model_instance, model_class):
+ if model_instance is None:
+ return self
+ return [cls.__name__ for cls in model_class.__class_hierarchy__]
+
+
+class PolymorphicClass(db.PropertiedClass):
+ """Meta-class for initializing PolymorphicClasses.
+
+ This class extends PropertiedClass to add a few static attributes to
+ new polymorphic classes necessary for their correct functioning.
+
+ """
+
+ def __init__(cls, name, bases, dct):
+ """Initializes a class that belongs to a polymorphic hierarchy.
+
+ This method configures a few built-in attributes of polymorphic
+ models:
+
+ __root_class__: If the new class is a root class, __root_class__ is set to
+ itself so that it subclasses can quickly know what the root of
+ their hierarchy is and what kind they are stored in.
+ __class_hierarchy__: List of classes describing the new model's place
+ in the class hierarchy in reverse MRO order. The first element is
+ always the root class while the last element is always the new class.
+
+ MRO documentation: http://www.python.org/download/releases/2.3/mro/
+
+ For example:
+ class Foo(PolymorphicClass): ...
+
+ class Bar(Foo): ...
+
+ class Baz(Bar): ...
+
+ Foo.__class_hierarchy__ == [Foo]
+ Bar.__class_hierarchy__ == [Foo, Bar]
+ Baz.__class_hierarchy__ == [Foo, Bar, Baz]
+
+ Unless the class is a root class or PolyModel itself, it is not
+ inserted in to the kind-map like other models. However, all polymorphic
+ classes, are inserted in to the class-map which maps the class-key to
+ implementation. This class key is consulted using the polymorphic instances
+ discriminator (the 'class' property of the entity) when loading from the
+ datastore.
+ """
+ if name == 'PolyModel':
+ super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
+ return
+
+ elif PolyModel in bases:
+ if getattr(cls, '__class_hierarchy__', None):
+ raise db.ConfigurationError(('%s cannot derive from PolyModel as '
+ '__class_hierarchy__ is already defined.') % cls.__name__)
+ cls.__class_hierarchy__ = [cls]
+ cls.__root_class__ = cls
+ super(PolymorphicClass, cls).__init__(name, bases, dct)
+ else:
+ super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
+
+ cls.__class_hierarchy__ = [c for c in reversed(cls.mro())
+ if issubclass(c, PolyModel) and c != PolyModel]
+
+ if cls.__class_hierarchy__[0] != cls.__root_class__:
+ raise db.ConfigurationError(
+ '%s cannot be derived from both root classes %s and %s' %
+ (cls.__name__,
+ cls.__class_hierarchy__[0].__name__,
+ cls.__root_class__.__name__))
+
+ _class_map[cls.class_key()] = cls
+
+
+class PolyModel(db.Model):
+ """Base-class for models that supports polymorphic queries.
+
+ Use this class to build hierarchies that can be queried based
+ on their types.
+
+ Example:
+
+ consider the following model hierarchy:
+
+ +------+
+ |Animal|
+ +------+
+ |
+ +-----------------+
+ | |
+ +------+ +------+
+ |Canine| |Feline|
+ +------+ +------+
+ | |
+ +-------+ +-------+
+ | | | |
+ +---+ +----+ +---+ +-------+
+ |Dog| |Wolf| |Cat| |Panther|
+ +---+ +----+ +---+ +-------+
+
+ This class hierarchy has three levels. The first is the "root class".
+ All models in a single class hierarchy must inherit from this root. All
+ models in the hierarchy are stored as the same kind as the root class.
+ For example, Panther entities when stored to the datastore are of the kind
+ 'Animal'. Querying against the Animal kind will retrieve Cats, Dogs and
+ Canines, for example, that match your query. Different classes stored
+ in the root class' kind are identified by their class-key. When loaded
+ from the datastore, it is mapped to the appropriate implementation class.
+
+ Polymorphic properties:
+
+ Properties that are defined in a given base-class within a hierarchy are
+ stored in the datastore for all sub-casses only. So, if the Feline class
+ had a property called 'whiskers', the Cat and Panther enties would also
+ have whiskers, but not Animal, Canine, Dog or Wolf.
+
+ Polymorphic queries:
+
+ When written to the datastore, all polymorphic objects automatically have
+ a property called 'class' that you can query against. Using this property
+ it is possible to easily write a GQL query against any sub-hierarchy. For
+ example, to fetch only Canine objects, including all Dogs and Wolves:
+
+ db.GqlQuery("SELECT * FROM Animal WHERE class='Canine'")
+
+ And alternate method is to use the 'all' or 'gql' methods of the Canine
+ class:
+
+ Canine.all()
+ Canine.gql('')
+
+ The 'class' property is not meant to be used by your code other than
+ for queries. Since it is supposed to represents the real Python class
+ it is intended to be hidden from view.
+
+ Root class:
+
+ The root class is the class from which all other classes of the hierarchy
+ inherits from. Each hierarchy has a single root class. A class is a
+ root class if it is an immediate child of PolyModel. The subclasses of
+ the root class are all the same kind as the root class. In other words:
+
+ Animal.kind() == Feline.kind() == Panther.kind() == 'Animal'
+ """
+
+ __metaclass__ = PolymorphicClass
+
+ _class = _ClassKeyProperty(name=_CLASS_KEY_PROPERTY)
+
+ def __new__(cls, *args, **kwds):
+ """Prevents direct instantiation of PolyModel."""
+ if cls is PolyModel:
+ raise NotImplementedError()
+ return super(PolyModel, cls).__new__(cls, *args, **kwds)
+
+ @classmethod
+ def kind(cls):
+ """Get kind of polymorphic model.
+
+ Overridden so that all subclasses of root classes are the same kind
+ as the root.
+
+ Returns:
+ Kind of entity to write to datastore.
+ """
+ if cls is cls.__root_class__:
+ return super(PolyModel, cls).kind()
+ else:
+ return cls.__root_class__.kind()
+
+ @classmethod
+ def class_key(cls):
+ """Caclulate the class-key for this class.
+
+ Returns:
+ Class key for class. By default this is a the list of classes
+ of the hierarchy, starting with the root class and walking its way
+ down to cls.
+ """
+ if not hasattr(cls, '__class_hierarchy__'):
+ raise NotImplementedError(
+ 'Cannot determine class key without class hierarchy')
+ return tuple(cls.class_name() for cls in cls.__class_hierarchy__)
+
+ @classmethod
+ def class_name(cls):
+ """Calculate class name for this class.
+
+ Returns name to use for each classes element within its class-key. Used
+ to discriminate between different classes within a class hierarchy's
+ Datastore kind.
+
+ The presence of this method allows developers to use a different class
+ name in the datastore from what is used in Python code. This is useful,
+ for example, for renaming classes without having to migrate instances
+ already written to the datastore. For example, to rename a polymorphic
+ class Contact to SimpleContact, you could convert:
+
+ # Class key is ['Information']
+ class Information(PolyModel): ...
+
+ # Class key is ['Information', 'Contact']
+ class Contact(Information): ...
+
+ to:
+
+ # Class key is still ['Information', 'Contact']
+ class SimpleContact(Information):
+ ...
+ @classmethod
+ def class_name(cls):
+ return 'Contact'
+
+ # Class key is ['Information', 'Contact', 'ExtendedContact']
+ class ExtendedContact(SimpleContact): ...
+
+ This would ensure that all objects written previously using the old class
+ name would still be loaded.
+
+ Returns:
+ Name of this class.
+ """
+ return cls.__name__
+
+ @classmethod
+ def from_entity(cls, entity):
+ """Load from entity to class based on discriminator.
+
+ Rather than instantiating a new Model instance based on the kind
+ mapping, this creates an instance of the correct model class based
+ on the entities class-key.
+
+ Args:
+ entity: Entity loaded directly from datastore.
+
+ Raises:
+ KindError when there is no class mapping based on discriminator.
+ """
+ if (_CLASS_KEY_PROPERTY in entity and
+ tuple(entity[_CLASS_KEY_PROPERTY]) != cls.class_key()):
+ key = tuple(entity[_CLASS_KEY_PROPERTY])
+ try:
+ poly_class = _class_map[key]
+ except KeyError:
+ raise db.KindError('No implementation for class \'%s\'' % key)
+ return poly_class.from_entity(entity)
+ return super(PolyModel, cls).from_entity(entity)
+
+ @classmethod
+ def all(cls, **kwds):
+ """Get all instance of a class hierarchy.
+
+ Args:
+ kwds: Keyword parameters passed on to Model.all.
+
+ Returns:
+ Query with filter set to match this class' discriminator.
+ """
+ query = super(PolyModel, cls).all(**kwds)
+ if cls != cls.__root_class__:
+ query.filter(_CLASS_KEY_PROPERTY + ' =', cls.class_name())
+ return query
+
+ @classmethod
+ def gql(cls, query_string, *args, **kwds):
+ """Returns a polymorphic query using GQL query string.
+
+ This query is polymorphic in that it has its filters configured in a way
+ to retrieve instances of the model or an instance of a subclass of the
+ model.
+
+ Args:
+ query_string: properly formatted GQL query string with the
+ 'SELECT * FROM <entity>' part omitted
+ *args: rest of the positional arguments used to bind numeric references
+ in the query.
+ **kwds: dictionary-based arguments (for named parameters).
+ """
+ if cls == cls.__root_class__:
+ return super(PolyModel, cls).gql(query_string, *args, **kwds)
+ else:
+ from google.appengine.ext import gql
+
+ query = db.GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string))
+
+ query_filter = [('nop',
+ [gql.Literal(cls.class_name())])]
+ query._proto_query.filters()[('class', '=')] = query_filter
+ query.bind(*args, **kwds)
+ return query
diff --git a/google_appengine/google/appengine/ext/db/polymodel.pyc b/google_appengine/google/appengine/ext/db/polymodel.pyc
new file mode 100644
index 0000000..3c39e4f
--- /dev/null
+++ b/google_appengine/google/appengine/ext/db/polymodel.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/deferred/__init__.py b/google_appengine/google/appengine/ext/deferred/__init__.py
new file mode 100755
index 0000000..55fc5ee
--- /dev/null
+++ b/google_appengine/google/appengine/ext/deferred/__init__.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+
+
+from deferred import *
diff --git a/google_appengine/google/appengine/ext/deferred/deferred.py b/google_appengine/google/appengine/ext/deferred/deferred.py
new file mode 100755
index 0000000..4851a6d
--- /dev/null
+++ b/google_appengine/google/appengine/ext/deferred/deferred.py
@@ -0,0 +1,267 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A module that handles deferred execution of callables via the task queue.
+
+Tasks consist of a callable and arguments to pass to it. The callable and its
+arguments are serialized and put on the task queue, which deserializes and
+executes them. The following callables can be used as tasks:
+
+1) Functions defined in the top level of a module
+2) Classes defined in the top level of a module
+3) Instances of classes in (2) that implement __call__
+4) Instance methods of objects of classes in (2)
+5) Class methods of classes in (2)
+6) Built-in functions
+7) Built-in methods
+
+The following callables can NOT be used as tasks:
+1) Nested functions or closures
+2) Nested classes or objects of them
+3) Lambda functions
+4) Static methods
+
+The arguments to the callable, and the object (in the case of method or object
+calls) must all be pickleable.
+
+If you want your tasks to execute reliably, don't use mutable global variables;
+they are not serialized with the task and may not be the same when your task
+executes as they were when it was enqueued (in fact, they will almost certainly
+be different).
+
+If your app relies on manipulating the import path, make sure that the function
+you are deferring is defined in a module that can be found without import path
+manipulation. Alternately, you can include deferred.TaskHandler in your own
+webapp application instead of using the easy-install method detailed below.
+
+When you create a deferred task using deferred.defer, the task is serialized,
+and an attempt is made to add it directly to the task queue. If the task is too
+big (larger than about 10 kilobytes when serialized), a datastore entry will be
+created for the task, and a new task will be enqueued, which will fetch the
+original task from the datastore and execute it. This is much less efficient
+than the direct execution model, so it's a good idea to minimize the size of
+your tasks when possible.
+
+In order for tasks to be processed, you need to set up the handler. Add the
+following to your app.yaml handlers section:
+
+handlers:
+- url: /_ah/queue/deferred
+ script: $PYTHON_LIB/google/appengine/ext/deferred/__init__.py
+ login: admin
+
+By default, the deferred module uses the URL above, and the default queue.
+
+Example usage:
+
+ def do_something_later(key, amount):
+ entity = MyModel.get(key)
+ entity.total += amount
+ entity.put()
+
+ # Use default URL and queue name, no task name, execute ASAP.
+ deferred.defer(do_something_later, 20)
+
+ # Providing non-default task queue arguments
+ deferred.defer(do_something_later, 20, _queue="foo", countdown=60)
+"""
+
+
+
+
+
+import logging
+import pickle
+import types
+
+from google.appengine.api.labs import taskqueue
+from google.appengine.ext import db
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp.util import run_wsgi_app
+
+
+_TASKQUEUE_HEADERS = {"Content-Type": "application/octet-stream"}
+_DEFAULT_URL = "/_ah/queue/deferred"
+_DEFAULT_QUEUE = "default"
+
+
+class Error(Exception):
+ """Base class for exceptions in this module."""
+
+
+class PermanentTaskFailure(Error):
+ """Indicates that a task failed, and will never succeed."""
+
+
+def run(data):
+ """Unpickles and executes a task.
+
+ Args:
+ data: A pickled tuple of (function, args, kwargs) to execute.
+ Returns:
+ The return value of the function invocation.
+ """
+ try:
+ func, args, kwds = pickle.loads(data)
+ except Exception, e:
+ raise PermanentTaskFailure(e)
+ else:
+ return func(*args, **kwds)
+
+
+class _DeferredTaskEntity(db.Model):
+ """Datastore representation of a deferred task.
+
+ This is used in cases when the deferred task is too big to be included as
+ payload with the task queue entry.
+ """
+ data = db.BlobProperty(required=True)
+
+
+def run_from_datastore(key):
+ """Retrieves a task from the datastore and executes it.
+
+ Args:
+ key: The datastore key of a _DeferredTaskEntity storing the task.
+ Returns:
+ The return value of the function invocation.
+ """
+ entity = _DeferredTaskEntity.get(key)
+ if not entity:
+ raise PermanentTaskFailure()
+ try:
+ ret = run(entity.data)
+ entity.delete()
+ except PermanentTaskFailure:
+ entity.delete()
+ raise
+
+
+def invoke_member(obj, membername, *args, **kwargs):
+ """Retrieves a member of an object, then calls it with the provided arguments.
+
+ Args:
+ obj: The object to operate on.
+ membername: The name of the member to retrieve from ojb.
+ args: Positional arguments to pass to the method.
+ kwargs: Keyword arguments to pass to the method.
+ Returns:
+ The return value of the method invocation.
+ """
+ return getattr(obj, membername)(*args, **kwargs)
+
+
+def _curry_callable(obj, *args, **kwargs):
+ """Takes a callable and arguments and returns a task queue tuple.
+
+ The returned tuple consists of (callable, args, kwargs), and can be pickled
+ and unpickled safely.
+
+ Args:
+ obj: The callable to curry. See the module docstring for restrictions.
+ args: Positional arguments to call the callable with.
+ kwargs: Keyword arguments to call the callable with.
+ Returns:
+ A tuple consisting of (callable, args, kwargs) that can be evaluated by
+ run() with equivalent effect of executing the function directly.
+ Raises:
+ ValueError: If the passed in object is not of a valid callable type.
+ """
+ if isinstance(obj, types.MethodType):
+ return (invoke_member, (obj.im_self, obj.im_func.__name__) + args, kwargs)
+ elif isinstance(obj, types.BuiltinMethodType):
+ if not obj.__self__:
+ return (obj, args, kwargs)
+ else:
+ return (invoke_member, (obj.__self__, obj.__name__) + args, kwargs)
+ elif isinstance(obj, types.ObjectType) and hasattr(obj, "__call__"):
+ return (obj, args, kwargs)
+ elif isinstance(obj, (types.FunctionType, types.BuiltinFunctionType,
+ types.ClassType, types.UnboundMethodType)):
+ return (obj, args, kwargs)
+ else:
+ raise ValueError("obj must be callable")
+
+
+def serialize(obj, *args, **kwargs):
+ """Serializes a callable into a format recognized by the deferred executor.
+
+ Args:
+ obj: The callable to serialize. See module docstring for restrictions.
+ args: Positional arguments to call the callable with.
+ kwargs: Keyword arguments to call the callable with.
+ Returns:
+ A serialized representation of the callable.
+ """
+ curried = _curry_callable(obj, *args, **kwargs)
+ return pickle.dumps(curried, protocol=pickle.HIGHEST_PROTOCOL)
+
+
+def defer(obj, *args, **kwargs):
+ """Defers a callable for execution later.
+
+ The default deferred URL of /_ah/queue/deferred will be used unless an
+ alternate URL is explicitly specified. If you want to use the default URL for
+ a queue, specify _url=None. If you specify a different URL, you will need to
+ install the handler on that URL (see the module docstring for details).
+
+ Args:
+ obj: The callable to execute. See module docstring for restrictions.
+ _countdown, _eta, _name, _url, _queue: Passed through to the task queue -
+ see the task queue documentation for details.
+ args: Positional arguments to call the callable with.
+ kwargs: Any other keyword arguments are passed through to the callable.
+ """
+ taskargs = dict((x, kwargs.pop(("_%s" % x), None))
+ for x in ("countdown", "eta", "name"))
+ taskargs["url"] = kwargs.pop("_url", _DEFAULT_URL)
+ taskargs["headers"] = _TASKQUEUE_HEADERS
+ queue = kwargs.pop("_queue", _DEFAULT_QUEUE)
+ pickled = serialize(obj, *args, **kwargs)
+ try:
+ task = taskqueue.Task(payload=pickled, **taskargs)
+ task.add(queue)
+ except taskqueue.TaskTooLargeError:
+ key = _DeferredTaskEntity(data=pickled).put()
+ pickled = serialize(run_from_datastore, str(key))
+ task = taskqueue.Task(payload=pickled, **taskargs)
+ task.add(queue)
+
+
+class TaskHandler(webapp.RequestHandler):
+ """A webapp handler class that processes deferred invocations."""
+
+ def post(self):
+ headers = ["%s:%s" % (k, v) for k, v in self.request.headers.items()
+ if k.lower().startswith("x-appengine-")]
+ logging.info(", ".join(headers))
+
+ try:
+ run(self.request.body)
+ except PermanentTaskFailure, e:
+ logging.exception("Permanent failure attempting to execute task")
+
+
+application = webapp.WSGIApplication([(".*", TaskHandler)])
+
+
+def main():
+ run_wsgi_app(application)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/google_appengine/google/appengine/ext/ereporter/__init__.py b/google_appengine/google/appengine/ext/ereporter/__init__.py
new file mode 100755
index 0000000..3ae417b
--- /dev/null
+++ b/google_appengine/google/appengine/ext/ereporter/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from ereporter import *
diff --git a/google_appengine/google/appengine/ext/ereporter/ereporter.py b/google_appengine/google/appengine/ext/ereporter/ereporter.py
new file mode 100755
index 0000000..989718c
--- /dev/null
+++ b/google_appengine/google/appengine/ext/ereporter/ereporter.py
@@ -0,0 +1,261 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A logging handler that records information about unique exceptions.
+
+'Unique' in this case is defined as a given (exception class, location) tuple.
+Unique exceptions are logged to the datastore with an example stacktrace and an
+approximate count of occurrences, grouped by day and application version.
+
+A cron handler, in google.appengine.ext.ereporter.report_generator, constructs
+and emails a report based on the previous day's exceptions.
+
+Example usage:
+
+In your handler script(s), add:
+
+ import logging
+ from google.appengine.ext import ereporter
+
+ ereporter.register_logger()
+
+In your app.yaml, add:
+
+ handlers:
+ - url: /_ereporter/.*
+ script: $PYTHON_LIB/google/appengine/ext/ereporter/report_generator.py
+ login: admin
+
+In your cron.yaml, add:
+
+ cron:
+ - description: Daily exception report
+ url: /_ereporter?sender=you@yourdomain.com
+ schedule: every day 00:00
+
+This will cause a daily exception report to be generated and emailed to all
+admins, with exception traces grouped by minor version. If you only want to
+get exception information for the most recent minor version, add the
+'versions=latest' argument to the query string. For other valid query string
+arguments, see report_generator.py.
+
+If you anticipate a lot of exception traces (for example, if you're deploying
+many minor versions, each of which may have its own set of exceptions), you
+can ensure that the traces from the newest minor versions get included by adding
+this to your index.yaml:
+
+ indexes:
+ - kind: __google_ExceptionRecord
+ properties:
+ - name: date
+ - name: major_version
+ - name: minor_version
+ direction: desc
+"""
+
+
+
+
+
+import datetime
+import logging
+import os
+import sha
+import traceback
+import urllib
+
+from google.appengine.api import memcache
+from google.appengine.ext import db
+from google.appengine.ext import webapp
+
+
+MAX_SIGNATURE_LENGTH = 256
+
+
+class ExceptionRecord(db.Model):
+ """Datastore model for a record of a unique exception."""
+
+ signature = db.StringProperty(required=True)
+ major_version = db.StringProperty(required=True)
+ minor_version = db.IntegerProperty(required=True)
+ date = db.DateProperty(required=True)
+ count = db.IntegerProperty(required=True, default=0)
+
+ stacktrace = db.TextProperty(required=True)
+ http_method = db.TextProperty(required=True)
+ url = db.TextProperty(required=True)
+ handler = db.TextProperty(required=True)
+
+ @classmethod
+ def get_key_name(cls, signature, version, date=None):
+ """Generates a key name for an exception record.
+
+ Args:
+ signature: A signature representing the exception and its site.
+ version: The major/minor version of the app the exception occurred in.
+ date: The date the exception occurred.
+
+ Returns:
+ The unique key name for this exception record.
+ """
+ if not date:
+ date = datetime.date.today()
+ return '%s@%s:%s' % (signature, date, version)
+
+
+class ExceptionRecordingHandler(logging.Handler):
+ """A handler that records exception data to the App Engine datastore."""
+
+ def __init__(self, log_interval=10):
+ """Constructs a new ExceptionRecordingHandler.
+
+ Args:
+ log_interval: The minimum interval at which we will log an individual
+ exception. This is a per-exception timeout, so doesn't affect the
+ aggregate rate of exception logging, only the rate at which we record
+ ocurrences of a single exception, to prevent datastore contention.
+ """
+ self.log_interval = log_interval
+ logging.Handler.__init__(self)
+
+ @classmethod
+ def __RelativePath(cls, path):
+ """Rewrites a path to be relative to the app's root directory.
+
+ Args:
+ path: The path to rewrite.
+
+ Returns:
+ The path with the prefix removed, if that prefix matches the app's
+ root directory.
+ """
+ cwd = os.getcwd()
+ if path.startswith(cwd):
+ path = path[len(cwd)+1:]
+ return path
+
+ @classmethod
+ def __GetSignature(cls, exc_info):
+ """Returns a unique signature string for an exception.
+
+ Args:
+ exc_info: The exc_info object for an exception.
+
+ Returns:
+ A unique signature string for the exception, consisting of fully
+ qualified exception name and call site.
+ """
+ ex_type, unused_value, trace = exc_info
+ frames = traceback.extract_tb(trace)
+
+ fulltype = '%s.%s' % (ex_type.__module__, ex_type.__name__)
+ path, line_no = frames[-1][:2]
+ path = cls.__RelativePath(path)
+ site = '%s:%d' % (path, line_no)
+ signature = '%s@%s' % (fulltype, site)
+ if len(signature) > MAX_SIGNATURE_LENGTH:
+ signature = 'hash:%s' % sha.new(signature).hexdigest()
+
+ return signature
+
+ @classmethod
+ def __GetURL(cls):
+ """Returns the URL of the page currently being served.
+
+ Returns:
+ The full URL of the page currently being served.
+ """
+ if os.environ['SERVER_PORT'] == '80':
+ scheme = 'http://'
+ else:
+ scheme = 'https://'
+ host = os.environ['SERVER_NAME']
+ script_name = urllib.quote(os.environ['SCRIPT_NAME'])
+ path_info = urllib.quote(os.environ['PATH_INFO'])
+ qs = os.environ.get('QUERY_STRING', '')
+ if qs:
+ qs = '?' + qs
+ return scheme + host + script_name + path_info + qs
+
+ def __GetFormatter(self):
+ """Returns the log formatter for this handler.
+
+ Returns:
+ The log formatter to use.
+ """
+ if self.formatter:
+ return self.formatter
+ else:
+ return logging._defaultFormatter
+
+ def emit(self, record):
+ """Log an error to the datastore, if applicable.
+
+ Args:
+ The logging.LogRecord object.
+ See http://docs.python.org/library/logging.html#logging.LogRecord
+ """
+ try:
+ if not record.exc_info:
+ return
+
+ signature = self.__GetSignature(record.exc_info)
+
+ if not memcache.add(signature, None, self.log_interval):
+ return
+
+ db.run_in_transaction_custom_retries(1, self.__EmitTx, signature,
+ record.exc_info)
+ except Exception:
+ self.handleError(record)
+
+ def __EmitTx(self, signature, exc_info):
+ """Run in a transaction to insert or update the record for this transaction.
+
+ Args:
+ signature: The signature for this exception.
+ exc_info: The exception info record.
+ """
+ today = datetime.date.today()
+ version = os.environ['CURRENT_VERSION_ID']
+ major_ver, minor_ver = version.rsplit('.', 1)
+ minor_ver = int(minor_ver)
+ key_name = ExceptionRecord.get_key_name(signature, version)
+
+ exrecord = ExceptionRecord.get_by_key_name(key_name)
+ if not exrecord:
+ exrecord = ExceptionRecord(
+ key_name=key_name,
+ signature=signature,
+ major_version=major_ver,
+ minor_version=minor_ver,
+ date=today,
+ stacktrace=self.__GetFormatter().formatException(exc_info),
+ http_method=os.environ['REQUEST_METHOD'],
+ url=self.__GetURL(),
+ handler=self.__RelativePath(os.environ['PATH_TRANSLATED']))
+
+ exrecord.count += 1
+ exrecord.put()
+
+
+def register_logger(logger=None):
+ if not logger:
+ logger = logging.getLogger()
+ handler = ExceptionRecordingHandler()
+ logger.addHandler(handler)
+ return handler
diff --git a/google_appengine/google/appengine/ext/ereporter/report_generator.py b/google_appengine/google/appengine/ext/ereporter/report_generator.py
new file mode 100755
index 0000000..f173f47
--- /dev/null
+++ b/google_appengine/google/appengine/ext/ereporter/report_generator.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Generates and emails daily exception reports.
+
+See google/appengine/ext/ereporter/__init__.py for usage details.
+
+Valid query string arguments to the report_generator script include:
+delete: Set to 'false' to prevent deletion of exception records from the
+ datastore after sending a report. Defaults to 'true'.
+debug: Set to 'true' to return the report in the response instead of
+ emailing it.
+date: The date to generate the report for, in yyyy-mm-dd format. Defaults to
+ yesterday's date. Useful for debugging.
+max_results: Maximum number of entries to include in a report.
+sender: The email address to use as the sender. Must be an administrator.
+to: If specified, send reports to this address. If not specified, all
+ admins are sent the report.
+versions: 'all' to report on all minor versions, or 'latest' for the latest.
+"""
+
+
+
+
+
+import datetime
+import itertools
+import os
+import re
+from xml.sax import saxutils
+
+from google.appengine.api import mail
+from google.appengine.ext import db
+from google.appengine.ext import ereporter
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp import template
+from google.appengine.ext.webapp.util import run_wsgi_app
+
+
+def isTrue(val):
+ """Determines if a textual value represents 'true'.
+
+ Args:
+ val: A string, which may be 'true', 'yes', 't', '1' to indicate True.
+ Returns:
+ True or False
+ """
+ val = val.lower()
+ return val == 'true' or val == 't' or val == '1' or val == 'yes'
+
+
+class ReportGenerator(webapp.RequestHandler):
+ """Handler class to generate and email an exception report."""
+
+ DEFAULT_MAX_RESULTS = 100
+
+ def __init__(self, send_mail=mail.send_mail,
+ mail_admins=mail.send_mail_to_admins):
+ super(ReportGenerator, self).__init__()
+
+ self.send_mail = send_mail
+ self.send_mail_to_admins = mail_admins
+
+ def GetQuery(self, order=None):
+ """Creates a query object that will retrieve the appropriate exceptions.
+
+ Returns:
+ A query to retrieve the exceptions required.
+ """
+ q = ereporter.ExceptionRecord.all()
+ q.filter('date =', self.yesterday)
+ q.filter('major_version =', self.major_version)
+ if self.version_filter.lower() == 'latest':
+ q.filter('minor_version =', self.minor_version)
+ if order:
+ q.order(order)
+ return q
+
+ def GenerateReport(self, exceptions):
+ """Generates an HTML exception report.
+
+ Args:
+ exceptions: A list of ExceptionRecord objects. This argument will be
+ modified by this function.
+ Returns:
+ An HTML exception report.
+ """
+ exceptions.sort(key=lambda e: (e.minor_version, -e.count))
+ versions = [(minor, list(excs)) for minor, excs
+ in itertools.groupby(exceptions, lambda e: e.minor_version)]
+
+ template_values = {
+ 'version_filter': self.version_filter,
+ 'version_count': len(versions),
+
+ 'exception_count': sum(len(excs) for _, excs in versions),
+
+ 'occurrence_count': sum(y.count for x in versions for y in x[1]),
+ 'app_id': self.app_id,
+ 'major_version': self.major_version,
+ 'date': self.yesterday,
+ 'versions': versions,
+ }
+ path = os.path.join(os.path.dirname(__file__), 'templates', 'report.html')
+ return template.render(path, template_values)
+
+ def SendReport(self, report):
+ """Emails an exception report.
+
+ Args:
+ report: A string containing the report to send.
+ """
+ subject = ('Daily exception report for app "%s", major version "%s"'
+ % (self.app_id, self.major_version))
+ report_text = saxutils.unescape(re.sub('<[^>]+>', '', report))
+ mail_args = {
+ 'sender': self.sender,
+ 'subject': subject,
+ 'body': report_text,
+ 'html': report,
+ }
+ if self.to:
+ mail_args['to'] = self.to
+ self.send_mail(**mail_args)
+ else:
+ self.send_mail_to_admins(**mail_args)
+
+ def get(self):
+ self.version_filter = self.request.GET.get('versions', 'all')
+ self.sender = self.request.GET['sender']
+ self.to = self.request.GET.get('to', None)
+ report_date = self.request.GET.get('date', None)
+ if report_date:
+ self.yesterday = datetime.date(*[int(x) for x in report_date.split('-')])
+ else:
+ self.yesterday = datetime.date.today() - datetime.timedelta(days=1)
+ self.app_id = os.environ['APPLICATION_ID']
+ version = os.environ['CURRENT_VERSION_ID']
+ self.major_version, self.minor_version = version.rsplit('.', 1)
+ self.minor_version = int(self.minor_version)
+ self.max_results = int(self.request.GET.get('max_results',
+ self.DEFAULT_MAX_RESULTS))
+ self.debug = isTrue(self.request.GET.get('debug', 'false'))
+ self.delete = isTrue(self.request.GET.get('delete', 'true'))
+
+ try:
+ exceptions = self.GetQuery(order='-minor_version').fetch(self.max_results)
+ except db.NeedIndexError:
+ exceptions = self.GetQuery().fetch(self.max_results)
+
+ if exceptions:
+ report = self.GenerateReport(exceptions)
+ if self.debug:
+ self.response.out.write(report)
+ else:
+ self.SendReport(report)
+
+ if self.delete:
+ db.delete(exceptions)
+
+
+application = webapp.WSGIApplication([('.*', ReportGenerator)])
+
+
+def main():
+ run_wsgi_app(application)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/google_appengine/google/appengine/ext/ereporter/templates/report.html b/google_appengine/google/appengine/ext/ereporter/templates/report.html
new file mode 100644
index 0000000..cdf71d2
--- /dev/null
+++ b/google_appengine/google/appengine/ext/ereporter/templates/report.html
@@ -0,0 +1,15 @@
+<!-- Unusual layout is to ensure template is useful with tags stripped, too -->
+<html><head><title>Daily exception report for app "{{app_id}}", major version "{{major_version}}".</title></head>
+<body><p>At least {{occurrence_count}} occurrences of {{exception_count}} exceptions across {{version_count}} versions.</p>
+{% for version in versions %}
+<h1>Minor version {{version.0}}</h1>
+{% for exception in version.1 %}
+<h2>{{exception.signature}} (at least {{exception.count}} occurrences)</h2>
+ <table><tr><th>Handler:</th> <td>{{exception.handler}}</td></tr>
+ <tr><th>URL:</th> <td>{{exception.method|escape}} {{exception.url|escape}}</td></tr>
+ <tr><th>Stacktrace:</th>
+
+<td><pre>{{exception.stacktrace|escape}}</pre></td></tr></table>
+
+
+{% endfor %}{% endfor %}</body> \ No newline at end of file
diff --git a/google_appengine/google/appengine/ext/gql/__init__.py b/google_appengine/google/appengine/ext/gql/__init__.py
new file mode 100755
index 0000000..8648587
--- /dev/null
+++ b/google_appengine/google/appengine/ext/gql/__init__.py
@@ -0,0 +1,1151 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""GQL -- the SQL-like interface to the datastore.
+
+Defines the GQL-based query class, which is a query mechanism
+for the datastore which provides an alternative model for interacting with
+data stored.
+"""
+
+
+
+
+
+import calendar
+import datetime
+import logging
+import re
+import time
+
+from google.appengine.api import datastore
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.api import users
+
+MultiQuery = datastore.MultiQuery
+
+LOG_LEVEL = logging.DEBUG - 1
+
+_EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+def Execute(query_string, *args, **keyword_args):
+ """Execute command to parse and run the query.
+
+ Calls the query parser code to build a proto-query which is an
+ unbound query. The proto-query is then bound into a real query and
+ executed.
+
+ Args:
+ query_string: properly formatted GQL query string.
+ args: rest of the positional arguments used to bind numeric references in
+ the query.
+ keyword_args: dictionary-based arguments (for named parameters).
+
+ Returns:
+ the result of running the query with *args.
+ """
+ app = keyword_args.pop('_app', None)
+ proto_query = GQL(query_string, _app=app)
+ return proto_query.Bind(args, keyword_args).Run()
+
+
+class GQL(object):
+ """A GQL interface to the datastore.
+
+ GQL is a SQL-like language which supports more object-like semantics
+ in a langauge that is familiar to SQL users. The language supported by
+ GQL will change over time, but will start off with fairly simple
+ semantics.
+
+ - reserved words are case insensitive
+ - names are case sensitive
+
+ The syntax for SELECT is fairly straightforward:
+
+ SELECT [* | __key__ ] [FROM <entity>]
+ [WHERE <condition> [AND <condition> ...]]
+ [ORDER BY <property> [ASC | DESC] [, <property> [ASC | DESC] ...]]
+ [LIMIT [<offset>,]<count>]
+ [OFFSET <offset>]
+ [HINT (ORDER_FIRST | HINT FILTER_FIRST | HINT ANCESTOR_FIRST)]
+
+ <condition> := <property> {< | <= | > | >= | = | != | IN} <value>
+ <condition> := <property> {< | <= | > | >= | = | != | IN} CAST(<value>)
+ <condition> := <property> IN (<value>, ...)
+ <condition> := ANCESTOR IS <entity or key>
+
+ Currently the parser is LL(1) because of the simplicity of the grammer
+ (as it is largely predictive with one token lookahead).
+
+ The class is implemented using some basic regular expression tokenization
+ to pull out reserved tokens and then the recursive descent parser will act
+ as a builder for the pre-compiled query. This pre-compiled query is then
+ bound to arguments before executing the query.
+
+ Initially, three parameter passing mechanisms are supported when calling
+ Execute():
+
+ - Positional parameters
+ Execute('SELECT * FROM Story WHERE Author = :1 AND Date > :2')
+ - Named parameters
+ Execute('SELECT * FROM Story WHERE Author = :author AND Date > :date')
+ - Literals (numbers, and strings)
+ Execute('SELECT * FROM Story WHERE Author = \'James\'')
+
+ Users are also given the option of doing type conversions to other datastore
+ types (e.g. db.Email, db.GeoPt). The language provides a conversion function
+ which allows the caller to express conversions of both literals and
+ parameters. The current conversion operators are:
+ - GEOPT(float, float)
+ - USER(str)
+ - KEY(kind, id/name[, kind, id/name...])
+ - DATETIME(year, month, day, hour, minute, second)
+ - DATETIME('YYYY-MM-DD HH:MM:SS')
+ - DATE(year, month, day)
+ - DATE('YYYY-MM-DD')
+ - TIME(hour, minute, second)
+ - TIME('HH:MM:SS')
+
+ We will properly serialize and quote all values.
+
+ It should also be noted that there are some caveats to the queries that can
+ be expressed in the syntax. The parser will attempt to make these clear as
+ much as possible, but some of the caveats include:
+ - There is no OR operation. In most cases, you should prefer to use IN to
+ express the idea of wanting data matching one of a set of values.
+ - You cannot express inequality operators on multiple different properties
+ - You can only have one != operator per query (related to the previous
+ rule).
+ - The IN and != operators must be used carefully because they can
+ dramatically raise the amount of work done by the datastore. As such,
+ there is a limit on the number of elements you can use in IN statements.
+ This limit is set fairly low. Currently, a max of 30 datastore queries is
+ allowed in a given GQL query. != translates into 2x the number of
+ datastore queries, and IN multiplies by the number of elements in the
+ clause (so having two IN clauses, one with 5 elements, the other with 6
+ will cause 30 queries to occur).
+ - Literals can take the form of basic types or as type-cast literals. On
+ the other hand, literals within lists can currently only take the form of
+ simple types (strings, integers, floats).
+
+
+ SELECT * will return an iterable set of entities; SELECT __key__ will return
+ an iterable set of Keys.
+ """
+
+ TOKENIZE_REGEX = re.compile(r"""
+ (?:'[^'\n\r]*')+|
+ <=|>=|!=|=|<|>|
+ :\w+|
+ ,|
+ \*|
+ -?\d+(?:\.\d+)?|
+ \w+|
+ \(|\)|
+ \S+
+ """, re.VERBOSE | re.IGNORECASE)
+
+ MAX_ALLOWABLE_QUERIES = datastore.MAX_ALLOWABLE_QUERIES
+
+ __ANCESTOR = -1
+
+ def __init__(self, query_string, _app=None, _auth_domain=None):
+ """Ctor.
+
+ Parses the input query into the class as a pre-compiled query, allowing
+ for a later call to Bind() to bind arguments as defined in the
+ documentation.
+
+ Args:
+ query_string: properly formatted GQL query string.
+
+ Raises:
+ datastore_errors.BadQueryError: if the query is not parsable.
+ """
+ self._entity = ''
+ self.__filters = {}
+ self.__has_ancestor = False
+ self.__orderings = []
+ self.__offset = -1
+ self.__limit = -1
+ self.__hint = ''
+ self.__app = _app
+ self.__auth_domain = _auth_domain
+
+ self.__symbols = self.TOKENIZE_REGEX.findall(query_string)
+ self.__next_symbol = 0
+ if not self.__Select():
+ raise datastore_errors.BadQueryError(
+ 'Unable to parse query')
+ else:
+ pass
+
+ def Bind(self, args, keyword_args):
+ """Bind the existing query to the argument list.
+
+ Assumes that the input args are first positional, then a dictionary.
+ So, if the query contains references to :1, :2 and :name, it is assumed
+ that arguments are passed as (:1, :2, dict) where dict contains a mapping
+ [name] -> value.
+
+ Args:
+ args: the arguments to bind to the object's unbound references.
+ keyword_args: dictionary-based arguments (for named parameters).
+
+ Raises:
+ datastore_errors.BadArgumentError: when arguments are left unbound
+ (missing from the inputs arguments) or when arguments do not match the
+ expected type.
+
+ Returns:
+ The bound datastore.Query object. This may take the form of a MultiQuery
+ object if the GQL query will require multiple backend queries to statisfy.
+ """
+ num_args = len(args)
+ input_args = frozenset(xrange(num_args))
+ used_args = set()
+
+ queries = []
+ enumerated_queries = self.EnumerateQueries(used_args, args, keyword_args)
+ if enumerated_queries:
+ query_count = len(enumerated_queries)
+ else:
+ query_count = 1
+
+ for i in xrange(query_count):
+ queries.append(datastore.Query(self._entity, _app=self.__app,
+ keys_only=self._keys_only))
+
+ logging.log(LOG_LEVEL,
+ 'Binding with %i positional args %s and %i keywords %s'
+ , len(args), args, len(keyword_args), keyword_args)
+ for ((identifier, condition), value_list) in self.__filters.iteritems():
+ for (operator, params) in value_list:
+ value = self.__Operate(args, keyword_args, used_args, operator, params)
+ if not self.__IsMultiQuery(condition):
+ for query in queries:
+ self.__AddFilterToQuery(identifier, condition, value, query)
+
+ unused_args = input_args - used_args
+ if unused_args:
+ unused_values = [unused_arg + 1 for unused_arg in unused_args]
+ raise datastore_errors.BadArgumentError('Unused positional arguments %s' %
+ unused_values)
+
+ if enumerated_queries:
+ logging.log(LOG_LEVEL,
+ 'Multiple Queries Bound: %s',
+ enumerated_queries)
+
+ for (query, enumerated_query) in zip(queries, enumerated_queries):
+ query.update(enumerated_query)
+
+ if self.__orderings:
+ for query in queries:
+ query.Order(*tuple(self.__orderings))
+
+ if query_count > 1:
+ return MultiQuery(queries, self.__orderings)
+ else:
+ return queries[0]
+
+ def EnumerateQueries(self, used_args, args, keyword_args):
+ """Create a list of all multi-query filter combinations required.
+
+ To satisfy multi-query requests ("IN" and "!=" filters), multiple queries
+ may be required. This code will enumerate the power-set of all multi-query
+ filters.
+
+ Args:
+ used_args: set of used positional parameters (output only variable used in
+ reporting for unused positional args)
+ args: positional arguments referenced by the proto-query in self. This
+ assumes the input is a tuple (and can also be called with a varargs
+ param).
+ keyword_args: dict of keyword arguments referenced by the proto-query in
+ self.
+
+ Returns:
+ A list of maps [(identifier, condition) -> value] of all queries needed
+ to satisfy the GQL query with the given input arguments.
+ """
+ enumerated_queries = []
+
+ for ((identifier, condition), value_list) in self.__filters.iteritems():
+ for (operator, params) in value_list:
+ value = self.__Operate(args, keyword_args, used_args, operator, params)
+ self.__AddMultiQuery(identifier, condition, value, enumerated_queries)
+
+ return enumerated_queries
+
+ def __CastError(self, operator, values, error_message):
+ """Query building error for type cast operations.
+
+ Args:
+ operator: the failed cast operation
+ values: value list passed to the cast operator
+ error_message: string to emit as part of the 'Cast Error' string.
+
+ Raises:
+ BadQueryError and passes on an error message from the caller. Will raise
+ BadQueryError on all calls.
+ """
+ raise datastore_errors.BadQueryError(
+ 'Type Cast Error: unable to cast %r with operation %s (%s)' %
+ (values, operator.upper(), error_message))
+
+ def __CastNop(self, values):
+ """Return values[0] if it exists -- default for most where clauses."""
+ if len(values) != 1:
+ self.__CastError(values, 'nop', 'requires one and only one value')
+ else:
+ return values[0]
+
+ def __CastList(self, values):
+ """Return the full list of values -- only useful for IN clause."""
+ if values:
+ return values
+ else:
+ return None
+
+ def __CastKey(self, values):
+ """Cast input values to Key() class using encoded string or tuple list."""
+ if not len(values) % 2:
+ return datastore_types.Key.from_path(_app=self.__app, *values)
+ elif len(values) == 1 and isinstance(values[0], basestring):
+ return datastore_types.Key(values[0])
+ else:
+ self.__CastError('KEY', values,
+ 'requires an even number of operands '
+ 'or a single encoded string')
+
+ def __CastGeoPt(self, values):
+ """Cast input to GeoPt() class using 2 input parameters."""
+ if len(values) != 2:
+ self.__CastError('GEOPT', values, 'requires 2 input parameters')
+ return datastore_types.GeoPt(*values)
+
+ def __CastUser(self, values):
+ """Cast to User() class using the email address in values[0]."""
+ if len(values) != 1:
+ self.__CastError(values, 'user', 'requires one and only one value')
+ else:
+ return users.User(email=values[0], _auth_domain=self.__auth_domain)
+
+ def __EncodeIfNeeded(self, value):
+ """Simple helper function to create an str from possibly unicode strings.
+ Args:
+ value: input string (should pass as an instance of str or unicode).
+ """
+ if isinstance(value, unicode):
+ return value.encode('utf8')
+ else:
+ return value
+
+ def __CastDate(self, values):
+ """Cast DATE values (year/month/day) from input (to datetime.datetime).
+
+ Casts DATE input values formulated as ISO string or time tuple inputs.
+
+ Args:
+ values: either a single string with ISO time representation or 3
+ integer valued date tuple (year, month, day).
+
+ Returns:
+ datetime.datetime value parsed from the input values.
+ """
+
+ if len(values) == 1:
+ value = self.__EncodeIfNeeded(values[0])
+ if isinstance(value, str):
+ try:
+ time_tuple = time.strptime(value, '%Y-%m-%d')[0:6]
+ except ValueError, err:
+ self.__CastError('DATE', values, err)
+ else:
+ self.__CastError('DATE', values, 'Single input value not a string')
+ elif len(values) == 3:
+ time_tuple = (values[0], values[1], values[2], 0, 0, 0)
+ else:
+ self.__CastError('DATE', values,
+ 'function takes 1 string or 3 integer values')
+
+ try:
+ return datetime.datetime(*time_tuple)
+ except ValueError, err:
+ self.__CastError('DATE', values, err)
+
+ def __CastTime(self, values):
+ """Cast TIME values (hour/min/sec) from input (to datetime.datetime).
+
+ Casts TIME input values formulated as ISO string or time tuple inputs.
+
+ Args:
+ values: either a single string with ISO time representation or 1-4
+ integer valued time tuple (hour), (hour, minute),
+ (hour, minute, second), (hour, minute, second, microsec).
+
+ Returns:
+ datetime.datetime value parsed from the input values.
+ """
+ if len(values) == 1:
+ value = self.__EncodeIfNeeded(values[0])
+ if isinstance(value, str):
+ try:
+ time_tuple = time.strptime(value, '%H:%M:%S')
+ except ValueError, err:
+ self.__CastError('TIME', values, err)
+ time_tuple = (1970, 1, 1) + time_tuple[3:]
+ time_tuple = time_tuple[0:6]
+ elif isinstance(value, int):
+ time_tuple = (1970, 1, 1, value)
+ else:
+ self.__CastError('TIME', values,
+ 'Single input value not a string or integer hour')
+ elif len(values) <= 4:
+ time_tuple = (1970, 1, 1) + tuple(values)
+ else:
+ self.__CastError('TIME', values, err)
+
+ try:
+ return datetime.datetime(*time_tuple)
+ except ValueError, err:
+ self.__CastError('TIME', values, err)
+
+ def __CastDatetime(self, values):
+ """Cast DATETIME values (string or tuple) from input (to datetime.datetime).
+
+ Casts DATETIME input values formulated as ISO string or datetime tuple
+ inputs.
+
+ Args:
+ values: either a single string with ISO representation or 3-7
+ integer valued time tuple (year, month, day, ...).
+
+ Returns:
+ datetime.datetime value parsed from the input values.
+ """
+ if len(values) == 1:
+ value = self.__EncodeIfNeeded(values[0])
+ if isinstance(value, str):
+ try:
+ time_tuple = time.strptime(str(value), '%Y-%m-%d %H:%M:%S')[0:6]
+ except ValueError, err:
+ self.__CastError('DATETIME', values, err)
+ else:
+ self.__CastError('DATETIME', values, 'Single input value not a string')
+ else:
+ time_tuple = values
+
+ try:
+ return datetime.datetime(*time_tuple)
+ except ValueError, err:
+ self.__CastError('DATETIME', values, err)
+
+ def __Operate(self, args, keyword_args, used_args, operator, params):
+ """Create a single output value from params using the operator string given.
+
+ Args:
+ args,keyword_args: arguments passed in for binding purposes (used in
+ binding positional and keyword based arguments).
+ used_args: set of numeric arguments accessed in this call.
+ values are ints representing used zero-based positional arguments.
+ used as an output parameter with new used arguments appended to the
+ list.
+ operator: string representing the operator to use 'nop' just returns
+ the first value from params.
+ params: parameter list to operate on (positional references, named
+ references, or literals).
+
+ Returns:
+ A value which can be used as part of a GQL filter description (either a
+ list of datastore types -- for use with IN, or a single datastore type --
+ for use with other filters).
+ """
+ if not params:
+ return None
+
+ param_values = []
+ for param in params:
+ if isinstance(param, Literal):
+ value = param.Get()
+ else:
+ value = self.__GetParam(param, args, keyword_args)
+ if isinstance(param, int):
+ used_args.add(param - 1)
+ logging.log(LOG_LEVEL, 'found param for bind: %s value: %s',
+ param, value)
+ param_values.append(value)
+
+ logging.log(LOG_LEVEL, '%s Operating on values: %s',
+ operator, repr(param_values))
+
+ if operator in self.__cast_operators:
+ result = self.__cast_operators[operator](self, param_values)
+ else:
+ self.__Error('Operation %s is invalid' % operator)
+
+ return result
+
+ def __IsMultiQuery(self, condition):
+ """Return whether or not this condition could require multiple queries."""
+ return condition.lower() in ('in', '!=')
+
+ def __GetParam(self, reference, args, keyword_args):
+ """Get the specified parameter from the input arguments.
+
+ Args:
+ reference: id for a filter reference in the filter list (string or
+ number)
+ args: positional args passed in by the user (tuple of arguments, indexed
+ numerically by "reference")
+ keyword_args: dict of keyword based arguments (strings in "reference")
+
+ Returns:
+ The specified param from the input list.
+
+ Raises:
+ BadArgumentError if the referenced argument doesn't exist.
+ """
+ num_args = len(args)
+ if isinstance(reference, int):
+ if reference <= num_args:
+ return args[reference - 1]
+ else:
+ raise datastore_errors.BadArgumentError(
+ 'Missing argument for bind, requires argument #%i, '
+ 'but only has %i args.' % (reference, num_args))
+ elif isinstance(reference, basestring):
+ if reference in keyword_args:
+ return keyword_args[reference]
+ else:
+ raise datastore_errors.BadArgumentError(
+ 'Missing named arguments for bind, requires argument %s' %
+ reference)
+ else:
+ assert False, 'Unknown reference %s' % reference
+
+ def __AddMultiQuery(self, identifier, condition, value, enumerated_queries):
+ """Helper function to add a muti-query to previously enumerated queries.
+
+ Args:
+ identifier: property being filtered by this condition
+ condition: filter condition (e.g. !=,in)
+ value: value being bound
+ enumerated_queries: in/out list of already bound queries -> expanded list
+ with the full enumeration required to satisfy the condition query
+ Raises:
+ BadArgumentError if the filter is invalid (namely non-list with IN)
+ """
+ if condition.lower() in ('!=', 'in') and self._keys_only:
+ raise datastore_errors.BadQueryError(
+ 'Keys only queries do not support IN or != filters.')
+
+ def CloneQueries(queries, n):
+ """Do a full copy of the queries and append to the end of the queries.
+
+ Does an in-place replication of the input list and sorts the result to
+ put copies next to one-another.
+
+ Args:
+ queries: list of all filters to clone
+ n: number of copies to make
+
+ Returns:
+ Number of iterations needed to fill the structure
+ """
+ if not enumerated_queries:
+ for i in xrange(n):
+ queries.append({})
+ return 1
+ else:
+ old_size = len(queries)
+ tmp_queries = []
+ for i in xrange(n - 1):
+ [tmp_queries.append(filter_map.copy()) for filter_map in queries]
+ queries.extend(tmp_queries)
+ queries.sort()
+ return old_size
+
+ if condition == '!=':
+ if len(enumerated_queries) * 2 > self.MAX_ALLOWABLE_QUERIES:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot satisfy query -- too many IN/!= values.')
+
+ num_iterations = CloneQueries(enumerated_queries, 2)
+ for i in xrange(num_iterations):
+ enumerated_queries[2 * i]['%s <' % identifier] = value
+ enumerated_queries[2 * i + 1]['%s >' % identifier] = value
+ elif condition.lower() == 'in':
+ if not isinstance(value, list):
+ raise datastore_errors.BadArgumentError('List expected for "IN" filter')
+
+ in_list_size = len(value)
+ if len(enumerated_queries) * in_list_size > self.MAX_ALLOWABLE_QUERIES:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot satisfy query -- too many IN/!= values.')
+
+ num_iterations = CloneQueries(enumerated_queries, in_list_size)
+ for clone_num in xrange(num_iterations):
+ for value_num in xrange(len(value)):
+ list_val = value[value_num]
+ query_num = in_list_size * clone_num + value_num
+ filt = '%s =' % identifier
+ enumerated_queries[query_num][filt] = list_val
+
+ def __AddFilterToQuery(self, identifier, condition, value, query):
+ """Add a filter condition to a query based on the inputs.
+
+ Args:
+ identifier: name of the property (or self.__ANCESTOR for ancestors)
+ condition: test condition
+ value: test value passed from the caller
+ query: query to add the filter to
+ """
+ if identifier != self.__ANCESTOR:
+ filter_condition = '%s %s' % (identifier, condition)
+ logging.log(LOG_LEVEL, 'Setting filter on "%s" with value "%s"',
+ filter_condition, value.__class__)
+ datastore._AddOrAppend(query, filter_condition, value)
+
+ else:
+ logging.log(LOG_LEVEL, 'Setting ancestor query for ancestor %s', value)
+ query.Ancestor(value)
+
+ def Run(self, *args, **keyword_args):
+ """Runs this query.
+
+ Similar to datastore.Query.Run.
+ Assumes that limit == -1 or > 0
+
+ Args:
+ args: arguments used to bind to references in the compiled query object.
+ keyword_args: dictionary-based arguments (for named parameters).
+
+ Returns:
+ A list of results if a query count limit was passed.
+ A result iterator if no limit was given.
+ """
+ bind_results = self.Bind(args, keyword_args)
+
+ offset = 0
+ if self.__offset != -1:
+ offset = self.__offset
+
+ if self.__limit == -1:
+ it = bind_results.Run()
+ try:
+ for i in xrange(offset):
+ it.next()
+ except StopIteration:
+ pass
+
+ return it
+ else:
+ res = bind_results.Get(self.__limit, offset)
+ return res
+
+ def filters(self):
+ """Return the compiled list of filters."""
+ return self.__filters
+
+ def hint(self):
+ """Return the datastore hint."""
+ return self.__hint
+
+ def limit(self):
+ """Return numerical result count limit."""
+ return self.__limit
+
+ def orderings(self):
+ """Return the result ordering list."""
+ return self.__orderings
+
+ def is_keys_only(self):
+ """Returns True if this query returns Keys, False if it returns Entities."""
+ return self._keys_only
+
+ __iter__ = Run
+
+ __result_type_regex = re.compile(r'(\*|__key__)')
+ __quoted_string_regex = re.compile(r'((?:\'[^\'\n\r]*\')+)')
+ __ordinal_regex = re.compile(r':(\d+)$')
+ __named_regex = re.compile(r':(\w+)$')
+ __identifier_regex = re.compile(r'(\w+)$')
+ __conditions_regex = re.compile(r'(<=|>=|!=|=|<|>|is|in)$', re.IGNORECASE)
+ __number_regex = re.compile(r'(\d+)$')
+ __cast_regex = re.compile(
+ r'(geopt|user|key|date|time|datetime)$', re.IGNORECASE)
+ __cast_operators = {
+ 'geopt': __CastGeoPt,
+ 'user': __CastUser,
+ 'key': __CastKey,
+ 'datetime': __CastDatetime,
+ 'date': __CastDate,
+ 'time': __CastTime,
+ 'list': __CastList,
+ 'nop': __CastNop,
+ }
+
+ def __Error(self, error_message):
+ """Generic query error.
+
+ Args:
+ error_message: string to emit as part of the 'Parse Error' string.
+
+ Raises:
+ BadQueryError and passes on an error message from the caller. Will raise
+ BadQueryError on all calls to __Error()
+ """
+ if self.__next_symbol >= len(self.__symbols):
+ raise datastore_errors.BadQueryError(
+ 'Parse Error: %s at end of string' % error_message)
+ else:
+ raise datastore_errors.BadQueryError(
+ 'Parse Error: %s at symbol %s' %
+ (error_message, self.__symbols[self.__next_symbol]))
+
+ def __Accept(self, symbol_string):
+ """Advance the symbol and return true iff the next symbol matches input."""
+ if self.__next_symbol < len(self.__symbols):
+ logging.log(LOG_LEVEL, '\t%s', self.__symbols)
+ logging.log(LOG_LEVEL, '\tExpect: %s Got: %s',
+ symbol_string, self.__symbols[self.__next_symbol].upper())
+ if self.__symbols[self.__next_symbol].upper() == symbol_string:
+ self.__next_symbol += 1
+ return True
+ return False
+
+ def __Expect(self, symbol_string):
+ """Require that the next symbol matches symbol_string, or emit an error.
+
+ Args:
+ symbol_string: next symbol expected by the caller
+
+ Raises:
+ BadQueryError if the next symbol doesn't match the parameter passed in.
+ """
+ if not self.__Accept(symbol_string):
+ self.__Error('Unexpected Symbol: %s' % symbol_string)
+
+ def __AcceptRegex(self, regex):
+ """Advance and return the symbol if the next symbol matches the regex.
+
+ Args:
+ regex: the compiled regular expression to attempt acceptance on.
+
+ Returns:
+ The first group in the expression to allow for convenient access
+ to simple matches. Requires () around some objects in the regex.
+ None if no match is found.
+ """
+ if self.__next_symbol < len(self.__symbols):
+ match_symbol = self.__symbols[self.__next_symbol]
+ logging.log(LOG_LEVEL, '\taccept %s on symbol %s', regex, match_symbol)
+ match = regex.match(match_symbol)
+ if match:
+ self.__next_symbol += 1
+ if match.groups():
+ matched_string = match.group(1)
+
+ logging.log(LOG_LEVEL, '\taccepted %s', matched_string)
+ return matched_string
+
+ return None
+
+ def __AcceptTerminal(self):
+ """Only accept an empty string.
+
+ Returns:
+ True
+
+ Raises:
+ BadQueryError if there are unconsumed symbols in the query.
+ """
+ if self.__next_symbol < len(self.__symbols):
+ self.__Error('Expected no additional symbols')
+ return True
+
+ def __Select(self):
+ """Consume the SELECT clause and everything that follows it.
+
+ Assumes SELECT * to start.
+ Transitions to a FROM clause.
+
+ Returns:
+ True if parsing completed okay.
+ """
+ self.__Expect('SELECT')
+ result_type = self.__AcceptRegex(self.__result_type_regex)
+ self._keys_only = (result_type == '__key__')
+ return self.__From()
+
+ def __From(self):
+ """Consume the FROM clause.
+
+ Assumes a single well formed entity in the clause.
+ Assumes FROM <Entity Name>
+ Transitions to a WHERE clause.
+
+ Returns:
+ True if parsing completed okay.
+ """
+ if self.__Accept('FROM'):
+ kind = self.__AcceptRegex(self.__identifier_regex)
+ if kind:
+ self._entity = kind
+ else:
+ self.__Error('Identifier Expected')
+ return False
+ else:
+ self._entity = None
+ return self.__Where()
+
+ def __Where(self):
+ """Consume the WHERE cluase.
+
+ These can have some recursion because of the AND symbol.
+
+ Returns:
+ True if parsing the WHERE clause completed correctly, as well as all
+ subsequent clauses
+ """
+ if self.__Accept('WHERE'):
+ return self.__FilterList()
+ return self.__OrderBy()
+
+ def __FilterList(self):
+ """Consume the filter list (remainder of the WHERE clause)."""
+ identifier = self.__AcceptRegex(self.__identifier_regex)
+ if not identifier:
+ self.__Error('Invalid WHERE Identifier')
+ return False
+
+ condition = self.__AcceptRegex(self.__conditions_regex)
+ if not condition:
+ self.__Error('Invalid WHERE Condition')
+ return False
+ self.__CheckFilterSyntax(identifier, condition)
+
+ if not self.__AddSimpleFilter(identifier, condition, self.__Reference()):
+ if not self.__AddSimpleFilter(identifier, condition, self.__Literal()):
+ type_cast = self.__TypeCast()
+ if (not type_cast or
+ not self.__AddProcessedParameterFilter(identifier, condition,
+ *type_cast)):
+ self.__Error('Invalid WHERE condition')
+
+ if self.__Accept('AND'):
+ return self.__FilterList()
+
+ return self.__OrderBy()
+
+ def __GetValueList(self):
+ """Read in a list of parameters from the tokens and return the list.
+
+ Reads in a set of tokens, but currently only accepts literals, positional
+ parameters, or named parameters. Or empty list if nothing was parsed.
+
+ Returns:
+ A list of values parsed from the input, with values taking the form of
+ strings (unbound, named reference), integers (unbound, positional
+ reference), or Literal() (bound value usable directly as part of a filter
+ with no additional information).
+ """
+ params = []
+
+ while True:
+ reference = self.__Reference()
+ if reference:
+ params.append(reference)
+ else:
+ literal = self.__Literal()
+ if literal:
+ params.append(literal)
+ else:
+ self.__Error('Parameter list requires literal or reference parameter')
+
+ if not self.__Accept(','):
+ break
+
+ return params
+
+ def __CheckFilterSyntax(self, identifier, condition):
+ """Check that filter conditions are valid and throw errors if not.
+
+ Args:
+ identifier: identifier being used in comparison
+ condition: string form of the comparison operator used in the filter
+ """
+ if identifier.lower() == 'ancestor':
+ if condition.lower() == 'is':
+ if self.__has_ancestor:
+ self.__Error('Only one ANCESTOR IS" clause allowed')
+ else:
+ self.__Error('"IS" expected to follow "ANCESTOR"')
+ elif condition.lower() == 'is':
+ self.__Error('"IS" can only be used when comparing against "ANCESTOR"')
+
+ def __AddProcessedParameterFilter(self, identifier, condition,
+ operator, parameters):
+ """Add a filter with post-processing required.
+
+ Args:
+ identifier: property being compared.
+ condition: comparison operation being used with the property (e.g. !=).
+ operator: operation to perform on the parameters before adding the filter.
+ parameters: list of bound parameters passed to 'operator' before creating
+ the filter. When using the parameters as a pass-through, pass 'nop'
+ into the operator field and the first value will be used unprocessed).
+
+ Returns:
+ True if the filter was okay to add.
+ """
+ if parameters is None:
+ return False
+ if parameters[0] is None:
+ return False
+
+ logging.log(LOG_LEVEL, 'Adding Filter %s %s %s',
+ identifier, condition, repr(parameters))
+ filter_rule = (identifier, condition)
+ if identifier.lower() == 'ancestor':
+ self.__has_ancestor = True
+ filter_rule = (self.__ANCESTOR, 'is')
+ assert condition.lower() == 'is'
+
+ if condition.lower() != 'in' and operator == 'list':
+ self.__Error('Only IN can process a list of values')
+
+ self.__filters.setdefault(filter_rule, []).append((operator, parameters))
+ return True
+
+ def __AddSimpleFilter(self, identifier, condition, parameter):
+ """Add a filter to the query being built (no post-processing on parameter).
+
+ Args:
+ identifier: identifier being used in comparison
+ condition: string form of the comparison operator used in the filter
+ parameter: ID of the reference being made or a value of type Literal
+
+ Returns:
+ True if the filter could be added.
+ False otherwise.
+ """
+ return self.__AddProcessedParameterFilter(identifier, condition,
+ 'nop', [parameter])
+
+ def __Reference(self):
+ """Consume a parameter reference and return it.
+
+ Consumes a reference to a positional parameter (:1) or a named parameter
+ (:email). Only consumes a single reference (not lists).
+
+ Returns:
+ The name of the reference (integer for positional parameters or string
+ for named parameters) to a bind-time parameter.
+ """
+ logging.log(LOG_LEVEL, 'Try Reference')
+ reference = self.__AcceptRegex(self.__ordinal_regex)
+ if reference:
+ return int(reference)
+ else:
+ reference = self.__AcceptRegex(self.__named_regex)
+ if reference:
+ return reference
+
+ return None
+
+ def __Literal(self):
+ """Parse literals from our token list.
+
+ Returns:
+ The parsed literal from the input string (currently either a string,
+ integer, or floating point value).
+ """
+ logging.log(LOG_LEVEL, 'Try Literal')
+ literal = None
+ try:
+ literal = int(self.__symbols[self.__next_symbol])
+ except ValueError:
+ pass
+ else:
+ self.__next_symbol += 1
+
+ if literal is None:
+ try:
+ literal = float(self.__symbols[self.__next_symbol])
+ except ValueError:
+ pass
+ else:
+ self.__next_symbol += 1
+
+ if literal is None:
+ literal = self.__AcceptRegex(self.__quoted_string_regex)
+ if literal:
+ literal = literal[1:-1].replace("''", "'")
+
+ if literal is None:
+ if self.__Accept('TRUE'):
+ literal = True
+ elif self.__Accept('FALSE'):
+ literal = False
+
+ if literal is not None:
+ return Literal(literal)
+ else:
+ return None
+
+ def __TypeCast(self):
+ """Check if the next operation is a type-cast and return the cast if so.
+
+ Casting operators look like simple function calls on their parameters. This
+ code returns the cast operator found and the list of parameters provided by
+ the user to complete the cast operation.
+
+ Returns:
+ A tuple (cast operator, params) which represents the cast operation
+ requested and the parameters parsed from the cast clause.
+
+ None - if there is no TypeCast function.
+ """
+ logging.log(LOG_LEVEL, 'Try Type Cast')
+ cast_op = self.__AcceptRegex(self.__cast_regex)
+ if not cast_op:
+ if self.__Accept('('):
+ cast_op = 'list'
+ else:
+ return None
+ else:
+ cast_op = cast_op.lower()
+ self.__Expect('(')
+
+ params = self.__GetValueList()
+ self.__Expect(')')
+
+ logging.log(LOG_LEVEL, 'Got casting operator %s with params %s',
+ cast_op, repr(params))
+ return (cast_op, params)
+
+ def __OrderBy(self):
+ """Consume the ORDER BY clause."""
+ if self.__Accept('ORDER'):
+ self.__Expect('BY')
+ return self.__OrderList()
+ return self.__Limit()
+
+ def __OrderList(self):
+ """Consume variables and sort order for ORDER BY clause."""
+ identifier = self.__AcceptRegex(self.__identifier_regex)
+ if identifier:
+ if self.__Accept('DESC'):
+ self.__orderings.append((identifier, datastore.Query.DESCENDING))
+ elif self.__Accept('ASC'):
+ self.__orderings.append((identifier, datastore.Query.ASCENDING))
+ else:
+ self.__orderings.append((identifier, datastore.Query.ASCENDING))
+ else:
+ self.__Error('Invalid ORDER BY Property')
+
+ logging.log(LOG_LEVEL, self.__orderings)
+ if self.__Accept(','):
+ return self.__OrderList()
+ return self.__Limit()
+
+ def __Limit(self):
+ """Consume the LIMIT clause."""
+ if self.__Accept('LIMIT'):
+ maybe_limit = self.__AcceptRegex(self.__number_regex)
+
+ if maybe_limit:
+ if self.__Accept(','):
+ self.__offset = int(maybe_limit)
+ if self.__offset < 0:
+ self.__Error('Bad offset in LIMIT Value')
+ else:
+ logging.log(LOG_LEVEL, 'Set offset to %i', self.__offset)
+ maybe_limit = self.__AcceptRegex(self.__number_regex)
+
+ self.__limit = int(maybe_limit)
+ if self.__limit < 1:
+ self.__Error('Bad Limit in LIMIT Value')
+ else:
+ logging.log(LOG_LEVEL, 'Set limit to %i', self.__limit)
+ else:
+ self.__Error('Non-number limit in LIMIT clause')
+
+ return self.__Offset()
+
+ def __Offset(self):
+ """Consume the OFFSET clause."""
+ if self.__Accept('OFFSET'):
+ if self.__offset != -1:
+ self.__Error('Offset already defined in LIMIT clause')
+
+ offset = self.__AcceptRegex(self.__number_regex)
+
+ if offset:
+ self.__offset = int(offset)
+ if self.__offset < 0:
+ self.__Error('Bad offset in OFFSET clause')
+ else:
+ logging.log(LOG_LEVEL, 'Set offset to %i', self.__offset)
+ else:
+ self.__Error('Non-number offset in OFFSET clause')
+
+ return self.__Hint()
+
+ def __Hint(self):
+ """Consume the HINT clause.
+
+ Requires one of three options (mirroring the rest of the datastore):
+ HINT ORDER_FIRST
+ HINT ANCESTOR_FIRST
+ HINT FILTER_FIRST
+
+ Returns:
+ True if the hint clause and later clauses all parsed okay
+ """
+ if self.__Accept('HINT'):
+ if self.__Accept('ORDER_FIRST'):
+ self.__hint = 'ORDER_FIRST'
+ elif self.__Accept('FILTER_FIRST'):
+ self.__hint = 'FILTER_FIRST'
+ elif self.__Accept('ANCESTOR_FIRST'):
+ self.__hint = 'ANCESTOR_FIRST'
+ else:
+ self.__Error('Unknown HINT')
+ return False
+ return self.__AcceptTerminal()
+
+
+class Literal(object):
+ """Class for representing literal values in a way unique from unbound params.
+
+ This is a simple wrapper class around basic types and datastore types.
+ """
+
+ def __init__(self, value):
+ self.__value = value
+
+ def Get(self):
+ """Return the value of the literal."""
+ return self.__value
+
+ def __repr__(self):
+ return 'Literal(%s)' % repr(self.__value)
diff --git a/google_appengine/google/appengine/ext/gql/__init__.pyc b/google_appengine/google/appengine/ext/gql/__init__.pyc
new file mode 100644
index 0000000..f73e391
--- /dev/null
+++ b/google_appengine/google/appengine/ext/gql/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/key_range/__init__.py b/google_appengine/google/appengine/ext/key_range/__init__.py
new file mode 100755
index 0000000..4c6b632
--- /dev/null
+++ b/google_appengine/google/appengine/ext/key_range/__init__.py
@@ -0,0 +1,570 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Key range representation and splitting."""
+
+
+import os
+
+try:
+ import simplejson
+except ImportError:
+ simplejson = None
+
+from google.appengine.api import datastore
+from google.appengine.datastore import datastore_pb
+from google.appengine.ext import db
+
+
+class Error(Exception):
+ """Base class for exceptions in this module."""
+
+
+class KeyRangeError(Error):
+ """Error while trying to generate a KeyRange."""
+
+
+class SimplejsonUnavailableError(Error):
+ """Error while using json functionality whith unavailable simplejson."""
+
+class EmptyDbQuery(db.Query):
+ """A query that returns no results."""
+
+ def get(self):
+ return None
+
+ def fetch(self, limit=1000, offset=0):
+ return []
+
+ def count(self, limit=1000):
+ return 0
+
+
+class EmptyDatastoreQuery(datastore.Query):
+ """A query that returns no results."""
+
+ def __init__(self, kind):
+ datastore.Query.__init__(self, kind)
+
+ def _Run(self, *unused_args, **unused_kwargs):
+ empty_result_pb = datastore_pb.QueryResult()
+ empty_result_pb.set_cursor(0)
+ empty_result_pb.set_more_results(False)
+ return datastore.Iterator(empty_result_pb)
+
+ def Count(self, *unused_args, **unused_kwargs):
+ return 0
+
+ def Get(self, *unused_args, **unused_kwargs):
+ return []
+
+ def Next(self, *unused_args, **unused_kwargs):
+ return []
+
+
+class KeyRange(object):
+ """Represents a range of keys in the datastore.
+
+ A KeyRange object represents a key range
+ (key_start, include_start, key_end, include_end)
+ and a scan direction (KeyRange.DESC or KeyRange.ASC).
+ """
+
+ DESC = 'DESC'
+ ASC = 'ASC'
+
+ def __init__(self,
+ key_start=None,
+ key_end=None,
+ direction=None,
+ include_start=True,
+ include_end=True):
+ """Initialize a KeyRange object.
+
+ Args:
+ key_start: The starting key for this range.
+ key_end: The ending key for this range.
+ direction: The direction of the query for this range.
+ include_start: Whether the start key should be included in the range.
+ include_end: Whether the end key should be included in the range.
+ """
+ if direction is None:
+ direction = KeyRange.ASC
+ assert direction in (KeyRange.ASC, KeyRange.DESC)
+ self.direction = direction
+ self.key_start = key_start
+ self.key_end = key_end
+ self.include_start = include_start
+ self.include_end = include_end
+
+ def __str__(self):
+ if self.include_start:
+ left_side = '['
+ else:
+ left_side = '('
+ if self.include_end:
+ right_side = ']'
+ else:
+ right_side = '('
+ return '%s%s%s-%s%s' % (self.direction, left_side, repr(self.key_start),
+ repr(self.key_end), right_side)
+
+ def __repr__(self):
+ return ('key_range.KeyRange(key_start=%s,key_end=%s,direction=%s,'
+ 'include_start=%s,include_end=%s)') % (repr(self.key_start),
+ repr(self.key_end),
+ repr(self.direction),
+ repr(self.include_start),
+ repr(self.include_end))
+
+ def filter_query(self, query):
+ """Add query filter to restrict to this key range.
+
+ Args:
+ query: A db.Query instance.
+
+ Returns:
+ The input query restricted to this key range or an empty query if
+ this key range is empty.
+ """
+ assert isinstance(query, db.Query)
+ if self.key_start == self.key_end and not (
+ self.include_start or self.include_end):
+ return EmptyDbQuery()
+ if self.include_start:
+ start_comparator = '>='
+ else:
+ start_comparator = '>'
+ if self.include_end:
+ end_comparator = '<='
+ else:
+ end_comparator = '<'
+ if self.key_start:
+ query.filter('__key__ %s' % start_comparator, self.key_start)
+ if self.key_end:
+ query.filter('__key__ %s' % end_comparator, self.key_end)
+ return query
+
+ def filter_datastore_query(self, query):
+ """Add query filter to restrict to this key range.
+
+ Args:
+ query: A datastore.Query instance.
+
+ Returns:
+ The input query restricted to this key range or an empty query if
+ this key range is empty.
+ """
+ assert isinstance(query, datastore.Query)
+ if self.key_start == self.key_end and not (
+ self.include_start or self.include_end):
+ return EmptyDatastoreQuery(query.kind)
+ if self.include_start:
+ start_comparator = '>='
+ else:
+ start_comparator = '>'
+ if self.include_end:
+ end_comparator = '<='
+ else:
+ end_comparator = '<'
+ if self.key_start:
+ query.update({'__key__ %s' % start_comparator: self.key_start})
+ if self.key_end:
+ query.update({'__key__ %s' % end_comparator: self.key_end})
+ return query
+
+ def __get_direction(self, asc, desc):
+ """Check that self.direction is in (KeyRange.ASC, KeyRange.DESC).
+
+ Args:
+ asc: Argument to return if self.direction is KeyRange.ASC
+ desc: Argument to return if self.direction is KeyRange.DESC
+
+ Returns:
+ asc or desc appropriately
+
+ Raises:
+ KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+ """
+ if self.direction == KeyRange.ASC:
+ return asc
+ elif self.direction == KeyRange.DESC:
+ return desc
+ else:
+ raise KeyRangeError('KeyRange direction unexpected: %s', self.direction)
+
+ def make_directed_query(self, kind_class):
+ """Construct a query for this key range, including the scan direction.
+
+ Args:
+ kind_class: A kind implementation class.
+
+ Returns:
+ A db.Query instance.
+
+ Raises:
+ KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+ """
+ direction = self.__get_direction('', '-')
+ query = db.Query(kind_class)
+ query.order('%s__key__' % direction)
+
+ query = self.filter_query(query)
+ return query
+
+ def make_directed_datastore_query(self, kind):
+ """Construct a query for this key range, including the scan direction.
+
+ Args:
+ kind: A string.
+
+ Returns:
+ A datastore.Query instance.
+
+ Raises:
+ KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+ """
+ direction = self.__get_direction(datastore.Query.ASCENDING,
+ datastore.Query.DESCENDING)
+ query = datastore.Query(kind)
+ query.Order(('__key__', direction))
+
+ query = self.filter_datastore_query(query)
+ return query
+
+ def make_ascending_query(self, kind_class):
+ """Construct a query for this key range without setting the scan direction.
+
+ Args:
+ kind_class: A kind implementation class.
+
+ Returns:
+ A db.Query instance.
+ """
+ query = db.Query(kind_class)
+ query.order('__key__')
+
+ query = self.filter_query(query)
+ return query
+
+ def make_ascending_datastore_query(self, kind):
+ """Construct a query for this key range without setting the scan direction.
+
+ Args:
+ kind: A string.
+
+ Returns:
+ A datastore.Query instance.
+ """
+ query = datastore.Query(kind)
+ query.Order(('__key__', datastore.Query.ASCENDING))
+
+ query = self.filter_datastore_query(query)
+ return query
+
+ def split_range(self, batch_size=0):
+ """Split this key range into a list of at most two ranges.
+
+ This method attempts to split the key range approximately in half.
+ Numeric ranges are split in the middle into two equal ranges and
+ string ranges are split lexicographically in the middle. If the
+ key range is smaller than batch_size it is left unsplit.
+
+ Note that splitting is done without knowledge of the distribution
+ of actual entities in the key range, so there is no guarantee (nor
+ any particular reason to believe) that the entities of the range
+ are evenly split.
+
+ Args:
+ batch_size: The maximum size of a key range that should not be split.
+
+ Returns:
+ A list of one or two key ranges covering the same space as this range.
+ """
+ key_start = self.key_start
+ key_end = self.key_end
+ include_start = self.include_start
+ include_end = self.include_end
+
+ key_pairs = []
+ if not key_start:
+ key_pairs.append((key_start, include_start, key_end, include_end,
+ KeyRange.ASC))
+ elif not key_end:
+ key_pairs.append((key_start, include_start, key_end, include_end,
+ KeyRange.DESC))
+ else:
+ key_split = KeyRange.split_keys(key_start, key_end, batch_size)
+ first_include_end = True
+ if key_split == key_start:
+ first_include_end = first_include_end and include_start
+
+ key_pairs.append((key_start, include_start,
+ key_split, first_include_end,
+ KeyRange.DESC))
+
+ second_include_end = include_end
+ if key_split == key_end:
+ second_include_end = False
+ key_pairs.append((key_split, False,
+ key_end, second_include_end,
+ KeyRange.ASC))
+
+ ranges = [KeyRange(key_start=start,
+ include_start=include_start,
+ key_end=end,
+ include_end=include_end,
+ direction=direction)
+ for (start, include_start, end, include_end, direction)
+ in key_pairs]
+
+ return ranges
+
+ def __cmp__(self, other):
+ """Compare two key ranges.
+
+ Key ranges with a value of None for key_start or key_end, are always
+ considered to have include_start=False or include_end=False, respectively,
+ when comparing. Since None indicates an unbounded side of the range,
+ the include specifier is meaningless. The ordering generated is total
+ but somewhat arbitrary.
+
+ Args:
+ other: An object to compare to this one.
+
+ Returns:
+ -1: if this key range is less than other.
+ 0: if this key range is equal to other.
+ 1: if this key range is greater than other.
+ """
+ if not isinstance(other, KeyRange):
+ return 1
+
+ self_list = [self.key_start, self.key_end, self.direction,
+ self.include_start, self.include_end]
+ if not self.key_start:
+ self_list[3] = False
+ if not self.key_end:
+ self_list[4] = False
+
+ other_list = [other.key_start,
+ other.key_end,
+ other.direction,
+ other.include_start,
+ other.include_end]
+ if not other.key_start:
+ other_list[3] = False
+ if not other.key_end:
+ other_list[4] = False
+
+ return cmp(self_list, other_list)
+
+ @staticmethod
+ def bisect_string_range(start, end):
+ """Returns a string that is approximately in the middle of the range.
+
+ (start, end) is treated as a string range, and it is assumed
+ start <= end in the usual lexicographic string ordering. The output key
+ mid is guaranteed to satisfy start <= mid <= end.
+
+ The method proceeds by comparing initial characters of start and
+ end. When the characters are equal, they are appended to the mid
+ string. In the first place that the characters differ, the
+ difference characters are averaged and this average is appended to
+ the mid string. If averaging resulted in rounding down, and
+ additional character is added to the mid string to make up for the
+ rounding down. This extra step is necessary for correctness in
+ the case that the average of the two characters is equal to the
+ character in the start string.
+
+ This method makes the assumption that most keys are ascii and it
+ attempts to perform splitting within the ascii range when that
+ results in a valid split.
+
+ Args:
+ start: A string.
+ end: A string such that start <= end.
+
+ Returns:
+ A string mid such that start <= mid <= end.
+ """
+ if start == end:
+ return start
+ start += '\0'
+ end += '\0'
+ midpoint = []
+ expected_max = 127
+ for i in xrange(min(len(start), len(end))):
+ if start[i] == end[i]:
+ midpoint.append(start[i])
+ else:
+ ord_sum = ord(start[i]) + ord(end[i])
+ midpoint.append(unichr(ord_sum / 2))
+ if ord_sum % 2:
+ if len(start) > i + 1:
+ ord_start = ord(start[i+1])
+ else:
+ ord_start = 0
+ if ord_start < expected_max:
+ ord_split = (expected_max + ord_start) / 2
+ else:
+ ord_split = (0xFFFF + ord_start) / 2
+ midpoint.append(unichr(ord_split))
+ break
+ return ''.join(midpoint)
+
+ @staticmethod
+ def split_keys(key_start, key_end, batch_size):
+ """Return a key that is between key_start and key_end inclusive.
+
+ This method compares components of the ancestor paths of key_start
+ and key_end. The first place in the path that differs is
+ approximately split in half. If the kind components differ, a new
+ non-existent kind halfway between the two is used to split the
+ space. If the id_or_name components differ, then a new id_or_name
+ that is halfway between the two is selected. If the lower
+ id_or_name is numeric and the upper id_or_name is a string, then
+ the minumum string key u'\0' is used as the split id_or_name. The
+ key that is returned is the shared portion of the ancestor path
+ followed by the generated split component.
+
+ Args:
+ key_start: A db.Key instance for the lower end of a range.
+ key_end: A db.Key instance for the upper end of a range.
+ batch_size: The maximum size of a range that should not be split.
+
+ Returns:
+ A db.Key instance, k, such that key_start <= k <= key_end.
+ """
+ assert key_start.app() == key_end.app()
+ path1 = key_start.to_path()
+ path2 = key_end.to_path()
+ len1 = len(path1)
+ len2 = len(path2)
+ assert len1 % 2 == 0
+ assert len2 % 2 == 0
+ out_path = []
+ min_path_len = min(len1, len2) / 2
+ for i in xrange(min_path_len):
+ kind1 = path1[2*i]
+ kind2 = path2[2*i]
+
+ if kind1 != kind2:
+ split_kind = KeyRange.bisect_string_range(kind1, kind2)
+ out_path.append(split_kind)
+ out_path.append(unichr(0))
+ break
+
+ last = (len1 == len2 == 2*(i + 1))
+
+ id_or_name1 = path1[2*i + 1]
+ id_or_name2 = path2[2*i + 1]
+ id_or_name_split = KeyRange._split_id_or_name(
+ id_or_name1, id_or_name2, batch_size, last)
+ if id_or_name1 == id_or_name_split:
+ out_path.append(kind1)
+ out_path.append(id_or_name1)
+ else:
+ out_path.append(kind1)
+ out_path.append(id_or_name_split)
+ break
+
+ return db.Key.from_path(*out_path)
+
+ @staticmethod
+ def _split_id_or_name(id_or_name1, id_or_name2, batch_size, maintain_batches):
+ """Return an id_or_name that is between id_or_name1 an id_or_name2.
+
+ Attempts to split the range [id_or_name1, id_or_name2] in half,
+ unless maintain_batches is true and the size of the range
+ [id_or_name1, id_or_name2] is less than or equal to batch_size.
+
+ Args:
+ id_or_name1: A number or string or the id_or_name component of a key
+ id_or_name2: A number or string or the id_or_name component of a key
+ batch_size: The range size that will not be split if maintain_batches
+ is true.
+ maintain_batches: A boolean for whether to keep small ranges intact.
+
+ Returns:
+ An id_or_name such that id_or_name1 <= id_or_name <= id_or_name2.
+ """
+ if (isinstance(id_or_name1, (int, long)) and
+ isinstance(id_or_name2, (int, long))):
+ if not maintain_batches or id_or_name2 - id_or_name1 > batch_size:
+ return (id_or_name1 + id_or_name2) / 2
+ else:
+ return id_or_name1
+ elif (isinstance(id_or_name1, basestring) and
+ isinstance(id_or_name2, basestring)):
+ return KeyRange.bisect_string_range(id_or_name1, id_or_name2)
+ else:
+ assert (isinstance(id_or_name1, (int, long)) and
+ isinstance(id_or_name2, basestring))
+ return unichr(0)
+
+ def to_json(self):
+ """Serialize KeyRange to json.
+
+ Returns:
+ string with KeyRange json representation.
+ """
+ if simplejson is None:
+ raise SimplejsonUnavailableError(
+ "JSON functionality requires simplejson to be available")
+
+ def key_to_str(key):
+ if key:
+ return str(key)
+ else:
+ return None
+
+ return simplejson.dumps({
+ "direction": self.direction,
+ "key_start": key_to_str(self.key_start),
+ "key_end": key_to_str(self.key_end),
+ "include_start": self.include_start,
+ "include_end": self.include_end,
+ }, sort_keys=True)
+
+
+ @staticmethod
+ def from_json(json_str):
+ """Deserialize KeyRange from its json representation.
+
+ Args:
+ json_str: string with json representation created by key_range_to_json.
+
+ Returns:
+ deserialized KeyRange instance.
+ """
+ if simplejson is None:
+ raise SimplejsonUnavailableError(
+ "JSON functionality requires simplejson to be available")
+
+ def key_from_str(key_str):
+ if key_str:
+ return db.Key(key_str)
+ else:
+ return None
+
+ json = simplejson.loads(json_str)
+ return KeyRange(key_from_str(json["key_start"]),
+ key_from_str(json["key_end"]),
+ json["direction"],
+ json["include_start"],
+ json["include_end"])
diff --git a/google_appengine/google/appengine/ext/key_range/__init__.pyc b/google_appengine/google/appengine/ext/key_range/__init__.pyc
new file mode 100644
index 0000000..dd44920
--- /dev/null
+++ b/google_appengine/google/appengine/ext/key_range/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/preload/__init__.py b/google_appengine/google/appengine/ext/preload/__init__.py
new file mode 100755
index 0000000..ba1cee9
--- /dev/null
+++ b/google_appengine/google/appengine/ext/preload/__init__.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Preloads many modules to reduce loading time of third-party code."""
+
+
+
+
+import os
+_original_os_urandom = os.urandom
+def os_urandom_replacement(n):
+ raise NotImplementedError
+os.urandom = os_urandom_replacement
+import random
+os.urandom = _original_os_urandom
+random._urandom = _original_os_urandom
+
+import BaseHTTPServer
+import Bastion
+import CGIHTTPServer
+import ConfigParser
+import Cookie
+import DocXMLRPCServer
+import HTMLParser
+import MimeWriter
+import Queue
+import SimpleHTTPServer
+import SimpleXMLRPCServer
+import SocketServer
+import StringIO
+import UserDict
+import UserList
+import UserString
+import aifc
+import anydbm
+import atexit
+import audiodev
+import base64
+import bdb
+import binhex
+import bisect
+import bz2
+import calendar
+import cgi
+import cgitb
+import chunk
+import cmd
+import code
+import codecs
+import codeop
+import colorsys
+import commands
+import cookielib
+import copy
+import copy_reg
+import csv
+import datetime
+import difflib
+import dircache
+import dis
+import doctest
+import dumbdbm
+import filecmp
+import fileinput
+import fnmatch
+import formatter
+import fpformat
+import ftplib
+import getopt
+import getpass
+import gettext
+import glob
+import gzip
+import heapq
+import hmac
+import htmlentitydefs
+import htmllib
+import httplib
+import imaplib
+import imghdr
+import imputil
+import inspect
+import keyword
+import linecache
+import locale
+import logging
+import macpath
+import macurl2path
+import mailbox
+import mailcap
+import markupbase
+import math
+import md5
+import mhlib
+import mimetools
+import mimetypes
+import modulefinder
+import multifile
+import mutex
+import netrc
+import new
+import nntplib
+import ntpath
+import nturl2path
+import opcode
+import optparse
+import os2emxpath
+import pdb
+import pickle
+import pickletools
+import pipes
+import pkgutil
+import popen2
+import poplib
+import posixpath
+import pprint
+import profile
+import pstats
+import pyclbr
+import pydoc
+import quopri
+import re
+import repr
+import rfc822
+import robotparser
+import sched
+import sets
+import sgmllib
+import sha
+import shelve
+import shlex
+import shutil
+import site
+import smtplib
+import sndhdr
+import socket
+import stat
+import statvfs
+import string
+import stringold
+import stringprep
+import struct
+import sunau
+import sunaudio
+import symbol
+import sys
+import tabnanny
+import tarfile
+import telnetlib
+import tempfile
+import textwrap
+import time
+import timeit
+import toaiff
+import token
+import tokenize
+import trace
+import traceback
+import types
+import unittest
+import urllib
+import urllib2
+import urlparse
+import uu
+import uuid
+import warnings
+import wave
+import weakref
+import whichdb
+import xdrlib
+import xml.parsers.expat
+import xml.dom
+import xml.sax
+import xmlrpclib
+import zipfile
+import zlib
+
+import neo_cs
+import neo_util
+import webob
+import wsgiref.handlers
+
+from google.appengine.api import datastore
+from google.appengine.api import images
+from google.appengine.api import mail
+from google.appengine.api import memcache
+from google.appengine.api import urlfetch
+from google.appengine.api import users
+
+from google.appengine.ext import bulkload
+from google.appengine.ext import db
+from google.appengine.ext import gql
+from google.appengine.ext import search
+from google.appengine.ext import webapp
+
+from google.appengine.runtime import apiproxy
+
+if __name__ == '__main__':
+ pass
diff --git a/google_appengine/google/appengine/ext/remote_api/__init__.py b/google_appengine/google/appengine/ext/remote_api/__init__.py
new file mode 100644
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/ext/remote_api/__init__.pyc b/google_appengine/google/appengine/ext/remote_api/__init__.pyc
new file mode 100644
index 0000000..ab8361b
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/remote_api/handler.py b/google_appengine/google/appengine/ext/remote_api/handler.py
new file mode 100755
index 0000000..a5cceb4
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/handler.py
@@ -0,0 +1,319 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A handler that exports various App Engine services over HTTP.
+
+You can export this handler in your app by adding it directly to app.yaml's
+list of handlers:
+
+ handlers:
+ - url: /remote_api
+ script: $PYTHON_LIB/google/appengine/ext/remote_api/handler.py
+ login: admin
+
+Then, you can use remote_api_stub to remotely access services exported by this
+handler. See the documentation in remote_api_stub.py for details on how to do
+this.
+
+Using this handler without specifying "login: admin" would be extremely unwise.
+So unwise that the default handler insists on checking for itself.
+"""
+
+
+
+
+
+import google
+import logging
+import os
+import pickle
+import sha
+import wsgiref.handlers
+import yaml
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore_errors
+from google.appengine.api import mail_service_pb
+from google.appengine.api import urlfetch_service_pb
+from google.appengine.api import users
+from google.appengine.api.capabilities import capability_service_pb
+from google.appengine.api.images import images_service_pb
+from google.appengine.api.memcache import memcache_service_pb
+from google.appengine.datastore import datastore_pb
+from google.appengine.ext import webapp
+from google.appengine.ext.remote_api import remote_api_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+class RemoteDatastoreStub(apiproxy_stub.APIProxyStub):
+ """Provides a stub that permits execution of stateful datastore queries.
+
+ Some operations aren't possible using the standard interface. Notably,
+ datastore RunQuery operations internally store a cursor that is referenced in
+ later Next calls, and cleaned up at the end of each request. Because every
+ call to ApiCallHandler takes place in its own request, this isn't possible.
+
+ To work around this, RemoteDatastoreStub provides its own implementation of
+ RunQuery that immediately returns the query results.
+ """
+
+ def _Dynamic_RunQuery(self, request, response):
+ """Handle a RunQuery request.
+
+ We handle RunQuery by executing a Query and a Next and returning the result
+ of the Next request.
+ """
+ runquery_response = datastore_pb.QueryResult()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery',
+ request, runquery_response)
+ if runquery_response.result_size() > 0:
+ response.CopyFrom(runquery_response)
+ return
+
+ next_request = datastore_pb.NextRequest()
+ next_request.mutable_cursor().CopyFrom(runquery_response.cursor())
+ next_request.set_count(request.limit())
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next',
+ next_request, response)
+
+ def _Dynamic_Transaction(self, request, response):
+ """Handle a Transaction request.
+
+ We handle transactions by accumulating Put requests on the client end, as
+ well as recording the key and hash of Get requests. When Commit is called,
+ Transaction is invoked, which verifies that all the entities in the
+ precondition list still exist and their hashes match, then performs a
+ transaction of its own to make the updates.
+ """
+ tx = datastore_pb.Transaction()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
+ api_base_pb.VoidProto(), tx)
+
+ preconditions = request.precondition_list()
+ if preconditions:
+ get_request = datastore_pb.GetRequest()
+ get_request.mutable_transaction().CopyFrom(tx)
+ for precondition in preconditions:
+ key = get_request.add_key()
+ key.CopyFrom(precondition.key())
+ get_response = datastore_pb.GetResponse()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', get_request,
+ get_response)
+ entities = get_response.entity_list()
+ assert len(entities) == request.precondition_size()
+ for precondition, entity in zip(preconditions, entities):
+ if precondition.has_hash() != entity.has_entity():
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.CONCURRENT_TRANSACTION,
+ "Transaction precondition failed.")
+ elif entity.has_entity():
+ entity_hash = sha.new(entity.entity().Encode()).digest()
+ if precondition.hash() != entity_hash:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.CONCURRENT_TRANSACTION,
+ "Transaction precondition failed.")
+
+ if request.has_puts():
+ put_request = request.puts()
+ put_request.mutable_transaction().CopyFrom(tx)
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put',
+ put_request, response)
+
+ if request.has_deletes():
+ delete_request = request.deletes()
+ delete_request.mutable_transaction().CopyFrom(tx)
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete',
+ delete_request, api_base_pb.VoidProto())
+
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit', tx,
+ api_base_pb.VoidProto())
+
+ def _Dynamic_GetIDs(self, request, response):
+ """Fetch unique IDs for a set of paths."""
+ for entity in request.entity_list():
+ assert entity.property_size() == 0
+ assert entity.raw_property_size() == 0
+ assert entity.entity_group().element_size() == 0
+ lastpart = entity.key().path().element_list()[-1]
+ assert lastpart.id() == 0 and not lastpart.has_name()
+
+ tx = datastore_pb.Transaction()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
+ api_base_pb.VoidProto(), tx)
+
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', request, response)
+
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback', tx,
+ api_base_pb.VoidProto())
+
+
+SERVICE_PB_MAP = {
+ 'capability_service': {
+ 'IsEnabled': (capability_service_pb.IsEnabledRequest,
+ capability_service_pb.IsEnabledResponse),
+ },
+ 'datastore_v3': {
+ 'Get': (datastore_pb.GetRequest, datastore_pb.GetResponse),
+ 'Put': (datastore_pb.PutRequest, datastore_pb.PutResponse),
+ 'Delete': (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
+ 'Count': (datastore_pb.Query, api_base_pb.Integer64Proto),
+ 'GetIndices': (api_base_pb.StringProto, datastore_pb.CompositeIndices),
+ },
+ 'images': {
+ 'Transform': (images_service_pb.ImagesTransformRequest,
+ images_service_pb.ImagesTransformResponse),
+ 'Composite': (images_service_pb.ImagesCompositeRequest,
+ images_service_pb.ImagesCompositeResponse),
+ 'Histogram': (images_service_pb.ImagesHistogramRequest,
+ images_service_pb.ImagesHistogramResponse),
+ },
+ 'mail': {
+ 'Send': (mail_service_pb.MailMessage, api_base_pb.VoidProto),
+ 'SendToAdmins': (mail_service_pb.MailMessage, api_base_pb.VoidProto),
+ },
+ 'memcache': {
+ 'Get': (memcache_service_pb.MemcacheGetRequest,
+ memcache_service_pb.MemcacheGetResponse),
+ 'Set': (memcache_service_pb.MemcacheSetRequest,
+ memcache_service_pb.MemcacheSetResponse),
+ 'Delete': (memcache_service_pb.MemcacheDeleteRequest,
+ memcache_service_pb.MemcacheDeleteResponse),
+ 'Increment': (memcache_service_pb.MemcacheIncrementRequest,
+ memcache_service_pb.MemcacheIncrementResponse),
+ 'FlushAll': (memcache_service_pb.MemcacheFlushRequest,
+ memcache_service_pb.MemcacheFlushResponse),
+ 'Stats': (memcache_service_pb.MemcacheStatsRequest,
+ memcache_service_pb.MemcacheStatsResponse),
+ },
+ 'remote_datastore': {
+ 'RunQuery': (datastore_pb.Query, datastore_pb.QueryResult),
+ 'Transaction': (remote_api_pb.TransactionRequest,
+ datastore_pb.PutResponse),
+ 'GetIDs': (remote_api_pb.PutRequest, datastore_pb.PutResponse),
+ },
+ 'urlfetch': {
+ 'Fetch': (urlfetch_service_pb.URLFetchRequest,
+ urlfetch_service_pb.URLFetchResponse),
+ },
+}
+
+
+class ApiCallHandler(webapp.RequestHandler):
+ """A webapp handler that accepts API calls over HTTP and executes them."""
+
+ LOCAL_STUBS = {
+ 'remote_datastore': RemoteDatastoreStub('remote_datastore'),
+ }
+
+ def CheckIsAdmin(self):
+ if not users.is_current_user_admin():
+ self.response.set_status(401)
+ self.response.out.write(
+ "You must be logged in as an administrator to access this.")
+ self.response.headers['Content-Type'] = 'text/plain'
+ return False
+ elif 'X-appcfg-api-version' not in self.request.headers:
+ self.response.set_status(403)
+ self.response.out.write("This request did not contain a necessary header")
+ self.response.headers['Content-Type'] = 'text/plain'
+ return False
+ return True
+
+
+ def get(self):
+ """Handle a GET. Just show an info page."""
+ if not self.CheckIsAdmin():
+ return
+
+ rtok = self.request.get('rtok', '0')
+ app_info = {
+ 'app_id': os.environ['APPLICATION_ID'],
+ 'rtok': rtok
+ }
+
+ self.response.headers['Content-Type'] = 'text/plain'
+ self.response.out.write(yaml.dump(app_info))
+
+ def post(self):
+ """Handle POST requests by executing the API call."""
+ if not self.CheckIsAdmin():
+ return
+
+ self.response.headers['Content-Type'] = 'application/octet-stream'
+ response = remote_api_pb.Response()
+ try:
+ request = remote_api_pb.Request()
+ request.ParseFromString(self.request.body)
+ response_data = self.ExecuteRequest(request)
+ response.mutable_response().set_contents(response_data.Encode())
+ self.response.set_status(200)
+ except Exception, e:
+ logging.exception('Exception while handling %s', request)
+ self.response.set_status(200)
+ response.mutable_exception().set_contents(pickle.dumps(e))
+ if isinstance(e, datastore_errors.Error):
+ application_error = response.mutable_application_error()
+ application_error.setCode(e.application_error)
+ application_error.setDetail(e.error_detail)
+ self.response.out.write(response.Encode())
+
+ def ExecuteRequest(self, request):
+ """Executes an API invocation and returns the response object."""
+ service = request.service_name()
+ method = request.method()
+ service_methods = SERVICE_PB_MAP.get(service, {})
+ request_class, response_class = service_methods.get(method, (None, None))
+ if not request_class:
+ raise apiproxy_errors.CallNotFoundError()
+
+ request_data = request_class()
+ request_data.ParseFromString(request.request().contents())
+ response_data = response_class()
+
+ if service in self.LOCAL_STUBS:
+ self.LOCAL_STUBS[service].MakeSyncCall(service, method, request_data,
+ response_data)
+ else:
+ apiproxy_stub_map.MakeSyncCall(service, method, request_data,
+ response_data)
+
+ return response_data
+
+ def InfoPage(self):
+ """Renders an information page."""
+ return """
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html><head>
+<title>App Engine API endpoint.</title>
+</head><body>
+<h1>App Engine API endpoint.</h1>
+<p>This is an endpoint for the App Engine remote API interface.
+Point your stubs (google.appengine.ext.remote_api.remote_api_stub) here.</p>
+</body>
+</html>"""
+
+
+def main():
+ application = webapp.WSGIApplication([('.*', ApiCallHandler)])
+ wsgiref.handlers.CGIHandler().run(application)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/google_appengine/google/appengine/ext/remote_api/remote_api_pb.py b/google_appengine/google/appengine/ext/remote_api/remote_api_pb.py
new file mode 100644
index 0000000..bd6a777
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/remote_api_pb.py
@@ -0,0 +1,809 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.net.proto.RawMessage import RawMessage
+from google.appengine.datastore.datastore_pb import PutRequest
+from google.appengine.datastore.datastore_pb import DeleteRequest
+from google.appengine.datastore.entity_pb import Reference
+class Request(ProtocolBuffer.ProtocolMessage):
+ has_service_name_ = 0
+ service_name_ = ""
+ has_method_ = 0
+ method_ = ""
+ has_request_ = 0
+
+ def __init__(self, contents=None):
+ self.request_ = RawMessage()
+ if contents is not None: self.MergeFromString(contents)
+
+ def service_name(self): return self.service_name_
+
+ def set_service_name(self, x):
+ self.has_service_name_ = 1
+ self.service_name_ = x
+
+ def clear_service_name(self):
+ if self.has_service_name_:
+ self.has_service_name_ = 0
+ self.service_name_ = ""
+
+ def has_service_name(self): return self.has_service_name_
+
+ def method(self): return self.method_
+
+ def set_method(self, x):
+ self.has_method_ = 1
+ self.method_ = x
+
+ def clear_method(self):
+ if self.has_method_:
+ self.has_method_ = 0
+ self.method_ = ""
+
+ def has_method(self): return self.has_method_
+
+ def request(self): return self.request_
+
+ def mutable_request(self): self.has_request_ = 1; return self.request_
+
+ def clear_request(self):self.has_request_ = 0; self.request_.Clear()
+
+ def has_request(self): return self.has_request_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_service_name()): self.set_service_name(x.service_name())
+ if (x.has_method()): self.set_method(x.method())
+ if (x.has_request()): self.mutable_request().MergeFrom(x.request())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_service_name_ != x.has_service_name_: return 0
+ if self.has_service_name_ and self.service_name_ != x.service_name_: return 0
+ if self.has_method_ != x.has_method_: return 0
+ if self.has_method_ and self.method_ != x.method_: return 0
+ if self.has_request_ != x.has_request_: return 0
+ if self.has_request_ and self.request_ != x.request_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_service_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: service_name not set.')
+ if (not self.has_method_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: method not set.')
+ if (not self.has_request_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: request not set.')
+ elif not self.request_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.service_name_))
+ n += self.lengthString(len(self.method_))
+ n += self.lengthString(self.request_.ByteSize())
+ return n + 3
+
+ def Clear(self):
+ self.clear_service_name()
+ self.clear_method()
+ self.clear_request()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.service_name_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.method_)
+ out.putVarInt32(34)
+ out.putVarInt32(self.request_.ByteSize())
+ self.request_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 18:
+ self.set_service_name(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_method(d.getPrefixedString())
+ continue
+ if tt == 34:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_request().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_service_name_: res+=prefix+("service_name: %s\n" % self.DebugFormatString(self.service_name_))
+ if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatString(self.method_))
+ if self.has_request_:
+ res+=prefix+"request <\n"
+ res+=self.request_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kservice_name = 2
+ kmethod = 3
+ krequest = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 2: "service_name",
+ 3: "method",
+ 4: "request",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.STRING,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ApplicationError(ProtocolBuffer.ProtocolMessage):
+ has_code_ = 0
+ code_ = 0
+ has_detail_ = 0
+ detail_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def code(self): return self.code_
+
+ def set_code(self, x):
+ self.has_code_ = 1
+ self.code_ = x
+
+ def clear_code(self):
+ if self.has_code_:
+ self.has_code_ = 0
+ self.code_ = 0
+
+ def has_code(self): return self.has_code_
+
+ def detail(self): return self.detail_
+
+ def set_detail(self, x):
+ self.has_detail_ = 1
+ self.detail_ = x
+
+ def clear_detail(self):
+ if self.has_detail_:
+ self.has_detail_ = 0
+ self.detail_ = ""
+
+ def has_detail(self): return self.has_detail_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_code()): self.set_code(x.code())
+ if (x.has_detail()): self.set_detail(x.detail())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_code_ != x.has_code_: return 0
+ if self.has_code_ and self.code_ != x.code_: return 0
+ if self.has_detail_ != x.has_detail_: return 0
+ if self.has_detail_ and self.detail_ != x.detail_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_code_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: code not set.')
+ if (not self.has_detail_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: detail not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.code_)
+ n += self.lengthString(len(self.detail_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_code()
+ self.clear_detail()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.code_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.detail_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_code(d.getVarInt32())
+ continue
+ if tt == 18:
+ self.set_detail(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_code_: res+=prefix+("code: %s\n" % self.DebugFormatInt32(self.code_))
+ if self.has_detail_: res+=prefix+("detail: %s\n" % self.DebugFormatString(self.detail_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcode = 1
+ kdetail = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "code",
+ 2: "detail",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Response(ProtocolBuffer.ProtocolMessage):
+ has_response_ = 0
+ response_ = None
+ has_exception_ = 0
+ exception_ = None
+ has_application_error_ = 0
+ application_error_ = None
+ has_java_exception_ = 0
+ java_exception_ = None
+
+ def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def response(self):
+ if self.response_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.response_ is None: self.response_ = RawMessage()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.response_
+
+ def mutable_response(self): self.has_response_ = 1; return self.response()
+
+ def clear_response(self):
+ if self.has_response_:
+ self.has_response_ = 0;
+ if self.response_ is not None: self.response_.Clear()
+
+ def has_response(self): return self.has_response_
+
+ def exception(self):
+ if self.exception_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.exception_ is None: self.exception_ = RawMessage()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.exception_
+
+ def mutable_exception(self): self.has_exception_ = 1; return self.exception()
+
+ def clear_exception(self):
+ if self.has_exception_:
+ self.has_exception_ = 0;
+ if self.exception_ is not None: self.exception_.Clear()
+
+ def has_exception(self): return self.has_exception_
+
+ def application_error(self):
+ if self.application_error_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.application_error_ is None: self.application_error_ = ApplicationError()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.application_error_
+
+ def mutable_application_error(self): self.has_application_error_ = 1; return self.application_error()
+
+ def clear_application_error(self):
+ if self.has_application_error_:
+ self.has_application_error_ = 0;
+ if self.application_error_ is not None: self.application_error_.Clear()
+
+ def has_application_error(self): return self.has_application_error_
+
+ def java_exception(self):
+ if self.java_exception_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.java_exception_ is None: self.java_exception_ = RawMessage()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.java_exception_
+
+ def mutable_java_exception(self): self.has_java_exception_ = 1; return self.java_exception()
+
+ def clear_java_exception(self):
+ if self.has_java_exception_:
+ self.has_java_exception_ = 0;
+ if self.java_exception_ is not None: self.java_exception_.Clear()
+
+ def has_java_exception(self): return self.has_java_exception_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_response()): self.mutable_response().MergeFrom(x.response())
+ if (x.has_exception()): self.mutable_exception().MergeFrom(x.exception())
+ if (x.has_application_error()): self.mutable_application_error().MergeFrom(x.application_error())
+ if (x.has_java_exception()): self.mutable_java_exception().MergeFrom(x.java_exception())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_response_ != x.has_response_: return 0
+ if self.has_response_ and self.response_ != x.response_: return 0
+ if self.has_exception_ != x.has_exception_: return 0
+ if self.has_exception_ and self.exception_ != x.exception_: return 0
+ if self.has_application_error_ != x.has_application_error_: return 0
+ if self.has_application_error_ and self.application_error_ != x.application_error_: return 0
+ if self.has_java_exception_ != x.has_java_exception_: return 0
+ if self.has_java_exception_ and self.java_exception_ != x.java_exception_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_response_ and not self.response_.IsInitialized(debug_strs)): initialized = 0
+ if (self.has_exception_ and not self.exception_.IsInitialized(debug_strs)): initialized = 0
+ if (self.has_application_error_ and not self.application_error_.IsInitialized(debug_strs)): initialized = 0
+ if (self.has_java_exception_ and not self.java_exception_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_response_): n += 1 + self.lengthString(self.response_.ByteSize())
+ if (self.has_exception_): n += 1 + self.lengthString(self.exception_.ByteSize())
+ if (self.has_application_error_): n += 1 + self.lengthString(self.application_error_.ByteSize())
+ if (self.has_java_exception_): n += 1 + self.lengthString(self.java_exception_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_response()
+ self.clear_exception()
+ self.clear_application_error()
+ self.clear_java_exception()
+
+ def OutputUnchecked(self, out):
+ if (self.has_response_):
+ out.putVarInt32(10)
+ out.putVarInt32(self.response_.ByteSize())
+ self.response_.OutputUnchecked(out)
+ if (self.has_exception_):
+ out.putVarInt32(18)
+ out.putVarInt32(self.exception_.ByteSize())
+ self.exception_.OutputUnchecked(out)
+ if (self.has_application_error_):
+ out.putVarInt32(26)
+ out.putVarInt32(self.application_error_.ByteSize())
+ self.application_error_.OutputUnchecked(out)
+ if (self.has_java_exception_):
+ out.putVarInt32(34)
+ out.putVarInt32(self.java_exception_.ByteSize())
+ self.java_exception_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_response().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_exception().TryMerge(tmp)
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_application_error().TryMerge(tmp)
+ continue
+ if tt == 34:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_java_exception().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_response_:
+ res+=prefix+"response <\n"
+ res+=self.response_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_exception_:
+ res+=prefix+"exception <\n"
+ res+=self.exception_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_application_error_:
+ res+=prefix+"application_error <\n"
+ res+=self.application_error_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_java_exception_:
+ res+=prefix+"java_exception <\n"
+ res+=self.java_exception_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kresponse = 1
+ kexception = 2
+ kapplication_error = 3
+ kjava_exception = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "response",
+ 2: "exception",
+ 3: "application_error",
+ 4: "java_exception",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.STRING,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TransactionRequest_Precondition(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ has_hash_ = 0
+ hash_ = ""
+
+ def __init__(self, contents=None):
+ self.key_ = Reference()
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def mutable_key(self): self.has_key_ = 1; return self.key_
+
+ def clear_key(self):self.has_key_ = 0; self.key_.Clear()
+
+ def has_key(self): return self.has_key_
+
+ def hash(self): return self.hash_
+
+ def set_hash(self, x):
+ self.has_hash_ = 1
+ self.hash_ = x
+
+ def clear_hash(self):
+ if self.has_hash_:
+ self.has_hash_ = 0
+ self.hash_ = ""
+
+ def has_hash(self): return self.has_hash_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.mutable_key().MergeFrom(x.key())
+ if (x.has_hash()): self.set_hash(x.hash())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_hash_ != x.has_hash_: return 0
+ if self.has_hash_ and self.hash_ != x.hash_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ elif not self.key_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.key_.ByteSize())
+ if (self.has_hash_): n += 1 + self.lengthString(len(self.hash_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_hash()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putVarInt32(self.key_.ByteSize())
+ self.key_.OutputUnchecked(out)
+ if (self.has_hash_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.hash_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_key().TryMerge(tmp)
+ continue
+ if tt == 26:
+ self.set_hash(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_:
+ res+=prefix+"key <\n"
+ res+=self.key_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_hash_: res+=prefix+("hash: %s\n" % self.DebugFormatString(self.hash_))
+ return res
+
+class TransactionRequest(ProtocolBuffer.ProtocolMessage):
+ has_puts_ = 0
+ puts_ = None
+ has_deletes_ = 0
+ deletes_ = None
+
+ def __init__(self, contents=None):
+ self.precondition_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def precondition_size(self): return len(self.precondition_)
+ def precondition_list(self): return self.precondition_
+
+ def precondition(self, i):
+ return self.precondition_[i]
+
+ def mutable_precondition(self, i):
+ return self.precondition_[i]
+
+ def add_precondition(self):
+ x = TransactionRequest_Precondition()
+ self.precondition_.append(x)
+ return x
+
+ def clear_precondition(self):
+ self.precondition_ = []
+ def puts(self):
+ if self.puts_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.puts_ is None: self.puts_ = PutRequest()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.puts_
+
+ def mutable_puts(self): self.has_puts_ = 1; return self.puts()
+
+ def clear_puts(self):
+ if self.has_puts_:
+ self.has_puts_ = 0;
+ if self.puts_ is not None: self.puts_.Clear()
+
+ def has_puts(self): return self.has_puts_
+
+ def deletes(self):
+ if self.deletes_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.deletes_ is None: self.deletes_ = DeleteRequest()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.deletes_
+
+ def mutable_deletes(self): self.has_deletes_ = 1; return self.deletes()
+
+ def clear_deletes(self):
+ if self.has_deletes_:
+ self.has_deletes_ = 0;
+ if self.deletes_ is not None: self.deletes_.Clear()
+
+ def has_deletes(self): return self.has_deletes_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.precondition_size()): self.add_precondition().CopyFrom(x.precondition(i))
+ if (x.has_puts()): self.mutable_puts().MergeFrom(x.puts())
+ if (x.has_deletes()): self.mutable_deletes().MergeFrom(x.deletes())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.precondition_) != len(x.precondition_): return 0
+ for e1, e2 in zip(self.precondition_, x.precondition_):
+ if e1 != e2: return 0
+ if self.has_puts_ != x.has_puts_: return 0
+ if self.has_puts_ and self.puts_ != x.puts_: return 0
+ if self.has_deletes_ != x.has_deletes_: return 0
+ if self.has_deletes_ and self.deletes_ != x.deletes_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.precondition_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_puts_ and not self.puts_.IsInitialized(debug_strs)): initialized = 0
+ if (self.has_deletes_ and not self.deletes_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.precondition_)
+ for i in xrange(len(self.precondition_)): n += self.precondition_[i].ByteSize()
+ if (self.has_puts_): n += 1 + self.lengthString(self.puts_.ByteSize())
+ if (self.has_deletes_): n += 1 + self.lengthString(self.deletes_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_precondition()
+ self.clear_puts()
+ self.clear_deletes()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.precondition_)):
+ out.putVarInt32(11)
+ self.precondition_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+ if (self.has_puts_):
+ out.putVarInt32(34)
+ out.putVarInt32(self.puts_.ByteSize())
+ self.puts_.OutputUnchecked(out)
+ if (self.has_deletes_):
+ out.putVarInt32(42)
+ out.putVarInt32(self.deletes_.ByteSize())
+ self.deletes_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_precondition().TryMerge(d)
+ continue
+ if tt == 34:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_puts().TryMerge(tmp)
+ continue
+ if tt == 42:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_deletes().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.precondition_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Precondition%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_puts_:
+ res+=prefix+"puts <\n"
+ res+=self.puts_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_deletes_:
+ res+=prefix+"deletes <\n"
+ res+=self.deletes_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kPreconditionGroup = 1
+ kPreconditionkey = 2
+ kPreconditionhash = 3
+ kputs = 4
+ kdeletes = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Precondition",
+ 2: "key",
+ 3: "hash",
+ 4: "puts",
+ 5: "deletes",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['Request','ApplicationError','Response','TransactionRequest','TransactionRequest_Precondition']
diff --git a/google_appengine/google/appengine/ext/remote_api/remote_api_pb.pyc b/google_appengine/google/appengine/ext/remote_api/remote_api_pb.pyc
new file mode 100644
index 0000000..57b1989
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/remote_api_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py b/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py
new file mode 100755
index 0000000..eeeea66
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py
@@ -0,0 +1,499 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""An apiproxy stub that calls a remote handler via HTTP.
+
+This allows easy remote access to the App Engine datastore, and potentially any
+of the other App Engine APIs, using the same interface you use when accessing
+the service locally.
+
+An example Python script:
+---
+from google.appengine.ext import db
+from google.appengine.ext.remote_api import remote_api_stub
+from myapp import models
+import getpass
+
+def auth_func():
+ return (raw_input('Username:'), getpass.getpass('Password:'))
+
+remote_api_stub.ConfigureRemoteDatastore('my-app', '/remote_api', auth_func)
+
+# Now you can access the remote datastore just as if your code was running on
+# App Engine!
+
+houses = models.House.all().fetch(100)
+for a_house in q:
+ a_house.doors += 1
+db.put(houses)
+---
+
+A few caveats:
+- Where possible, avoid iterating over queries directly. Fetching as many
+ results as you will need is faster and more efficient.
+- If you need to iterate, consider instead fetching items in batches with a sort
+ order and constructing a new query starting from where the previous one left
+ off. The __key__ pseudo-property can be used as a sort key for this purpose,
+ and does not even require a custom index if you are iterating over all
+ entities of a given type.
+- Likewise, it's a good idea to put entities in batches. Instead of calling put
+ for each individual entity, accumulate them and put them in batches using
+ db.put(), if you can.
+- Requests and responses are still limited to 1MB each, so if you have large
+ entities or try and fetch or put many of them at once, your requests may fail.
+"""
+
+
+
+
+
+import google
+import os
+import pickle
+import random
+import sha
+import sys
+import thread
+import threading
+import yaml
+
+from google.appengine.api import datastore
+from google.appengine.api import apiproxy_rpc
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.datastore import datastore_pb
+from google.appengine.ext.remote_api import remote_api_pb
+from google.appengine.runtime import apiproxy_errors
+from google.appengine.tools import appengine_rpc
+
+
+class Error(Exception):
+ """Base class for exceptions in this module."""
+
+
+class ConfigurationError(Error):
+ """Exception for configuration errors."""
+
+
+class UnknownJavaServerError(Error):
+ """Exception for exceptions returned from a Java remote_api handler."""
+
+
+def GetUserAgent():
+ """Determines the value of the 'User-agent' header to use for HTTP requests.
+
+ Returns:
+ String containing the 'user-agent' header value, which includes the SDK
+ version, the platform information, and the version of Python;
+ e.g., "remote_api/1.0.1 Darwin/9.2.0 Python/2.5.2".
+ """
+ product_tokens = []
+
+ product_tokens.append("Google-remote_api/1.0")
+
+ product_tokens.append(appengine_rpc.GetPlatformToken())
+
+ python_version = ".".join(str(i) for i in sys.version_info)
+ product_tokens.append("Python/%s" % python_version)
+
+ return " ".join(product_tokens)
+
+
+def GetSourceName():
+ return "Google-remote_api-1.0"
+
+
+class TransactionData(object):
+ """Encapsulates data about an individual transaction."""
+
+ def __init__(self, thread_id):
+ self.thread_id = thread_id
+ self.preconditions = {}
+ self.entities = {}
+
+
+class RemoteStub(object):
+ """A stub for calling services on a remote server over HTTP.
+
+ You can use this to stub out any service that the remote server supports.
+ """
+
+ def __init__(self, server, path):
+ """Constructs a new RemoteStub that communicates with the specified server.
+
+ Args:
+ server: An instance of a subclass of
+ google.appengine.tools.appengine_rpc.AbstractRpcServer.
+ path: The path to the handler this stub should send requests to.
+ """
+ self._server = server
+ self._path = path
+
+ def _PreHookHandler(self, service, call, request, response):
+ pass
+
+ def _PostHookHandler(self, service, call, request, response):
+ pass
+
+ def MakeSyncCall(self, service, call, request, response):
+ self._PreHookHandler(service, call, request, response)
+ request_pb = remote_api_pb.Request()
+ request_pb.set_service_name(service)
+ request_pb.set_method(call)
+ request_pb.mutable_request().set_contents(request.Encode())
+
+ response_pb = remote_api_pb.Response()
+ encoded_request = request_pb.Encode()
+ encoded_response = self._server.Send(self._path, encoded_request)
+ response_pb.ParseFromString(encoded_response)
+
+ try:
+ if response_pb.has_application_error():
+ error_pb = response_pb.application_error()
+ raise datastore._ToDatastoreError(
+ apiproxy_errors.ApplicationError(error_pb.code(), error_pb.detail()))
+ elif response_pb.has_exception():
+ raise pickle.loads(response_pb.exception().contents())
+ elif response_pb.has_java_exception():
+ raise UnknownJavaServerError("An unknown error has occured in the "
+ "Java remote_api handler for this call.")
+ else:
+ response.ParseFromString(response_pb.response().contents())
+ finally:
+ self._PostHookHandler(service, call, request, response)
+
+ def CreateRPC(self):
+ return apiproxy_rpc.RPC(stub=self)
+
+
+class RemoteDatastoreStub(RemoteStub):
+ """A specialised stub for accessing the App Engine datastore remotely.
+
+ A specialised stub is required because there are some datastore operations
+ that preserve state between calls. This stub makes queries possible.
+ Transactions on the remote datastore are unfortunately still impossible.
+ """
+
+ def __init__(self, server, path):
+ super(RemoteDatastoreStub, self).__init__(server, path)
+ self.__queries = {}
+ self.__transactions = {}
+
+ self.__next_local_cursor = 1
+ self.__local_cursor_lock = threading.Lock()
+ self.__next_local_tx = 1
+ self.__local_tx_lock = threading.Lock()
+
+ def MakeSyncCall(self, service, call, request, response):
+ assert service == 'datastore_v3'
+
+ explanation = []
+ assert request.IsInitialized(explanation), explanation
+
+ handler = getattr(self, '_Dynamic_' + call, None)
+ if handler:
+ handler(request, response)
+ else:
+ super(RemoteDatastoreStub, self).MakeSyncCall(service, call, request,
+ response)
+
+ assert response.IsInitialized(explanation), explanation
+
+ def _Dynamic_RunQuery(self, query, query_result):
+ self.__local_cursor_lock.acquire()
+ try:
+ cursor_id = self.__next_local_cursor
+ self.__next_local_cursor += 1
+ finally:
+ self.__local_cursor_lock.release()
+ query.clear_count()
+ self.__queries[cursor_id] = query
+
+ query_result.mutable_cursor().set_cursor(cursor_id)
+ query_result.set_more_results(True)
+ query_result.set_keys_only(query.keys_only())
+
+ def _Dynamic_Next(self, next_request, query_result):
+ cursor = next_request.cursor().cursor()
+ if cursor not in self.__queries:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ 'Cursor %d not found' % cursor)
+ query = self.__queries[cursor]
+
+ if query is None:
+ query_result.set_more_results(False)
+ return
+
+ request = datastore_pb.Query()
+ request.CopyFrom(query)
+ if request.has_limit():
+ request.set_limit(min(request.limit(), next_request.count()))
+ else:
+ request.set_limit(next_request.count())
+ request.set_count(request.limit())
+
+ super(RemoteDatastoreStub, self).MakeSyncCall(
+ 'remote_datastore', 'RunQuery', request, query_result)
+
+ query.set_offset(query.offset() + query_result.result_size())
+ if query.has_limit():
+ query.set_limit(query.limit() - query_result.result_size())
+ if not query_result.more_results():
+ self.__queries[cursor] = None
+
+ def _Dynamic_Get(self, get_request, get_response):
+ txid = None
+ if get_request.has_transaction():
+ txid = get_request.transaction().handle()
+ txdata = self.__transactions[txid]
+ assert (txdata.thread_id ==
+ thread.get_ident()), "Transactions are single-threaded."
+
+ keys = [(k, k.Encode()) for k in get_request.key_list()]
+
+ new_request = datastore_pb.GetRequest()
+ for key, enckey in keys:
+ if enckey not in txdata.entities:
+ new_request.add_key().CopyFrom(key)
+ else:
+ new_request = get_request
+
+ if new_request.key_size() > 0:
+ super(RemoteDatastoreStub, self).MakeSyncCall(
+ 'datastore_v3', 'Get', new_request, get_response)
+
+ if txid is not None:
+ newkeys = new_request.key_list()
+ entities = get_response.entity_list()
+ for key, entity in zip(newkeys, entities):
+ entity_hash = None
+ if entity.has_entity():
+ entity_hash = sha.new(entity.entity().Encode()).digest()
+ txdata.preconditions[key.Encode()] = (key, entity_hash)
+
+ new_response = datastore_pb.GetResponse()
+ it = iter(get_response.entity_list())
+ for key, enckey in keys:
+ if enckey in txdata.entities:
+ cached_entity = txdata.entities[enckey][1]
+ if cached_entity:
+ new_response.add_entity().mutable_entity().CopyFrom(cached_entity)
+ else:
+ new_response.add_entity()
+ else:
+ new_entity = it.next()
+ if new_entity.has_entity():
+ assert new_entity.entity().key() == key
+ new_response.add_entity().CopyFrom(new_entity)
+ else:
+ new_response.add_entity()
+ get_response.CopyFrom(new_response)
+
+ def _Dynamic_Put(self, put_request, put_response):
+ if put_request.has_transaction():
+ entities = put_request.entity_list()
+
+ requires_id = lambda x: x.id() == 0 and not x.has_name()
+ new_ents = [e for e in entities
+ if requires_id(e.key().path().element_list()[-1])]
+ id_request = remote_api_pb.PutRequest()
+ if new_ents:
+ for ent in new_ents:
+ e = id_request.add_entity()
+ e.mutable_key().CopyFrom(ent.key())
+ e.mutable_entity_group()
+ id_response = datastore_pb.PutResponse()
+ super(RemoteDatastoreStub, self).MakeSyncCall(
+ 'remote_datastore', 'GetIDs', id_request, id_response)
+ assert id_request.entity_size() == id_response.key_size()
+ for key, ent in zip(id_response.key_list(), new_ents):
+ ent.mutable_key().CopyFrom(key)
+ ent.mutable_entity_group().add_element().CopyFrom(
+ key.path().element(0))
+
+ txid = put_request.transaction().handle()
+ txdata = self.__transactions[txid]
+ assert (txdata.thread_id ==
+ thread.get_ident()), "Transactions are single-threaded."
+ for entity in entities:
+ txdata.entities[entity.key().Encode()] = (entity.key(), entity)
+ put_response.add_key().CopyFrom(entity.key())
+ else:
+ super(RemoteDatastoreStub, self).MakeSyncCall(
+ 'datastore_v3', 'Put', put_request, put_response)
+
+ def _Dynamic_Delete(self, delete_request, response):
+ if delete_request.has_transaction():
+ txid = delete_request.transaction().handle()
+ txdata = self.__transactions[txid]
+ assert (txdata.thread_id ==
+ thread.get_ident()), "Transactions are single-threaded."
+ for key in delete_request.key_list():
+ txdata.entities[key.Encode()] = (key, None)
+ else:
+ super(RemoteDatastoreStub, self).MakeSyncCall(
+ 'datastore_v3', 'Delete', delete_request, response)
+
+ def _Dynamic_BeginTransaction(self, request, transaction):
+ self.__local_tx_lock.acquire()
+ try:
+ txid = self.__next_local_tx
+ self.__transactions[txid] = TransactionData(thread.get_ident())
+ self.__next_local_tx += 1
+ finally:
+ self.__local_tx_lock.release()
+ transaction.set_handle(txid)
+
+ def _Dynamic_Commit(self, transaction, transaction_response):
+ txid = transaction.handle()
+ if txid not in self.__transactions:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Transaction %d not found.' % (txid,))
+
+ txdata = self.__transactions[txid]
+ assert (txdata.thread_id ==
+ thread.get_ident()), "Transactions are single-threaded."
+ del self.__transactions[txid]
+
+ tx = remote_api_pb.TransactionRequest()
+ for key, hash in txdata.preconditions.values():
+ precond = tx.add_precondition()
+ precond.mutable_key().CopyFrom(key)
+ if hash:
+ precond.set_hash(hash)
+
+ puts = tx.mutable_puts()
+ deletes = tx.mutable_deletes()
+ for key, entity in txdata.entities.values():
+ if entity:
+ puts.add_entity().CopyFrom(entity)
+ else:
+ deletes.add_key().CopyFrom(key)
+
+ super(RemoteDatastoreStub, self).MakeSyncCall(
+ 'remote_datastore', 'Transaction',
+ tx, datastore_pb.PutResponse())
+
+ def _Dynamic_Rollback(self, transaction, transaction_response):
+ txid = transaction.handle()
+ self.__local_tx_lock.acquire()
+ try:
+ if txid not in self.__transactions:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Transaction %d not found.' % (txid,))
+
+ assert (txdata[txid].thread_id ==
+ thread.get_ident()), "Transactions are single-threaded."
+ del self.__transactions[txid]
+ finally:
+ self.__local_tx_lock.release()
+
+ def _Dynamic_CreateIndex(self, index, id_response):
+ raise apiproxy_errors.CapabilityDisabledError(
+ 'The remote datastore does not support index manipulation.')
+
+ def _Dynamic_UpdateIndex(self, index, void):
+ raise apiproxy_errors.CapabilityDisabledError(
+ 'The remote datastore does not support index manipulation.')
+
+ def _Dynamic_DeleteIndex(self, index, void):
+ raise apiproxy_errors.CapabilityDisabledError(
+ 'The remote datastore does not support index manipulation.')
+
+
+def ConfigureRemoteApi(app_id,
+ path,
+ auth_func,
+ servername=None,
+ rpc_server_factory=appengine_rpc.HttpRpcServer,
+ rtok=None,
+ secure=False):
+ """Does necessary setup to allow easy remote access to App Engine APIs.
+
+ Either servername must be provided or app_id must not be None. If app_id
+ is None and a servername is provided, this function will send a request
+ to the server to retrieve the app_id.
+
+ Args:
+ app_id: The app_id of your app, as declared in app.yaml.
+ path: The path to the remote_api handler for your app
+ (for example, '/remote_api').
+ auth_func: A function that takes no arguments and returns a
+ (username, password) tuple. This will be called if your application
+ requires authentication to access the remote_api handler (it should!)
+ and you do not already have a valid auth cookie.
+ servername: The hostname your app is deployed on. Defaults to
+ <app_id>.appspot.com.
+ rpc_server_factory: A factory to construct the rpc server for the datastore.
+ rtok: The validation token to sent with app_id lookups. If None, a random
+ token is used.
+ secure: Use SSL when communicating with the server.
+
+ Raises:
+ urllib2.HTTPError: if app_id is not provided and there is an error while
+ retrieving it.
+ ConfigurationError: if there is a error configuring the DatstoreFileStub.
+ """
+ if not servername and not app_id:
+ raise ConfigurationError('app_id or servername required')
+ if not servername:
+ servername = '%s.appspot.com' % (app_id,)
+ server = rpc_server_factory(servername, auth_func, GetUserAgent(),
+ GetSourceName(), debug_data=False, secure=secure)
+ if not app_id:
+ if not rtok:
+ random.seed()
+ rtok = str(random.random())[2:]
+ urlargs = {'rtok': rtok}
+ response = server.Send(path, payload=None, **urlargs)
+ if not response.startswith('{'):
+ raise ConfigurationError(
+ 'Invalid response recieved from server: %s' % response)
+ app_info = yaml.load(response)
+ if not app_info or 'rtok' not in app_info or 'app_id' not in app_info:
+ raise ConfigurationError('Error parsing app_id lookup response')
+ if app_info['rtok'] != rtok:
+ raise ConfigurationError('Token validation failed during app_id lookup. '
+ '(sent %s, got %s)' % (repr(rtok),
+ repr(app_info['rtok'])))
+ app_id = app_info['app_id']
+
+ os.environ['APPLICATION_ID'] = app_id
+ apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
+ datastore_stub = RemoteDatastoreStub(server, path)
+ apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore_stub)
+ stub = RemoteStub(server, path)
+ for service in ['capability_service', 'images', 'mail', 'memcache',
+ 'urlfetch']:
+ apiproxy_stub_map.apiproxy.RegisterStub(service, stub)
+
+
+def MaybeInvokeAuthentication():
+ """Sends an empty request through to the configured end-point.
+
+ If authentication is necessary, this will cause the rpc_server to invoke
+ interactive authentication.
+ """
+ datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
+ if isinstance(datastore_stub, RemoteStub):
+ datastore_stub._server.Send(datastore_stub._path, payload=None)
+ else:
+ raise ConfigurationError('remote_api is not configured.')
+
+
+ConfigureRemoteDatastore = ConfigureRemoteApi
diff --git a/google_appengine/google/appengine/ext/remote_api/remote_api_stub.pyc b/google_appengine/google/appengine/ext/remote_api/remote_api_stub.pyc
new file mode 100644
index 0000000..0f773c2
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/remote_api_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/remote_api/throttle.py b/google_appengine/google/appengine/ext/remote_api/throttle.py
new file mode 100755
index 0000000..f9454bf
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/throttle.py
@@ -0,0 +1,637 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Client-side transfer throttling for use with remote_api_stub.
+
+This module is used to configure rate limiting for programs accessing
+AppEngine services through remote_api.
+
+See the Throttle class for more information.
+
+An example with throttling:
+---
+from google.appengine.ext import db
+from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.ext.remote_api import throttle
+from myapp import models
+import getpass
+import threading
+
+def auth_func():
+ return (raw_input('Username:'), getpass.getpass('Password:'))
+
+remote_api_stub.ConfigureRemoteDatastore('my-app', '/remote_api', auth_func)
+full_throttle = throttle.DefaultThrottle(multiplier=1.0)
+throttle.ThrottleRemoteDatastore(full_throttle)
+
+# Register any threads that will be using the datastore with the throttler
+full_throttle.Register(threading.currentThread())
+
+# Now you can access the remote datastore just as if your code was running on
+# App Engine, and you don't need to worry about exceeding quota limits!
+
+houses = models.House.all().fetch(100)
+for a_house in houses:
+ a_house.doors += 1
+db.put(houses)
+---
+
+This example limits usage to the default free quota levels. The multiplier
+kwarg to throttle.DefaultThrottle can be used to scale the throttle levels
+higher or lower.
+
+Throttles can also be constructed directly for more control over the limits
+for different operations. See the Throttle class and the constants following
+it for details.
+"""
+
+
+import logging
+import threading
+import time
+import urllib2
+import urlparse
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.tools import appengine_rpc
+
+logger = logging.getLogger('google.appengine.ext.remote_api.throttle')
+
+MINIMUM_THROTTLE_SLEEP_DURATION = 0.001
+
+
+class Error(Exception):
+ """Base class for errors in this module."""
+
+
+class ThreadNotRegisteredError(Error):
+ """An unregistered thread has accessed the throttled datastore stub."""
+
+
+class UnknownThrottleNameError(Error):
+ """A transfer was added for an unknown throttle name."""
+
+
+def InterruptibleSleep(sleep_time):
+ """Puts thread to sleep, checking this threads exit_flag four times a second.
+
+ Args:
+ sleep_time: Time to sleep.
+ """
+ slept = 0.0
+ epsilon = .0001
+ thread = threading.currentThread()
+ while slept < sleep_time - epsilon:
+ remaining = sleep_time - slept
+ this_sleep_time = min(remaining, 0.25)
+ time.sleep(this_sleep_time)
+ slept += this_sleep_time
+ if hasattr(thread, 'exit_flag') and thread.exit_flag:
+ return
+
+
+class Throttle(object):
+ """A base class for upload rate throttling.
+
+ Transferring large number of entities, too quickly, could trigger
+ quota limits and cause the transfer process to halt. In order to
+ stay within the application's quota, we throttle the data transfer
+ to a specified limit (across all transfer threads).
+
+ This class tracks a moving average of some aspect of the transfer
+ rate (bandwidth, records per second, http connections per
+ second). It keeps two windows of counts of bytes transferred, on a
+ per-thread basis. One block is the "current" block, and the other is
+ the "prior" block. It will rotate the counts from current to prior
+ when ROTATE_PERIOD has passed. Thus, the current block will
+ represent from 0 seconds to ROTATE_PERIOD seconds of activity
+ (determined by: time.time() - self.last_rotate). The prior block
+ will always represent a full ROTATE_PERIOD.
+
+ Sleeping is performed just before a transfer of another block, and is
+ based on the counts transferred *before* the next transfer. It really
+ does not matter how much will be transferred, but only that for all the
+ data transferred SO FAR that we have interspersed enough pauses to
+ ensure the aggregate transfer rate is within the specified limit.
+
+ These counts are maintained on a per-thread basis, so we do not require
+ any interlocks around incrementing the counts. There IS an interlock on
+ the rotation of the counts because we do not want multiple threads to
+ multiply-rotate the counts.
+
+ There are various race conditions in the computation and collection
+ of these counts. We do not require precise values, but simply to
+ keep the overall transfer within the bandwidth limits. If a given
+ pause is a little short, or a little long, then the aggregate delays
+ will be correct.
+ """
+
+ ROTATE_PERIOD = 600
+
+ def __init__(self,
+ get_time=time.time,
+ thread_sleep=InterruptibleSleep,
+ layout=None):
+ self.get_time = get_time
+ self.thread_sleep = thread_sleep
+
+ self.start_time = get_time()
+ self.transferred = {}
+ self.prior_block = {}
+ self.totals = {}
+ self.throttles = {}
+
+ self.last_rotate = {}
+ self.rotate_mutex = {}
+ if layout:
+ self.AddThrottles(layout)
+
+ def AddThrottle(self, name, limit):
+ self.throttles[name] = limit
+ self.transferred[name] = {}
+ self.prior_block[name] = {}
+ self.totals[name] = {}
+ self.last_rotate[name] = self.get_time()
+ self.rotate_mutex[name] = threading.Lock()
+
+ def AddThrottles(self, layout):
+ for key, value in layout.iteritems():
+ self.AddThrottle(key, value)
+
+ def Register(self, thread):
+ """Register this thread with the throttler."""
+ thread_id = id(thread)
+ for throttle_name in self.throttles.iterkeys():
+ self.transferred[throttle_name][thread_id] = 0
+ self.prior_block[throttle_name][thread_id] = 0
+ self.totals[throttle_name][thread_id] = 0
+
+ def VerifyThrottleName(self, throttle_name):
+ if throttle_name not in self.throttles:
+ raise UnknownThrottleNameError('%s is not a registered throttle' %
+ throttle_name)
+
+ def AddTransfer(self, throttle_name, token_count):
+ """Add a count to the amount this thread has transferred.
+
+ Each time a thread transfers some data, it should call this method to
+ note the amount sent. The counts may be rotated if sufficient time
+ has passed since the last rotation.
+
+ Args:
+ throttle_name: The name of the throttle to add to.
+ token_count: The number to add to the throttle counter.
+ """
+ self.VerifyThrottleName(throttle_name)
+ transferred = self.transferred[throttle_name]
+ try:
+ transferred[id(threading.currentThread())] += token_count
+ except KeyError:
+ thread = threading.currentThread()
+ raise ThreadNotRegisteredError(
+ 'Unregistered thread accessing throttled datastore stub: id = %s\n'
+ 'name = %s' % (id(thread), thread.getName()))
+
+ if self.last_rotate[throttle_name] + self.ROTATE_PERIOD < self.get_time():
+ self._RotateCounts(throttle_name)
+
+ def Sleep(self, throttle_name=None):
+ """Possibly sleep in order to limit the transfer rate.
+
+ Note that we sleep based on *prior* transfers rather than what we
+ may be about to transfer. The next transfer could put us under/over
+ and that will be rectified *after* that transfer. Net result is that
+ the average transfer rate will remain within bounds. Spiky behavior
+ or uneven rates among the threads could possibly bring the transfer
+ rate above the requested limit for short durations.
+
+ Args:
+ throttle_name: The name of the throttle to sleep on. If None or
+ omitted, then sleep on all throttles.
+ """
+ if throttle_name is None:
+ for throttle_name in self.throttles:
+ self.Sleep(throttle_name=throttle_name)
+ return
+
+ self.VerifyThrottleName(throttle_name)
+
+ thread = threading.currentThread()
+
+ while True:
+ duration = self.get_time() - self.last_rotate[throttle_name]
+
+ total = 0
+ for count in self.prior_block[throttle_name].values():
+ total += count
+
+ if total:
+ duration += self.ROTATE_PERIOD
+
+ for count in self.transferred[throttle_name].values():
+ total += count
+
+ sleep_time = self._SleepTime(total, self.throttles[throttle_name],
+ duration)
+
+ if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION:
+ break
+
+ logger.debug('[%s] Throttling on %s. Sleeping for %.1f ms '
+ '(duration=%.1f ms, total=%d)',
+ thread.getName(), throttle_name,
+ sleep_time * 1000, duration * 1000, total)
+ self.thread_sleep(sleep_time)
+ if thread.exit_flag:
+ break
+ self._RotateCounts(throttle_name)
+
+ def _SleepTime(self, total, limit, duration):
+ """Calculate the time to sleep on a throttle.
+
+ Args:
+ total: The total amount transferred.
+ limit: The amount per second that is allowed to be sent.
+ duration: The amount of time taken to send the total.
+
+ Returns:
+ A float for the amount of time to sleep.
+ """
+ if not limit:
+ return 0.0
+ return max(0.0, (total / limit) - duration)
+
+ def _RotateCounts(self, throttle_name):
+ """Rotate the transfer counters.
+
+ If sufficient time has passed, then rotate the counters from active to
+ the prior-block of counts.
+
+ This rotation is interlocked to ensure that multiple threads do not
+ over-rotate the counts.
+
+ Args:
+ throttle_name: The name of the throttle to rotate.
+ """
+ self.VerifyThrottleName(throttle_name)
+ self.rotate_mutex[throttle_name].acquire()
+ try:
+ next_rotate_time = self.last_rotate[throttle_name] + self.ROTATE_PERIOD
+ if next_rotate_time >= self.get_time():
+ return
+
+ for name, count in self.transferred[throttle_name].items():
+
+
+ self.prior_block[throttle_name][name] = count
+ self.transferred[throttle_name][name] = 0
+
+ self.totals[throttle_name][name] += count
+
+ self.last_rotate[throttle_name] = self.get_time()
+
+ finally:
+ self.rotate_mutex[throttle_name].release()
+
+ def TotalTransferred(self, throttle_name):
+ """Return the total transferred, and over what period.
+
+ Args:
+ throttle_name: The name of the throttle to total.
+
+ Returns:
+ A tuple of the total count and running time for the given throttle name.
+ """
+ total = 0
+ for count in self.totals[throttle_name].values():
+ total += count
+ for count in self.transferred[throttle_name].values():
+ total += count
+ return total, self.get_time() - self.start_time
+
+
+BANDWIDTH_UP = 'http-bandwidth-up'
+BANDWIDTH_DOWN = 'http-bandwidth-down'
+REQUESTS = 'http-requests'
+HTTPS_BANDWIDTH_UP = 'https-bandwidth-up'
+HTTPS_BANDWIDTH_DOWN = 'https-bandwidth-down'
+HTTPS_REQUESTS = 'https-requests'
+DATASTORE_CALL_COUNT = 'datastore-call-count'
+ENTITIES_FETCHED = 'entities-fetched'
+ENTITIES_MODIFIED = 'entities-modified'
+INDEX_MODIFICATIONS = 'index-modifications'
+
+
+DEFAULT_LIMITS = {
+ BANDWIDTH_UP: 100000,
+ BANDWIDTH_DOWN: 100000,
+ REQUESTS: 15,
+ HTTPS_BANDWIDTH_UP: 100000,
+ HTTPS_BANDWIDTH_DOWN: 100000,
+ HTTPS_REQUESTS: 15,
+ DATASTORE_CALL_COUNT: 120,
+ ENTITIES_FETCHED: 400,
+ ENTITIES_MODIFIED: 400,
+ INDEX_MODIFICATIONS: 1600,
+}
+
+NO_LIMITS = {
+ BANDWIDTH_UP: None,
+ BANDWIDTH_DOWN: None,
+ REQUESTS: None,
+ HTTPS_BANDWIDTH_UP: None,
+ HTTPS_BANDWIDTH_DOWN: None,
+ HTTPS_REQUESTS: None,
+ DATASTORE_CALL_COUNT: None,
+ ENTITIES_FETCHED: None,
+ ENTITIES_MODIFIED: None,
+ INDEX_MODIFICATIONS: None,
+}
+
+
+def DefaultThrottle(multiplier=1.0):
+ """Return a Throttle instance with multiplier * the quota limits."""
+ layout = dict([(name, multiplier * limit)
+ for (name, limit) in DEFAULT_LIMITS.iteritems()])
+ return Throttle(layout=layout)
+
+
+class ThrottleHandler(urllib2.BaseHandler):
+ """A urllib2 handler for http and https requests that adds to a throttle."""
+
+ def __init__(self, throttle):
+ """Initialize a ThrottleHandler.
+
+ Args:
+ throttle: A Throttle instance to call for bandwidth and http/https request
+ throttling.
+ """
+ self.throttle = throttle
+
+ def AddRequest(self, throttle_name, req):
+ """Add to bandwidth throttle for given request.
+
+ Args:
+ throttle_name: The name of the bandwidth throttle to add to.
+ req: The request whose size will be added to the throttle.
+ """
+ size = 0
+ for key, value in req.headers.iteritems():
+ size += len('%s: %s\n' % (key, value))
+ for key, value in req.unredirected_hdrs.iteritems():
+ size += len('%s: %s\n' % (key, value))
+ (unused_scheme,
+ unused_host_port, url_path,
+ unused_query, unused_fragment) = urlparse.urlsplit(req.get_full_url())
+ size += len('%s %s HTTP/1.1\n' % (req.get_method(), url_path))
+ data = req.get_data()
+ if data:
+ size += len(data)
+ self.throttle.AddTransfer(throttle_name, size)
+
+ def AddResponse(self, throttle_name, res):
+ """Add to bandwidth throttle for given response.
+
+ Args:
+ throttle_name: The name of the bandwidth throttle to add to.
+ res: The response whose size will be added to the throttle.
+ """
+ content = res.read()
+
+ def ReturnContent():
+ return content
+
+ res.read = ReturnContent
+ size = len(content)
+ headers = res.info()
+ for key, value in headers.items():
+ size += len('%s: %s\n' % (key, value))
+ self.throttle.AddTransfer(throttle_name, size)
+
+ def http_request(self, req):
+ """Process an HTTP request.
+
+ If the throttle is over quota, sleep first. Then add request size to
+ throttle before returning it to be sent.
+
+ Args:
+ req: A urllib2.Request object.
+
+ Returns:
+ The request passed in.
+ """
+ self.throttle.Sleep(BANDWIDTH_UP)
+ self.throttle.Sleep(BANDWIDTH_DOWN)
+ self.AddRequest(BANDWIDTH_UP, req)
+ return req
+
+ def https_request(self, req):
+ """Process an HTTPS request.
+
+ If the throttle is over quota, sleep first. Then add request size to
+ throttle before returning it to be sent.
+
+ Args:
+ req: A urllib2.Request object.
+
+ Returns:
+ The request passed in.
+ """
+ self.throttle.Sleep(HTTPS_BANDWIDTH_UP)
+ self.throttle.Sleep(HTTPS_BANDWIDTH_DOWN)
+ self.AddRequest(HTTPS_BANDWIDTH_UP, req)
+ return req
+
+ def http_response(self, unused_req, res):
+ """Process an HTTP response.
+
+ The size of the response is added to the bandwidth throttle and the request
+ throttle is incremented by one.
+
+ Args:
+ unused_req: The urllib2 request for this response.
+ res: A urllib2 response object.
+
+ Returns:
+ The response passed in.
+ """
+ self.AddResponse(BANDWIDTH_DOWN, res)
+ self.throttle.AddTransfer(REQUESTS, 1)
+ return res
+
+ def https_response(self, unused_req, res):
+ """Process an HTTPS response.
+
+ The size of the response is added to the bandwidth throttle and the request
+ throttle is incremented by one.
+
+ Args:
+ unused_req: The urllib2 request for this response.
+ res: A urllib2 response object.
+
+ Returns:
+ The response passed in.
+ """
+ self.AddResponse(HTTPS_BANDWIDTH_DOWN, res)
+ self.throttle.AddTransfer(HTTPS_REQUESTS, 1)
+ return res
+
+
+class ThrottledHttpRpcServer(appengine_rpc.HttpRpcServer):
+ """Provides a simplified RPC-style interface for HTTP requests.
+
+ This RPC server uses a Throttle to prevent exceeding quotas.
+ """
+
+ def __init__(self, throttle, *args, **kwargs):
+ """Initialize a ThrottledHttpRpcServer.
+
+ Also sets request_manager.rpc_server to the ThrottledHttpRpcServer instance.
+
+ Args:
+ throttle: A Throttles instance.
+ args: Positional arguments to pass through to
+ appengine_rpc.HttpRpcServer.__init__
+ kwargs: Keyword arguments to pass through to
+ appengine_rpc.HttpRpcServer.__init__
+ """
+ self.throttle = throttle
+ appengine_rpc.HttpRpcServer.__init__(self, *args, **kwargs)
+
+ def _GetOpener(self):
+ """Returns an OpenerDirector that supports cookies and ignores redirects.
+
+ Returns:
+ A urllib2.OpenerDirector object.
+ """
+ opener = appengine_rpc.HttpRpcServer._GetOpener(self)
+ opener.add_handler(ThrottleHandler(self.throttle))
+
+ return opener
+
+
+def ThrottledHttpRpcServerFactory(throttle):
+ """Create a factory to produce ThrottledHttpRpcServer for a given throttle.
+
+ Args:
+ throttle: A Throttle instance to use for the ThrottledHttpRpcServer.
+
+ Returns:
+ A factory to produce a ThrottledHttpRpcServer.
+ """
+
+ def MakeRpcServer(*args, **kwargs):
+ """Factory to produce a ThrottledHttpRpcServer.
+
+ Args:
+ args: Positional args to pass to ThrottledHttpRpcServer.
+ kwargs: Keyword args to pass to ThrottledHttpRpcServer.
+
+ Returns:
+ A ThrottledHttpRpcServer instance.
+ """
+ kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
+ kwargs['save_cookies'] = True
+ rpc_server = ThrottledHttpRpcServer(throttle, *args, **kwargs)
+ return rpc_server
+ return MakeRpcServer
+
+
+class Throttler(object):
+ def PrehookHandler(self, service, call, request, response):
+ handler = getattr(self, '_Prehook_' + call, None)
+ if handler:
+ handler(request, response)
+
+ def PosthookHandler(self, service, call, request, response):
+ handler = getattr(self, '_Posthook_' + call, None)
+ if handler:
+ handler(request, response)
+
+
+def SleepHandler(*throttle_names):
+ def SleepOnThrottles(self, request, response):
+ for throttle_name in throttle_names:
+ self._DatastoreThrottler__throttle.Sleep(throttle_name)
+ return SleepOnThrottles
+
+
+class DatastoreThrottler(Throttler):
+ def __init__(self, throttle):
+ Throttler.__init__(self)
+ self.__throttle = throttle
+
+ def AddCost(self, cost_proto):
+ """Add costs from the Cost protobuf."""
+ self.__throttle.AddTransfer(INDEX_MODIFICATIONS, cost_proto.index_writes())
+ self.__throttle.AddTransfer(ENTITIES_MODIFIED, cost_proto.entity_writes())
+
+
+ _Prehook_Put = SleepHandler(ENTITIES_MODIFIED, INDEX_MODIFICATIONS)
+
+ def _Posthook_Put(self, request, response):
+ self.AddCost(response.cost())
+
+
+ _Prehook_Get = SleepHandler(ENTITIES_FETCHED)
+
+ def _Posthook_Get(self, request, response):
+ self.__throttle.AddTransfer(ENTITIES_FETCHED, response.entity_size())
+
+
+ _Prehook_RunQuery = SleepHandler(ENTITIES_FETCHED)
+
+ def _Posthook_RunQuery(self, request, response):
+ if not response.keys_only():
+ self.__throttle.AddTransfer(ENTITIES_FETCHED, response.result_size())
+
+
+ _Prehook_Next = SleepHandler(ENTITIES_FETCHED)
+
+ def _Posthook_Next(self, request, response):
+ if not response.keys_only():
+ self.__throttle.AddTransfer(ENTITIES_FETCHED, response.result_size())
+
+
+ _Prehook_Delete = SleepHandler(ENTITIES_MODIFIED, INDEX_MODIFICATIONS)
+
+ def _Posthook_Delete(self, request, response):
+ self.AddCost(response.cost())
+
+
+ _Prehook_Commit = SleepHandler()
+
+ def _Posthook_Commit(self, request, response):
+ self.AddCost(response.cost())
+
+
+def ThrottleRemoteDatastore(throttle, remote_datastore_stub=None):
+ """Install the given throttle for the remote datastore stub.
+
+ Args:
+ throttle: A Throttle instance to limit datastore access rates
+ remote_datastore_stub: The datstore stub instance to throttle, for
+ testing purposes.
+ """
+ if not remote_datastore_stub:
+ remote_datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
+ if not isinstance(remote_datastore_stub, remote_api_stub.RemoteDatastoreStub):
+ raise remote_api_stub.ConfigurationError('remote_api is not configured.')
+ throttler = DatastoreThrottler(throttle)
+ remote_datastore_stub._PreHookHandler = throttler.PrehookHandler
+ remote_datastore_stub._PostHookHandler = throttler.PosthookHandler
diff --git a/google_appengine/google/appengine/ext/remote_api/throttle.pyc b/google_appengine/google/appengine/ext/remote_api/throttle.pyc
new file mode 100644
index 0000000..648c646
--- /dev/null
+++ b/google_appengine/google/appengine/ext/remote_api/throttle.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/search/__init__.py b/google_appengine/google/appengine/ext/search/__init__.py
new file mode 100755
index 0000000..a4bd998
--- /dev/null
+++ b/google_appengine/google/appengine/ext/search/__init__.py
@@ -0,0 +1,420 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Full text indexing and search, implemented in pure python.
+
+Defines a SearchableModel subclass of db.Model that supports full text
+indexing and search, based on the datastore's existing indexes.
+
+Don't expect too much. First, there's no ranking, which is a killer drawback.
+There's also no exact phrase match, substring match, boolean operators,
+stemming, or other common full text search features. Finally, support for stop
+words (common words that are not indexed) is currently limited to English.
+
+To be indexed, entities must be created and saved as SearchableModel
+instances, e.g.:
+
+ class Article(search.SearchableModel):
+ text = db.TextProperty()
+ ...
+
+ article = Article(text=...)
+ article.save()
+
+To search the full text index, use the SearchableModel.all() method to get an
+instance of SearchableModel.Query, which subclasses db.Query. Use its search()
+method to provide a search query, in addition to any other filters or sort
+orders, e.g.:
+
+ query = article.all().search('a search query').filter(...).order(...)
+ for result in query:
+ ...
+
+The full text index is stored in a property named __searchable_text_index.
+
+Specifying multiple indexes and properties to index
+---------------------------------------------------
+
+By default, one index is created with all string properties. You can define
+multiple indexes and specify which properties should be indexed for each by
+overriding SearchableProperties() method of model.SearchableModel, for example:
+
+ class Article(search.SearchableModel):
+ @classmethod
+ def SearchableProperties(cls):
+ return [['book', 'author'], ['book']]
+
+In this example, two indexes will be maintained - one that includes 'book' and
+'author' properties, and another one for 'book' property only. They will be
+stored in properties named __searchable_text_index_book_author and
+__searchable_text_index_book respectively. Note that the index that includes
+all properties will not be created unless added explicitly like this:
+
+ @classmethod
+ def SearchableProperties(cls):
+ return [['book', 'author'], ['book'], search.ALL_PROPERTIES]
+
+The default return value of SearchableProperties() is [search.ALL_PROPERTIES]
+(one index, all properties).
+
+To search using a custom-defined index, pass its definition
+in 'properties' parameter of 'search':
+
+ Article.all().search('Lem', properties=['book', 'author'])
+
+Note that the order of properties in the list matters.
+
+Adding indexes to index.yaml
+-----------------------------
+
+In general, if you just want to provide full text search, you *don't* need to
+add any extra indexes to your index.yaml. However, if you want to use search()
+in a query *in addition to* an ancestor, filter, or sort order, you'll need to
+create an index in index.yaml with the __searchable_text_index property. For
+example:
+
+ - kind: Article
+ properties:
+ - name: __searchable_text_index
+ - name: date
+ direction: desc
+ ...
+
+Similarly, if you created a custom index (see above), use the name of the
+property it's stored in, e.g. __searchable_text_index_book_author.
+
+Note that using SearchableModel will noticeable increase the latency of save()
+operations, since it writes an index row for each indexable word. This also
+means that the latency of save() will increase roughly with the size of the
+properties in a given entity. Caveat hacker!
+"""
+
+
+
+
+import re
+import string
+import sys
+
+from google.appengine.api import datastore
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.ext import db
+from google.appengine.datastore import datastore_pb
+
+ALL_PROPERTIES = []
+
+class SearchableEntity(datastore.Entity):
+ """A subclass of datastore.Entity that supports full text indexing.
+
+ Automatically indexes all string and Text properties, using the datastore's
+ built-in per-property indices. To search, use the SearchableQuery class and
+ its Search() method.
+ """
+ _FULL_TEXT_INDEX_PROPERTY = '__searchable_text_index'
+
+ _FULL_TEXT_MIN_LENGTH = 3
+
+ _FULL_TEXT_STOP_WORDS = frozenset([
+ 'a', 'about', 'according', 'accordingly', 'affected', 'affecting', 'after',
+ 'again', 'against', 'all', 'almost', 'already', 'also', 'although',
+ 'always', 'am', 'among', 'an', 'and', 'any', 'anyone', 'apparently', 'are',
+ 'arise', 'as', 'aside', 'at', 'away', 'be', 'became', 'because', 'become',
+ 'becomes', 'been', 'before', 'being', 'between', 'both', 'briefly', 'but',
+ 'by', 'came', 'can', 'cannot', 'certain', 'certainly', 'could', 'did', 'do',
+ 'does', 'done', 'during', 'each', 'either', 'else', 'etc', 'ever', 'every',
+ 'following', 'for', 'found', 'from', 'further', 'gave', 'gets', 'give',
+ 'given', 'giving', 'gone', 'got', 'had', 'hardly', 'has', 'have', 'having',
+ 'here', 'how', 'however', 'i', 'if', 'in', 'into', 'is', 'it', 'itself',
+ 'just', 'keep', 'kept', 'knowledge', 'largely', 'like', 'made', 'mainly',
+ 'make', 'many', 'might', 'more', 'most', 'mostly', 'much', 'must', 'nearly',
+ 'necessarily', 'neither', 'next', 'no', 'none', 'nor', 'normally', 'not',
+ 'noted', 'now', 'obtain', 'obtained', 'of', 'often', 'on', 'only', 'or',
+ 'other', 'our', 'out', 'owing', 'particularly', 'past', 'perhaps', 'please',
+ 'poorly', 'possible', 'possibly', 'potentially', 'predominantly', 'present',
+ 'previously', 'primarily', 'probably', 'prompt', 'promptly', 'put',
+ 'quickly', 'quite', 'rather', 'readily', 'really', 'recently', 'regarding',
+ 'regardless', 'relatively', 'respectively', 'resulted', 'resulting',
+ 'results', 'said', 'same', 'seem', 'seen', 'several', 'shall', 'should',
+ 'show', 'showed', 'shown', 'shows', 'significantly', 'similar', 'similarly',
+ 'since', 'slightly', 'so', 'some', 'sometime', 'somewhat', 'soon',
+ 'specifically', 'state', 'states', 'strongly', 'substantially',
+ 'successfully', 'such', 'sufficiently', 'than', 'that', 'the', 'their',
+ 'theirs', 'them', 'then', 'there', 'therefore', 'these', 'they', 'this',
+ 'those', 'though', 'through', 'throughout', 'to', 'too', 'toward', 'under',
+ 'unless', 'until', 'up', 'upon', 'use', 'used', 'usefully', 'usefulness',
+ 'using', 'usually', 'various', 'very', 'was', 'we', 'were', 'what', 'when',
+ 'where', 'whether', 'which', 'while', 'who', 'whose', 'why', 'widely',
+ 'will', 'with', 'within', 'without', 'would', 'yet', 'you'])
+
+ _word_delimiter_regex = re.compile('[' + re.escape(string.punctuation) + ']')
+
+ _searchable_properties = [ALL_PROPERTIES]
+
+ def __init__(self, kind_or_entity, word_delimiter_regex=None, *args,
+ **kwargs):
+ """Constructor. May be called as a copy constructor.
+
+ If kind_or_entity is a datastore.Entity, copies it into this Entity.
+ datastore.Get() and Query() returns instances of datastore.Entity, so this
+ is useful for converting them back to SearchableEntity so that they'll be
+ indexed when they're stored back in the datastore.
+
+ Otherwise, passes through the positional and keyword args to the
+ datastore.Entity constructor.
+
+ Args:
+ kind_or_entity: string or datastore.Entity
+ word_delimiter_regex: a regex matching characters that delimit words
+ """
+ self._word_delimiter_regex = word_delimiter_regex
+ if isinstance(kind_or_entity, datastore.Entity):
+ self._Entity__key = kind_or_entity._Entity__key
+ self._Entity__unindexed_properties = frozenset(kind_or_entity.unindexed_properties())
+ if isinstance(kind_or_entity, SearchableEntity):
+ if getattr(kind_or_entity, '_searchable_properties', None) is not None:
+ self._searchable_properties = kind_or_entity._searchable_properties
+ self.update(kind_or_entity)
+ else:
+ super(SearchableEntity, self).__init__(kind_or_entity, *args, **kwargs)
+
+ def _ToPb(self):
+ """Rebuilds the full text index, then delegates to the superclass.
+
+ Returns:
+ entity_pb.Entity
+ """
+ for properties_to_index in self._searchable_properties:
+ index_property_name = SearchableEntity.IndexPropertyName(properties_to_index)
+ if index_property_name in self:
+ del self[index_property_name]
+
+
+ if not properties_to_index:
+ properties_to_index = self.keys()
+
+ index = set()
+ for name in properties_to_index:
+ if not self.has_key(name):
+ continue
+
+ values = self[name]
+ if not isinstance(values, list):
+ values = [values]
+
+ if (isinstance(values[0], basestring) and
+ not isinstance(values[0], datastore_types.Blob)):
+ for value in values:
+ index.update(SearchableEntity._FullTextIndex(
+ value, self._word_delimiter_regex))
+
+ index_list = list(index)
+ if index_list:
+ self[index_property_name] = index_list
+
+ return super(SearchableEntity, self)._ToPb()
+
+ @classmethod
+ def _FullTextIndex(cls, text, word_delimiter_regex=None):
+ """Returns a set of keywords appropriate for full text indexing.
+
+ See SearchableQuery.Search() for details.
+
+ Args:
+ text: string
+
+ Returns:
+ set of strings
+ """
+
+ if word_delimiter_regex is None:
+ word_delimiter_regex = cls._word_delimiter_regex
+
+ if text:
+ datastore_types.ValidateString(text, 'text', max_len=sys.maxint)
+ text = word_delimiter_regex.sub(' ', text)
+ words = text.lower().split()
+
+ words = set(unicode(w) for w in words)
+
+ words -= cls._FULL_TEXT_STOP_WORDS
+ for word in list(words):
+ if len(word) < cls._FULL_TEXT_MIN_LENGTH:
+ words.remove(word)
+
+ else:
+ words = set()
+
+ return words
+
+ @classmethod
+ def IndexPropertyName(cls, properties):
+ """Given index definition, returns the name of the property to put it in."""
+ name = SearchableEntity._FULL_TEXT_INDEX_PROPERTY
+
+ if properties:
+ name += '_' + '_'.join(properties)
+
+ return name
+
+
+class SearchableQuery(datastore.Query):
+ """A subclass of datastore.Query that supports full text search.
+
+ Only searches over entities that were created and stored using the
+ SearchableEntity or SearchableModel classes.
+ """
+
+ def Search(self, search_query, word_delimiter_regex=None,
+ properties=ALL_PROPERTIES):
+ """Add a search query. This may be combined with filters.
+
+ Note that keywords in the search query will be silently dropped if they
+ are stop words or too short, ie if they wouldn't be indexed.
+
+ Args:
+ search_query: string
+
+ Returns:
+ # this query
+ SearchableQuery
+ """
+ datastore_types.ValidateString(search_query, 'search query')
+ self._search_query = search_query
+ self._word_delimiter_regex = word_delimiter_regex
+ self._properties = properties
+ return self
+
+ def _ToPb(self, *args, **kwds):
+ """Adds filters for the search query, then delegates to the superclass.
+
+ Mimics Query._ToPb()'s signature. Raises BadFilterError if a filter on the
+ index property already exists.
+
+ Returns:
+ datastore_pb.Query
+ """
+
+ properties = getattr(self, "_properties", ALL_PROPERTIES)
+
+ index_property_name = SearchableEntity.IndexPropertyName(properties)
+ if index_property_name in self:
+ raise datastore_errors.BadFilterError(
+ '%s is a reserved name.' % index_property_name)
+
+ pb = super(SearchableQuery, self)._ToPb(*args, **kwds)
+
+ if hasattr(self, '_search_query'):
+ keywords = SearchableEntity._FullTextIndex(
+ self._search_query, self._word_delimiter_regex)
+ for keyword in keywords:
+ filter = pb.add_filter()
+ filter.set_op(datastore_pb.Query_Filter.EQUAL)
+ prop = filter.add_property()
+ prop.set_name(index_property_name)
+ prop.set_multiple(len(keywords) > 1)
+ prop.mutable_value().set_stringvalue(unicode(keyword).encode('utf-8'))
+
+ return pb
+
+
+class SearchableMultiQuery(datastore.MultiQuery):
+ """A multiquery that supports Search() by searching subqueries."""
+
+ def Search(self, *args, **kwargs):
+ """Add a search query, by trying to add it to all subqueries.
+
+ Args:
+ args: Passed to Search on each subquery.
+ kwargs: Passed to Search on each subquery.
+
+ Returns:
+ self for consistency with SearchableQuery.
+ """
+ for q in self:
+ q.Search(*args, **kwargs)
+ return self
+
+
+class SearchableModel(db.Model):
+ """A subclass of db.Model that supports full text search and indexing.
+
+ Automatically indexes all string-based properties. To search, use the all()
+ method to get a SearchableModel.Query, then use its search() method.
+
+ Override SearchableProperties() to define properties to index and/or multiple
+ indexes (see the file's comment).
+ """
+
+ @classmethod
+ def SearchableProperties(cls):
+ return [ALL_PROPERTIES]
+
+ class Query(db.Query):
+ """A subclass of db.Query that supports full text search."""
+ _search_query = None
+ _properties = None
+
+ def search(self, search_query, properties=ALL_PROPERTIES):
+ """Adds a full text search to this query.
+
+ Args:
+ search_query, a string containing the full text search query.
+
+ Returns:
+ self
+ """
+ self._search_query = search_query
+ self._properties = properties
+
+ if self._properties not in getattr(self, '_searchable_properties', [ALL_PROPERTIES]):
+ raise datastore_errors.BadFilterError(
+ '%s does not have a corresponding index. Please add it to'
+ 'the SEARCHABLE_PROPERTIES list' % self._properties)
+
+ return self
+
+ def _get_query(self):
+ """Wraps db.Query._get_query() and injects SearchableQuery."""
+ query = db.Query._get_query(self,
+ _query_class=SearchableQuery,
+ _multi_query_class=SearchableMultiQuery)
+ if self._search_query:
+ query.Search(self._search_query, properties=self._properties)
+ return query
+
+ def _populate_internal_entity(self):
+ """Wraps db.Model._populate_internal_entity() and injects
+ SearchableEntity."""
+ entity = db.Model._populate_internal_entity(self,
+ _entity_class=SearchableEntity)
+ entity._searchable_properties = self.SearchableProperties()
+ return entity
+
+ @classmethod
+ def from_entity(cls, entity):
+ """Wraps db.Model.from_entity() and injects SearchableEntity."""
+ if not isinstance(entity, SearchableEntity):
+ entity = SearchableEntity(entity)
+ return super(SearchableModel, cls).from_entity(entity)
+
+ @classmethod
+ def all(cls):
+ """Returns a SearchableModel.Query for this kind."""
+ query = SearchableModel.Query(cls)
+ query._searchable_properties = cls.SearchableProperties()
+ return query
diff --git a/google_appengine/google/appengine/ext/webapp/__init__.py b/google_appengine/google/appengine/ext/webapp/__init__.py
new file mode 100755
index 0000000..446475a
--- /dev/null
+++ b/google_appengine/google/appengine/ext/webapp/__init__.py
@@ -0,0 +1,580 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""An extremely simple WSGI web application framework.
+
+This module exports three primary classes: Request, Response, and
+RequestHandler. You implement a web application by subclassing RequestHandler.
+As WSGI requests come in, they are passed to instances of your RequestHandlers.
+The RequestHandler class provides access to the easy-to-use Request and
+Response objects so you can interpret the request and write the response with
+no knowledge of the esoteric WSGI semantics. Here is a simple example:
+
+ from google.appengine.ext import webapp
+ import wsgiref.simple_server
+
+ class MainPage(webapp.RequestHandler):
+ def get(self):
+ self.response.out.write(
+ '<html><body><form action="/hello" method="post">'
+ 'Name: <input name="name" type="text" size="20"> '
+ '<input type="submit" value="Say Hello"></form></body></html>')
+
+ class HelloPage(webapp.RequestHandler):
+ def post(self):
+ self.response.headers['Content-Type'] = 'text/plain'
+ self.response.out.write('Hello, %s' % self.request.get('name'))
+
+ application = webapp.WSGIApplication([
+ ('/', MainPage),
+ ('/hello', HelloPage)
+ ], debug=True)
+
+ server = wsgiref.simple_server.make_server('', 8080, application)
+ print 'Serving on port 8080...'
+ server.serve_forever()
+
+The WSGIApplication class maps URI regular expressions to your RequestHandler
+classes. It is a WSGI-compatible application object, so you can use it in
+conjunction with wsgiref to make your web application into, e.g., a CGI
+script or a simple HTTP server, as in the example above.
+
+The framework does not support streaming output. All output from a response
+is stored in memory before it is written.
+"""
+
+
+import cgi
+import StringIO
+import logging
+import re
+import sys
+import traceback
+import urlparse
+import webob
+import wsgiref.headers
+import wsgiref.util
+
+RE_FIND_GROUPS = re.compile('\(.*?\)')
+_CHARSET_RE = re.compile(r';\s*charset=([^;\s]*)', re.I)
+
+class Error(Exception):
+ """Base of all exceptions in the webapp module."""
+ pass
+
+
+class NoUrlFoundError(Error):
+ """Thrown when RequestHandler.get_url() fails."""
+ pass
+
+
+class Request(webob.Request):
+ """Abstraction for an HTTP request.
+
+ Properties:
+ uri: the complete URI requested by the user
+ scheme: 'http' or 'https'
+ host: the host, including the port
+ path: the path up to the ';' or '?' in the URL
+ parameters: the part of the URL between the ';' and the '?', if any
+ query: the part of the URL after the '?'
+
+ You can access parsed query and POST values with the get() method; do not
+ parse the query string yourself.
+ """
+
+ request_body_tempfile_limit = 0
+
+ uri = property(lambda self: self.url)
+ query = property(lambda self: self.query_string)
+
+ def __init__(self, environ):
+ """Constructs a Request object from a WSGI environment.
+
+ If the charset isn't specified in the Content-Type header, defaults
+ to UTF-8.
+
+ Args:
+ environ: A WSGI-compliant environment dictionary.
+ """
+ match = _CHARSET_RE.search(environ.get('CONTENT_TYPE', ''))
+ if match:
+ charset = match.group(1).lower()
+ else:
+ charset = 'utf-8'
+
+ webob.Request.__init__(self, environ, charset=charset,
+ unicode_errors= 'ignore', decode_param_names=True)
+
+ def get(self, argument_name, default_value='', allow_multiple=False):
+ """Returns the query or POST argument with the given name.
+
+ We parse the query string and POST payload lazily, so this will be a
+ slower operation on the first call.
+
+ Args:
+ argument_name: the name of the query or POST argument
+ default_value: the value to return if the given argument is not present
+ allow_multiple: return a list of values with the given name (deprecated)
+
+ Returns:
+ If allow_multiple is False (which it is by default), we return the first
+ value with the given name given in the request. If it is True, we always
+ return an list.
+ """
+ param_value = self.get_all(argument_name)
+ if allow_multiple:
+ return param_value
+ else:
+ if len(param_value) > 0:
+ return param_value[0]
+ else:
+ return default_value
+
+ def get_all(self, argument_name):
+ """Returns a list of query or POST arguments with the given name.
+
+ We parse the query string and POST payload lazily, so this will be a
+ slower operation on the first call.
+
+ Args:
+ argument_name: the name of the query or POST argument
+
+ Returns:
+ A (possibly empty) list of values.
+ """
+ if self.charset:
+ argument_name = argument_name.encode(self.charset)
+
+ param_value = self.params.getall(argument_name)
+
+ for i in xrange(len(param_value)):
+ if isinstance(param_value[i], cgi.FieldStorage):
+ param_value[i] = param_value[i].value
+
+ return param_value
+
+ def arguments(self):
+ """Returns a list of the arguments provided in the query and/or POST.
+
+ The return value is a list of strings.
+ """
+ return list(set(self.params.keys()))
+
+ def get_range(self, name, min_value=None, max_value=None, default=0):
+ """Parses the given int argument, limiting it to the given range.
+
+ Args:
+ name: the name of the argument
+ min_value: the minimum int value of the argument (if any)
+ max_value: the maximum int value of the argument (if any)
+ default: the default value of the argument if it is not given
+
+ Returns:
+ An int within the given range for the argument
+ """
+ try:
+ value = int(self.get(name, default))
+ except ValueError:
+ value = default
+ if max_value != None:
+ value = min(value, max_value)
+ if min_value != None:
+ value = max(value, min_value)
+ return value
+
+
+class Response(object):
+ """Abstraction for an HTTP response.
+
+ Properties:
+ out: file pointer for the output stream
+ headers: wsgiref.headers.Headers instance representing the output headers
+ """
+ def __init__(self):
+ """Constructs a response with the default settings."""
+ self.out = StringIO.StringIO()
+ self.__wsgi_headers = []
+ self.headers = wsgiref.headers.Headers(self.__wsgi_headers)
+ self.headers['Content-Type'] = 'text/html; charset=utf-8'
+ self.headers['Cache-Control'] = 'no-cache'
+ self.set_status(200)
+
+ def set_status(self, code, message=None):
+ """Sets the HTTP status code of this response.
+
+ Args:
+ message: the HTTP status string to use
+
+ If no status string is given, we use the default from the HTTP/1.1
+ specification.
+ """
+ if not message:
+ message = Response.http_status_message(code)
+ self.__status = (code, message)
+
+ def clear(self):
+ """Clears all data written to the output stream so that it is empty."""
+ self.out.seek(0)
+ self.out.truncate(0)
+
+ def wsgi_write(self, start_response):
+ """Writes this response using WSGI semantics with the given WSGI function.
+
+ Args:
+ start_response: the WSGI-compatible start_response function
+ """
+ body = self.out.getvalue()
+ if isinstance(body, unicode):
+ body = body.encode('utf-8')
+ elif self.headers.get('Content-Type', '').endswith('; charset=utf-8'):
+ try:
+ body.decode('utf-8')
+ except UnicodeError, e:
+ logging.warning('Response written is not UTF-8: %s', e)
+
+ if (self.headers.get('Cache-Control') == 'no-cache' and
+ not self.headers.get('Expires')):
+ self.headers['Expires'] = 'Fri, 01 Jan 1990 00:00:00 GMT'
+ self.headers['Content-Length'] = str(len(body))
+ write = start_response('%d %s' % self.__status, self.__wsgi_headers)
+ write(body)
+ self.out.close()
+
+ def http_status_message(code):
+ """Returns the default HTTP status message for the given code.
+
+ Args:
+ code: the HTTP code for which we want a message
+ """
+ if not Response.__HTTP_STATUS_MESSAGES.has_key(code):
+ raise Error('Invalid HTTP status code: %d' % code)
+ return Response.__HTTP_STATUS_MESSAGES[code]
+ http_status_message = staticmethod(http_status_message)
+
+ __HTTP_STATUS_MESSAGES = {
+ 100: 'Continue',
+ 101: 'Switching Protocols',
+ 200: 'OK',
+ 201: 'Created',
+ 202: 'Accepted',
+ 203: 'Non-Authoritative Information',
+ 204: 'No Content',
+ 205: 'Reset Content',
+ 206: 'Partial Content',
+ 300: 'Multiple Choices',
+ 301: 'Moved Permanently',
+ 302: 'Moved Temporarily',
+ 303: 'See Other',
+ 304: 'Not Modified',
+ 305: 'Use Proxy',
+ 306: 'Unused',
+ 307: 'Temporary Redirect',
+ 400: 'Bad Request',
+ 401: 'Unauthorized',
+ 402: 'Payment Required',
+ 403: 'Forbidden',
+ 404: 'Not Found',
+ 405: 'Method Not Allowed',
+ 406: 'Not Acceptable',
+ 407: 'Proxy Authentication Required',
+ 408: 'Request Time-out',
+ 409: 'Conflict',
+ 410: 'Gone',
+ 411: 'Length Required',
+ 412: 'Precondition Failed',
+ 413: 'Request Entity Too Large',
+ 414: 'Request-URI Too Large',
+ 415: 'Unsupported Media Type',
+ 416: 'Requested Range Not Satisfiable',
+ 417: 'Expectation Failed',
+ 500: 'Internal Server Error',
+ 501: 'Not Implemented',
+ 502: 'Bad Gateway',
+ 503: 'Service Unavailable',
+ 504: 'Gateway Time-out',
+ 505: 'HTTP Version not supported'
+ }
+
+
+class RequestHandler(object):
+ """Our base HTTP request handler. Clients should subclass this class.
+
+ Subclasses should override get(), post(), head(), options(), etc to handle
+ different HTTP methods.
+ """
+ def initialize(self, request, response):
+ """Initializes this request handler with the given Request and Response."""
+ self.request = request
+ self.response = response
+
+ def get(self, *args):
+ """Handler method for GET requests."""
+ self.error(405)
+
+ def post(self, *args):
+ """Handler method for POST requests."""
+ self.error(405)
+
+ def head(self, *args):
+ """Handler method for HEAD requests."""
+ self.error(405)
+
+ def options(self, *args):
+ """Handler method for OPTIONS requests."""
+ self.error(405)
+
+ def put(self, *args):
+ """Handler method for PUT requests."""
+ self.error(405)
+
+ def delete(self, *args):
+ """Handler method for DELETE requests."""
+ self.error(405)
+
+ def trace(self, *args):
+ """Handler method for TRACE requests."""
+ self.error(405)
+
+ def error(self, code):
+ """Clears the response output stream and sets the given HTTP error code.
+
+ Args:
+ code: the HTTP status error code (e.g., 501)
+ """
+ self.response.set_status(code)
+ self.response.clear()
+
+ def redirect(self, uri, permanent=False):
+ """Issues an HTTP redirect to the given relative URL.
+
+ Args:
+ uri: a relative or absolute URI (e.g., '../flowers.html')
+ permanent: if true, we use a 301 redirect instead of a 302 redirect
+ """
+ if permanent:
+ self.response.set_status(301)
+ else:
+ self.response.set_status(302)
+ absolute_url = urlparse.urljoin(self.request.uri, uri)
+ self.response.headers['Location'] = str(absolute_url)
+ self.response.clear()
+
+ def handle_exception(self, exception, debug_mode):
+ """Called if this handler throws an exception during execution.
+
+ The default behavior is to call self.error(500) and print a stack trace
+ if debug_mode is True.
+
+ Args:
+ exception: the exception that was thrown
+ debug_mode: True if the web application is running in debug mode
+ """
+ self.error(500)
+ logging.exception(exception)
+ if debug_mode:
+ lines = ''.join(traceback.format_exception(*sys.exc_info()))
+ self.response.clear()
+ self.response.out.write('<pre>%s</pre>' % (cgi.escape(lines, quote=True)))
+
+ @classmethod
+ def get_url(cls, *args, **kargs):
+ """Returns the url for the given handler.
+
+ The default implementation uses the patterns passed to the active
+ WSGIApplication and the django urlresolvers module to create a url.
+ However, it is different from urlresolvers.reverse() in the following ways:
+ - It does not try to resolve handlers via module loading
+ - It does not support named arguments
+ - It performs some post-prosessing on the url to remove some regex
+ operators that urlresolvers.reverse_helper() seems to miss.
+ - It will try to fill in the left-most missing arguments with the args
+ used in the active request.
+
+ Args:
+ args: Parameters for the url pattern's groups.
+ kwargs: Optionally contains 'implicit_args' that can either be a boolean
+ or a tuple. When it is True, it will use the arguments to the
+ active request as implicit arguments. When it is False (default),
+ it will not use any implicit arguments. When it is a tuple, it
+ will use the tuple as the implicit arguments.
+ the left-most args if some are missing from args.
+
+ Returns:
+ The url for this handler/args combination.
+
+ Raises:
+ NoUrlFoundError: No url pattern for this handler has the same
+ number of args that were passed in.
+ """
+
+ app = WSGIApplication.active_instance
+ pattern_map = app._pattern_map
+
+ implicit_args = kargs.get('implicit_args', ())
+ if implicit_args == True:
+ implicit_args = app.current_request_args
+
+ min_params = len(args)
+
+ urlresolvers = None
+
+ for pattern_tuple in pattern_map.get(cls, ()):
+ num_params_in_pattern = pattern_tuple[1]
+ if num_params_in_pattern < min_params:
+ continue
+
+ if urlresolvers is None:
+ from django.core import urlresolvers
+
+ try:
+ num_implicit_args = max(0, num_params_in_pattern - len(args))
+ merged_args = implicit_args[:num_implicit_args] + args
+ url = urlresolvers.reverse_helper(pattern_tuple[0], *merged_args)
+ url = url.replace('\\', '')
+ url = url.replace('?', '')
+ return url
+ except urlresolvers.NoReverseMatch:
+ continue
+
+ logging.warning('get_url failed for Handler name: %r, Args: %r',
+ cls.__name__, args)
+ raise NoUrlFoundError
+
+
+class WSGIApplication(object):
+ """Wraps a set of webapp RequestHandlers in a WSGI-compatible application.
+
+ To use this class, pass a list of (URI regular expression, RequestHandler)
+ pairs to the constructor, and pass the class instance to a WSGI handler.
+ See the example in the module comments for details.
+
+ The URL mapping is first-match based on the list ordering.
+ """
+
+ REQUEST_CLASS = Request
+ RESPONSE_CLASS = Response
+
+ def __init__(self, url_mapping, debug=False):
+ """Initializes this application with the given URL mapping.
+
+ Args:
+ url_mapping: list of (URI, RequestHandler) pairs (e.g., [('/', ReqHan)])
+ debug: if true, we send Python stack traces to the browser on errors
+ """
+ self._init_url_mappings(url_mapping)
+ self.__debug = debug
+ WSGIApplication.active_instance = self
+ self.current_request_args = ()
+
+ def __call__(self, environ, start_response):
+ """Called by WSGI when a request comes in."""
+ request = self.REQUEST_CLASS(environ)
+ response = self.RESPONSE_CLASS()
+
+ WSGIApplication.active_instance = self
+
+ handler = None
+ groups = ()
+ for regexp, handler_class in self._url_mapping:
+ match = regexp.match(request.path)
+ if match:
+ handler = handler_class()
+ handler.initialize(request, response)
+ groups = match.groups()
+ break
+
+ self.current_request_args = groups
+
+ if handler:
+ try:
+ method = environ['REQUEST_METHOD']
+ if method == 'GET':
+ handler.get(*groups)
+ elif method == 'POST':
+ handler.post(*groups)
+ elif method == 'HEAD':
+ handler.head(*groups)
+ elif method == 'OPTIONS':
+ handler.options(*groups)
+ elif method == 'PUT':
+ handler.put(*groups)
+ elif method == 'DELETE':
+ handler.delete(*groups)
+ elif method == 'TRACE':
+ handler.trace(*groups)
+ else:
+ handler.error(501)
+ except Exception, e:
+ handler.handle_exception(e, self.__debug)
+ else:
+ response.set_status(404)
+
+ response.wsgi_write(start_response)
+ return ['']
+
+ def _init_url_mappings(self, handler_tuples):
+ """Initializes the maps needed for mapping urls to handlers and handlers
+ to urls.
+
+ Args:
+ handler_tuples: list of (URI, RequestHandler) pairs.
+ """
+
+ handler_map = {}
+ pattern_map = {}
+ url_mapping = []
+
+ for regexp, handler in handler_tuples:
+
+ handler_map[handler.__name__] = handler
+
+ if not regexp.startswith('^'):
+ regexp = '^' + regexp
+ if not regexp.endswith('$'):
+ regexp += '$'
+
+ compiled = re.compile(regexp)
+ url_mapping.append((compiled, handler))
+
+ num_groups = len(RE_FIND_GROUPS.findall(regexp))
+ handler_patterns = pattern_map.setdefault(handler, [])
+ handler_patterns.append((compiled, num_groups))
+
+ self._handler_map = handler_map
+ self._pattern_map = pattern_map
+ self._url_mapping = url_mapping
+
+ def get_registered_handler_by_name(self, handler_name):
+ """Returns the handler given the handler's name.
+
+ This uses the application's url mapping.
+
+ Args:
+ handler_name: The __name__ of a handler to return.
+
+ Returns:
+ The handler with the given name.
+
+ Raises:
+ KeyError: If the handler name is not found in the parent application.
+ """
+ try:
+ return self._handler_map[handler_name]
+ except:
+ logging.error('Handler does not map to any urls: %s', handler_name)
+ raise
diff --git a/google_appengine/google/appengine/ext/webapp/__init__.pyc b/google_appengine/google/appengine/ext/webapp/__init__.pyc
new file mode 100644
index 0000000..e3033aa
--- /dev/null
+++ b/google_appengine/google/appengine/ext/webapp/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/webapp/mail_handlers.py b/google_appengine/google/appengine/ext/webapp/mail_handlers.py
new file mode 100755
index 0000000..51077bf
--- /dev/null
+++ b/google_appengine/google/appengine/ext/webapp/mail_handlers.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Handler library for inbound Mail API.
+
+Contains handlers to help with receiving mail.
+
+ InboundMailHandler: Has helper method for easily setting up
+ email recievers.
+"""
+
+
+
+
+
+from google.appengine.api import mail
+from google.appengine.ext import webapp
+
+
+MAIL_HANDLER_URL_PATTERN = '/_ah/mail/.+'
+
+
+class InboundMailHandler(webapp.RequestHandler):
+ """Base class for inbound mail handlers.
+
+ Example:
+
+ # Sub-class overrides receive method.
+ class HelloReceiver(InboundMailHandler):
+
+ def receive(self, mail_message):
+ logging.info('Received greeting from %s: %s' % (mail_message.sender,
+ mail_message.body))
+
+
+ # Map mail handler to appliction.
+ application = webapp.WSGIApplication([
+ HelloReceiver.mapping(),
+ ])
+ """
+
+ def post(self):
+ """Transforms body to email request."""
+ self.receive(mail.InboundEmailMessage(self.request.body))
+
+ def receive(self, mail_message):
+ """Receive an email message.
+
+ Override this method to implement an email receiver.
+
+ Args:
+ mail_message: InboundEmailMessage instance representing received
+ email.
+ """
+ pass
+
+ @classmethod
+ def mapping(cls):
+ """Convenience method to map handler class to application.
+
+ Returns:
+ Mapping from email URL to inbound mail handler class.
+ """
+ return MAIL_HANDLER_URL_PATTERN, cls
diff --git a/google_appengine/google/appengine/ext/webapp/template.py b/google_appengine/google/appengine/ext/webapp/template.py
new file mode 100755
index 0000000..2cd8fd3
--- /dev/null
+++ b/google_appengine/google/appengine/ext/webapp/template.py
@@ -0,0 +1,219 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A simple wrapper for Django templates.
+
+The main purpose of this module is to hide all of the package import pain
+you normally have to go through to get Django to work. We expose the Django
+Template and Context classes from this module, handling the import nonsense
+on behalf of clients.
+
+Typical usage:
+
+ from google.appengine.ext.webapp import template
+ print template.render('templates/index.html', {'foo': 'bar'})
+
+Django uses a global setting for the directory in which it looks for templates.
+This is not natural in the context of the webapp module, so our load method
+takes in a complete template path, and we set these settings on the fly
+automatically. Because we have to set and use a global setting on every
+method call, this module is not thread safe, though that is not an issue
+for applications.
+
+Django template documentation is available at:
+http://www.djangoproject.com/documentation/templates/
+"""
+
+
+
+
+
+import md5
+import os
+
+try:
+ from django import v0_96
+except ImportError:
+ pass
+import django
+
+import django.conf
+try:
+ django.conf.settings.configure(
+ DEBUG=False,
+ TEMPLATE_DEBUG=False,
+ TEMPLATE_LOADERS=(
+ 'django.template.loaders.filesystem.load_template_source',
+ ),
+ )
+except (EnvironmentError, RuntimeError):
+ pass
+import django.template
+import django.template.loader
+
+from google.appengine.ext import webapp
+
+def render(template_path, template_dict, debug=False):
+ """Renders the template at the given path with the given dict of values.
+
+ Example usage:
+ render("templates/index.html", {"name": "Bret", "values": [1, 2, 3]})
+
+ Args:
+ template_path: path to a Django template
+ template_dict: dictionary of values to apply to the template
+ """
+ t = load(template_path, debug)
+ return t.render(Context(template_dict))
+
+
+template_cache = {}
+def load(path, debug=False):
+ """Loads the Django template from the given path.
+
+ It is better to use this function than to construct a Template using the
+ class below because Django requires you to load the template with a method
+ if you want imports and extends to work in the template.
+ """
+ abspath = os.path.abspath(path)
+
+ if not debug:
+ template = template_cache.get(abspath, None)
+ else:
+ template = None
+
+ if not template:
+ directory, file_name = os.path.split(abspath)
+ new_settings = {
+ 'TEMPLATE_DIRS': (directory,),
+ 'TEMPLATE_DEBUG': debug,
+ 'DEBUG': debug,
+ }
+ old_settings = _swap_settings(new_settings)
+ try:
+ template = django.template.loader.get_template(file_name)
+ finally:
+ _swap_settings(old_settings)
+
+ if not debug:
+ template_cache[abspath] = template
+
+ def wrap_render(context, orig_render=template.render):
+ URLNode = django.template.defaulttags.URLNode
+ save_urlnode_render = URLNode.render
+ old_settings = _swap_settings(new_settings)
+ try:
+ URLNode.render = _urlnode_render_replacement
+ return orig_render(context)
+ finally:
+ _swap_settings(old_settings)
+ URLNode.render = save_urlnode_render
+
+ template.render = wrap_render
+
+ return template
+
+
+def _swap_settings(new):
+ """Swap in selected Django settings, returning old settings.
+
+ Example:
+ save = _swap_settings({'X': 1, 'Y': 2})
+ try:
+ ...new settings for X and Y are in effect here...
+ finally:
+ _swap_settings(save)
+
+ Args:
+ new: A dict containing settings to change; the keys should
+ be setting names and the values settings values.
+
+ Returns:
+ Another dict structured the same was as the argument containing
+ the original settings. Original settings that were not set at all
+ are returned as None, and will be restored as None by the
+ 'finally' clause in the example above. This shouldn't matter; we
+ can't delete settings that are given as None, since None is also a
+ legitimate value for some settings. Creating a separate flag value
+ for 'unset' settings seems overkill as there is no known use case.
+ """
+ settings = django.conf.settings
+ old = {}
+ for key, value in new.iteritems():
+ old[key] = getattr(settings, key, None)
+ setattr(settings, key, value)
+ return old
+
+
+def create_template_register():
+ """Used to extend the Django template library with custom filters and tags.
+
+ To extend the template library with a custom filter module, create a Python
+ module, and create a module-level variable named "register", and register
+ all custom filters to it as described at
+ http://www.djangoproject.com/documentation/templates_python/
+ #extending-the-template-system:
+
+ templatefilters.py
+ ==================
+ register = webapp.template.create_template_register()
+
+ def cut(value, arg):
+ return value.replace(arg, '')
+ register.filter(cut)
+
+ Then, register the custom template module with the register_template_library
+ function below in your application module:
+
+ myapp.py
+ ========
+ webapp.template.register_template_library('templatefilters')
+ """
+ return django.template.Library()
+
+
+def register_template_library(package_name):
+ """Registers a template extension module to make it usable in templates.
+
+ See the documentation for create_template_register for more information."""
+ if not django.template.libraries.get(package_name, None):
+ django.template.add_to_builtins(package_name)
+
+
+Template = django.template.Template
+Context = django.template.Context
+
+
+def _urlnode_render_replacement(self, context):
+ """Replacement for django's {% url %} block.
+
+ This version uses WSGIApplication's url mapping to create urls.
+
+ Examples:
+
+ <a href="{% url MyPageHandler "overview" %}">
+ {% url MyPageHandler implicit_args=False %}
+ {% url MyPageHandler "calendar" %}
+ {% url MyPageHandler "jsmith","calendar" %}
+ """
+ args = [arg.resolve(context) for arg in self.args]
+ try:
+ app = webapp.WSGIApplication.active_instance
+ handler = app.get_registered_handler_by_name(self.view_name)
+ return handler.get_url(implicit_args=True, *args)
+ except webapp.NoUrlFoundError:
+ return ''
diff --git a/google_appengine/google/appengine/ext/webapp/template.pyc b/google_appengine/google/appengine/ext/webapp/template.pyc
new file mode 100644
index 0000000..22fd997
--- /dev/null
+++ b/google_appengine/google/appengine/ext/webapp/template.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/webapp/util.py b/google_appengine/google/appengine/ext/webapp/util.py
new file mode 100755
index 0000000..80adf75
--- /dev/null
+++ b/google_appengine/google/appengine/ext/webapp/util.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Convience functions for the Webapp framework."""
+
+
+
+
+
+__all__ = ["login_required", "run_wsgi_app"]
+
+import os
+import sys
+import wsgiref.util
+
+from google.appengine.api import users
+from google.appengine.ext import webapp
+
+
+def login_required(handler_method):
+ """A decorator to require that a user be logged in to access a handler.
+
+ To use it, decorate your get() method like this:
+
+ @login_required
+ def get(self):
+ user = users.get_current_user(self)
+ self.response.out.write('Hello, ' + user.nickname())
+
+ We will redirect to a login page if the user is not logged in. We always
+ redirect to the request URI, and Google Accounts only redirects back as a GET
+ request, so this should not be used for POSTs.
+ """
+ def check_login(self, *args):
+ if self.request.method != 'GET':
+ raise webapp.Error('The check_login decorator can only be used for GET '
+ 'requests')
+ user = users.get_current_user()
+ if not user:
+ self.redirect(users.create_login_url(self.request.uri))
+ return
+ else:
+ handler_method(self, *args)
+ return check_login
+
+
+def run_wsgi_app(application):
+ """Runs your WSGI-compliant application object in a CGI environment.
+
+ Compared to wsgiref.handlers.CGIHandler().run(application), this
+ function takes some shortcuts. Those are possible because the
+ app server makes stronger promises than the CGI standard.
+ """
+ env = dict(os.environ)
+ env["wsgi.input"] = sys.stdin
+ env["wsgi.errors"] = sys.stderr
+ env["wsgi.version"] = (1, 0)
+ env["wsgi.run_once"] = True
+ env["wsgi.url_scheme"] = wsgiref.util.guess_scheme(env)
+ env["wsgi.multithread"] = False
+ env["wsgi.multiprocess"] = False
+ result = application(env, _start_response)
+ if result is not None:
+ for data in result:
+ sys.stdout.write(data)
+
+
+def _start_response(status, headers, exc_info=None):
+ """A start_response() callable as specified by PEP 333"""
+ if exc_info is not None:
+ raise exc_info[0], exc_info[1], exc_info[2]
+ print "Status: %s" % status
+ for name, val in headers:
+ print "%s: %s" % (name, val)
+ print
+ return sys.stdout.write
diff --git a/google_appengine/google/appengine/ext/webapp/util.pyc b/google_appengine/google/appengine/ext/webapp/util.pyc
new file mode 100644
index 0000000..8e617dc
--- /dev/null
+++ b/google_appengine/google/appengine/ext/webapp/util.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/ext/webapp/xmpp_handlers.py b/google_appengine/google/appengine/ext/webapp/xmpp_handlers.py
new file mode 100755
index 0000000..99840bf
--- /dev/null
+++ b/google_appengine/google/appengine/ext/webapp/xmpp_handlers.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""XMPP webapp handler classes.
+
+This module provides handler classes for XMPP bots, including both basic
+messaging functionality and a command handler for commands such as "/foo bar"
+"""
+
+
+
+import logging
+from google.appengine.api import xmpp
+from google.appengine.ext import webapp
+
+
+class BaseHandler(webapp.RequestHandler):
+ """A webapp baseclass for XMPP handlers.
+
+ Implements a straightforward message delivery pattern. When a message is
+ received, message_received is called with a Message object that encapsulates
+ the relevant details. Users can reply using the standard XMPP API, or the
+ convenient .reply() method on the Message object.
+ """
+
+ def message_received(self, message):
+ """Called when a message is sent to the XMPP bot.
+
+ Args:
+ message: Message: The message that was sent by the user.
+ """
+ raise NotImplementedError()
+
+ def handle_exception(self, exception, debug_mode):
+ """Called if this handler throws an exception during execution.
+
+ Args:
+ exception: the exception that was thrown
+ debug_mode: True if the web application is running in debug mode
+ """
+ super(BaseHandler, self).handle_exception(exception, debug_mode)
+ if self.xmpp_message:
+ self.xmpp_message.reply('Oops. Something went wrong.')
+
+ def post(self):
+ try:
+ self.xmpp_message = xmpp.Message(self.request.POST)
+ except xmpp.InvalidMessageError, e:
+ logging.error("Invalid XMPP request: Missing required field %s", e[0])
+ return
+ self.message_received(self.xmpp_message)
+
+
+class CommandHandlerMixin(object):
+ """A command handler for XMPP bots.
+
+ Implements a command handler pattern. XMPP messages are processed by calling
+ message_received. Message objects handled by this class are annotated with
+ 'command' and 'arg' fields. On receipt of a message starting with a forward
+ or backward slash, the handler calls a method named after the command - eg,
+ if the user sends "/foo bar", the handler will call foo_command(message).
+ If no handler method matches, unhandled_command is called. The default behaviour
+ of unhandled_command is to send the message "Unknown command" back to
+ the sender.
+
+ If the user sends a message not prefixed with a slash,
+ text_message(message) is called.
+ """
+
+ def unhandled_command(self, message):
+ """Called when an unknown command is sent to the XMPP bot.
+
+ Args:
+ message: Message: The message that was sent by the user.
+ """
+ message.reply('Unknown command')
+
+ def text_message(self, message):
+ """Called when a message not prefixed by a /command is sent to the XMPP bot.
+
+ Args:
+ message: Message: The message that was sent by the user.
+ """
+ pass
+
+ def message_received(self, message):
+ """Called when a message is sent to the XMPP bot.
+
+ Args:
+ message: Message: The message that was sent by the user.
+ """
+ if message.command:
+ handler_name = '%s_command' % (message.command,)
+ handler = getattr(self, handler_name, None)
+ if handler:
+ handler(message)
+ else:
+ self.unhandled_command(message)
+ else:
+ self.text_message(message)
+
+
+class CommandHandler(CommandHandlerMixin, BaseHandler):
+ """A webapp implementation of CommandHandlerMixin."""
+ pass
diff --git a/google_appengine/google/appengine/ext/zipserve/__init__.py b/google_appengine/google/appengine/ext/zipserve/__init__.py
new file mode 100755
index 0000000..2da833e
--- /dev/null
+++ b/google_appengine/google/appengine/ext/zipserve/__init__.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Serve static files from a zipfile.
+
+This is a solution for apps that want to serve 1000s of small static
+files while staying withing the 1000 file limit.
+
+The simplest use case is driven purely from the handlers section in
+app.yaml, e.g.:
+
+ - url: /images/.*
+ script: $PYTHON_LIB/google/appengine/ext/zipserve
+
+This would invoke a main() within zipserve/__init__.py. This code
+would then take the URL path, and look for a .zip file under the first
+component of the path, in this case "images.zip" in the app's working
+directory. If found, it will then serve any matching paths below that
+from the zip file. In other words, /images/foo/icon.gif would map to
+foo/icon.gif in the zip file images.zip.
+
+You can also customize the behavior by adding a custom line to your
+WSGIApplication() invocation:
+
+ def main():
+ app = webapp.WSGIApplication(
+ [('/', MainPage),
+ ('/static/(.*)', zipserve.make_zip_handler('staticfiles.zip')),
+ ])
+
+You can pass max_age=N to the make_zip_handler() call to override the
+expiration time in seconds, which defaults to 600.
+
+To customize the behavior even more, you can subclass ZipHandler and
+override the get() method, or override it and call ServeFromZipFile()
+directly.
+
+Note that by default, a Cache-control is added that makes these pages
+cacheable even if they require authentication. If this is not what
+you want, override ZipHandler.SetCachingHeaders().
+"""
+
+
+import email.Utils
+import logging
+import mimetypes
+import time
+import zipfile
+
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp import util
+
+
+def make_zip_handler(zipfilename, max_age=None, public=None):
+ """Factory function to construct a custom ZipHandler instance.
+
+ Args:
+ zipfilename: The filename of a zipfile.
+ max_age: Optional expiration time; defaults to ZipHandler.MAX_AGE.
+ public: Optional public flag; defaults to ZipHandler.PUBLIC.
+
+ Returns:
+ A ZipHandler subclass.
+ """
+ class CustomZipHandler(ZipHandler):
+ def get(self, name):
+ self.ServeFromZipFile(self.ZIPFILENAME, name)
+ ZIPFILENAME = zipfilename
+ if max_age is not None:
+ MAX_AGE = max_age
+ if public is not None:
+ PUBLIC = public
+
+ return CustomZipHandler
+
+
+class ZipHandler(webapp.RequestHandler):
+ """Request handler serving static files from zipfiles."""
+
+ zipfile_cache = {}
+
+ def get(self, prefix, name):
+ """GET request handler.
+
+ Typically the arguments are passed from the matching groups in the
+ URL pattern passed to WSGIApplication().
+
+ Args:
+ prefix: The zipfilename without the .zip suffix.
+ name: The name within the zipfile.
+ """
+ self.ServeFromZipFile(prefix + '.zip', name)
+
+ def ServeFromZipFile(self, zipfilename, name):
+ """Helper for the GET request handler.
+
+ This serves the contents of file 'name' from zipfile
+ 'zipfilename', logging a message and returning a 404 response if
+ either the zipfile cannot be opened or the named file cannot be
+ read from it.
+
+ Args:
+ zipfilename: The name of the zipfile.
+ name: The name within the zipfile.
+ """
+ zipfile_object = self.zipfile_cache.get(zipfilename)
+ if zipfile_object is None:
+ try:
+ zipfile_object = zipfile.ZipFile(zipfilename)
+ except (IOError, RuntimeError), err:
+ logging.error('Can\'t open zipfile %s: %s', zipfilename, err)
+ zipfile_object = ''
+ self.zipfile_cache[zipfilename] = zipfile_object
+ if zipfile_object == '':
+ self.error(404)
+ self.response.out.write('Not found')
+ return
+ try:
+ data = zipfile_object.read(name)
+ except (KeyError, RuntimeError), err:
+ self.error(404)
+ self.response.out.write('Not found')
+ return
+ content_type, encoding = mimetypes.guess_type(name)
+ if content_type:
+ self.response.headers['Content-Type'] = content_type
+ self.SetCachingHeaders()
+ self.response.out.write(data)
+
+ MAX_AGE = 600
+
+ PUBLIC = True
+
+ def SetCachingHeaders(self):
+ """Helper to set the caching headers.
+
+ Override this to customize the headers beyond setting MAX_AGE.
+ """
+ max_age = self.MAX_AGE
+ self.response.headers['Expires'] = email.Utils.formatdate(
+ time.time() + max_age, usegmt=True)
+ cache_control = []
+ if self.PUBLIC:
+ cache_control.append('public')
+ cache_control.append('max-age=%d' % max_age)
+ self.response.headers['Cache-Control'] = ', '.join(cache_control)
+
+
+def main():
+ """Main program.
+
+ This is invoked when this package is referenced from app.yaml.
+ """
+ application = webapp.WSGIApplication([('/([^/]+)/(.*)', ZipHandler)])
+ util.run_wsgi_app(application)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/google_appengine/google/appengine/runtime/__init__.py b/google_appengine/google/appengine/runtime/__init__.py
new file mode 100755
index 0000000..939a073
--- /dev/null
+++ b/google_appengine/google/appengine/runtime/__init__.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Define the DeadlineExceededError exception."""
+
+
+
+try:
+ BaseException
+except NameError:
+ BaseException = Exception
+
+
+class DeadlineExceededError(BaseException):
+ """Exception raised when the request reaches its overall time limit.
+
+ Not to be confused with runtime.apiproxy_errors.DeadlineExceededError.
+ That one is raised when individual API calls take too long.
+ """
diff --git a/google_appengine/google/appengine/runtime/__init__.pyc b/google_appengine/google/appengine/runtime/__init__.pyc
new file mode 100644
index 0000000..21281a6
--- /dev/null
+++ b/google_appengine/google/appengine/runtime/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/runtime/apiproxy.py b/google_appengine/google/appengine/runtime/apiproxy.py
new file mode 100755
index 0000000..385318b
--- /dev/null
+++ b/google_appengine/google/appengine/runtime/apiproxy.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Makes API calls to various Google-provided services.
+
+Provides methods for making calls into Google Apphosting services and APIs
+from your application code. This code will only work properly from within
+the Google Apphosting environment.
+"""
+
+
+import sys
+from google.net.proto import ProtocolBuffer
+from google.appengine import runtime
+from google.appengine.api import apiproxy_rpc
+from google3.apphosting.runtime import _apphosting_runtime___python__apiproxy
+from google.appengine.runtime import apiproxy_errors
+
+OK = 0
+RPC_FAILED = 1
+CALL_NOT_FOUND = 2
+ARGUMENT_ERROR = 3
+DEADLINE_EXCEEDED = 4
+CANCELLED = 5
+APPLICATION_ERROR = 6
+OTHER_ERROR = 7
+OVER_QUOTA = 8
+REQUEST_TOO_LARGE = 9
+CAPABILITY_DISABLED = 10
+
+_ExceptionsMap = {
+ RPC_FAILED:
+ (apiproxy_errors.RPCFailedError,
+ "The remote RPC to the application server failed for the call %s.%s()."),
+ CALL_NOT_FOUND:
+ (apiproxy_errors.CallNotFoundError,
+ "The API package '%s' or call '%s()' was not found."),
+ ARGUMENT_ERROR:
+ (apiproxy_errors.ArgumentError,
+ "An error occurred parsing (locally or remotely) the arguments to %s.%s()."),
+ DEADLINE_EXCEEDED:
+ (apiproxy_errors.DeadlineExceededError,
+ "The API call %s.%s() took too long to respond and was cancelled."),
+ CANCELLED:
+ (apiproxy_errors.CancelledError,
+ "The API call %s.%s() was explicitly cancelled."),
+ OTHER_ERROR:
+ (apiproxy_errors.Error,
+ "An error occurred for the API request %s.%s()."),
+ OVER_QUOTA:
+ (apiproxy_errors.OverQuotaError,
+ "The API call %s.%s() required more quota than is available."),
+ REQUEST_TOO_LARGE:
+ (apiproxy_errors.RequestTooLargeError,
+ "The request to API call %s.%s() was too large."),
+
+
+
+
+
+
+}
+
+class RPC(apiproxy_rpc.RPC):
+ """A RPC object, suitable for talking to remote services.
+
+ Each instance of this object can be used only once, and should not be reused.
+
+ Stores the data members and methods for making RPC calls via the APIProxy.
+ """
+
+ def __init__(self, *args, **kargs):
+ """Constructor for the RPC object. All arguments are optional, and
+ simply set members on the class. These data members will be
+ overriden by values passed to MakeCall.
+ """
+ super(RPC, self).__init__(*args, **kargs)
+ self.__result_dict = {}
+
+ def _WaitImpl(self):
+ """Waits on the API call associated with this RPC. The callback,
+ if provided, will be executed before Wait() returns. If this RPC
+ is already complete, or if the RPC was never started, this
+ function will return immediately.
+
+ Raises:
+ InterruptedError if a callback throws an uncaught exception.
+ """
+ try:
+ rpc_completed = _apphosting_runtime___python__apiproxy.Wait(self)
+ except (runtime.DeadlineExceededError, apiproxy_errors.InterruptedError):
+ raise
+ except:
+ exc_class, exc, tb = sys.exc_info()
+ if (isinstance(exc, SystemError) and
+ exc.args[0] == 'uncaught RPC exception'):
+ raise
+ rpc = None
+ if hasattr(exc, "_appengine_apiproxy_rpc"):
+ rpc = exc._appengine_apiproxy_rpc
+ new_exc = apiproxy_errors.InterruptedError(exc, rpc)
+ raise new_exc.__class__, new_exc, tb
+ return True
+
+ def _MakeCallImpl(self):
+ assert isinstance(self.request, ProtocolBuffer.ProtocolMessage)
+ assert isinstance(self.response, ProtocolBuffer.ProtocolMessage)
+
+ e = ProtocolBuffer.Encoder()
+ self.request.Output(e)
+
+ self.__state = RPC.RUNNING
+
+ _apphosting_runtime___python__apiproxy.MakeCall(
+ self.package, self.call, e.buffer(), self.__result_dict,
+ self.__MakeCallDone, self, deadline=(self.deadline or -1))
+
+ def __MakeCallDone(self):
+ self.__state = RPC.FINISHING
+ self.cpu_usage_mcycles = self.__result_dict['cpu_usage_mcycles']
+ if self.__result_dict['error'] == APPLICATION_ERROR:
+ self.__exception = apiproxy_errors.ApplicationError(
+ self.__result_dict['application_error'],
+ self.__result_dict['error_detail'])
+ elif self.__result_dict['error'] == CAPABILITY_DISABLED:
+ if self.__result_dict['error_detail']:
+ self.__exception = apiproxy_errors.CapabilityDisabledError(
+ self.__result_dict['error_detail'])
+ else:
+ self.__exception = apiproxy_errors.CapabilityDisabledError(
+ "The API call %s.%s() is temporarily unavailable." % (
+ self.package, self.call))
+ elif self.__result_dict['error'] in _ExceptionsMap:
+ exception_entry = _ExceptionsMap[self.__result_dict['error']]
+ self.__exception = exception_entry[0](
+ exception_entry[1] % (self.package, self.call))
+ else:
+ try:
+ self.response.ParseFromString(self.__result_dict['result_string'])
+ except Exception, e:
+ self.__exception = e
+ self.__Callback()
+
+def CreateRPC():
+ """Create a RPC instance. suitable for talking to remote services.
+
+ Each RPC instance can be used only once, and should not be reused.
+
+ Returns:
+ an instance of RPC object
+ """
+ return RPC()
+
+
+def MakeSyncCall(package, call, request, response):
+ """Makes a synchronous (i.e. blocking) API call within the specified
+ package for the specified call method. request and response must be the
+ appropriately typed ProtocolBuffers for the API call. An exception is
+ thrown if an error occurs when communicating with the system.
+
+ Args:
+ See MakeCall above.
+
+ Raises:
+ See CheckSuccess() above.
+ """
+ rpc = CreateRPC()
+ rpc.MakeCall(package, call, request, response)
+ rpc.Wait()
+ rpc.CheckSuccess()
diff --git a/google_appengine/google/appengine/runtime/apiproxy.pyc b/google_appengine/google/appengine/runtime/apiproxy.pyc
new file mode 100644
index 0000000..c408167
--- /dev/null
+++ b/google_appengine/google/appengine/runtime/apiproxy.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/runtime/apiproxy_errors.py b/google_appengine/google/appengine/runtime/apiproxy_errors.py
new file mode 100755
index 0000000..bad4fc1
--- /dev/null
+++ b/google_appengine/google/appengine/runtime/apiproxy_errors.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Errors thrown by apiproxy.MakeSyncCall.
+"""
+
+
+class Error(Exception):
+ """Base APIProxy error type."""
+
+
+class RPCFailedError(Error):
+ """Raised by APIProxy calls when the RPC to the application server fails."""
+
+
+class CallNotFoundError(Error):
+ """Raised by APIProxy calls when the requested method cannot be found."""
+
+
+class ArgumentError(Error):
+ """Raised by APIProxy calls if there is an error parsing the arguments."""
+
+
+class DeadlineExceededError(Error):
+ """Raised by APIProxy calls if the call took too long to respond."""
+
+
+class CancelledError(Error):
+ """Raised by APIProxy calls if the call was cancelled, such as when
+ the user's request is exiting."""
+
+
+class ApplicationError(Error):
+ """Raised by APIProxy in the event of an application-level error."""
+ def __init__(self, application_error, error_detail=''):
+ self.application_error = application_error
+ self.error_detail = error_detail
+ Error.__init__(self, application_error)
+
+ def __str__(self):
+ return 'ApplicationError: %d %s' % (self.application_error,
+ self.error_detail)
+
+class OverQuotaError(Error):
+ """Raised by APIProxy calls when they have been blocked due to a lack of
+ available quota."""
+
+class RequestTooLargeError(Error):
+ """Raised by APIProxy calls if the request was too large."""
+
+class CapabilityDisabledError(Error):
+ """Raised by APIProxy when API calls are temporarily disabled."""
+
+class InterruptedError(Error):
+ """Raised by APIProxy.Wait() when the wait is interrupted by an uncaught
+ exception from some callback, not necessarily associated with the RPC in
+ question."""
+ def __init__(self, exception, rpc):
+ self.args = ("The Wait() request was interrupted by an exception from "
+ "another callback:", exception)
+ self.__rpc = rpc
+ self.__exception = exception
+
+ @property
+ def rpc(self):
+ return self.__rpc
+
+ @property
+ def exception(self):
+ return self.__exception
diff --git a/google_appengine/google/appengine/runtime/apiproxy_errors.pyc b/google_appengine/google/appengine/runtime/apiproxy_errors.pyc
new file mode 100644
index 0000000..48a4e27
--- /dev/null
+++ b/google_appengine/google/appengine/runtime/apiproxy_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/__init__.py b/google_appengine/google/appengine/tools/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/tools/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/tools/__init__.pyc b/google_appengine/google/appengine/tools/__init__.pyc
new file mode 100644
index 0000000..2b87356
--- /dev/null
+++ b/google_appengine/google/appengine/tools/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/adaptive_thread_pool.py b/google_appengine/google/appengine/tools/adaptive_thread_pool.py
new file mode 100755
index 0000000..8458289
--- /dev/null
+++ b/google_appengine/google/appengine/tools/adaptive_thread_pool.py
@@ -0,0 +1,460 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Provides thread-pool-like functionality for workers accessing App Engine.
+
+The pool adapts to slow or timing out requests by reducing the number of
+active workers, or increasing the number when requests latency reduces.
+"""
+
+
+
+import logging
+import Queue
+import sys
+import threading
+import time
+import traceback
+
+from google.appengine.tools.requeue import ReQueue
+
+logger = logging.getLogger('google.appengine.tools.adaptive_thread_pool')
+
+_THREAD_SHOULD_EXIT = '_THREAD_SHOULD_EXIT'
+
+INITIAL_BACKOFF = 1.0
+
+BACKOFF_FACTOR = 2.0
+
+
+class Error(Exception):
+ """Base-class for exceptions in this module."""
+
+
+class WorkItemError(Error):
+ """Error while processing a WorkItem."""
+
+
+class RetryException(Error):
+ """A non-fatal exception that indicates that a work item should be retried."""
+
+
+def InterruptibleSleep(sleep_time):
+ """Puts thread to sleep, checking this threads exit_flag four times a second.
+
+ Args:
+ sleep_time: Time to sleep.
+ """
+ slept = 0.0
+ epsilon = .0001
+ thread = threading.currentThread()
+ while slept < sleep_time - epsilon:
+ remaining = sleep_time - slept
+ this_sleep_time = min(remaining, 0.25)
+ time.sleep(this_sleep_time)
+ slept += this_sleep_time
+ if thread.exit_flag:
+ return
+
+
+class WorkerThread(threading.Thread):
+ """A WorkerThread to execute WorkItems.
+
+ Attributes:
+ exit_flag: A boolean indicating whether this thread should stop
+ its work and exit.
+ """
+
+ def __init__(self, thread_pool, thread_gate, name=None):
+ """Initialize a WorkerThread instance.
+
+ Args:
+ thread_pool: An AdaptiveThreadPool instance.
+ thread_gate: A ThreadGate instance.
+ name: A name for this WorkerThread.
+ """
+ threading.Thread.__init__(self)
+
+ self.setDaemon(True)
+
+ self.exit_flag = False
+ self.__error = None
+ self.__traceback = None
+ self.__thread_pool = thread_pool
+ self.__work_queue = thread_pool.requeue
+ self.__thread_gate = thread_gate
+ if not name:
+ self.__name = 'Anonymous_' + self.__class__.__name__
+ else:
+ self.__name = name
+
+ def run(self):
+ """Perform the work of the thread."""
+ logger.debug('[%s] %s: started', self.getName(), self.__class__.__name__)
+
+ try:
+ self.WorkOnItems()
+ except:
+ self.SetError()
+
+ logger.debug('[%s] %s: exiting', self.getName(), self.__class__.__name__)
+
+ def SetError(self):
+ """Sets the error and traceback information for this thread.
+
+ This must be called from an exception handler.
+ """
+ if not self.__error:
+ exc_info = sys.exc_info()
+ self.__error = exc_info[1]
+ self.__traceback = exc_info[2]
+ logger.exception('[%s] %s:', self.getName(), self.__class__.__name__)
+
+ def WorkOnItems(self):
+ """Perform the work of a WorkerThread."""
+ while not self.exit_flag:
+ item = None
+ self.__thread_gate.StartWork()
+ try:
+ status, instruction = WorkItem.FAILURE, ThreadGate.DECREASE
+ try:
+ if self.exit_flag:
+ instruction = ThreadGate.HOLD
+ break
+
+ try:
+ item = self.__work_queue.get(block=True, timeout=1.0)
+ except Queue.Empty:
+ instruction = ThreadGate.HOLD
+ continue
+ if item == _THREAD_SHOULD_EXIT or self.exit_flag:
+ status, instruction = WorkItem.SUCCESS, ThreadGate.HOLD
+ break
+
+ logger.debug('[%s] Got work item %s', self.getName(), item)
+
+ status, instruction = item.PerformWork(self.__thread_pool)
+ except RetryException:
+ status, instruction = WorkItem.RETRY, ThreadGate.HOLD
+ except:
+ self.SetError()
+ raise
+
+ finally:
+ try:
+ if item:
+ if status == WorkItem.SUCCESS:
+ self.__work_queue.task_done()
+ elif status == WorkItem.RETRY:
+ try:
+ self.__work_queue.reput(item, block=False)
+ except Queue.Full:
+ logger.error('[%s] Failed to reput work item.', self.getName())
+ raise Error('Failed to reput work item')
+ else:
+ if not self.__error:
+ if item.error:
+ self.__error = item.error
+ self.__traceback = item.traceback
+ else:
+ self.__error = WorkItemError(
+ 'Fatal error while processing %s' % item)
+ raise self.__error
+
+ finally:
+ self.__thread_gate.FinishWork(instruction=instruction)
+
+ def CheckError(self):
+ """If an error is present, then log it."""
+ if self.__error:
+ logger.error('Error in %s: %s', self.getName(), self.__error)
+ if self.__traceback:
+ logger.debug('%s', ''.join(traceback.format_exception(
+ self.__error.__class__,
+ self.__error,
+ self.__traceback)))
+
+ def __str__(self):
+ return self.__name
+
+
+class AdaptiveThreadPool(object):
+ """A thread pool which processes WorkItems from a queue.
+
+ Attributes:
+ requeue: The requeue instance which holds work items for this
+ thread pool.
+ """
+
+ def __init__(self,
+ num_threads,
+ queue_size=None,
+ base_thread_name=None,
+ worker_thread_factory=WorkerThread,
+ queue_factory=Queue.Queue):
+ """Initialize an AdaptiveThreadPool.
+
+ An adaptive thread pool executes WorkItems using a number of
+ WorkerThreads. WorkItems represent items of work that may
+ succeed, soft fail, or hard fail. In addition, a completed work
+ item can signal this AdaptiveThreadPool to enable more or fewer
+ threads. Initially one thread is active. Soft failures are
+ reqeueud to be retried. Hard failures cause this
+ AdaptiveThreadPool to shut down entirely. See the WorkItem class
+ for more details.
+
+ Args:
+ num_threads: The number of threads to use.
+ queue_size: The size of the work item queue to use.
+ base_thread_name: A string from which worker thread names are derived.
+ worker_thread_factory: A factory which procudes WorkerThreads.
+ queue_factory: Used for dependency injection.
+ """
+ if queue_size is None:
+ queue_size = num_threads
+ self.requeue = ReQueue(queue_size, queue_factory=queue_factory)
+ self.__thread_gate = ThreadGate(num_threads)
+ self.__num_threads = num_threads
+ self.__threads = []
+ for i in xrange(num_threads):
+ thread = worker_thread_factory(self, self.__thread_gate)
+ if base_thread_name:
+ base = base_thread_name
+ else:
+ base = thread.__class__.__name__
+ thread.name = '%s-%d' % (base, i)
+ self.__threads.append(thread)
+ thread.start()
+
+ def num_threads(self):
+ """Return the number of threads in this thread pool."""
+ return self.__num_threads
+
+ def Threads(self):
+ """Yields the registered threads."""
+ for thread in self.__threads:
+ yield thread
+
+ def SubmitItem(self, item, block=True, timeout=0.0):
+ """Submit a WorkItem to the AdaptiveThreadPool.
+
+ Args:
+ item: A WorkItem instance.
+ block: Whether to block on submitting if the submit queue is full.
+ timeout: Time wait for room in the queue if block is True, 0.0 to
+ block indefinitely.
+
+ Raises:
+ Queue.Full if the submit queue is full.
+ """
+ self.requeue.put(item, block=block, timeout=timeout)
+
+ def QueuedItemCount(self):
+ """Returns the number of items currently in the queue."""
+ return self.requeue.qsize()
+
+ def Shutdown(self):
+ """Shutdown the thread pool.
+
+ Tasks may remain unexecuted in the submit queue.
+ """
+ while not self.requeue.empty():
+ try:
+ unused_item = self.requeue.get_nowait()
+ self.requeue.task_done()
+ except Queue.Empty:
+ pass
+ for thread in self.__threads:
+ thread.exit_flag = True
+ self.requeue.put(_THREAD_SHOULD_EXIT)
+ self.__thread_gate.EnableAllThreads()
+
+ def Wait(self):
+ """Wait until all work items have been completed."""
+ self.requeue.join()
+
+ def JoinThreads(self):
+ """Wait for all threads to exit."""
+ for thread in self.__threads:
+ logger.debug('Waiting for %s to exit' % str(thread))
+ thread.join()
+
+ def CheckErrors(self):
+ """Output logs for any errors that occurred in the worker threads."""
+ for thread in self.__threads:
+ thread.CheckError()
+
+
+class ThreadGate(object):
+ """Manage the number of active worker threads.
+
+ The ThreadGate limits the number of threads that are simultaneously
+ active in order to implement adaptive rate control.
+
+ Initially the ThreadGate allows only one thread to be active. For
+ each successful work item, another thread is activated and for each
+ failed item, the number of active threads is reduced by one. When only
+ one thread is active, failures will cause exponential backoff.
+
+ For example, a ThreadGate instance, thread_gate can be used in a number
+ of threads as so:
+
+ # Block until this thread is enabled for work.
+ thread_gate.StartWork()
+ try:
+ status = DoSomeWorkInvolvingLimitedSharedResources()
+ suceeded = IsStatusGood(status)
+ badly_failed = IsStatusVeryBad(status)
+ finally:
+ if suceeded:
+ # Suceeded, add more simultaneously enabled threads to the task.
+ thread_gate.FinishWork(instruction=ThreadGate.INCREASE)
+ elif badly_failed:
+ # Failed, or succeeded but with high resource load, reduce number of
+ # workers.
+ thread_gate.FinishWork(instruction=ThreadGate.DECREASE)
+ else:
+ # We succeeded, but don't want to add more workers to the task.
+ thread_gate.FinishWork(instruction=ThreadGate.HOLD)
+
+ the thread_gate will enable and disable/backoff threads in response to
+ resource load conditions.
+
+ StartWork can block indefinitely. FinishWork, while not
+ lock-free, should never block absent a demonic scheduler.
+ """
+
+ INCREASE = 'increase'
+ HOLD = 'hold'
+ DECREASE = 'decrease'
+
+ def __init__(self,
+ num_threads,
+ sleep=InterruptibleSleep):
+ """Constructor for ThreadGate instances.
+
+ Args:
+ num_threads: The total number of threads using this gate.
+ sleep: Used for dependency injection.
+ """
+ self.__enabled_count = 1
+ self.__lock = threading.Lock()
+ self.__thread_semaphore = threading.Semaphore(self.__enabled_count)
+ self.__num_threads = num_threads
+ self.__backoff_time = 0
+ self.__sleep = sleep
+
+ def num_threads(self):
+ return self.__num_threads
+
+ def EnableThread(self):
+ """Enable one more worker thread."""
+ self.__lock.acquire()
+ try:
+ self.__enabled_count += 1
+ finally:
+ self.__lock.release()
+ self.__thread_semaphore.release()
+
+ def EnableAllThreads(self):
+ """Enable all worker threads."""
+ for unused_idx in xrange(self.__num_threads - self.__enabled_count):
+ self.EnableThread()
+
+ def StartWork(self):
+ """Starts a critical section in which the number of workers is limited.
+
+ Starts a critical section which allows self.__enabled_count
+ simultaneously operating threads. The critical section is ended by
+ calling self.FinishWork().
+ """
+ self.__thread_semaphore.acquire()
+ if self.__backoff_time > 0.0:
+ if not threading.currentThread().exit_flag:
+ logger.info('Backing off due to errors: %.1f seconds',
+ self.__backoff_time)
+ self.__sleep(self.__backoff_time)
+
+ def FinishWork(self, instruction=None):
+ """Ends a critical section started with self.StartWork()."""
+ if not instruction or instruction == ThreadGate.HOLD:
+ self.__thread_semaphore.release()
+
+ elif instruction == ThreadGate.INCREASE:
+ if self.__backoff_time > 0.0:
+ logger.info('Resetting backoff to 0.0')
+ self.__backoff_time = 0.0
+ do_enable = False
+ self.__lock.acquire()
+ try:
+ if self.__num_threads > self.__enabled_count:
+ do_enable = True
+ self.__enabled_count += 1
+ finally:
+ self.__lock.release()
+ if do_enable:
+ logger.debug('Increasing active thread count to %d',
+ self.__enabled_count)
+ self.__thread_semaphore.release()
+ self.__thread_semaphore.release()
+
+ elif instruction == ThreadGate.DECREASE:
+ do_disable = False
+ self.__lock.acquire()
+ try:
+ if self.__enabled_count > 1:
+ do_disable = True
+ self.__enabled_count -= 1
+ else:
+ if self.__backoff_time == 0.0:
+ self.__backoff_time = INITIAL_BACKOFF
+ else:
+ self.__backoff_time *= BACKOFF_FACTOR
+ finally:
+ self.__lock.release()
+ if do_disable:
+ logger.debug('Decreasing the number of active threads to %d',
+ self.__enabled_count)
+ else:
+ self.__thread_semaphore.release()
+
+
+class WorkItem(object):
+ """Holds a unit of work."""
+
+ SUCCESS = 'success'
+ RETRY = 'retry'
+ FAILURE = 'failure'
+
+ def __init__(self, name):
+ self.__name = name
+
+ def PerformWork(self, thread_pool):
+ """Perform the work of this work item and report the results.
+
+ Args:
+ thread_pool: The AdaptiveThreadPool instance associated with this
+ thread.
+
+ Returns:
+ A tuple (status, instruction) of the work status and an instruction
+ for the ThreadGate.
+ """
+ raise NotImplementedError
+
+ def __str__(self):
+ return self.__name
diff --git a/google_appengine/google/appengine/tools/adaptive_thread_pool.pyc b/google_appengine/google/appengine/tools/adaptive_thread_pool.pyc
new file mode 100644
index 0000000..25b62c4
--- /dev/null
+++ b/google_appengine/google/appengine/tools/adaptive_thread_pool.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/appcfg.py b/google_appengine/google/appengine/tools/appcfg.py
new file mode 100755
index 0000000..f9abe25
--- /dev/null
+++ b/google_appengine/google/appengine/tools/appcfg.py
@@ -0,0 +1,2525 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Tool for deploying apps to an app server.
+
+Currently, the application only uploads new appversions. To do this, it first
+walks the directory tree rooted at the path the user specifies, adding all the
+files it finds to a list. It then uploads the application configuration
+(app.yaml) to the server using HTTP, followed by uploading each of the files.
+It then commits the transaction with another request.
+
+The bulk of this work is handled by the AppVersionUpload class, which exposes
+methods to add to the list of files, fetch a list of modified files, upload
+files, and commit or rollback the transaction.
+"""
+
+
+import calendar
+import datetime
+import getpass
+import logging
+import mimetypes
+import optparse
+import os
+import random
+import re
+import sha
+import sys
+import tempfile
+import time
+import urllib
+import urllib2
+
+import google
+import yaml
+from google.appengine.cron import groctimespecification
+from google.appengine.api import appinfo
+from google.appengine.api import croninfo
+from google.appengine.api import queueinfo
+from google.appengine.api import validation
+from google.appengine.api import yaml_errors
+from google.appengine.api import yaml_object
+from google.appengine.datastore import datastore_index
+from google.appengine.tools import appengine_rpc
+from google.appengine.tools import bulkloader
+
+
+MAX_FILES_TO_CLONE = 100
+LIST_DELIMITER = '\n'
+TUPLE_DELIMITER = '|'
+
+VERSION_FILE = '../VERSION'
+
+UPDATE_CHECK_TIMEOUT = 3
+
+NAG_FILE = '.appcfg_nag'
+
+MAX_LOG_LEVEL = 4
+
+MAX_BATCH_SIZE = 1000000
+MAX_BATCH_COUNT = 100
+MAX_BATCH_FILE_SIZE = 200000
+BATCH_OVERHEAD = 500
+
+verbosity = 1
+
+
+appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = 'python'
+_api_versions = os.environ.get('GOOGLE_TEST_API_VERSIONS', '1')
+_options = validation.Options(*_api_versions.split(','))
+appinfo.AppInfoExternal.ATTRIBUTES[appinfo.API_VERSION] = _options
+del _api_versions, _options
+
+
+def StatusUpdate(msg):
+ """Print a status message to stderr.
+
+ If 'verbosity' is greater than 0, print the message.
+
+ Args:
+ msg: The string to print.
+ """
+ if verbosity > 0:
+ print >>sys.stderr, msg
+
+
+def GetMimeTypeIfStaticFile(config, filename):
+ """Looks up the mime type for 'filename'.
+
+ Uses the handlers in 'config' to determine if the file should
+ be treated as a static file.
+
+ Args:
+ config: The app.yaml object to check the filename against.
+ filename: The name of the file.
+
+ Returns:
+ The mime type string. For example, 'text/plain' or 'image/gif'.
+ None if this is not a static file.
+ """
+ for handler in config.handlers:
+ handler_type = handler.GetHandlerType()
+ if handler_type in ('static_dir', 'static_files'):
+ if handler_type == 'static_dir':
+ regex = os.path.join(re.escape(handler.GetHandler()), '.*')
+ else:
+ regex = handler.upload
+ if re.match(regex, filename):
+ if handler.mime_type is not None:
+ return handler.mime_type
+ else:
+ guess = mimetypes.guess_type(filename)[0]
+ if guess is None:
+ default = 'application/octet-stream'
+ print >>sys.stderr, ('Could not guess mimetype for %s. Using %s.'
+ % (filename, default))
+ return default
+ return guess
+ return None
+
+
+def BuildClonePostBody(file_tuples):
+ """Build the post body for the /api/clone{files,blobs} urls.
+
+ Args:
+ file_tuples: A list of tuples. Each tuple should contain the entries
+ appropriate for the endpoint in question.
+
+ Returns:
+ A string containing the properly delimited tuples.
+ """
+ file_list = []
+ for tup in file_tuples:
+ path = tup[0]
+ tup = tup[1:]
+ file_list.append(TUPLE_DELIMITER.join([path] + list(tup)))
+ return LIST_DELIMITER.join(file_list)
+
+
+class NagFile(validation.Validated):
+ """A validated YAML class to represent the user's nag preferences.
+
+ Attributes:
+ timestamp: The timestamp of the last nag.
+ opt_in: True if the user wants to check for updates on dev_appserver
+ start. False if not. May be None if we have not asked the user yet.
+ """
+
+ ATTRIBUTES = {
+ 'timestamp': validation.TYPE_FLOAT,
+ 'opt_in': validation.Optional(validation.TYPE_BOOL),
+ }
+
+ @staticmethod
+ def Load(nag_file):
+ """Load a single NagFile object where one and only one is expected.
+
+ Args:
+ nag_file: A file-like object or string containing the yaml data to parse.
+
+ Returns:
+ A NagFile instance.
+ """
+ return yaml_object.BuildSingleObject(NagFile, nag_file)
+
+
+def GetVersionObject(isfile=os.path.isfile, open_fn=open):
+ """Gets the version of the SDK by parsing the VERSION file.
+
+ Args:
+ isfile: used for testing.
+ open_fn: Used for testing.
+
+ Returns:
+ A Yaml object or None if the VERSION file does not exist.
+ """
+ version_filename = os.path.join(os.path.dirname(google.__file__),
+ VERSION_FILE)
+ if not isfile(version_filename):
+ logging.error('Could not find version file at %s', version_filename)
+ return None
+
+ version_fh = open_fn(version_filename, 'r')
+ try:
+ version = yaml.safe_load(version_fh)
+ finally:
+ version_fh.close()
+
+ return version
+
+
+def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable_func):
+ """Calls a function multiple times, backing off more and more each time.
+
+ Args:
+ initial_delay: Initial delay after first try, in seconds.
+ backoff_factor: Delay will be multiplied by this factor after each try.
+ max_tries: Maximum number of tries.
+ callable_func: The method to call, will pass no arguments.
+
+ Returns:
+ True if the function succeded in one of its tries.
+
+ Raises:
+ Whatever the function raises--an exception will immediately stop retries.
+ """
+ delay = initial_delay
+ while not callable_func() and max_tries > 0:
+ StatusUpdate('Will check again in %s seconds.' % delay)
+ time.sleep(delay)
+ delay *= backoff_factor
+ max_tries -= 1
+ return max_tries > 0
+
+
+def _VersionList(release):
+ """Parse a version string into a list of ints.
+
+ Args:
+ release: The 'release' version, e.g. '1.2.4'.
+ (Due to YAML parsing this may also be an int or float.)
+
+ Returns:
+ A list of ints corresponding to the parts of the version string
+ between periods. Example:
+ '1.2.4' -> [1, 2, 4]
+ '1.2.3.4' -> [1, 2, 3, 4]
+
+ Raises:
+ ValueError if not all the parts are valid integers.
+ """
+ return [int(part) for part in str(release).split('.')]
+
+
+class UpdateCheck(object):
+ """Determines if the local SDK is the latest version.
+
+ Nags the user when there are updates to the SDK. As the SDK becomes
+ more out of date, the language in the nagging gets stronger. We
+ store a little yaml file in the user's home directory so that we nag
+ the user only once a week.
+
+ The yaml file has the following field:
+ 'timestamp': Last time we nagged the user in seconds since the epoch.
+
+ Attributes:
+ server: An AbstractRpcServer instance used to check for the latest SDK.
+ config: The app's AppInfoExternal. Needed to determine which api_version
+ the app is using.
+ """
+
+ def __init__(self,
+ server,
+ config,
+ isdir=os.path.isdir,
+ isfile=os.path.isfile,
+ open_fn=open):
+ """Create a new UpdateCheck.
+
+ Args:
+ server: The AbstractRpcServer to use.
+ config: The yaml object that specifies the configuration of this
+ application.
+ isdir: Replacement for os.path.isdir (for testing).
+ isfile: Replacement for os.path.isfile (for testing).
+ open_fn: Replacement for the open builtin (for testing).
+ """
+ self.server = server
+ self.config = config
+ self.isdir = isdir
+ self.isfile = isfile
+ self.open = open_fn
+
+ @staticmethod
+ def MakeNagFilename():
+ """Returns the filename for the nag file for this user."""
+ user_homedir = os.path.expanduser('~/')
+ if not os.path.isdir(user_homedir):
+ drive, unused_tail = os.path.splitdrive(os.__file__)
+ if drive:
+ os.environ['HOMEDRIVE'] = drive
+
+ return os.path.expanduser('~/' + NAG_FILE)
+
+ def _ParseVersionFile(self):
+ """Parse the local VERSION file.
+
+ Returns:
+ A Yaml object or None if the file does not exist.
+ """
+ return GetVersionObject(isfile=self.isfile, open_fn=self.open)
+
+ def CheckSupportedVersion(self):
+ """Determines if the app's api_version is supported by the SDK.
+
+ Uses the api_version field from the AppInfoExternal to determine if
+ the SDK supports that api_version.
+
+ Raises:
+ SystemExit if the api_version is not supported.
+ """
+ version = self._ParseVersionFile()
+ if version is None:
+ logging.error('Could not determine if the SDK supports the api_version '
+ 'requested in app.yaml.')
+ return
+ if self.config.api_version not in version['api_versions']:
+ logging.critical('The api_version specified in app.yaml (%s) is not '
+ 'supported by this release of the SDK. The supported '
+ 'api_versions are %s.',
+ self.config.api_version, version['api_versions'])
+ sys.exit(1)
+
+ def CheckForUpdates(self):
+ """Queries the server for updates and nags the user if appropriate.
+
+ Queries the server for the latest SDK version at the same time reporting
+ the local SDK version. The server will respond with a yaml document
+ containing the fields:
+ 'release': The name of the release (e.g. 1.2).
+ 'timestamp': The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ).
+ 'api_versions': A list of api_version strings (e.g. ['1', 'beta']).
+
+ We will nag the user with increasing severity if:
+ - There is a new release.
+ - There is a new release with a new api_version.
+ - There is a new release that does not support the api_version named in
+ self.config.
+ """
+ version = self._ParseVersionFile()
+ if version is None:
+ logging.info('Skipping update check')
+ return
+ logging.info('Checking for updates to the SDK.')
+
+ try:
+ response = self.server.Send('/api/updatecheck',
+ timeout=UPDATE_CHECK_TIMEOUT,
+ release=version['release'],
+ timestamp=version['timestamp'],
+ api_versions=version['api_versions'])
+ except urllib2.URLError, e:
+ logging.info('Update check failed: %s', e)
+ return
+
+ latest = yaml.safe_load(response)
+ if version['release'] == latest['release']:
+ logging.info('The SDK is up to date.')
+ return
+
+ try:
+ this_release = _VersionList(version['release'])
+ except ValueError:
+ logging.warn('Could not parse this release version (%r)',
+ version['release'])
+ else:
+ try:
+ advertised_release = _VersionList(latest['release'])
+ except ValueError:
+ logging.warn('Could not parse advertised release version (%r)',
+ latest['release'])
+ else:
+ if this_release > advertised_release:
+ logging.info('This SDK release is newer than the advertised release.')
+ return
+
+ api_versions = latest['api_versions']
+ if self.config.api_version not in api_versions:
+ self._Nag(
+ 'The api version you are using (%s) is obsolete! You should\n'
+ 'upgrade your SDK and test that your code works with the new\n'
+ 'api version.' % self.config.api_version,
+ latest, version, force=True)
+ return
+
+ if self.config.api_version != api_versions[len(api_versions) - 1]:
+ self._Nag(
+ 'The api version you are using (%s) is deprecated. You should\n'
+ 'upgrade your SDK to try the new functionality.' %
+ self.config.api_version, latest, version)
+ return
+
+ self._Nag('There is a new release of the SDK available.',
+ latest, version)
+
+ def _ParseNagFile(self):
+ """Parses the nag file.
+
+ Returns:
+ A NagFile if the file was present else None.
+ """
+ nag_filename = UpdateCheck.MakeNagFilename()
+ if self.isfile(nag_filename):
+ fh = self.open(nag_filename, 'r')
+ try:
+ nag = NagFile.Load(fh)
+ finally:
+ fh.close()
+ return nag
+ return None
+
+ def _WriteNagFile(self, nag):
+ """Writes the NagFile to the user's nag file.
+
+ If the destination path does not exist, this method will log an error
+ and fail silently.
+
+ Args:
+ nag: The NagFile to write.
+ """
+ nagfilename = UpdateCheck.MakeNagFilename()
+ try:
+ fh = self.open(nagfilename, 'w')
+ try:
+ fh.write(nag.ToYAML())
+ finally:
+ fh.close()
+ except (OSError, IOError), e:
+ logging.error('Could not write nag file to %s. Error: %s', nagfilename, e)
+
+ def _Nag(self, msg, latest, version, force=False):
+ """Prints a nag message and updates the nag file's timestamp.
+
+ Because we don't want to nag the user everytime, we store a simple
+ yaml document in the user's home directory. If the timestamp in this
+ doc is over a week old, we'll nag the user. And when we nag the user,
+ we update the timestamp in this doc.
+
+ Args:
+ msg: The formatted message to print to the user.
+ latest: The yaml document received from the server.
+ version: The local yaml version document.
+ force: If True, always nag the user, ignoring the nag file.
+ """
+ nag = self._ParseNagFile()
+ if nag and not force:
+ last_nag = datetime.datetime.fromtimestamp(nag.timestamp)
+ if datetime.datetime.now() - last_nag < datetime.timedelta(weeks=1):
+ logging.debug('Skipping nag message')
+ return
+
+ if nag is None:
+ nag = NagFile()
+ nag.timestamp = time.time()
+ self._WriteNagFile(nag)
+
+ print '****************************************************************'
+ print msg
+ print '-----------'
+ print 'Latest SDK:'
+ print yaml.dump(latest)
+ print '-----------'
+ print 'Your SDK:'
+ print yaml.dump(version)
+ print '-----------'
+ print 'Please visit http://code.google.com/appengine for the latest SDK'
+ print '****************************************************************'
+
+ def AllowedToCheckForUpdates(self, input_fn=raw_input):
+ """Determines if the user wants to check for updates.
+
+ On startup, the dev_appserver wants to check for updates to the SDK.
+ Because this action reports usage to Google when the user is not
+ otherwise communicating with Google (e.g. pushing a new app version),
+ the user must opt in.
+
+ If the user does not have a nag file, we will query the user and
+ save the response in the nag file. Subsequent calls to this function
+ will re-use that response.
+
+ Args:
+ input_fn: used to collect user input. This is for testing only.
+
+ Returns:
+ True if the user wants to check for updates. False otherwise.
+ """
+ nag = self._ParseNagFile()
+ if nag is None:
+ nag = NagFile()
+ nag.timestamp = time.time()
+
+ if nag.opt_in is None:
+ answer = input_fn('Allow dev_appserver to check for updates on startup? '
+ '(Y/n): ')
+ answer = answer.strip().lower()
+ if answer == 'n' or answer == 'no':
+ print ('dev_appserver will not check for updates on startup. To '
+ 'change this setting, edit %s' % UpdateCheck.MakeNagFilename())
+ nag.opt_in = False
+ else:
+ print ('dev_appserver will check for updates on startup. To change '
+ 'this setting, edit %s' % UpdateCheck.MakeNagFilename())
+ nag.opt_in = True
+ self._WriteNagFile(nag)
+ return nag.opt_in
+
+
+class IndexDefinitionUpload(object):
+ """Provides facilities to upload index definitions to the hosting service."""
+
+ def __init__(self, server, config, definitions):
+ """Creates a new DatastoreIndexUpload.
+
+ Args:
+ server: The RPC server to use. Should be an instance of HttpRpcServer
+ or TestRpcServer.
+ config: The AppInfoExternal object derived from the app.yaml file.
+ definitions: An IndexDefinitions object.
+ """
+ self.server = server
+ self.config = config
+ self.definitions = definitions
+
+ def DoUpload(self):
+ """Uploads the index definitions."""
+ StatusUpdate('Uploading index definitions.')
+ self.server.Send('/api/datastore/index/add',
+ app_id=self.config.application,
+ version=self.config.version,
+ payload=self.definitions.ToYAML())
+
+
+class CronEntryUpload(object):
+ """Provides facilities to upload cron entries to the hosting service."""
+
+ def __init__(self, server, config, cron):
+ """Creates a new CronEntryUpload.
+
+ Args:
+ server: The RPC server to use. Should be an instance of a subclass of
+ AbstractRpcServer
+ config: The AppInfoExternal object derived from the app.yaml file.
+ cron: The CronInfoExternal object loaded from the cron.yaml file.
+ """
+ self.server = server
+ self.config = config
+ self.cron = cron
+
+ def DoUpload(self):
+ """Uploads the cron entries."""
+ StatusUpdate('Uploading cron entries.')
+ self.server.Send('/api/datastore/cron/update',
+ app_id=self.config.application,
+ version=self.config.version,
+ payload=self.cron.ToYAML())
+
+
+class QueueEntryUpload(object):
+ """Provides facilities to upload task queue entries to the hosting service."""
+
+ def __init__(self, server, config, queue):
+ """Creates a new QueueEntryUpload.
+
+ Args:
+ server: The RPC server to use. Should be an instance of a subclass of
+ AbstractRpcServer
+ config: The AppInfoExternal object derived from the app.yaml file.
+ queue: The QueueInfoExternal object loaded from the queue.yaml file.
+ """
+ self.server = server
+ self.config = config
+ self.queue = queue
+
+ def DoUpload(self):
+ """Uploads the task queue entries."""
+ StatusUpdate('Uploading task queue entries.')
+ self.server.Send('/api/queue/update',
+ app_id=self.config.application,
+ version=self.config.version,
+ payload=self.queue.ToYAML())
+
+
+class IndexOperation(object):
+ """Provide facilities for writing Index operation commands."""
+
+ def __init__(self, server, config):
+ """Creates a new IndexOperation.
+
+ Args:
+ server: The RPC server to use. Should be an instance of HttpRpcServer
+ or TestRpcServer.
+ config: appinfo.AppInfoExternal configuration object.
+ """
+ self.server = server
+ self.config = config
+
+ def DoDiff(self, definitions):
+ """Retrieve diff file from the server.
+
+ Args:
+ definitions: datastore_index.IndexDefinitions as loaded from users
+ index.yaml file.
+
+ Returns:
+ A pair of datastore_index.IndexDefinitions objects. The first record
+ is the set of indexes that are present in the index.yaml file but missing
+ from the server. The second record is the set of indexes that are
+ present on the server but missing from the index.yaml file (indicating
+ that these indexes should probably be vacuumed).
+ """
+ StatusUpdate('Fetching index definitions diff.')
+ response = self.server.Send('/api/datastore/index/diff',
+ app_id=self.config.application,
+ payload=definitions.ToYAML())
+ return datastore_index.ParseMultipleIndexDefinitions(response)
+
+ def DoDelete(self, definitions):
+ """Delete indexes from the server.
+
+ Args:
+ definitions: Index definitions to delete from datastore.
+
+ Returns:
+ A single datstore_index.IndexDefinitions containing indexes that were
+ not deleted, probably because they were already removed. This may
+ be normal behavior as there is a potential race condition between fetching
+ the index-diff and sending deletion confirmation through.
+ """
+ StatusUpdate('Deleting selected index definitions.')
+ response = self.server.Send('/api/datastore/index/delete',
+ app_id=self.config.application,
+ payload=definitions.ToYAML())
+ return datastore_index.ParseIndexDefinitions(response)
+
+
+class VacuumIndexesOperation(IndexOperation):
+ """Provide facilities to request the deletion of datastore indexes."""
+
+ def __init__(self, server, config, force,
+ confirmation_fn=raw_input):
+ """Creates a new VacuumIndexesOperation.
+
+ Args:
+ server: The RPC server to use. Should be an instance of HttpRpcServer
+ or TestRpcServer.
+ config: appinfo.AppInfoExternal configuration object.
+ force: True to force deletion of indexes, else False.
+ confirmation_fn: Function used for getting input form user.
+ """
+ super(VacuumIndexesOperation, self).__init__(server, config)
+ self.force = force
+ self.confirmation_fn = confirmation_fn
+
+ def GetConfirmation(self, index):
+ """Get confirmation from user to delete an index.
+
+ This method will enter an input loop until the user provides a
+ response it is expecting. Valid input is one of three responses:
+
+ y: Confirm deletion of index.
+ n: Do not delete index.
+ a: Delete all indexes without asking for further confirmation.
+
+ If the user enters nothing at all, the default action is to skip
+ that index and do not delete.
+
+ If the user selects 'a', as a side effect, the 'force' flag is set.
+
+ Args:
+ index: Index to confirm.
+
+ Returns:
+ True if user enters 'y' or 'a'. False if user enter 'n'.
+ """
+ while True:
+ print 'This index is no longer defined in your index.yaml file.'
+ print
+ print index.ToYAML()
+ print
+
+ confirmation = self.confirmation_fn(
+ 'Are you sure you want to delete this index? (N/y/a): ')
+ confirmation = confirmation.strip().lower()
+
+ if confirmation == 'y':
+ return True
+ elif confirmation == 'n' or not confirmation:
+ return False
+ elif confirmation == 'a':
+ self.force = True
+ return True
+ else:
+ print 'Did not understand your response.'
+
+ def DoVacuum(self, definitions):
+ """Vacuum indexes in datastore.
+
+ This method will query the server to determine which indexes are not
+ being used according to the user's local index.yaml file. Once it has
+ made this determination, it confirms with the user which unused indexes
+ should be deleted. Once confirmation for each index is receives, it
+ deletes those indexes.
+
+ Because another user may in theory delete the same indexes at the same
+ time as the user, there is a potential race condition. In this rare cases,
+ some of the indexes previously confirmed for deletion will not be found.
+ The user is notified which indexes these were.
+
+ Args:
+ definitions: datastore_index.IndexDefinitions as loaded from users
+ index.yaml file.
+ """
+ unused_new_indexes, notused_indexes = self.DoDiff(definitions)
+
+ deletions = datastore_index.IndexDefinitions(indexes=[])
+ if notused_indexes.indexes is not None:
+ for index in notused_indexes.indexes:
+ if self.force or self.GetConfirmation(index):
+ deletions.indexes.append(index)
+
+ if deletions.indexes:
+ not_deleted = self.DoDelete(deletions)
+
+ if not_deleted.indexes:
+ not_deleted_count = len(not_deleted.indexes)
+ if not_deleted_count == 1:
+ warning_message = ('An index was not deleted. Most likely this is '
+ 'because it no longer exists.\n\n')
+ else:
+ warning_message = ('%d indexes were not deleted. Most likely this '
+ 'is because they no longer exist.\n\n'
+ % not_deleted_count)
+ for index in not_deleted.indexes:
+ warning_message += index.ToYAML()
+ logging.warning(warning_message)
+
+
+class LogsRequester(object):
+ """Provide facilities to export request logs."""
+
+ def __init__(self, server, config, output_file,
+ num_days, append, severity, now, vhost, include_vhost):
+ """Constructor.
+
+ Args:
+ server: The RPC server to use. Should be an instance of HttpRpcServer
+ or TestRpcServer.
+ config: appinfo.AppInfoExternal configuration object.
+ output_file: Output file name.
+ num_days: Number of days worth of logs to export; 0 for all available.
+ append: True if appending to an existing file.
+ severity: App log severity to request (0-4); None for no app logs.
+ now: POSIX timestamp used for calculating valid dates for num_days.
+ vhost: The virtual host of log messages to get. None for all hosts.
+ include_vhost: If true, the virtual host is included in log messages.
+ """
+ self.server = server
+ self.config = config
+ self.output_file = output_file
+ self.append = append
+ self.num_days = num_days
+ self.severity = severity
+ self.vhost = vhost
+ self.include_vhost = include_vhost
+ self.version_id = self.config.version + '.1'
+ self.sentinel = None
+ self.write_mode = 'w'
+ if self.append:
+ self.sentinel = FindSentinel(self.output_file)
+ self.write_mode = 'a'
+ self.valid_dates = None
+ if self.num_days:
+ patterns = []
+ now = PacificTime(now)
+ for i in xrange(self.num_days):
+ then = time.gmtime(now - 24*3600 * i)
+ patterns.append(re.escape(time.strftime('%d/%m/%Y', then)))
+ patterns.append(re.escape(time.strftime('%d/%b/%Y', then)))
+ self.valid_dates = re.compile(r'[^[]+\[(' + '|'.join(patterns) + r'):')
+
+ def DownloadLogs(self):
+ """Download the requested logs.
+
+ This will write the logs to the file designated by
+ self.output_file, or to stdout if the filename is '-'.
+ Multiple roundtrips to the server may be made.
+ """
+ StatusUpdate('Downloading request logs for %s %s.' %
+ (self.config.application, self.version_id))
+ tf = tempfile.TemporaryFile()
+ offset = None
+ try:
+ while True:
+ try:
+ offset = self.RequestLogLines(tf, offset)
+ if not offset:
+ break
+ except KeyboardInterrupt:
+ StatusUpdate('Keyboard interrupt; saving data downloaded so far.')
+ break
+ StatusUpdate('Copying request logs to %r.' % self.output_file)
+ if self.output_file == '-':
+ of = sys.stdout
+ else:
+ try:
+ of = open(self.output_file, self.write_mode)
+ except IOError, err:
+ StatusUpdate('Can\'t write %r: %s.' % (self.output_file, err))
+ sys.exit(1)
+ try:
+ line_count = CopyReversedLines(tf, of)
+ finally:
+ of.flush()
+ if of is not sys.stdout:
+ of.close()
+ finally:
+ tf.close()
+ StatusUpdate('Copied %d records.' % line_count)
+
+ def RequestLogLines(self, tf, offset):
+ """Make a single roundtrip to the server.
+
+ Args:
+ tf: Writable binary stream to which the log lines returned by
+ the server are written, stripped of headers, and excluding
+ lines skipped due to self.sentinel or self.valid_dates filtering.
+ offset: Offset string for a continued request; None for the first.
+
+ Returns:
+ The offset string to be used for the next request, if another
+ request should be issued; or None, if not.
+ """
+ logging.info('Request with offset %r.', offset)
+ kwds = {'app_id': self.config.application,
+ 'version': self.version_id,
+ 'limit': 100,
+ }
+ if offset:
+ kwds['offset'] = offset
+ if self.severity is not None:
+ kwds['severity'] = str(self.severity)
+ if self.vhost is not None:
+ kwds['vhost'] = str(self.vhost)
+ if self.include_vhost is not None:
+ kwds['include_vhost'] = str(self.include_vhost)
+ response = self.server.Send('/api/request_logs', payload=None, **kwds)
+ response = response.replace('\r', '\0')
+ lines = response.splitlines()
+ logging.info('Received %d bytes, %d records.', len(response), len(lines))
+ offset = None
+ if lines and lines[0].startswith('#'):
+ match = re.match(r'^#\s*next_offset=(\S+)\s*$', lines[0])
+ del lines[0]
+ if match:
+ offset = match.group(1)
+ if lines and lines[-1].startswith('#'):
+ del lines[-1]
+ valid_dates = self.valid_dates
+ sentinel = self.sentinel
+ len_sentinel = None
+ if sentinel:
+ len_sentinel = len(sentinel)
+ for line in lines:
+ if ((sentinel and
+ line.startswith(sentinel) and
+ line[len_sentinel : len_sentinel+1] in ('', '\0')) or
+ (valid_dates and not valid_dates.match(line))):
+ return None
+ tf.write(line + '\n')
+ if not lines:
+ return None
+ return offset
+
+
+def PacificTime(now):
+ """Helper to return the number of seconds between UTC and Pacific time.
+
+ This is needed to compute today's date in Pacific time (more
+ specifically: Mountain View local time), which is how request logs
+ are reported. (Google servers always report times in Mountain View
+ local time, regardless of where they are physically located.)
+
+ This takes (post-2006) US DST into account. Pacific time is either
+ 8 hours or 7 hours west of UTC, depending on whether DST is in
+ effect. Since 2007, US DST starts on the Second Sunday in March
+ March, and ends on the first Sunday in November. (Reference:
+ http://aa.usno.navy.mil/faq/docs/daylight_time.php.)
+
+ Note that the server doesn't report its local time (the HTTP Date
+ header uses UTC), and the client's local time is irrelevant.
+
+ Args:
+ now: A posix timestamp giving current UTC time.
+
+ Returns:
+ A pseudo-posix timestamp giving current Pacific time. Passing
+ this through time.gmtime() will produce a tuple in Pacific local
+ time.
+ """
+ now -= 8*3600
+ if IsPacificDST(now):
+ now += 3600
+ return now
+
+
+def IsPacificDST(now):
+ """Helper for PacificTime to decide whether now is Pacific DST (PDT).
+
+ Args:
+ now: A pseudo-posix timestamp giving current time in PST.
+
+ Returns:
+ True if now falls within the range of DST, False otherwise.
+ """
+ DAY = 24*3600
+ SUNDAY = 6
+ pst = time.gmtime(now)
+ year = pst[0]
+ assert year >= 2007
+ begin = calendar.timegm((year, 3, 8, 2, 0, 0, 0, 0, 0))
+ while time.gmtime(begin).tm_wday != SUNDAY:
+ begin += DAY
+ end = calendar.timegm((year, 11, 1, 2, 0, 0, 0, 0, 0))
+ while time.gmtime(end).tm_wday != SUNDAY:
+ end += DAY
+ return begin <= now < end
+
+
+def CopyReversedLines(instream, outstream, blocksize=2**16):
+ r"""Copy lines from input stream to output stream in reverse order.
+
+ As a special feature, null bytes in the input are turned into
+ newlines followed by tabs in the output, but these 'sub-lines'
+ separated by null bytes are not reversed. E.g. If the input is
+ 'A\0B\nC\0D\n', the output is 'C\n\tD\nA\n\tB\n'.
+
+ Args:
+ instream: A seekable stream open for reading in binary mode.
+ outstream: A stream open for writing; doesn't have to be seekable or binary.
+ blocksize: Optional block size for buffering, for unit testing.
+
+ Returns:
+ The number of lines copied.
+ """
+ line_count = 0
+ instream.seek(0, 2)
+ last_block = instream.tell() // blocksize
+ spillover = ''
+ for iblock in xrange(last_block + 1, -1, -1):
+ instream.seek(iblock * blocksize)
+ data = instream.read(blocksize)
+ lines = data.splitlines(True)
+ lines[-1:] = ''.join(lines[-1:] + [spillover]).splitlines(True)
+ if lines and not lines[-1].endswith('\n'):
+ lines[-1] += '\n'
+ lines.reverse()
+ if lines and iblock > 0:
+ spillover = lines.pop()
+ if lines:
+ line_count += len(lines)
+ data = ''.join(lines).replace('\0', '\n\t')
+ outstream.write(data)
+ return line_count
+
+
+def FindSentinel(filename, blocksize=2**16):
+ """Return the sentinel line from the output file.
+
+ Args:
+ filename: The filename of the output file. (We'll read this file.)
+ blocksize: Optional block size for buffering, for unit testing.
+
+ Returns:
+ The contents of the last line in the file that doesn't start with
+ a tab, with its trailing newline stripped; or None if the file
+ couldn't be opened or no such line could be found by inspecting
+ the last 'blocksize' bytes of the file.
+ """
+ if filename == '-':
+ StatusUpdate('Can\'t combine --append with output to stdout.')
+ sys.exit(2)
+ try:
+ fp = open(filename, 'rb')
+ except IOError, err:
+ StatusUpdate('Append mode disabled: can\'t read %r: %s.' % (filename, err))
+ return None
+ try:
+ fp.seek(0, 2)
+ fp.seek(max(0, fp.tell() - blocksize))
+ lines = fp.readlines()
+ del lines[:1]
+ sentinel = None
+ for line in lines:
+ if not line.startswith('\t'):
+ sentinel = line
+ if not sentinel:
+ StatusUpdate('Append mode disabled: can\'t find sentinel in %r.' %
+ filename)
+ return None
+ return sentinel.rstrip('\n')
+ finally:
+ fp.close()
+
+
+class UploadBatcher(object):
+ """Helper to batch file uploads."""
+
+ def __init__(self, what, app_id, version, server):
+ """Constructor.
+
+ Args:
+ what: Either 'file' or 'blob' indicating what kind of objects
+ this batcher uploads. Used in messages and URLs.
+ app_id: The application ID.
+ version: The application version string.
+ server: The RPC server.
+ """
+ assert what in ('file', 'blob'), repr(what)
+ self.what = what
+ self.app_id = app_id
+ self.version = version
+ self.server = server
+ self.single_url = '/api/appversion/add' + what
+ self.batch_url = self.single_url + 's'
+ self.batching = True
+ self.batch = []
+ self.batch_size = 0
+
+ def SendBatch(self):
+ """Send the current batch on its way.
+
+ If successful, resets self.batch and self.batch_size.
+
+ Raises:
+ HTTPError with code=404 if the server doesn't support batching.
+ """
+ boundary = 'boundary'
+ parts = []
+ for path, payload, mime_type in self.batch:
+ while boundary in payload:
+ boundary += '%04x' % random.randint(0, 0xffff)
+ assert len(boundary) < 80, 'Unexpected error, please try again.'
+ part = '\n'.join(['',
+ 'X-Appcfg-File: %s' % urllib.quote(path),
+ 'X-Appcfg-Hash: %s' % _Hash(payload),
+ 'Content-Type: %s' % mime_type,
+ 'Content-Length: %d' % len(payload),
+ 'Content-Transfer-Encoding: 8bit',
+ '',
+ payload,
+ ])
+ parts.append(part)
+ parts.insert(0,
+ 'MIME-Version: 1.0\n'
+ 'Content-Type: multipart/mixed; boundary="%s"\n'
+ '\n'
+ 'This is a message with multiple parts in MIME format.' %
+ boundary)
+ parts.append('--\n')
+ delimiter = '\n--%s' % boundary
+ payload = delimiter.join(parts)
+ logging.info('Uploading batch of %d %ss to %s with boundary="%s".',
+ len(self.batch), self.what, self.batch_url, boundary)
+ self.server.Send(self.batch_url,
+ payload=payload,
+ content_type='message/rfc822',
+ app_id=self.app_id,
+ version=self.version)
+ self.batch = []
+ self.batch_size = 0
+
+ def SendSingleFile(self, path, payload, mime_type):
+ """Send a single file on its way."""
+ logging.info('Uploading %s %s (%s bytes, type=%s) to %s.',
+ self.what, path, len(payload), mime_type, self.single_url)
+ self.server.Send(self.single_url,
+ payload=payload,
+ content_type=mime_type,
+ path=path,
+ app_id=self.app_id,
+ version=self.version)
+
+ def Flush(self):
+ """Flush the current batch.
+
+ This first attempts to send the batch as a single request; if that
+ fails because the server doesn't support batching, the files are
+ sent one by one, and self.batching is reset to False.
+
+ At the end, self.batch and self.batch_size are reset.
+ """
+ if not self.batch:
+ return
+ try:
+ self.SendBatch()
+ except urllib2.HTTPError, err:
+ if err.code != 404:
+ raise
+
+ logging.info('Old server detected; turning off %s batching.', self.what)
+ self.batching = False
+
+ for path, payload, mime_type in self.batch:
+ self.SendSingleFile(path, payload, mime_type)
+
+ self.batch = []
+ self.batch_size = 0
+
+ def AddToBatch(self, path, payload, mime_type):
+ """Batch a file, possibly flushing first, or perhaps upload it directly.
+
+ Args:
+ path: The name of the file.
+ payload: The contents of the file.
+ mime_type: The MIME Content-type of the file, or None.
+
+ If mime_type is None, application/octet-stream is substituted.
+ """
+ if not mime_type:
+ mime_type = 'application/octet-stream'
+ size = len(payload)
+ if size <= MAX_BATCH_FILE_SIZE:
+ if (len(self.batch) >= MAX_BATCH_COUNT or
+ self.batch_size + size > MAX_BATCH_SIZE):
+ self.Flush()
+ if self.batching:
+ logging.info('Adding %s %s (%s bytes, type=%s) to batch.',
+ self.what, path, size, mime_type)
+ self.batch.append((path, payload, mime_type))
+ self.batch_size += size + BATCH_OVERHEAD
+ return
+ self.SendSingleFile(path, payload, mime_type)
+
+
+def _Hash(content):
+ """Compute the hash of the content.
+
+ Args:
+ content: The data to hash as a string.
+
+ Returns:
+ The string representation of the hash.
+ """
+ h = sha.new(content).hexdigest()
+ return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
+
+
+class AppVersionUpload(object):
+ """Provides facilities to upload a new appversion to the hosting service.
+
+ Attributes:
+ server: The AbstractRpcServer to use for the upload.
+ config: The AppInfoExternal object derived from the app.yaml file.
+ app_id: The application string from 'config'.
+ version: The version string from 'config'.
+ files: A dictionary of files to upload to the server, mapping path to
+ hash of the file contents.
+ in_transaction: True iff a transaction with the server has started.
+ An AppVersionUpload can do only one transaction at a time.
+ deployed: True iff the Deploy method has been called.
+ """
+
+ def __init__(self, server, config):
+ """Creates a new AppVersionUpload.
+
+ Args:
+ server: The RPC server to use. Should be an instance of HttpRpcServer or
+ TestRpcServer.
+ config: An AppInfoExternal object that specifies the configuration for
+ this application.
+ """
+ self.server = server
+ self.config = config
+ self.app_id = self.config.application
+ self.version = self.config.version
+ self.files = {}
+ self.in_transaction = False
+ self.deployed = False
+ self.batching = True
+ self.file_batcher = UploadBatcher('file', self.app_id, self.version,
+ self.server)
+ self.blob_batcher = UploadBatcher('blob', self.app_id, self.version,
+ self.server)
+
+ def AddFile(self, path, file_handle):
+ """Adds the provided file to the list to be pushed to the server.
+
+ Args:
+ path: The path the file should be uploaded as.
+ file_handle: A stream containing data to upload.
+ """
+ assert not self.in_transaction, 'Already in a transaction.'
+ assert file_handle is not None
+
+ reason = appinfo.ValidFilename(path)
+ if reason:
+ logging.error(reason)
+ return
+
+ pos = file_handle.tell()
+ content_hash = _Hash(file_handle.read())
+ file_handle.seek(pos, 0)
+
+ self.files[path] = content_hash
+
+ def Begin(self):
+ """Begins the transaction, returning a list of files that need uploading.
+
+ All calls to AddFile must be made before calling Begin().
+
+ Returns:
+ A list of pathnames for files that should be uploaded using UploadFile()
+ before Commit() can be called.
+ """
+ assert not self.in_transaction, 'Already in a transaction.'
+
+ StatusUpdate('Initiating update.')
+ self.server.Send('/api/appversion/create', app_id=self.app_id,
+ version=self.version, payload=self.config.ToYAML())
+ self.in_transaction = True
+
+ files_to_clone = []
+ blobs_to_clone = []
+ for path, content_hash in self.files.iteritems():
+ mime_type = GetMimeTypeIfStaticFile(self.config, path)
+ if mime_type is not None:
+ blobs_to_clone.append((path, content_hash, mime_type))
+ else:
+ files_to_clone.append((path, content_hash))
+
+ files_to_upload = {}
+
+ def CloneFiles(url, files, file_type):
+ """Sends files to the given url.
+
+ Args:
+ url: the server URL to use.
+ files: a list of files
+ file_type: the type of the files
+ """
+ if not files:
+ return
+
+ StatusUpdate('Cloning %d %s file%s.' %
+ (len(files), file_type, len(files) != 1 and 's' or ''))
+ for i in xrange(0, len(files), MAX_FILES_TO_CLONE):
+ if i > 0 and i % MAX_FILES_TO_CLONE == 0:
+ StatusUpdate('Cloned %d files.' % i)
+
+ chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)]
+ result = self.server.Send(url,
+ app_id=self.app_id, version=self.version,
+ payload=BuildClonePostBody(chunk))
+ if result:
+ files_to_upload.update(dict(
+ (f, self.files[f]) for f in result.split(LIST_DELIMITER)))
+
+ CloneFiles('/api/appversion/cloneblobs', blobs_to_clone, 'static')
+ CloneFiles('/api/appversion/clonefiles', files_to_clone, 'application')
+
+ logging.debug('Files to upload: %s', files_to_upload)
+
+ self.files = files_to_upload
+ return sorted(files_to_upload.iterkeys())
+
+ def UploadFile(self, path, file_handle):
+ """Uploads a file to the hosting service.
+
+ Must only be called after Begin().
+ The path provided must be one of those that were returned by Begin().
+
+ Args:
+ path: The path the file is being uploaded as.
+ file_handle: A file-like object containing the data to upload.
+
+ Raises:
+ KeyError: The provided file is not amongst those to be uploaded.
+ """
+ assert self.in_transaction, 'Begin() must be called before UploadFile().'
+ if path not in self.files:
+ raise KeyError('File \'%s\' is not in the list of files to be uploaded.'
+ % path)
+
+ del self.files[path]
+ mime_type = GetMimeTypeIfStaticFile(self.config, path)
+ payload = file_handle.read()
+ if mime_type is None:
+ self.file_batcher.AddToBatch(path, payload, mime_type)
+ else:
+ self.blob_batcher.AddToBatch(path, payload, mime_type)
+
+ def Commit(self):
+ """Commits the transaction, making the new app version available.
+
+ All the files returned by Begin() must have been uploaded with UploadFile()
+ before Commit() can be called.
+
+ This tries the new 'deploy' method; if that fails it uses the old 'commit'.
+
+ Raises:
+ Exception: Some required files were not uploaded.
+ """
+ assert self.in_transaction, 'Begin() must be called before Commit().'
+ if self.files:
+ raise Exception('Not all required files have been uploaded.')
+
+ try:
+ self.Deploy()
+ if not RetryWithBackoff(1, 2, 8, self.IsReady):
+ logging.warning('Version still not ready to serve, aborting.')
+ raise Exception('Version not ready.')
+ self.StartServing()
+ except urllib2.HTTPError, e:
+ if e.code != 404:
+ raise
+ StatusUpdate('Closing update.')
+ self.server.Send('/api/appversion/commit', app_id=self.app_id,
+ version=self.version)
+ self.in_transaction = False
+
+ def Deploy(self):
+ """Deploys the new app version but does not make it default.
+
+ All the files returned by Begin() must have been uploaded with UploadFile()
+ before Deploy() can be called.
+
+ Raises:
+ Exception: Some required files were not uploaded.
+ """
+ assert self.in_transaction, 'Begin() must be called before Deploy().'
+ if self.files:
+ raise Exception('Not all required files have been uploaded.')
+
+ StatusUpdate('Deploying new version.')
+ self.server.Send('/api/appversion/deploy', app_id=self.app_id,
+ version=self.version)
+ self.deployed = True
+
+ def IsReady(self):
+ """Check if the new app version is ready to serve traffic.
+
+ Raises:
+ Exception: Deploy has not yet been called.
+
+ Returns:
+ True if the server returned the app is ready to serve.
+ """
+ assert self.deployed, 'Deploy() must be called before IsReady().'
+
+ StatusUpdate('Checking if new version is ready to serve.')
+ result = self.server.Send('/api/appversion/isready', app_id=self.app_id,
+ version=self.version)
+ return result == '1'
+
+ def StartServing(self):
+ """Start serving with the newly created version.
+
+ Raises:
+ Exception: Deploy has not yet been called.
+ """
+ assert self.deployed, 'Deploy() must be called before IsReady().'
+
+ StatusUpdate('Closing update: new version is ready to start serving.')
+ self.server.Send('/api/appversion/startserving',
+ app_id=self.app_id, version=self.version)
+ self.in_transaction = False
+
+ def Rollback(self):
+ """Rolls back the transaction if one is in progress."""
+ if not self.in_transaction:
+ return
+ StatusUpdate('Rolling back the update.')
+ self.server.Send('/api/appversion/rollback', app_id=self.app_id,
+ version=self.version)
+ self.in_transaction = False
+ self.files = {}
+
+ def DoUpload(self, paths, max_size, openfunc):
+ """Uploads a new appversion with the given config and files to the server.
+
+ Args:
+ paths: An iterator that yields the relative paths of the files to upload.
+ max_size: The maximum size file to upload.
+ openfunc: A function that takes a path and returns a file-like object.
+ """
+ logging.info('Reading app configuration.')
+
+ path = ''
+ try:
+ StatusUpdate('Scanning files on local disk.')
+ num_files = 0
+ for path in paths:
+ file_handle = openfunc(path)
+ try:
+ if self.config.skip_files.match(path):
+ logging.info('Ignoring file \'%s\': File matches ignore regex.',
+ path)
+ else:
+ file_length = GetFileLength(file_handle)
+ if file_length > max_size:
+ logging.error('Ignoring file \'%s\': Too long '
+ '(max %d bytes, file is %d bytes)',
+ path, max_size, file_length)
+ else:
+ logging.info('Processing file \'%s\'', path)
+ self.AddFile(path, file_handle)
+ finally:
+ file_handle.close()
+ num_files += 1
+ if num_files % 500 == 0:
+ StatusUpdate('Scanned %d files.' % num_files)
+ except KeyboardInterrupt:
+ logging.info('User interrupted. Aborting.')
+ raise
+ except EnvironmentError, e:
+ logging.error('An error occurred processing file \'%s\': %s. Aborting.',
+ path, e)
+ raise
+
+ try:
+ missing_files = self.Begin()
+ if missing_files:
+ StatusUpdate('Uploading %d files and blobs.' % len(missing_files))
+ num_files = 0
+ for missing_file in missing_files:
+ file_handle = openfunc(missing_file)
+ try:
+ self.UploadFile(missing_file, file_handle)
+ finally:
+ file_handle.close()
+ num_files += 1
+ if num_files % 500 == 0:
+ StatusUpdate('Processed %d out of %s.' %
+ (num_files, len(missing_files)))
+ self.file_batcher.Flush()
+ self.blob_batcher.Flush()
+ StatusUpdate('Uploaded %d files and blobs' % num_files)
+
+ self.Commit()
+
+ except KeyboardInterrupt:
+ logging.info('User interrupted. Aborting.')
+ self.Rollback()
+ raise
+ except urllib2.HTTPError, err:
+ logging.info('HTTP Error (%s)', err)
+ self.Rollback()
+ raise
+ except:
+ logging.exception('An unexpected error occurred. Aborting.')
+ self.Rollback()
+ raise
+
+ logging.info('Done!')
+
+
+def FileIterator(base, separator=os.path.sep):
+ """Walks a directory tree, returning all the files. Follows symlinks.
+
+ Args:
+ base: The base path to search for files under.
+ separator: Path separator used by the running system's platform.
+
+ Yields:
+ Paths of files found, relative to base.
+ """
+ dirs = ['']
+ while dirs:
+ current_dir = dirs.pop()
+ for entry in os.listdir(os.path.join(base, current_dir)):
+ name = os.path.join(current_dir, entry)
+ fullname = os.path.join(base, name)
+ if os.path.isfile(fullname):
+ if separator == '\\':
+ name = name.replace('\\', '/')
+ yield name
+ elif os.path.isdir(fullname):
+ dirs.append(name)
+
+
+def GetFileLength(fh):
+ """Returns the length of the file represented by fh.
+
+ This function is capable of finding the length of any seekable stream,
+ unlike os.fstat, which only works on file streams.
+
+ Args:
+ fh: The stream to get the length of.
+
+ Returns:
+ The length of the stream.
+ """
+ pos = fh.tell()
+ fh.seek(0, 2)
+ length = fh.tell()
+ fh.seek(pos, 0)
+ return length
+
+
+def GetUserAgent(get_version=GetVersionObject,
+ get_platform=appengine_rpc.GetPlatformToken):
+ """Determines the value of the 'User-agent' header to use for HTTP requests.
+
+ If the 'APPCFG_SDK_NAME' environment variable is present, that will be
+ used as the first product token in the user-agent.
+
+ Args:
+ get_version: Used for testing.
+ get_platform: Used for testing.
+
+ Returns:
+ String containing the 'user-agent' header value, which includes the SDK
+ version, the platform information, and the version of Python;
+ e.g., 'appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2'.
+ """
+ product_tokens = []
+
+ sdk_name = os.environ.get('APPCFG_SDK_NAME')
+ if sdk_name:
+ product_tokens.append(sdk_name)
+ else:
+ version = get_version()
+ if version is None:
+ release = 'unknown'
+ else:
+ release = version['release']
+
+ product_tokens.append('appcfg_py/%s' % release)
+
+ product_tokens.append(get_platform())
+
+ python_version = '.'.join(str(i) for i in sys.version_info)
+ product_tokens.append('Python/%s' % python_version)
+
+ return ' '.join(product_tokens)
+
+
+def GetSourceName(get_version=GetVersionObject):
+ """Gets the name of this source version."""
+ version = get_version()
+ if version is None:
+ release = 'unknown'
+ else:
+ release = version['release']
+ return 'Google-appcfg-%s' % (release,)
+
+
+class AppCfgApp(object):
+ """Singleton class to wrap AppCfg tool functionality.
+
+ This class is responsible for parsing the command line and executing
+ the desired action on behalf of the user. Processing files and
+ communicating with the server is handled by other classes.
+
+ Attributes:
+ actions: A dictionary mapping action names to Action objects.
+ action: The Action specified on the command line.
+ parser: An instance of optparse.OptionParser.
+ options: The command line options parsed by 'parser'.
+ argv: The original command line as a list.
+ args: The positional command line args left over after parsing the options.
+ raw_input_fn: Function used for getting raw user input, like email.
+ password_input_fn: Function used for getting user password.
+ error_fh: Unexpected HTTPErrors are printed to this file handle.
+
+ Attributes for testing:
+ parser_class: The class to use for parsing the command line. Because
+ OptionsParser will exit the program when there is a parse failure, it
+ is nice to subclass OptionsParser and catch the error before exiting.
+ """
+
+ def __init__(self, argv, parser_class=optparse.OptionParser,
+ rpc_server_class=appengine_rpc.HttpRpcServer,
+ raw_input_fn=raw_input,
+ password_input_fn=getpass.getpass,
+ error_fh=sys.stderr,
+ update_check_class=UpdateCheck):
+ """Initializer. Parses the cmdline and selects the Action to use.
+
+ Initializes all of the attributes described in the class docstring.
+ Prints help or error messages if there is an error parsing the cmdline.
+
+ Args:
+ argv: The list of arguments passed to this program.
+ parser_class: Options parser to use for this application.
+ rpc_server_class: RPC server class to use for this application.
+ raw_input_fn: Function used for getting user email.
+ password_input_fn: Function used for getting user password.
+ error_fh: Unexpected HTTPErrors are printed to this file handle.
+ update_check_class: UpdateCheck class (can be replaced for testing).
+ """
+ self.parser_class = parser_class
+ self.argv = argv
+ self.rpc_server_class = rpc_server_class
+ self.raw_input_fn = raw_input_fn
+ self.password_input_fn = password_input_fn
+ self.error_fh = error_fh
+ self.update_check_class = update_check_class
+
+ self.parser = self._GetOptionParser()
+ for action in self.actions.itervalues():
+ action.options(self, self.parser)
+
+ self.options, self.args = self.parser.parse_args(argv[1:])
+
+ if len(self.args) < 1:
+ self._PrintHelpAndExit()
+ if self.args[0] not in self.actions:
+ self.parser.error('Unknown action \'%s\'\n%s' %
+ (self.args[0], self.parser.get_description()))
+ action_name = self.args.pop(0)
+ self.action = self.actions[action_name]
+
+ self.parser, self.options = self._MakeSpecificParser(self.action)
+
+ if self.options.help:
+ self._PrintHelpAndExit()
+
+ if self.options.verbose == 2:
+ logging.getLogger().setLevel(logging.INFO)
+ elif self.options.verbose == 3:
+ logging.getLogger().setLevel(logging.DEBUG)
+
+ global verbosity
+ verbosity = self.options.verbose
+
+ def Run(self):
+ """Executes the requested action.
+
+ Catches any HTTPErrors raised by the action and prints them to stderr.
+
+ Returns:
+ 1 on error, 0 if successful.
+ """
+ try:
+ self.action(self)
+ except urllib2.HTTPError, e:
+ body = e.read()
+ print >>self.error_fh, ('Error %d: --- begin server output ---\n'
+ '%s\n--- end server output ---' %
+ (e.code, body.rstrip('\n')))
+ return 1
+ except yaml_errors.EventListenerError, e:
+ print >>self.error_fh, ('Error parsing yaml file:\n%s' % e)
+ return 1
+ return 0
+
+ def _GetActionDescriptions(self):
+ """Returns a formatted string containing the short_descs for all actions."""
+ action_names = self.actions.keys()
+ action_names.sort()
+ desc = ''
+ for action_name in action_names:
+ desc += ' %s: %s\n' % (action_name, self.actions[action_name].short_desc)
+ return desc
+
+ def _GetOptionParser(self):
+ """Creates an OptionParser with generic usage and description strings.
+
+ Returns:
+ An OptionParser instance.
+ """
+
+ class Formatter(optparse.IndentedHelpFormatter):
+ """Custom help formatter that does not reformat the description."""
+
+ def format_description(self, description):
+ """Very simple formatter."""
+ return description + '\n'
+
+ desc = self._GetActionDescriptions()
+ desc = ('Action must be one of:\n%s'
+ 'Use \'help <action>\' for a detailed description.') % desc
+
+ parser = self.parser_class(usage='%prog [options] <action>',
+ description=desc,
+ formatter=Formatter(),
+ conflict_handler='resolve')
+ parser.add_option('-h', '--help', action='store_true',
+ dest='help', help='Show the help message and exit.')
+ parser.add_option('-q', '--quiet', action='store_const', const=0,
+ dest='verbose', help='Print errors only.')
+ parser.add_option('-v', '--verbose', action='store_const', const=2,
+ dest='verbose', default=1,
+ help='Print info level logs.')
+ parser.add_option('--noisy', action='store_const', const=3,
+ dest='verbose', help='Print all logs.')
+ parser.add_option('-s', '--server', action='store', dest='server',
+ default='appengine.google.com',
+ metavar='SERVER', help='The server to connect to.')
+ parser.add_option('--secure', action='store_true', dest='secure',
+ default=False,
+ help='Use SSL when communicating with the server.')
+ parser.add_option('-e', '--email', action='store', dest='email',
+ metavar='EMAIL', default=None,
+ help='The username to use. Will prompt if omitted.')
+ parser.add_option('-H', '--host', action='store', dest='host',
+ metavar='HOST', default=None,
+ help='Overrides the Host header sent with all RPCs.')
+ parser.add_option('--no_cookies', action='store_false',
+ dest='save_cookies', default=True,
+ help='Do not save authentication cookies to local disk.')
+ parser.add_option('--passin', action='store_true',
+ dest='passin', default=False,
+ help='Read the login password from stdin.')
+ return parser
+
+ def _MakeSpecificParser(self, action):
+ """Creates a new parser with documentation specific to 'action'.
+
+ Args:
+ action: An Action instance to be used when initializing the new parser.
+
+ Returns:
+ A tuple containing:
+ parser: An instance of OptionsParser customized to 'action'.
+ options: The command line options after re-parsing.
+ """
+ parser = self._GetOptionParser()
+ parser.set_usage(action.usage)
+ parser.set_description('%s\n%s' % (action.short_desc, action.long_desc))
+ action.options(self, parser)
+ options, unused_args = parser.parse_args(self.argv[1:])
+ return parser, options
+
+ def _PrintHelpAndExit(self, exit_code=2):
+ """Prints the parser's help message and exits the program.
+
+ Args:
+ exit_code: The integer code to pass to sys.exit().
+ """
+ self.parser.print_help()
+ sys.exit(exit_code)
+
+ def _GetRpcServer(self):
+ """Returns an instance of an AbstractRpcServer.
+
+ Returns:
+ A new AbstractRpcServer, on which RPC calls can be made.
+ """
+
+ def GetUserCredentials():
+ """Prompts the user for a username and password."""
+ email = self.options.email
+ if email is None:
+ email = self.raw_input_fn('Email: ')
+
+ password_prompt = 'Password for %s: ' % email
+ if self.options.passin:
+ password = self.raw_input_fn(password_prompt)
+ else:
+ password = self.password_input_fn(password_prompt)
+
+ return (email, password)
+
+ if self.options.host and self.options.host == 'localhost':
+ email = self.options.email
+ if email is None:
+ email = 'test@example.com'
+ logging.info('Using debug user %s. Override with --email' % email)
+ server = self.rpc_server_class(
+ self.options.server,
+ lambda: (email, 'password'),
+ GetUserAgent(),
+ GetSourceName(),
+ host_override=self.options.host,
+ save_cookies=self.options.save_cookies)
+ server.authenticated = True
+ return server
+
+ if self.options.passin:
+ auth_tries = 1
+ else:
+ auth_tries = 3
+
+ return self.rpc_server_class(self.options.server, GetUserCredentials,
+ GetUserAgent(), GetSourceName(),
+ host_override=self.options.host,
+ save_cookies=self.options.save_cookies,
+ auth_tries=auth_tries,
+ account_type='HOSTED_OR_GOOGLE',
+ secure=self.options.secure)
+
+ def _FindYaml(self, basepath, file_name):
+ """Find yaml files in application directory.
+
+ Args:
+ basepath: Base application directory.
+ file_name: Filename without extension to search for.
+
+ Returns:
+ Path to located yaml file if one exists, else None.
+ """
+ if not os.path.isdir(basepath):
+ self.parser.error('Not a directory: %s' % basepath)
+
+ for yaml_file in (file_name + '.yaml', file_name + '.yml'):
+ yaml_path = os.path.join(basepath, yaml_file)
+ if os.path.isfile(yaml_path):
+ return yaml_path
+
+ return None
+
+ def _ParseAppYaml(self, basepath):
+ """Parses the app.yaml file.
+
+ Args:
+ basepath: the directory of the application.
+
+ Returns:
+ An AppInfoExternal object.
+ """
+ appyaml_filename = self._FindYaml(basepath, 'app')
+ if appyaml_filename is None:
+ self.parser.error('Directory does not contain an app.yaml '
+ 'configuration file.')
+
+ fh = open(appyaml_filename, 'r')
+ try:
+ appyaml = appinfo.LoadSingleAppInfo(fh)
+ finally:
+ fh.close()
+ return appyaml
+
+ def _ParseYamlFile(self, basepath, basename, parser):
+ """Parses the a yaml file.
+
+ Args:
+ basepath: the directory of the application.
+ basename: the base name of the file (with the '.yaml' stripped off).
+ parser: the function or method used to parse the file.
+
+ Returns:
+ A single parsed yaml file or None if the file does not exist.
+ """
+ file_name = self._FindYaml(basepath, basename)
+ if file_name is not None:
+ fh = open(file_name, 'r')
+ try:
+ defns = parser(fh)
+ finally:
+ fh.close()
+ return defns
+ return None
+
+ def _ParseIndexYaml(self, basepath):
+ """Parses the index.yaml file.
+
+ Args:
+ basepath: the directory of the application.
+
+ Returns:
+ A single parsed yaml file or None if the file does not exist.
+ """
+ return self._ParseYamlFile(basepath, 'index',
+ datastore_index.ParseIndexDefinitions)
+
+ def _ParseCronYaml(self, basepath):
+ """Parses the cron.yaml file.
+
+ Args:
+ basepath: the directory of the application.
+
+ Returns:
+ A CronInfoExternal object or None if the file does not exist.
+ """
+ return self._ParseYamlFile(basepath, 'cron', croninfo.LoadSingleCron)
+
+ def _ParseQueueYaml(self, basepath):
+ """Parses the queue.yaml file.
+
+ Args:
+ basepath: the directory of the application.
+
+ Returns:
+ A CronInfoExternal object or None if the file does not exist.
+ """
+ return self._ParseYamlFile(basepath, 'queue', queueinfo.LoadSingleQueue)
+
+ def Help(self):
+ """Prints help for a specific action.
+
+ Expects self.args[0] to contain the name of the action in question.
+ Exits the program after printing the help message.
+ """
+ if len(self.args) != 1 or self.args[0] not in self.actions:
+ self.parser.error('Expected a single action argument. Must be one of:\n' +
+ self._GetActionDescriptions())
+
+ action = self.actions[self.args[0]]
+ self.parser, unused_options = self._MakeSpecificParser(action)
+ self._PrintHelpAndExit(exit_code=0)
+
+ def Update(self):
+ """Updates and deploys a new appversion."""
+ if len(self.args) != 1:
+ self.parser.error('Expected a single <directory> argument.')
+
+ basepath = self.args[0]
+ appyaml = self._ParseAppYaml(basepath)
+ rpc_server = self._GetRpcServer()
+
+ updatecheck = self.update_check_class(rpc_server, appyaml)
+ updatecheck.CheckForUpdates()
+
+ appversion = AppVersionUpload(rpc_server, appyaml)
+ appversion.DoUpload(FileIterator(basepath), self.options.max_size,
+ lambda path: open(os.path.join(basepath, path), 'rb'))
+
+ index_defs = self._ParseIndexYaml(basepath)
+ if index_defs:
+ index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
+ try:
+ index_upload.DoUpload()
+ except urllib2.HTTPError, e:
+ StatusUpdate('Error %d: --- begin server output ---\n'
+ '%s\n--- end server output ---' %
+ (e.code, e.read().rstrip('\n')))
+ print >> self.error_fh, (
+ 'Your app was updated, but there was an error updating your '
+ 'indexes. Please retry later with appcfg.py update_indexes.')
+
+ cron_entries = self._ParseCronYaml(basepath)
+ if cron_entries:
+ cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
+ cron_upload.DoUpload()
+
+ queue_entries = self._ParseQueueYaml(basepath)
+ if queue_entries:
+ queue_upload = QueueEntryUpload(rpc_server, appyaml, queue_entries)
+ queue_upload.DoUpload()
+
+ def _UpdateOptions(self, parser):
+ """Adds update-specific options to 'parser'.
+
+ Args:
+ parser: An instance of OptionsParser.
+ """
+ parser.add_option('-S', '--max_size', type='int', dest='max_size',
+ default=10485760, metavar='SIZE',
+ help='Maximum size of a file to upload.')
+
+ def VacuumIndexes(self):
+ """Deletes unused indexes."""
+ if len(self.args) != 1:
+ self.parser.error('Expected a single <directory> argument.')
+
+ basepath = self.args[0]
+ config = self._ParseAppYaml(basepath)
+
+ index_defs = self._ParseIndexYaml(basepath)
+ if index_defs is None:
+ index_defs = datastore_index.IndexDefinitions()
+
+ rpc_server = self._GetRpcServer()
+ vacuum = VacuumIndexesOperation(rpc_server,
+ config,
+ self.options.force_delete)
+ vacuum.DoVacuum(index_defs)
+
+ def _VacuumIndexesOptions(self, parser):
+ """Adds vacuum_indexes-specific options to 'parser'.
+
+ Args:
+ parser: An instance of OptionsParser.
+ """
+ parser.add_option('-f', '--force', action='store_true', dest='force_delete',
+ default=False,
+ help='Force deletion without being prompted.')
+
+ def UpdateCron(self):
+ """Updates any new or changed cron definitions."""
+ if len(self.args) != 1:
+ self.parser.error('Expected a single <directory> argument.')
+
+ basepath = self.args[0]
+ appyaml = self._ParseAppYaml(basepath)
+ rpc_server = self._GetRpcServer()
+
+ cron_entries = self._ParseCronYaml(basepath)
+ if cron_entries:
+ cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
+ cron_upload.DoUpload()
+
+ def UpdateIndexes(self):
+ """Updates indexes."""
+ if len(self.args) != 1:
+ self.parser.error('Expected a single <directory> argument.')
+
+ basepath = self.args[0]
+ appyaml = self._ParseAppYaml(basepath)
+ rpc_server = self._GetRpcServer()
+
+ index_defs = self._ParseIndexYaml(basepath)
+ if index_defs:
+ index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
+ index_upload.DoUpload()
+
+ def UpdateQueues(self):
+ """Updates any new or changed task queue definitions."""
+ if len(self.args) != 1:
+ self.parser.error('Expected a single <directory> argument.')
+
+ basepath = self.args[0]
+ appyaml = self._ParseAppYaml(basepath)
+ rpc_server = self._GetRpcServer()
+
+ queue_entries = self._ParseQueueYaml(basepath)
+ if queue_entries:
+ queue_upload = QueueEntryUpload(rpc_server, appyaml, queue_entries)
+ queue_upload.DoUpload()
+
+ def Rollback(self):
+ """Does a rollback of any existing transaction for this app version."""
+ if len(self.args) != 1:
+ self.parser.error('Expected a single <directory> argument.')
+
+ basepath = self.args[0]
+ appyaml = self._ParseAppYaml(basepath)
+
+ appversion = AppVersionUpload(self._GetRpcServer(), appyaml)
+ appversion.in_transaction = True
+ appversion.Rollback()
+
+ def RequestLogs(self):
+ """Write request logs to a file."""
+ if len(self.args) != 2:
+ self.parser.error(
+ 'Expected a <directory> argument and an <output_file> argument.')
+ if (self.options.severity is not None and
+ not 0 <= self.options.severity <= MAX_LOG_LEVEL):
+ self.parser.error(
+ 'Severity range is 0 (DEBUG) through %s (CRITICAL).' % MAX_LOG_LEVEL)
+
+ if self.options.num_days is None:
+ self.options.num_days = int(not self.options.append)
+
+ try:
+ end_date = self._ParseEndDate(self.options.end_date)
+ except ValueError:
+ self.parser.error('End date must be in the format YYYY-MM-DD.')
+
+ basepath = self.args[0]
+ appyaml = self._ParseAppYaml(basepath)
+ rpc_server = self._GetRpcServer()
+ logs_requester = LogsRequester(rpc_server, appyaml, self.args[1],
+ self.options.num_days,
+ self.options.append,
+ self.options.severity,
+ end_date,
+ self.options.vhost,
+ self.options.include_vhost)
+ logs_requester.DownloadLogs()
+
+ def _ParseEndDate(self, date, time_func=time.time):
+ """Translates a user-readable end date to a POSIX timestamp.
+
+ Args:
+ date: A utc date string as YYYY-MM-DD.
+ time_func: time.time() function for testing.
+
+ Returns:
+ A POSIX timestamp representing the last moment of that day.
+ If no date is given, returns a timestamp representing now.
+ """
+ if not date:
+ return time_func()
+ struct_time = time.strptime('%s' % date, '%Y-%m-%d')
+ return calendar.timegm(struct_time) + 86400
+
+ def _RequestLogsOptions(self, parser):
+ """Adds request_logs-specific options to 'parser'.
+
+ Args:
+ parser: An instance of OptionsParser.
+ """
+ parser.add_option('-n', '--num_days', type='int', dest='num_days',
+ action='store', default=None,
+ help='Number of days worth of log data to get. '
+ 'The cut-off point is midnight UTC. '
+ 'Use 0 to get all available logs. '
+ 'Default is 1, unless --append is also given; '
+ 'then the default is 0.')
+ parser.add_option('-a', '--append', dest='append',
+ action='store_true', default=False,
+ help='Append to existing file.')
+ parser.add_option('--severity', type='int', dest='severity',
+ action='store', default=None,
+ help='Severity of app-level log messages to get. '
+ 'The range is 0 (DEBUG) through 4 (CRITICAL). '
+ 'If omitted, only request logs are returned.')
+ parser.add_option('--vhost', type='string', dest='vhost',
+ action='store', default=None,
+ help='The virtual host of log messages to get. '
+ 'If omitted, all log messages are returned.')
+ parser.add_option('--include_vhost', dest='include_vhost',
+ action='store_true', default=False,
+ help='Include virtual host in log messages.')
+ parser.add_option('--end_date', dest='end_date',
+ action='store', default='',
+ help='End date (as YYYY-MM-DD) of period for log data. '
+ 'Defaults to today.')
+
+ def CronInfo(self, now=None, output=sys.stdout):
+ """Displays information about cron definitions.
+
+ Args:
+ now: used for testing.
+ output: Used for testing.
+ """
+ if len(self.args) != 1:
+ self.parser.error('Expected a single <directory> argument.')
+ if now is None:
+ now = datetime.datetime.now()
+
+ basepath = self.args[0]
+ cron_entries = self._ParseCronYaml(basepath)
+ if cron_entries and cron_entries.cron:
+ for entry in cron_entries.cron:
+ description = entry.description
+ if not description:
+ description = '<no description>'
+ print >>output, '\n%s:\nURL: %s\nSchedule: %s' % (description,
+ entry.url,
+ entry.schedule)
+ schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
+ matches = schedule.GetMatches(now, self.options.num_runs)
+ for match in matches:
+ print >>output, '%s, %s from now' % (
+ match.strftime('%Y-%m-%d %H:%M:%S'), match - now)
+
+ def _CronInfoOptions(self, parser):
+ """Adds cron_info-specific options to 'parser'.
+
+ Args:
+ parser: An instance of OptionsParser.
+ """
+ parser.add_option('-n', '--num_runs', type='int', dest='num_runs',
+ action='store', default=5,
+ help='Number of runs of each cron job to display'
+ 'Default is 5')
+
+ def _CheckRequiredLoadOptions(self):
+ """Checks that upload/download options are present."""
+ for option in ['filename', 'kind', 'config_file']:
+ if getattr(self.options, option) is None:
+ self.parser.error('Option \'%s\' is required.' % option)
+ if not self.options.url:
+ self.parser.error('You must have google.appengine.ext.remote_api.handler '
+ 'assigned to an endpoint in app.yaml, or provide '
+ 'the url of the handler via the \'url\' option.')
+
+ def InferRemoteApiUrl(self, appyaml):
+ """Uses app.yaml to determine the remote_api endpoint.
+
+ Args:
+ appyaml: A parsed app.yaml file.
+
+ Returns:
+ The url of the remote_api endpoint as a string, or None
+ """
+ handlers = appyaml.handlers
+ handler_suffix = 'remote_api/handler.py'
+ app_id = appyaml.application
+ for handler in handlers:
+ if hasattr(handler, 'script') and handler.script:
+ if handler.script.endswith(handler_suffix):
+ server = self.options.server
+ if server == 'appengine.google.com':
+ return 'http://%s.appspot.com%s' % (app_id, handler.url)
+ else:
+ return 'http://%s%s' % (server, handler.url)
+ return None
+
+ def RunBulkloader(self, arg_dict):
+ """Invokes the bulkloader with the given keyword arguments.
+
+ Args:
+ arg_dict: Dictionary of arguments to pass to bulkloader.Run().
+ """
+ try:
+ import sqlite3
+ except ImportError:
+ logging.error('upload_data action requires SQLite3 and the python '
+ 'sqlite3 module (included in python since 2.5).')
+ sys.exit(1)
+
+ sys.exit(bulkloader.Run(arg_dict))
+
+ def _SetupLoad(self):
+ """Performs common verification and set up for upload and download."""
+ if len(self.args) != 1:
+ self.parser.error('Expected <directory> argument.')
+
+ basepath = self.args[0]
+ appyaml = self._ParseAppYaml(basepath)
+
+ self.options.app_id = appyaml.application
+
+ if not self.options.url:
+ url = self.InferRemoteApiUrl(appyaml)
+ if url is not None:
+ self.options.url = url
+
+ self._CheckRequiredLoadOptions()
+
+ if self.options.batch_size < 1:
+ self.parser.error('batch_size must be 1 or larger.')
+
+ if verbosity == 1:
+ logging.getLogger().setLevel(logging.INFO)
+ self.options.debug = False
+ else:
+ logging.getLogger().setLevel(logging.DEBUG)
+ self.options.debug = True
+
+ def _MakeLoaderArgs(self):
+ return dict([(arg_name, getattr(self.options, arg_name, None)) for
+ arg_name in (
+ 'app_id',
+ 'url',
+ 'filename',
+ 'batch_size',
+ 'kind',
+ 'num_threads',
+ 'bandwidth_limit',
+ 'rps_limit',
+ 'http_limit',
+ 'db_filename',
+ 'config_file',
+ 'auth_domain',
+ 'has_header',
+ 'loader_opts',
+ 'log_file',
+ 'passin',
+ 'email',
+ 'debug',
+ 'exporter_opts',
+ 'mapper_opts',
+ 'result_db_filename',
+ 'mapper_opts',
+ 'dry_run',
+ 'dump',
+ 'restore',
+ )])
+
+ def PerformDownload(self, run_fn=None):
+ """Performs a datastore download via the bulkloader.
+
+ Args:
+ run_fn: Function to invoke the bulkloader, used for testing.
+ """
+ if run_fn is None:
+ run_fn = self.RunBulkloader
+ self._SetupLoad()
+
+ StatusUpdate('Downloading data records.')
+
+ args = self._MakeLoaderArgs()
+ args['download'] = True
+ args['has_header'] = False
+ args['map'] = False
+ args['dump'] = False
+ args['restore'] = False
+
+ run_fn(args)
+
+ def PerformUpload(self, run_fn=None):
+ """Performs a datastore upload via the bulkloader.
+
+ Args:
+ run_fn: Function to invoke the bulkloader, used for testing.
+ """
+ if run_fn is None:
+ run_fn = self.RunBulkloader
+ self._SetupLoad()
+
+ StatusUpdate('Uploading data records.')
+
+ args = self._MakeLoaderArgs()
+ args['download'] = False
+ args['map'] = False
+ args['dump'] = False
+ args['restore'] = False
+
+ run_fn(args)
+
+ def _PerformLoadOptions(self, parser):
+ """Adds options common to 'upload_data' and 'download_data'.
+
+ Args:
+ parser: An instance of OptionsParser.
+ """
+ parser.add_option('--filename', type='string', dest='filename',
+ action='store',
+ help='The name of the file containing the input data.'
+ ' (Required)')
+ parser.add_option('--config_file', type='string', dest='config_file',
+ action='store',
+ help='Name of the configuration file. (Required)')
+ parser.add_option('--kind', type='string', dest='kind',
+ action='store',
+ help='The kind of the entities to store. (Required)')
+ parser.add_option('--url', type='string', dest='url',
+ action='store',
+ help='The location of the remote_api endpoint.')
+ parser.add_option('--num_threads', type='int', dest='num_threads',
+ action='store', default=10,
+ help='Number of threads to upload records with.')
+ parser.add_option('--batch_size', type='int', dest='batch_size',
+ action='store', default=10,
+ help='Number of records to post in each request.')
+ parser.add_option('--bandwidth_limit', type='int', dest='bandwidth_limit',
+ action='store', default=250000,
+ help='The maximum bytes/second bandwidth for transfers.')
+ parser.add_option('--rps_limit', type='int', dest='rps_limit',
+ action='store', default=20,
+ help='The maximum records/second for transfers.')
+ parser.add_option('--http_limit', type='int', dest='http_limit',
+ action='store', default=8,
+ help='The maximum requests/second for transfers.')
+ parser.add_option('--db_filename', type='string', dest='db_filename',
+ action='store',
+ help='Name of the progress database file.')
+ parser.add_option('--auth_domain', type='string', dest='auth_domain',
+ action='store', default='gmail.com',
+ help='The name of the authorization domain to use.')
+ parser.add_option('--log_file', type='string', dest='log_file',
+ help='File to write bulkloader logs. If not supplied '
+ 'then a new log file will be created, named: '
+ 'bulkloader-log-TIMESTAMP.')
+ parser.add_option('--dry_run', action='store_true',
+ dest='dry_run', default=False,
+ help='Do not execute any remote_api calls')
+
+ def _PerformUploadOptions(self, parser):
+ """Adds 'upload_data' specific options to the 'parser' passed in.
+
+ Args:
+ parser: An instance of OptionsParser.
+ """
+ self._PerformLoadOptions(parser)
+ parser.add_option('--has_header', dest='has_header',
+ action='store_true', default=False,
+ help='Whether the first line of the input file should be'
+ ' skipped')
+ parser.add_option('--loader_opts', type='string', dest='loader_opts',
+ help='A string to pass to the Loader.initialize method.')
+
+ def _PerformDownloadOptions(self, parser):
+ """Adds 'download_data' specific options to the 'parser' passed in.
+
+ Args:
+ parser: An instance of OptionsParser.
+ """
+ self._PerformLoadOptions(parser)
+ parser.add_option('--exporter_opts', type='string', dest='exporter_opts',
+ help='A string to pass to the Exporter.initialize method.'
+ )
+ parser.add_option('--result_db_filename', type='string',
+ dest='result_db_filename',
+ action='store',
+ help='Database to write entities to for download.')
+
+ class Action(object):
+ """Contains information about a command line action.
+
+ Attributes:
+ function: The name of a function defined on AppCfg or its subclasses
+ that will perform the appropriate action.
+ usage: A command line usage string.
+ short_desc: A one-line description of the action.
+ long_desc: A detailed description of the action. Whitespace and
+ formatting will be preserved.
+ options: A function that will add extra options to a given OptionParser
+ object.
+ """
+
+ def __init__(self, function, usage, short_desc, long_desc='',
+ options=lambda obj, parser: None):
+ """Initializer for the class attributes."""
+ self.function = function
+ self.usage = usage
+ self.short_desc = short_desc
+ self.long_desc = long_desc
+ self.options = options
+
+ def __call__(self, appcfg):
+ """Invoke this Action on the specified AppCfg.
+
+ This calls the function of the appropriate name on AppCfg, and
+ respects polymophic overrides.
+
+ Args:
+ appcfg: The appcfg to use.
+ Returns:
+ The result of the function call.
+ """
+ method = getattr(appcfg, self.function)
+ return method()
+
+ actions = {
+
+ 'help': Action(
+ function='Help',
+ usage='%prog help <action>',
+ short_desc='Print help for a specific action.'),
+
+ 'update': Action(
+ function='Update',
+ usage='%prog [options] update <directory>',
+ options=_UpdateOptions,
+ short_desc='Create or update an app version.',
+ long_desc="""
+Specify a directory that contains all of the files required by
+the app, and appcfg.py will create/update the app version referenced
+in the app.yaml file at the top level of that directory. appcfg.py
+will follow symlinks and recursively upload all files to the server.
+Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""),
+
+ 'update_cron': Action(
+ function='UpdateCron',
+ usage='%prog [options] update_cron <directory>',
+ short_desc='Update application cron definitions.',
+ long_desc="""
+The 'update_cron' command will update any new, removed or changed cron
+definitions from the optional cron.yaml file."""),
+
+ 'update_indexes': Action(
+ function='UpdateIndexes',
+ usage='%prog [options] update_indexes <directory>',
+ short_desc='Update application indexes.',
+ long_desc="""
+The 'update_indexes' command will add additional indexes which are not currently
+in production as well as restart any indexes that were not completed."""),
+
+ 'update_queues': Action(
+ function='UpdateQueues',
+ usage='%prog [options] update_queues <directory>',
+ short_desc='Update application task queue definitions.',
+ long_desc="""
+The 'update_queue' command will update any new, removed or changed task queue
+definitions from the optional queue.yaml file."""),
+
+ 'vacuum_indexes': Action(
+ function='VacuumIndexes',
+ usage='%prog [options] vacuum_indexes <directory>',
+ options=_VacuumIndexesOptions,
+ short_desc='Delete unused indexes from application.',
+ long_desc="""
+The 'vacuum_indexes' command will help clean up indexes which are no longer
+in use. It does this by comparing the local index configuration with
+indexes that are actually defined on the server. If any indexes on the
+server do not exist in the index configuration file, the user is given the
+option to delete them."""),
+
+ 'rollback': Action(
+ function='Rollback',
+ usage='%prog [options] rollback <directory>',
+ short_desc='Rollback an in-progress update.',
+ long_desc="""
+The 'update' command requires a server-side transaction. Use 'rollback'
+if you get an error message about another transaction being in progress
+and you are sure that there is no such transaction."""),
+
+ 'request_logs': Action(
+ function='RequestLogs',
+ usage='%prog [options] request_logs <directory> <output_file>',
+ options=_RequestLogsOptions,
+ short_desc='Write request logs in Apache common log format.',
+ long_desc="""
+The 'request_logs' command exports the request logs from your application
+to a file. It will write Apache common log format records ordered
+chronologically. If output file is '-' stdout will be written."""),
+
+ 'cron_info': Action(
+ function='CronInfo',
+ usage='%prog [options] cron_info <directory>',
+ options=_CronInfoOptions,
+ short_desc='Display information about cron jobs.',
+ long_desc="""
+The 'cron_info' command will display the next 'number' runs (default 5) for
+each cron job defined in the cron.yaml file."""),
+
+ 'upload_data': Action(
+ function='PerformUpload',
+ usage='%prog [options] upload_data <directory>',
+ options=_PerformUploadOptions,
+ short_desc='Upload data records to datastore.',
+ long_desc="""
+The 'upload_data' command translates input records into datastore entities and
+uploads them into your application's datastore."""),
+
+ 'download_data': Action(
+ function='PerformDownload',
+ usage='%prog [options] download_data <directory>',
+ options=_PerformDownloadOptions,
+ short_desc='Download entities from datastore.',
+ long_desc="""
+The 'download_data' command downloads datastore entities and writes them to
+file as CSV or developer defined format."""),
+
+
+
+ }
+
+
+def main(argv):
+ logging.basicConfig(format=('%(asctime)s %(levelname)s %(filename)s:'
+ '%(lineno)s %(message)s '))
+ try:
+ result = AppCfgApp(argv).Run()
+ if result:
+ sys.exit(result)
+ except KeyboardInterrupt:
+ StatusUpdate('Interrupted.')
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/google_appengine/google/appengine/tools/appcfg.pyc b/google_appengine/google/appengine/tools/appcfg.pyc
new file mode 100644
index 0000000..4ee2247
--- /dev/null
+++ b/google_appengine/google/appengine/tools/appcfg.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/appengine_rpc.py b/google_appengine/google/appengine/tools/appengine_rpc.py
new file mode 100755
index 0000000..2f82e3c
--- /dev/null
+++ b/google_appengine/google/appengine/tools/appengine_rpc.py
@@ -0,0 +1,435 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Tool for performing authenticated RPCs against App Engine."""
+
+
+import cookielib
+import logging
+import os
+import re
+import socket
+import sys
+import urllib
+import urllib2
+
+
+https_handler = urllib2.HTTPSHandler
+uses_cert_verification = False
+certpath = os.path.join(os.path.dirname(__file__), "cacerts.txt")
+cert_file_available = os.path.exists(certpath)
+try:
+ import https_wrapper
+ if cert_file_available:
+ https_handler = lambda: https_wrapper.CertValidatingHTTPSHandler(
+ ca_certs=certpath)
+ uses_cert_verification = True
+except ImportError:
+ pass
+
+logger = logging.getLogger('google.appengine.tools.appengine_rpc')
+
+def GetPlatformToken(os_module=os, sys_module=sys, platform=sys.platform):
+ """Returns a 'User-agent' token for the host system platform.
+
+ Args:
+ os_module, sys_module, platform: Used for testing.
+
+ Returns:
+ String containing the platform token for the host system.
+ """
+ if hasattr(sys_module, "getwindowsversion"):
+ windows_version = sys_module.getwindowsversion()
+ version_info = ".".join(str(i) for i in windows_version[:4])
+ return platform + "/" + version_info
+ elif hasattr(os_module, "uname"):
+ uname = os_module.uname()
+ return "%s/%s" % (uname[0], uname[2])
+ else:
+ return "unknown"
+
+def HttpRequestToString(req, include_data=True):
+ """Converts a urllib2.Request to a string.
+
+ Args:
+ req: urllib2.Request
+ Returns:
+ Multi-line string representing the request.
+ """
+
+ headers = ""
+ for header in req.header_items():
+ headers += "%s: %s\n" % (header[0], header[1])
+
+ template = ("%(method)s %(selector)s %(type)s/1.1\n"
+ "Host: %(host)s\n"
+ "%(headers)s")
+ if include_data:
+ template = template + "\n%(data)s"
+
+ return template % {
+ 'method' : req.get_method(),
+ 'selector' : req.get_selector(),
+ 'type' : req.get_type().upper(),
+ 'host' : req.get_host(),
+ 'headers': headers,
+ 'data': req.get_data(),
+ }
+
+class ClientLoginError(urllib2.HTTPError):
+ """Raised to indicate there was an error authenticating with ClientLogin."""
+
+ def __init__(self, url, code, msg, headers, args):
+ urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
+ self.args = args
+ self.reason = args["Error"]
+
+ def read(self):
+ return '%d %s: %s' % (self.code, self.msg, self.reason)
+
+
+class AbstractRpcServer(object):
+ """Provides a common interface for a simple RPC server."""
+
+ def __init__(self, host, auth_function, user_agent, source,
+ host_override=None, extra_headers=None, save_cookies=False,
+ auth_tries=3, account_type=None, debug_data=True, secure=False):
+ """Creates a new HttpRpcServer.
+
+ Args:
+ host: The host to send requests to.
+ auth_function: A function that takes no arguments and returns an
+ (email, password) tuple when called. Will be called if authentication
+ is required.
+ user_agent: The user-agent string to send to the server. Specify None to
+ omit the user-agent header.
+ source: The source to specify in authentication requests.
+ host_override: The host header to send to the server (defaults to host).
+ extra_headers: A dict of extra headers to append to every request. Values
+ supplied here will override other default headers that are supplied.
+ save_cookies: If True, save the authentication cookies to local disk.
+ If False, use an in-memory cookiejar instead. Subclasses must
+ implement this functionality. Defaults to False.
+ auth_tries: The number of times to attempt auth_function before failing.
+ account_type: One of GOOGLE, HOSTED_OR_GOOGLE, or None for automatic.
+ debug_data: Whether debugging output should include data contents.
+ """
+ if secure:
+ self.scheme = "https"
+ else:
+ self.scheme = "http"
+ self.host = host
+ self.host_override = host_override
+ self.auth_function = auth_function
+ self.source = source
+ self.authenticated = False
+ self.auth_tries = auth_tries
+ self.debug_data = debug_data
+
+ self.account_type = account_type
+
+ self.extra_headers = {}
+ if user_agent:
+ self.extra_headers["User-Agent"] = user_agent
+ if extra_headers:
+ self.extra_headers.update(extra_headers)
+
+ self.save_cookies = save_cookies
+ self.cookie_jar = cookielib.MozillaCookieJar()
+ self.opener = self._GetOpener()
+ if self.host_override:
+ logger.info("Server: %s; Host: %s", self.host, self.host_override)
+ else:
+ logger.info("Server: %s", self.host)
+
+ if ((self.host_override and self.host_override == "localhost") or
+ self.host == "localhost" or self.host.startswith("localhost:")):
+ self._DevAppServerAuthenticate()
+
+ def _GetOpener(self):
+ """Returns an OpenerDirector for making HTTP requests.
+
+ Returns:
+ A urllib2.OpenerDirector object.
+ """
+ raise NotImplemented()
+
+ def _CreateRequest(self, url, data=None):
+ """Creates a new urllib request."""
+ req = urllib2.Request(url, data=data)
+ if self.host_override:
+ req.add_header("Host", self.host_override)
+ for key, value in self.extra_headers.iteritems():
+ req.add_header(key, value)
+ return req
+
+ def _GetAuthToken(self, email, password):
+ """Uses ClientLogin to authenticate the user, returning an auth token.
+
+ Args:
+ email: The user's email address
+ password: The user's password
+
+ Raises:
+ ClientLoginError: If there was an error authenticating with ClientLogin.
+ HTTPError: If there was some other form of HTTP error.
+
+ Returns:
+ The authentication token returned by ClientLogin.
+ """
+ account_type = self.account_type
+ if not account_type:
+ if (self.host.split(':')[0].endswith(".google.com")
+ or (self.host_override
+ and self.host_override.split(':')[0].endswith(".google.com"))):
+ account_type = "HOSTED_OR_GOOGLE"
+ else:
+ account_type = "GOOGLE"
+ data = {
+ "Email": email,
+ "Passwd": password,
+ "service": "ah",
+ "source": self.source,
+ "accountType": account_type
+ }
+
+ req = self._CreateRequest(
+ url="https://www.google.com/accounts/ClientLogin",
+ data=urllib.urlencode(data))
+ try:
+ response = self.opener.open(req)
+ response_body = response.read()
+ response_dict = dict(x.split("=")
+ for x in response_body.split("\n") if x)
+ return response_dict["Auth"]
+ except urllib2.HTTPError, e:
+ if e.code == 403:
+ body = e.read()
+ response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
+ raise ClientLoginError(req.get_full_url(), e.code, e.msg,
+ e.headers, response_dict)
+ else:
+ raise
+
+ def _GetAuthCookie(self, auth_token):
+ """Fetches authentication cookies for an authentication token.
+
+ Args:
+ auth_token: The authentication token returned by ClientLogin.
+
+ Raises:
+ HTTPError: If there was an error fetching the authentication cookies.
+ """
+ continue_location = "http://localhost/"
+ args = {"continue": continue_location, "auth": auth_token}
+ login_path = os.environ.get("APPCFG_LOGIN_PATH", "/_ah")
+ req = self._CreateRequest("%s://%s%s/login?%s" %
+ (self.scheme, self.host, login_path,
+ urllib.urlencode(args)))
+ try:
+ response = self.opener.open(req)
+ except urllib2.HTTPError, e:
+ response = e
+ if (response.code != 302 or
+ response.info()["location"] != continue_location):
+ raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
+ response.headers, response.fp)
+ self.authenticated = True
+
+ def _Authenticate(self):
+ """Authenticates the user.
+
+ The authentication process works as follows:
+ 1) We get a username and password from the user
+ 2) We use ClientLogin to obtain an AUTH token for the user
+ (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
+ 3) We pass the auth token to /_ah/login on the server to obtain an
+ authentication cookie. If login was successful, it tries to redirect
+ us to the URL we provided.
+
+ If we attempt to access the upload API without first obtaining an
+ authentication cookie, it returns a 401 response and directs us to
+ authenticate ourselves with ClientLogin.
+ """
+ for unused_i in range(self.auth_tries):
+ credentials = self.auth_function()
+ try:
+ auth_token = self._GetAuthToken(credentials[0], credentials[1])
+ except ClientLoginError, e:
+ if e.reason == "BadAuthentication":
+ print >>sys.stderr, "Invalid username or password."
+ continue
+ if e.reason == "CaptchaRequired":
+ print >>sys.stderr, (
+ "Please go to\n"
+ "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
+ "and verify you are a human. Then try again.")
+ break
+ if e.reason == "NotVerified":
+ print >>sys.stderr, "Account not verified."
+ break
+ if e.reason == "TermsNotAgreed":
+ print >>sys.stderr, "User has not agreed to TOS."
+ break
+ if e.reason == "AccountDeleted":
+ print >>sys.stderr, "The user account has been deleted."
+ break
+ if e.reason == "AccountDisabled":
+ print >>sys.stderr, "The user account has been disabled."
+ break
+ if e.reason == "ServiceDisabled":
+ print >>sys.stderr, ("The user's access to the service has been "
+ "disabled.")
+ break
+ if e.reason == "ServiceUnavailable":
+ print >>sys.stderr, "The service is not available; try again later."
+ break
+ raise
+ self._GetAuthCookie(auth_token)
+ return
+
+ def _DevAppServerAuthenticate(self):
+ """Authenticates the user on the dev_appserver."""
+ credentials = self.auth_function()
+ self.extra_headers["Cookie"] = ('dev_appserver_login="%s:True"; Path=/;' %
+ (credentials[0],))
+
+ def Send(self, request_path, payload="",
+ content_type="application/octet-stream",
+ timeout=None,
+ **kwargs):
+ """Sends an RPC and returns the response.
+
+ Args:
+ request_path: The path to send the request to, eg /api/appversion/create.
+ payload: The body of the request, or None to send an empty request.
+ content_type: The Content-Type header to use.
+ timeout: timeout in seconds; default None i.e. no timeout.
+ (Note: for large requests on OS X, the timeout doesn't work right.)
+ kwargs: Any keyword arguments are converted into query string parameters.
+
+ Returns:
+ The response body, as a string.
+ """
+ old_timeout = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(timeout)
+ try:
+ tries = 0
+ auth_tried = False
+ while True:
+ tries += 1
+ args = dict(kwargs)
+ url = "%s://%s%s?%s" % (self.scheme, self.host, request_path,
+ urllib.urlencode(args))
+ req = self._CreateRequest(url=url, data=payload)
+ req.add_header("Content-Type", content_type)
+ req.add_header("X-appcfg-api-version", "1")
+ try:
+ logger.debug('Sending HTTP request:\n%s' %
+ HttpRequestToString(req, include_data=self.debug_data))
+ f = self.opener.open(req)
+ response = f.read()
+ f.close()
+ return response
+ except urllib2.HTTPError, e:
+ logger.debug("Got http error, this is try #%s" % tries)
+ if tries > self.auth_tries:
+ raise
+ elif e.code == 401:
+ if auth_tried:
+ raise
+ auth_tried = True
+ self._Authenticate()
+ elif e.code >= 500 and e.code < 600:
+ continue
+ elif e.code == 302:
+ if auth_tried:
+ raise
+ auth_tried = True
+ loc = e.info()["location"]
+ logger.debug("Got 302 redirect. Location: %s" % loc)
+ if loc.startswith("https://www.google.com/accounts/ServiceLogin"):
+ self._Authenticate()
+ elif re.match(r"https://www.google.com/a/[a-z0-9.-]+/ServiceLogin",
+ loc):
+ self.account_type = "HOSTED"
+ self._Authenticate()
+ elif loc.startswith("http://%s/_ah/login" % (self.host,)):
+ self._DevAppServerAuthenticate()
+ else:
+ raise
+ finally:
+ socket.setdefaulttimeout(old_timeout)
+
+
+class HttpRpcServer(AbstractRpcServer):
+ """Provides a simplified RPC-style interface for HTTP requests."""
+
+ DEFAULT_COOKIE_FILE_PATH = "~/.appcfg_cookies"
+
+ def _Authenticate(self):
+ """Save the cookie jar after authentication."""
+ if cert_file_available and not uses_cert_verification:
+ logger.warn("ssl module not found. Without this the identity of the "
+ "remote host cannot be verified, and connections are NOT "
+ "secure. To fix this, please install the ssl module from "
+ "http://pypi.python.org/pypi/ssl")
+ super(HttpRpcServer, self)._Authenticate()
+ if self.cookie_jar.filename is not None and self.save_cookies:
+ logger.info("Saving authentication cookies to %s" %
+ self.cookie_jar.filename)
+ self.cookie_jar.save()
+
+ def _GetOpener(self):
+ """Returns an OpenerDirector that supports cookies and ignores redirects.
+
+ Returns:
+ A urllib2.OpenerDirector object.
+ """
+ opener = urllib2.OpenerDirector()
+ opener.add_handler(urllib2.ProxyHandler())
+ opener.add_handler(urllib2.UnknownHandler())
+ opener.add_handler(urllib2.HTTPHandler())
+ opener.add_handler(urllib2.HTTPDefaultErrorHandler())
+ opener.add_handler(https_handler())
+ opener.add_handler(urllib2.HTTPErrorProcessor())
+
+ if self.save_cookies:
+ self.cookie_jar.filename = os.path.expanduser(
+ HttpRpcServer.DEFAULT_COOKIE_FILE_PATH)
+
+ if os.path.exists(self.cookie_jar.filename):
+ try:
+ self.cookie_jar.load()
+ self.authenticated = True
+ logger.info("Loaded authentication cookies from %s" %
+ self.cookie_jar.filename)
+ except (OSError, IOError, cookielib.LoadError), e:
+ logger.debug("Could not load authentication cookies; %s: %s",
+ e.__class__.__name__, e)
+ self.cookie_jar.filename = None
+ else:
+ try:
+ fd = os.open(self.cookie_jar.filename, os.O_CREAT, 0600)
+ os.close(fd)
+ except (OSError, IOError), e:
+ logger.debug("Could not create authentication cookies file; %s: %s",
+ e.__class__.__name__, e)
+ self.cookie_jar.filename = None
+
+ opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
+ return opener
diff --git a/google_appengine/google/appengine/tools/appengine_rpc.pyc b/google_appengine/google/appengine/tools/appengine_rpc.pyc
new file mode 100644
index 0000000..f63df5b
--- /dev/null
+++ b/google_appengine/google/appengine/tools/appengine_rpc.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/bulkload_client.py b/google_appengine/google/appengine/tools/bulkload_client.py
new file mode 100755
index 0000000..bec0fde
--- /dev/null
+++ b/google_appengine/google/appengine/tools/bulkload_client.py
@@ -0,0 +1,297 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Imports CSV data over HTTP.
+
+Usage:
+ %s [flags]
+
+ --debug Show debugging information. (Optional)
+ --cookie=<string> Whole Cookie header to supply to the server, including
+ the parameter name (e.g., "ACSID=..."). (Optional)
+ --url=<string> URL endpoint to post to for importing data. (Required)
+ --batch_size=<int> Number of Entity objects to include in each post to
+ the URL endpoint. The more data per row/Entity, the
+ smaller the batch size should be. (Default 10)
+ --filename=<path> Path to the CSV file to import. (Required)
+ --kind=<string> Name of the Entity object kind to put in the datastore.
+ (Required)
+
+The exit status will be 0 on success, non-zero on import failure.
+
+Works with the bulkload mix-in library for google.appengine.ext.bulkload.
+Please look there for documentation about how to setup the server side.
+"""
+
+
+import StringIO
+import httplib
+import logging
+import csv
+import getopt
+import socket
+import sys
+import urllib
+import urlparse
+
+from google.appengine.ext.bulkload import constants
+
+
+
+class Error(Exception):
+ """Base-class for exceptions in this module."""
+
+
+class PostError(Error):
+ """An error has occured while trying to post data to the server."""
+
+
+class BadServerStatusError(PostError):
+ """The server has returned an error while importing data."""
+
+
+def ContentGenerator(csv_file,
+ batch_size,
+ create_csv_reader=csv.reader,
+ create_csv_writer=csv.writer):
+ """Retrieves CSV data up to a batch size at a time.
+
+ Args:
+ csv_file: A file-like object for reading CSV data.
+ batch_size: Maximum number of CSV rows to yield on each iteration.
+ create_csv_reader, create_csv_writer: Used for dependency injection.
+
+ Yields:
+ Tuple (entity_count, csv_content) where:
+ entity_count: Number of entities contained in the csv_content. Will be
+ less than or equal to the batch_size and greater than 0.
+ csv_content: String containing the CSV content containing the next
+ entity_count entities.
+ """
+ try:
+ csv.field_size_limit(800000)
+ except AttributeError:
+ pass
+
+ reader = create_csv_reader(csv_file, skipinitialspace=True)
+ exhausted = False
+
+ while not exhausted:
+ rows_written = 0
+ content = StringIO.StringIO()
+ writer = create_csv_writer(content)
+ try:
+ for i in xrange(batch_size):
+ row = reader.next()
+ writer.writerow(row)
+ rows_written += 1
+ except StopIteration:
+ exhausted = True
+
+ if rows_written > 0:
+ yield rows_written, content.getvalue()
+
+
+def PostEntities(host_port, uri, cookie, kind, content):
+ """Posts Entity records to a remote endpoint over HTTP.
+
+ Args:
+ host_port: String containing the "host:port" pair; the port is optional.
+ uri: Relative URI to access on the remote host (e.g., '/bulkload').
+ cookie: String containing the Cookie header to use, if any.
+ kind: Kind of the Entity records being posted.
+ content: String containing the CSV data for the entities.
+
+ Raises:
+ BadServerStatusError if the server was contactable but returns an error.
+ PostError If an error occurred while connecting to the server or reading
+ or writing data.
+ """
+ logging.debug('Connecting to %s', host_port)
+ try:
+ body = urllib.urlencode({
+ constants.KIND_PARAM: kind,
+ constants.CSV_PARAM: content,
+ })
+ headers = {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Content-Length': len(body),
+ 'Cookie': cookie,
+ }
+
+ logging.debug('Posting %d bytes to http://%s%s', len(body), host_port, uri)
+ connection = httplib.HTTPConnection(host_port)
+ try:
+ connection.request('POST', uri, body, headers)
+ response = connection.getresponse()
+
+ status = response.status
+ reason = response.reason
+ content = response.read()
+ logging.debug('Received response code %d: %s', status, reason)
+ if status != httplib.OK:
+ raise BadServerStatusError('Received code %d: %s\n%s' % (
+ status, reason, content))
+ finally:
+ connection.close()
+ except (IOError, httplib.HTTPException, socket.error), e:
+ logging.debug('Encountered exception accessing HTTP server: %s', e)
+ raise PostError(e)
+
+
+def SplitURL(url):
+ """Splits an HTTP URL into pieces.
+
+ Args:
+ url: String containing a full URL string (e.g.,
+ 'http://blah.com:8080/stuff?param=1#foo')
+
+ Returns:
+ Tuple (netloc, uri) where:
+ netloc: String containing the host/port combination from the URL. The
+ port is optional. (e.g., 'blah.com:8080').
+ uri: String containing the relative URI of the URL. (e.g., '/stuff').
+ """
+ scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
+ return netloc, path
+
+
+def ImportCSV(filename,
+ post_url,
+ cookie,
+ batch_size,
+ kind,
+ split_url=SplitURL,
+ openfile=file,
+ create_content_generator=ContentGenerator,
+ post_entities=PostEntities):
+ """Imports CSV data using a series of HTTP posts.
+
+ Args:
+ filename: File on disk containing CSV data.
+ post_url: URL to post the Entity data to.
+ cookie: Full cookie header to use while connecting.
+ batch_size: Maximum number of Entity objects to post with each request.
+ kind: Entity kind of the objects being posted.
+ split_url, openfile, create_content_generator, post_entities: Used for
+ dependency injection.
+
+ Returns:
+ True if all entities were imported successfully; False otherwise.
+ """
+ host_port, uri = split_url(post_url)
+ csv_file = openfile(filename, 'r')
+ try:
+ content_gen = create_content_generator(csv_file, batch_size)
+ logging.info('Starting import; maximum %d entities per post', batch_size)
+ for num_entities, content in content_gen:
+ logging.info('Importing %d entities in %d bytes',
+ num_entities, len(content))
+ try:
+ content = post_entities(host_port, uri, cookie, kind, content)
+ except PostError, e:
+ logging.error('An error occurred while importing: %s', e)
+ return False
+ finally:
+ csv_file.close()
+ return True
+
+
+def PrintUsageExit(code):
+ """Prints usage information and exits with a status code.
+
+ Args:
+ code: Status code to pass to sys.exit() after displaying usage information.
+ """
+ print sys.modules['__main__'].__doc__ % sys.argv[0]
+ sys.stdout.flush()
+ sys.stderr.flush()
+ sys.exit(code)
+
+
+def ParseArguments(argv):
+ """Parses command-line arguments.
+
+ Prints out a help message if -h or --help is supplied.
+
+ Args:
+ argv: List of command-line arguments.
+
+ Returns:
+ Tuple (url, filename, cookie, batch_size, kind) containing the values from
+ each corresponding command-line flag.
+ """
+ opts, args = getopt.getopt(
+ argv[1:],
+ 'h',
+ ['debug',
+ 'help',
+ 'url=',
+ 'filename=',
+ 'cookie=',
+ 'batch_size=',
+ 'kind='])
+
+ url = None
+ filename = None
+ cookie = ''
+ batch_size = 10
+ kind = None
+ encoding = None
+
+ for option, value in opts:
+ if option == '--debug':
+ logging.getLogger().setLevel(logging.DEBUG)
+ if option in ('-h', '--help'):
+ PrintUsageExit(0)
+ if option == '--url':
+ url = value
+ if option == '--filename':
+ filename = value
+ if option == '--cookie':
+ cookie = value
+ if option == '--batch_size':
+ batch_size = int(value)
+ if batch_size <= 0:
+ print >>sys.stderr, 'batch_size must be 1 or larger'
+ PrintUsageExit(1)
+ if option == '--kind':
+ kind = value
+
+ return (url, filename, cookie, batch_size, kind)
+
+
+def main(argv):
+ """Runs the importer."""
+ logging.basicConfig(
+ level=logging.INFO,
+ format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
+
+ args = ParseArguments(argv)
+ if [arg for arg in args if arg is None]:
+ print >>sys.stderr, 'Invalid arguments'
+ PrintUsageExit(1)
+
+ url, filename, cookie, batch_size, kind = args
+ if ImportCSV(filename, url, cookie, batch_size, kind):
+ logging.info('Import succcessful')
+ return 0
+ logging.error('Import failed')
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/google_appengine/google/appengine/tools/bulkloader.py b/google_appengine/google/appengine/tools/bulkloader.py
new file mode 100755
index 0000000..e288b00
--- /dev/null
+++ b/google_appengine/google/appengine/tools/bulkloader.py
@@ -0,0 +1,3827 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Imports data over HTTP.
+
+Usage:
+ %(arg0)s [flags]
+
+ --debug Show debugging information. (Optional)
+ --app_id=<string> Application ID of endpoint (Optional for
+ *.appspot.com)
+ --auth_domain=<domain> The auth domain to use for logging in and for
+ UserProperties. (Default: gmail.com)
+ --bandwidth_limit=<int> The maximum number of bytes per second for the
+ aggregate transfer of data to the server. Bursts
+ may exceed this, but overall transfer rate is
+ restricted to this rate. (Default 250000)
+ --batch_size=<int> Number of Entity objects to include in each post to
+ the URL endpoint. The more data per row/Entity, the
+ smaller the batch size should be. (Default 10)
+ --config_file=<path> File containing Model and Loader definitions.
+ (Required unless --dump or --restore are used)
+ --db_filename=<path> Specific progress database to write to, or to
+ resume from. If not supplied, then a new database
+ will be started, named:
+ bulkloader-progress-TIMESTAMP.
+ The special filename "skip" may be used to simply
+ skip reading/writing any progress information.
+ --download Export entities to a file.
+ --dry_run Do not execute any remote_api calls.
+ --dump Use zero-configuration dump format.
+ --email=<string> The username to use. Will prompt if omitted.
+ --exporter_opts=<string>
+ A string to pass to the Exporter.initialize method.
+ --filename=<path> Path to the file to import. (Required)
+ --has_header Skip the first row of the input.
+ --http_limit=<int> The maximum numer of HTTP requests per second to
+ send to the server. (Default: 8)
+ --kind=<string> Name of the Entity object kind to put in the
+ datastore. (Required)
+ --loader_opts=<string> A string to pass to the Loader.initialize method.
+ --log_file=<path> File to write bulkloader logs. If not supplied
+ then a new log file will be created, named:
+ bulkloader-log-TIMESTAMP.
+ --map Map an action across datastore entities.
+ --mapper_opts=<string> A string to pass to the Mapper.Initialize method.
+ --num_threads=<int> Number of threads to use for uploading entities
+ (Default 10)
+ --passin Read the login password from stdin.
+ --restore Restore from zero-configuration dump format.
+ --result_db_filename=<path>
+ Result database to write to for downloads.
+ --rps_limit=<int> The maximum number of records per second to
+ transfer to the server. (Default: 20)
+ --url=<string> URL endpoint to post to for importing data.
+ (Required)
+
+The exit status will be 0 on success, non-zero on import failure.
+
+Works with the remote_api mix-in library for google.appengine.ext.remote_api.
+Please look there for documentation about how to setup the server side.
+
+Example:
+
+%(arg0)s --url=http://app.appspot.com/remote_api --kind=Model \
+ --filename=data.csv --config_file=loader_config.py
+
+"""
+
+
+
+import csv
+import errno
+import getopt
+import getpass
+import imp
+import logging
+import os
+import Queue
+import re
+import shutil
+import signal
+import StringIO
+import sys
+import threading
+import time
+import traceback
+import urllib2
+import urlparse
+
+from google.appengine.datastore import entity_pb
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore
+from google.appengine.api import datastore_errors
+from google.appengine.datastore import datastore_pb
+from google.appengine.ext import db
+from google.appengine.ext import key_range as key_range_module
+from google.appengine.ext.db import polymodel
+from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.ext.remote_api import throttle as remote_api_throttle
+from google.appengine.runtime import apiproxy_errors
+from google.appengine.tools import adaptive_thread_pool
+from google.appengine.tools import appengine_rpc
+from google.appengine.tools.requeue import ReQueue
+
+try:
+ import sqlite3
+except ImportError:
+ pass
+
+logger = logging.getLogger('google.appengine.tools.bulkloader')
+
+KeyRange = key_range_module.KeyRange
+
+DEFAULT_THREAD_COUNT = 10
+
+DEFAULT_BATCH_SIZE = 10
+
+DEFAULT_DOWNLOAD_BATCH_SIZE = 100
+
+DEFAULT_QUEUE_SIZE = DEFAULT_THREAD_COUNT * 10
+
+_THREAD_SHOULD_EXIT = '_THREAD_SHOULD_EXIT'
+
+STATE_READ = 0
+STATE_SENDING = 1
+STATE_SENT = 2
+STATE_NOT_SENT = 3
+
+STATE_GETTING = 1
+STATE_GOT = 2
+STATE_ERROR = 3
+
+DATA_CONSUMED_TO_HERE = 'DATA_CONSUMED_TO_HERE'
+
+INITIAL_BACKOFF = 1.0
+
+BACKOFF_FACTOR = 2.0
+
+
+DEFAULT_BANDWIDTH_LIMIT = 250000
+
+DEFAULT_RPS_LIMIT = 20
+
+DEFAULT_REQUEST_LIMIT = 8
+
+MAXIMUM_INCREASE_DURATION = 5.0
+MAXIMUM_HOLD_DURATION = 12.0
+
+
+def ImportStateMessage(state):
+ """Converts a numeric state identifier to a status message."""
+ return ({
+ STATE_READ: 'Batch read from file.',
+ STATE_SENDING: 'Sending batch to server.',
+ STATE_SENT: 'Batch successfully sent.',
+ STATE_NOT_SENT: 'Error while sending batch.'
+ }[state])
+
+
+def ExportStateMessage(state):
+ """Converts a numeric state identifier to a status message."""
+ return ({
+ STATE_READ: 'Batch read from file.',
+ STATE_GETTING: 'Fetching batch from server',
+ STATE_GOT: 'Batch successfully fetched.',
+ STATE_ERROR: 'Error while fetching batch'
+ }[state])
+
+
+def MapStateMessage(state):
+ """Converts a numeric state identifier to a status message."""
+ return ({
+ STATE_READ: 'Batch read from file.',
+ STATE_GETTING: 'Querying for batch from server',
+ STATE_GOT: 'Batch successfully fetched.',
+ STATE_ERROR: 'Error while fetching or mapping.'
+ }[state])
+
+
+def ExportStateName(state):
+ """Converts a numeric state identifier to a string."""
+ return ({
+ STATE_READ: 'READ',
+ STATE_GETTING: 'GETTING',
+ STATE_GOT: 'GOT',
+ STATE_ERROR: 'NOT_GOT'
+ }[state])
+
+
+def ImportStateName(state):
+ """Converts a numeric state identifier to a string."""
+ return ({
+ STATE_READ: 'READ',
+ STATE_GETTING: 'SENDING',
+ STATE_GOT: 'SENT',
+ STATE_NOT_SENT: 'NOT_SENT'
+ }[state])
+
+
+class Error(Exception):
+ """Base-class for exceptions in this module."""
+
+
+class MissingPropertyError(Error):
+ """An expected field is missing from an entity, and no default was given."""
+
+
+class FatalServerError(Error):
+ """An unrecoverable error occurred while posting data to the server."""
+
+
+class ResumeError(Error):
+ """Error while trying to resume a partial upload."""
+
+
+class ConfigurationError(Error):
+ """Error in configuration options."""
+
+
+class AuthenticationError(Error):
+ """Error while trying to authenticate with the server."""
+
+
+class FileNotFoundError(Error):
+ """A filename passed in by the user refers to a non-existent input file."""
+
+
+class FileNotReadableError(Error):
+ """A filename passed in by the user refers to a non-readable input file."""
+
+
+class FileExistsError(Error):
+ """A filename passed in by the user refers to an existing output file."""
+
+
+class FileNotWritableError(Error):
+ """A filename passed in by the user refers to a non-writable output file."""
+
+
+class BadStateError(Error):
+ """A work item in an unexpected state was encountered."""
+
+
+class KeyRangeError(Error):
+ """An error during construction of a KeyRangeItem."""
+
+
+class FieldSizeLimitError(Error):
+ """The csv module tried to read a field larger than the size limit."""
+
+ def __init__(self, limit):
+ self.message = """
+A field in your CSV input file has exceeded the current limit of %d.
+
+You can raise this limit by adding the following lines to your config file:
+
+import csv
+csv.field_size_limit(new_limit)
+
+where new_limit is number larger than the size in bytes of the largest
+field in your CSV.
+""" % limit
+ Error.__init__(self, self.message)
+
+
+class NameClashError(Error):
+ """A name clash occurred while trying to alias old method names."""
+
+ def __init__(self, old_name, new_name, klass):
+ Error.__init__(self, old_name, new_name, klass)
+ self.old_name = old_name
+ self.new_name = new_name
+ self.klass = klass
+
+
+def GetCSVGeneratorFactory(kind, csv_filename, batch_size, csv_has_header,
+ openfile=open, create_csv_reader=csv.reader):
+ """Return a factory that creates a CSV-based UploadWorkItem generator.
+
+ Args:
+ kind: The kind of the entities being uploaded.
+ csv_filename: File on disk containing CSV data.
+ batch_size: Maximum number of CSV rows to stash into an UploadWorkItem.
+ csv_has_header: Whether to skip the first row of the CSV.
+ openfile: Used for dependency injection.
+ create_csv_reader: Used for dependency injection.
+
+ Returns:
+ A callable (accepting the Progress Queue and Progress Generators
+ as input) which creates the UploadWorkItem generator.
+ """
+ loader = Loader.RegisteredLoader(kind)
+ loader._Loader__openfile = openfile
+ loader._Loader__create_csv_reader = create_csv_reader
+ record_generator = loader.generate_records(csv_filename)
+
+ def CreateGenerator(request_manager, progress_queue, progress_generator):
+ """Initialize a UploadWorkItem generator.
+
+ Args:
+ request_manager: A RequestManager instance.
+ progress_queue: A ProgressQueue instance to send progress information.
+ progress_generator: A generator of progress information or None.
+
+ Returns:
+ An UploadWorkItemGenerator instance.
+ """
+ return UploadWorkItemGenerator(request_manager,
+ progress_queue,
+ progress_generator,
+ record_generator,
+ csv_has_header,
+ batch_size)
+
+ return CreateGenerator
+
+
+class UploadWorkItemGenerator(object):
+ """Reads rows from a row generator and generates UploadWorkItems."""
+
+ def __init__(self,
+ request_manager,
+ progress_queue,
+ progress_generator,
+ record_generator,
+ skip_first,
+ batch_size):
+ """Initialize a WorkItemGenerator.
+
+ Args:
+ request_manager: A RequestManager instance with which to associate
+ WorkItems.
+ progress_queue: A progress queue with which to associate WorkItems.
+ progress_generator: A generator of progress information.
+ record_generator: A generator of data records.
+ skip_first: Whether to skip the first data record.
+ batch_size: The number of data records per WorkItem.
+ """
+ self.request_manager = request_manager
+ self.progress_queue = progress_queue
+ self.progress_generator = progress_generator
+ self.reader = record_generator
+ self.skip_first = skip_first
+ self.batch_size = batch_size
+ self.line_number = 1
+ self.column_count = None
+ self.read_rows = []
+ self.row_count = 0
+ self.xfer_count = 0
+
+ def _AdvanceTo(self, line):
+ """Advance the reader to the given line.
+
+ Args:
+ line: A line number to advance to.
+ """
+ while self.line_number < line:
+ self.reader.next()
+ self.line_number += 1
+ self.row_count += 1
+ self.xfer_count += 1
+
+ def _ReadRows(self, key_start, key_end):
+ """Attempts to read and encode rows [key_start, key_end].
+
+ The encoded rows are stored in self.read_rows.
+
+ Args:
+ key_start: The starting line number.
+ key_end: The ending line number.
+
+ Raises:
+ StopIteration: if the reader runs out of rows
+ ResumeError: if there are an inconsistent number of columns.
+ """
+ assert self.line_number == key_start
+ self.read_rows = []
+ while self.line_number <= key_end:
+ row = self.reader.next()
+ self.row_count += 1
+ if self.column_count is None:
+ self.column_count = len(row)
+ else:
+ if self.column_count != len(row):
+ raise ResumeError('Column count mismatch, %d: %s' %
+ (self.column_count, str(row)))
+ self.read_rows.append((self.line_number, row))
+ self.line_number += 1
+
+ def _MakeItem(self, key_start, key_end, rows, progress_key=None):
+ """Makes a UploadWorkItem containing the given rows, with the given keys.
+
+ Args:
+ key_start: The start key for the UploadWorkItem.
+ key_end: The end key for the UploadWorkItem.
+ rows: A list of the rows for the UploadWorkItem.
+ progress_key: The progress key for the UploadWorkItem
+
+ Returns:
+ An UploadWorkItem instance for the given batch.
+ """
+ assert rows
+
+ item = UploadWorkItem(self.request_manager, self.progress_queue, rows,
+ key_start, key_end, progress_key=progress_key)
+
+ return item
+
+ def Batches(self):
+ """Reads from the record_generator and generates UploadWorkItems.
+
+ Yields:
+ Instances of class UploadWorkItem
+
+ Raises:
+ ResumeError: If the progress database and data file indicate a different
+ number of rows.
+ """
+ if self.skip_first:
+ logger.info('Skipping header line.')
+ try:
+ self.reader.next()
+ except StopIteration:
+ return
+
+ exhausted = False
+
+ self.line_number = 1
+ self.column_count = None
+
+ logger.info('Starting import; maximum %d entities per post',
+ self.batch_size)
+
+ state = None
+ if self.progress_generator:
+ for progress_key, state, key_start, key_end in self.progress_generator:
+ if key_start:
+ try:
+ self._AdvanceTo(key_start)
+ self._ReadRows(key_start, key_end)
+ yield self._MakeItem(key_start,
+ key_end,
+ self.read_rows,
+ progress_key=progress_key)
+ except StopIteration:
+ logger.error('Mismatch between data file and progress database')
+ raise ResumeError(
+ 'Mismatch between data file and progress database')
+ elif state == DATA_CONSUMED_TO_HERE:
+ try:
+ self._AdvanceTo(key_end + 1)
+ except StopIteration:
+ state = None
+
+ if self.progress_generator is None or state == DATA_CONSUMED_TO_HERE:
+ while not exhausted:
+ key_start = self.line_number
+ key_end = self.line_number + self.batch_size - 1
+ try:
+ self._ReadRows(key_start, key_end)
+ except StopIteration:
+ exhausted = True
+ key_end = self.line_number - 1
+ if key_start <= key_end:
+ yield self._MakeItem(key_start, key_end, self.read_rows)
+
+
+class CSVGenerator(object):
+ """Reads a CSV file and generates data records."""
+
+ def __init__(self,
+ csv_filename,
+ openfile=open,
+ create_csv_reader=csv.reader):
+ """Initializes a CSV generator.
+
+ Args:
+ csv_filename: File on disk containing CSV data.
+ openfile: Used for dependency injection of 'open'.
+ create_csv_reader: Used for dependency injection of 'csv.reader'.
+ """
+ self.csv_filename = csv_filename
+ self.openfile = openfile
+ self.create_csv_reader = create_csv_reader
+
+ def Records(self):
+ """Reads the CSV data file and generates row records.
+
+ Yields:
+ Lists of strings
+
+ Raises:
+ ResumeError: If the progress database and data file indicate a different
+ number of rows.
+ """
+ csv_file = self.openfile(self.csv_filename, 'rb')
+ reader = self.create_csv_reader(csv_file, skipinitialspace=True)
+ try:
+ for record in reader:
+ yield record
+ except csv.Error, e:
+ if e.args and e.args[0].startswith('field larger than field limit'):
+ limit = e.args[1]
+ raise FieldSizeLimitError(limit)
+ else:
+ raise
+
+
+class KeyRangeItemGenerator(object):
+ """Generates ranges of keys to download.
+
+ Reads progress information from the progress database and creates
+ KeyRangeItem objects corresponding to incompletely downloaded parts of an
+ export.
+ """
+
+ def __init__(self, request_manager, kind, progress_queue, progress_generator,
+ key_range_item_factory):
+ """Initialize the KeyRangeItemGenerator.
+
+ Args:
+ request_manager: A RequestManager instance.
+ kind: The kind of entities being transferred.
+ progress_queue: A queue used for tracking progress information.
+ progress_generator: A generator of prior progress information, or None
+ if there is no prior status.
+ key_range_item_factory: A factory to produce KeyRangeItems.
+ """
+ self.request_manager = request_manager
+ self.kind = kind
+ self.row_count = 0
+ self.xfer_count = 0
+ self.progress_queue = progress_queue
+ self.progress_generator = progress_generator
+ self.key_range_item_factory = key_range_item_factory
+
+ def Batches(self):
+ """Iterate through saved progress information.
+
+ Yields:
+ KeyRangeItem instances corresponding to undownloaded key ranges.
+ """
+ if self.progress_generator is not None:
+ for progress_key, state, key_start, key_end in self.progress_generator:
+ if state is not None and state != STATE_GOT and key_start is not None:
+ key_start = ParseKey(key_start)
+ key_end = ParseKey(key_end)
+
+ key_range = KeyRange(key_start=key_start,
+ key_end=key_end)
+
+ result = self.key_range_item_factory(self.request_manager,
+ self.progress_queue,
+ self.kind,
+ key_range,
+ progress_key=progress_key,
+ state=STATE_READ)
+ yield result
+ else:
+ key_range = KeyRange()
+
+ yield self.key_range_item_factory(self.request_manager,
+ self.progress_queue,
+ self.kind,
+ key_range)
+
+
+class DownloadResult(object):
+ """Holds the result of an entity download."""
+
+ def __init__(self, continued, direction, keys, entities):
+ self.continued = continued
+ self.direction = direction
+ self.keys = keys
+ self.entities = entities
+ self.count = len(keys)
+ assert self.count == len(entities)
+ assert direction in (key_range_module.KeyRange.ASC,
+ key_range_module.KeyRange.DESC)
+ if self.count > 0:
+ if direction == key_range_module.KeyRange.ASC:
+ self.key_start = keys[0]
+ self.key_end = keys[-1]
+ else:
+ self.key_start = keys[-1]
+ self.key_end = keys[0]
+
+ def Entities(self):
+ """Returns the list of entities for this result in key order."""
+ if self.direction == key_range_module.KeyRange.ASC:
+ return list(self.entities)
+ else:
+ result = list(self.entities)
+ result.reverse()
+ return result
+
+ def __str__(self):
+ return 'continued = %s\n%s' % (
+ str(self.continued), '\n'.join(str(self.entities)))
+
+
+class _WorkItem(adaptive_thread_pool.WorkItem):
+ """Holds a description of a unit of upload or download work."""
+
+ def __init__(self, progress_queue, key_start, key_end, state_namer,
+ state=STATE_READ, progress_key=None):
+ """Initialize the _WorkItem instance.
+
+ Args:
+ progress_queue: A queue used for tracking progress information.
+ key_start: The start key of the work item.
+ key_end: The end key of the work item.
+ state_namer: Function to describe work item states.
+ state: The initial state of the work item.
+ progress_key: If this WorkItem represents state from a prior run,
+ then this will be the key within the progress database.
+ """
+ adaptive_thread_pool.WorkItem.__init__(self,
+ '[%s-%s]' % (key_start, key_end))
+ self.progress_queue = progress_queue
+ self.state_namer = state_namer
+ self.state = state
+ self.progress_key = progress_key
+ self.progress_event = threading.Event()
+ self.key_start = key_start
+ self.key_end = key_end
+ self.error = None
+ self.traceback = None
+
+ def _TransferItem(self, thread_pool):
+ raise NotImplementedError()
+
+ def SetError(self):
+ """Sets the error and traceback information for this thread.
+
+ This must be called from an exception handler.
+ """
+ if not self.error:
+ exc_info = sys.exc_info()
+ self.error = exc_info[1]
+ self.traceback = exc_info[2]
+
+ def PerformWork(self, thread_pool):
+ """Perform the work of this work item and report the results.
+
+ Args:
+ thread_pool: An AdaptiveThreadPool instance.
+
+ Returns:
+ A tuple (status, instruction) of the work status and an instruction
+ for the ThreadGate.
+ """
+ status = adaptive_thread_pool.WorkItem.FAILURE
+ instruction = adaptive_thread_pool.ThreadGate.DECREASE
+
+ try:
+ self.MarkAsTransferring()
+
+ try:
+ transfer_time = self._TransferItem(thread_pool)
+ if transfer_time is None:
+ status = adaptive_thread_pool.WorkItem.RETRY
+ instruction = adaptive_thread_pool.ThreadGate.HOLD
+ else:
+ logger.debug('[%s] %s Transferred %d entities in %0.1f seconds',
+ threading.currentThread().getName(), self, self.count,
+ transfer_time)
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ status = adaptive_thread_pool.WorkItem.SUCCESS
+ if transfer_time <= MAXIMUM_INCREASE_DURATION:
+ instruction = adaptive_thread_pool.ThreadGate.INCREASE
+ elif transfer_time <= MAXIMUM_HOLD_DURATION:
+ instruction = adaptive_thread_pool.ThreadGate.HOLD
+ except (db.InternalError, db.NotSavedError, db.Timeout,
+ db.TransactionFailedError,
+ apiproxy_errors.OverQuotaError,
+ apiproxy_errors.DeadlineExceededError,
+ apiproxy_errors.ApplicationError), e:
+ status = adaptive_thread_pool.WorkItem.RETRY
+ logger.exception('Retrying on non-fatal datastore error: %s', e)
+ except urllib2.HTTPError, e:
+ http_status = e.code
+ if http_status == 403 or (http_status >= 500 and http_status < 600):
+ status = adaptive_thread_pool.WorkItem.RETRY
+ logger.exception('Retrying on non-fatal HTTP error: %d %s',
+ http_status, e.msg)
+ else:
+ self.SetError()
+ status = adaptive_thread_pool.WorkItem.FAILURE
+ except urllib2.URLError, e:
+ if IsURLErrorFatal(e):
+ self.SetError()
+ status = adaptive_thread_pool.WorkItem.FAILURE
+ else:
+ status = adaptive_thread_pool.WorkItem.RETRY
+ logger.exception('Retrying on non-fatal URL error: %s', e.reason)
+
+ finally:
+ if status == adaptive_thread_pool.WorkItem.SUCCESS:
+ self.MarkAsTransferred()
+ else:
+ self.MarkAsError()
+
+ return (status, instruction)
+
+ def _AssertInState(self, *states):
+ """Raises an Error if the state of this range is not in states."""
+ if not self.state in states:
+ raise BadStateError('%s:%s not in %s' %
+ (str(self),
+ self.state_namer(self.state),
+ map(self.state_namer, states)))
+
+ def _AssertProgressKey(self):
+ """Raises an Error if the progress key is None."""
+ if self.progress_key is None:
+ raise BadStateError('%s: Progress key is missing' % str(self))
+
+ def MarkAsRead(self):
+ """Mark this _WorkItem as read, updating the progress database."""
+ self._AssertInState(STATE_READ)
+ self._StateTransition(STATE_READ, blocking=True)
+
+ def MarkAsTransferring(self):
+ """Mark this _WorkItem as transferring, updating the progress database."""
+ self._AssertInState(STATE_READ, STATE_ERROR)
+ self._AssertProgressKey()
+ self._StateTransition(STATE_GETTING, blocking=True)
+
+ def MarkAsTransferred(self):
+ """Mark this _WorkItem as transferred, updating the progress database."""
+ raise NotImplementedError()
+
+ def MarkAsError(self):
+ """Mark this _WorkItem as failed, updating the progress database."""
+ self._AssertInState(STATE_GETTING)
+ self._AssertProgressKey()
+ self._StateTransition(STATE_ERROR, blocking=True)
+
+ def _StateTransition(self, new_state, blocking=False):
+ """Transition the work item to a new state, storing progress information.
+
+ Args:
+ new_state: The state to transition to.
+ blocking: Whether to block for the progress thread to acknowledge the
+ transition.
+ """
+ assert not self.progress_event.isSet()
+
+ self.state = new_state
+
+ self.progress_queue.put(self)
+
+ if blocking:
+ self.progress_event.wait()
+
+ self.progress_event.clear()
+
+
+
+class UploadWorkItem(_WorkItem):
+ """Holds a unit of uploading work.
+
+ A UploadWorkItem represents a number of entities that need to be uploaded to
+ Google App Engine. These entities are encoded in the "content" field of
+ the UploadWorkItem, and will be POST'd as-is to the server.
+
+ The entities are identified by a range of numeric keys, inclusively. In
+ the case of a resumption of an upload, or a replay to correct errors,
+ these keys must be able to identify the same set of entities.
+
+ Note that keys specify a range. The entities do not have to sequentially
+ fill the entire range, they must simply bound a range of valid keys.
+ """
+
+ def __init__(self, request_manager, progress_queue, rows, key_start, key_end,
+ progress_key=None):
+ """Initialize the UploadWorkItem instance.
+
+ Args:
+ request_manager: A RequestManager instance.
+ progress_queue: A queue used for tracking progress information.
+ rows: A list of pairs of a line number and a list of column values
+ key_start: The (numeric) starting key, inclusive.
+ key_end: The (numeric) ending key, inclusive.
+ progress_key: If this UploadWorkItem represents state from a prior run,
+ then this will be the key within the progress database.
+ """
+ _WorkItem.__init__(self, progress_queue, key_start, key_end,
+ ImportStateName, state=STATE_READ,
+ progress_key=progress_key)
+
+ assert isinstance(key_start, (int, long))
+ assert isinstance(key_end, (int, long))
+ assert key_start <= key_end
+
+ self.request_manager = request_manager
+ self.rows = rows
+ self.content = None
+ self.count = len(rows)
+
+ def __str__(self):
+ return '[%s-%s]' % (self.key_start, self.key_end)
+
+ def _TransferItem(self, thread_pool, get_time=time.time):
+ """Transfers the entities associated with an item.
+
+ Args:
+ thread_pool: An AdaptiveThreadPool instance.
+ get_time: Used for dependency injection.
+ """
+ t = get_time()
+ if not self.content:
+ self.content = self.request_manager.EncodeContent(self.rows)
+ try:
+ self.request_manager.PostEntities(self.content)
+ except:
+ raise
+ return get_time() - t
+
+ def MarkAsTransferred(self):
+ """Mark this UploadWorkItem as sucessfully-sent to the server."""
+
+ self._AssertInState(STATE_SENDING)
+ self._AssertProgressKey()
+
+ self._StateTransition(STATE_SENT, blocking=False)
+
+
+def GetImplementationClass(kind_or_class_key):
+ """Returns the implementation class for a given kind or class key.
+
+ Args:
+ kind_or_class_key: A kind string or a tuple of kind strings.
+
+ Return:
+ A db.Model subclass for the given kind or class key.
+ """
+ if isinstance(kind_or_class_key, tuple):
+ try:
+ implementation_class = polymodel._class_map[kind_or_class_key]
+ except KeyError:
+ raise db.KindError('No implementation for class \'%s\'' %
+ kind_or_class_key)
+ else:
+ implementation_class = db.class_for_kind(kind_or_class_key)
+ return implementation_class
+
+
+def KeyLEQ(key1, key2):
+ """Compare two keys for less-than-or-equal-to.
+
+ All keys with numeric ids come before all keys with names. None represents
+ an unbounded end-point so it is both greater and less than any other key.
+
+ Args:
+ key1: An int or datastore.Key instance.
+ key2: An int or datastore.Key instance.
+
+ Returns:
+ True if key1 <= key2
+ """
+ if key1 is None or key2 is None:
+ return True
+ return key1 <= key2
+
+
+class KeyRangeItem(_WorkItem):
+ """Represents an item of work that scans over a key range.
+
+ A KeyRangeItem object represents holds a KeyRange
+ and has an associated state: STATE_READ, STATE_GETTING, STATE_GOT,
+ and STATE_ERROR.
+
+ - STATE_READ indicates the range ready to be downloaded by a worker thread.
+ - STATE_GETTING indicates the range is currently being downloaded.
+ - STATE_GOT indicates that the range was successfully downloaded
+ - STATE_ERROR indicates that an error occurred during the last download
+ attempt
+
+ KeyRangeItems not in the STATE_GOT state are stored in the progress database.
+ When a piece of KeyRangeItem work is downloaded, the download may cover only
+ a portion of the range. In this case, the old KeyRangeItem is removed from
+ the progress database and ranges covering the undownloaded range are
+ generated and stored as STATE_READ in the export progress database.
+ """
+
+ def __init__(self,
+ request_manager,
+ progress_queue,
+ kind,
+ key_range,
+ progress_key=None,
+ state=STATE_READ):
+ """Initialize a KeyRangeItem object.
+
+ Args:
+ request_manager: A RequestManager instance.
+ progress_queue: A queue used for tracking progress information.
+ kind: The kind of entities for this range.
+ key_range: A KeyRange instance for this work item.
+ progress_key: The key for this range within the progress database.
+ state: The initial state of this range.
+ """
+ _WorkItem.__init__(self, progress_queue, key_range.key_start,
+ key_range.key_end, ExportStateName, state=state,
+ progress_key=progress_key)
+ self.request_manager = request_manager
+ self.kind = kind
+ self.key_range = key_range
+ self.download_result = None
+ self.count = 0
+ self.key_start = key_range.key_start
+ self.key_end = key_range.key_end
+
+ def __str__(self):
+ return str(self.key_range)
+
+ def __repr__(self):
+ return self.__str__()
+
+ def MarkAsTransferred(self):
+ """Mark this KeyRangeItem as transferred, updating the progress database."""
+ pass
+
+ def Process(self, download_result, thread_pool, batch_size,
+ new_state=STATE_GOT):
+ """Mark this KeyRangeItem as success, updating the progress database.
+
+ Process will split this KeyRangeItem based on the content of
+ download_result and adds the unfinished ranges to the work queue.
+
+ Args:
+ download_result: A DownloadResult instance.
+ thread_pool: An AdaptiveThreadPool instance.
+ batch_size: The number of entities to transfer per request.
+ new_state: The state to transition the completed range to.
+ """
+ self._AssertInState(STATE_GETTING)
+ self._AssertProgressKey()
+
+ self.download_result = download_result
+ self.count = len(download_result.keys)
+ if download_result.continued:
+ self._FinishedRange()._StateTransition(new_state, blocking=True)
+ self._AddUnfinishedRanges(thread_pool, batch_size)
+ else:
+ self._StateTransition(new_state, blocking=True)
+
+ def _FinishedRange(self):
+ """Returns the range completed by the download_result.
+
+ Returns:
+ A KeyRangeItem representing a completed range.
+ """
+ assert self.download_result is not None
+
+ if self.key_range.direction == key_range_module.KeyRange.ASC:
+ key_start = self.key_range.key_start
+ if self.download_result.continued:
+ key_end = self.download_result.key_end
+ else:
+ key_end = self.key_range.key_end
+ else:
+ key_end = self.key_range.key_end
+ if self.download_result.continued:
+ key_start = self.download_result.key_start
+ else:
+ key_start = self.key_range.key_start
+
+ key_range = KeyRange(key_start=key_start,
+ key_end=key_end,
+ direction=self.key_range.direction)
+
+ result = self.__class__(self.request_manager,
+ self.progress_queue,
+ self.kind,
+ key_range,
+ progress_key=self.progress_key,
+ state=self.state)
+
+ result.download_result = self.download_result
+ result.count = self.count
+ return result
+
+ def _SplitAndAddRanges(self, thread_pool, batch_size):
+ """Split the key range [key_start, key_end] into a list of ranges."""
+ if self.download_result.direction == key_range_module.KeyRange.ASC:
+ key_range = KeyRange(
+ key_start=self.download_result.key_end,
+ key_end=self.key_range.key_end,
+ include_start=False)
+ else:
+ key_range = KeyRange(
+ key_start=self.key_range.key_start,
+ key_end=self.download_result.key_start,
+ include_end=False)
+
+ if thread_pool.QueuedItemCount() > 2 * thread_pool.num_threads():
+ ranges = [key_range]
+ else:
+ ranges = key_range.split_range(batch_size=batch_size)
+
+ for key_range in ranges:
+ key_range_item = self.__class__(self.request_manager,
+ self.progress_queue,
+ self.kind,
+ key_range)
+ key_range_item.MarkAsRead()
+ thread_pool.SubmitItem(key_range_item, block=True)
+
+ def _AddUnfinishedRanges(self, thread_pool, batch_size):
+ """Adds incomplete KeyRanges to the thread_pool.
+
+ Args:
+ thread_pool: An AdaptiveThreadPool instance.
+ batch_size: The number of entities to transfer per request.
+
+ Returns:
+ A list of KeyRanges representing incomplete datastore key ranges.
+
+ Raises:
+ KeyRangeError: if this key range has already been completely transferred.
+ """
+ assert self.download_result is not None
+ if self.download_result.continued:
+ self._SplitAndAddRanges(thread_pool, batch_size)
+ else:
+ raise KeyRangeError('No unfinished part of key range.')
+
+
+class DownloadItem(KeyRangeItem):
+ """A KeyRangeItem for downloading key ranges."""
+
+ def _TransferItem(self, thread_pool, get_time=time.time):
+ """Transfers the entities associated with an item."""
+ t = get_time()
+ download_result = self.request_manager.GetEntities(self)
+ transfer_time = get_time() - t
+ self.Process(download_result, thread_pool,
+ self.request_manager.batch_size)
+ return transfer_time
+
+
+class MapperItem(KeyRangeItem):
+ """A KeyRangeItem for mapping over key ranges."""
+
+ def _TransferItem(self, thread_pool, get_time=time.time):
+ t = get_time()
+ download_result = self.request_manager.GetEntities(self)
+ transfer_time = get_time() - t
+ mapper = self.request_manager.GetMapper()
+ try:
+ mapper.batch_apply(download_result.Entities())
+ except MapperRetry:
+ return None
+ self.Process(download_result, thread_pool,
+ self.request_manager.batch_size)
+ return transfer_time
+
+
+class RequestManager(object):
+ """A class which wraps a connection to the server."""
+
+ def __init__(self,
+ app_id,
+ host_port,
+ url_path,
+ kind,
+ throttle,
+ batch_size,
+ secure,
+ email,
+ passin,
+ dry_run=False):
+ """Initialize a RequestManager object.
+
+ Args:
+ app_id: String containing the application id for requests.
+ host_port: String containing the "host:port" pair; the port is optional.
+ url_path: partial URL (path) to post entity data to.
+ kind: Kind of the Entity records being posted.
+ throttle: A Throttle instance.
+ batch_size: The number of entities to transfer per request.
+ secure: Use SSL when communicating with server.
+ email: If not none, the username to log in with.
+ passin: If True, the password will be read from standard in.
+ """
+ self.app_id = app_id
+ self.host_port = host_port
+ self.host = host_port.split(':')[0]
+ if url_path and url_path[0] != '/':
+ url_path = '/' + url_path
+ self.url_path = url_path
+ self.kind = kind
+ self.throttle = throttle
+ self.batch_size = batch_size
+ self.secure = secure
+ self.authenticated = False
+ self.auth_called = False
+ self.parallel_download = True
+ self.email = email
+ self.passin = passin
+ self.mapper = None
+ self.dry_run = dry_run
+
+ if self.dry_run:
+ logger.info('Running in dry run mode, skipping remote_api setup')
+ return
+
+ logger.debug('Configuring remote_api. url_path = %s, '
+ 'servername = %s' % (url_path, host_port))
+
+ def CookieHttpRpcServer(*args, **kwargs):
+ kwargs['save_cookies'] = True
+ kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
+ return appengine_rpc.HttpRpcServer(*args, **kwargs)
+
+ remote_api_stub.ConfigureRemoteDatastore(
+ app_id,
+ url_path,
+ self.AuthFunction,
+ servername=host_port,
+ rpc_server_factory=CookieHttpRpcServer,
+ secure=self.secure)
+ remote_api_throttle.ThrottleRemoteDatastore(self.throttle)
+ logger.debug('Bulkloader using app_id: %s', os.environ['APPLICATION_ID'])
+
+ def Authenticate(self):
+ """Invoke authentication if necessary."""
+ logger.info('Connecting to %s%s', self.host_port, self.url_path)
+ if self.dry_run:
+ self.authenticated = True
+ return
+
+ remote_api_stub.MaybeInvokeAuthentication()
+ self.authenticated = True
+
+ def AuthFunction(self,
+ raw_input_fn=raw_input,
+ password_input_fn=getpass.getpass):
+ """Prompts the user for a username and password.
+
+ Caches the results the first time it is called and returns the
+ same result every subsequent time.
+
+ Args:
+ raw_input_fn: Used for dependency injection.
+ password_input_fn: Used for dependency injection.
+
+ Returns:
+ A pair of the username and password.
+ """
+ if self.email:
+ email = self.email
+ else:
+ print 'Please enter login credentials for %s' % (
+ self.host)
+ email = raw_input_fn('Email: ')
+
+ if email:
+ password_prompt = 'Password for %s: ' % email
+ if self.passin:
+ password = raw_input_fn(password_prompt)
+ else:
+ password = password_input_fn(password_prompt)
+ else:
+ password = None
+
+ self.auth_called = True
+ return (email, password)
+
+ def EncodeContent(self, rows, loader=None):
+ """Encodes row data to the wire format.
+
+ Args:
+ rows: A list of pairs of a line number and a list of column values.
+ loader: Used for dependency injection.
+
+ Returns:
+ A list of datastore.Entity instances.
+
+ Raises:
+ ConfigurationError: if no loader is defined for self.kind
+ """
+ if not loader:
+ try:
+ loader = Loader.RegisteredLoader(self.kind)
+ except KeyError:
+ logger.error('No Loader defined for kind %s.' % self.kind)
+ raise ConfigurationError('No Loader defined for kind %s.' % self.kind)
+ entities = []
+ for line_number, values in rows:
+ key = loader.generate_key(line_number, values)
+ if isinstance(key, datastore.Key):
+ parent = key.parent()
+ key = key.name()
+ else:
+ parent = None
+ entity = loader.create_entity(values, key_name=key, parent=parent)
+
+ def ToEntity(entity):
+ if isinstance(entity, db.Model):
+ return entity._populate_entity()
+ else:
+ return entity
+
+ if isinstance(entity, list):
+ entities.extend(map(ToEntity, entity))
+ elif entity:
+ entities.append(ToEntity(entity))
+
+ return entities
+
+ def PostEntities(self, entities):
+ """Posts Entity records to a remote endpoint over HTTP.
+
+ Args:
+ entities: A list of datastore entities.
+ """
+ if self.dry_run:
+ return
+ datastore.Put(entities)
+
+ def _QueryForPbs(self, query):
+ """Perform the given query and return a list of entity_pb's."""
+ try:
+ query_pb = query._ToPb(limit=self.batch_size)
+ result_pb = datastore_pb.QueryResult()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', query_pb,
+ result_pb)
+ next_pb = datastore_pb.NextRequest()
+ next_pb.set_count(self.batch_size)
+ next_pb.mutable_cursor().CopyFrom(result_pb.cursor())
+ result_pb = datastore_pb.QueryResult()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', next_pb, result_pb)
+ return result_pb.result_list()
+ except apiproxy_errors.ApplicationError, e:
+ raise datastore._ToDatastoreError(e)
+
+ def GetEntities(self, key_range_item, key_factory=datastore.Key):
+ """Gets Entity records from a remote endpoint over HTTP.
+
+ Args:
+ key_range_item: Range of keys to get.
+ key_factory: Used for dependency injection.
+
+ Returns:
+ A DownloadResult instance.
+
+ Raises:
+ ConfigurationError: if no Exporter is defined for self.kind
+ """
+ keys = []
+ entities = []
+
+ if self.parallel_download:
+ query = key_range_item.key_range.make_directed_datastore_query(self.kind)
+ try:
+ results = self._QueryForPbs(query)
+ except datastore_errors.NeedIndexError:
+ logger.info('%s: No descending index on __key__, '
+ 'performing serial download', self.kind)
+ self.parallel_download = False
+
+ if not self.parallel_download:
+ key_range_item.key_range.direction = key_range_module.KeyRange.ASC
+ query = key_range_item.key_range.make_ascending_datastore_query(self.kind)
+ results = self._QueryForPbs(query)
+
+ size = len(results)
+
+ for entity in results:
+ key = key_factory()
+ key._Key__reference = entity.key()
+ entities.append(entity)
+ keys.append(key)
+
+ continued = (size == self.batch_size)
+ key_range_item.count = size
+
+ return DownloadResult(continued, key_range_item.key_range.direction,
+ keys, entities)
+
+ def GetMapper(self):
+ """Returns a mapper for the registered kind.
+
+ Returns:
+ A Mapper instance.
+
+ Raises:
+ ConfigurationError: if no Mapper is defined for self.kind
+ """
+ if not self.mapper:
+ try:
+ self.mapper = Mapper.RegisteredMapper(self.kind)
+ except KeyError:
+ logger.error('No Mapper defined for kind %s.' % self.kind)
+ raise ConfigurationError('No Mapper defined for kind %s.' % self.kind)
+ return self.mapper
+
+
+def InterruptibleSleep(sleep_time):
+ """Puts thread to sleep, checking this threads exit_flag twice a second.
+
+ Args:
+ sleep_time: Time to sleep.
+ """
+ slept = 0.0
+ epsilon = .0001
+ thread = threading.currentThread()
+ while slept < sleep_time - epsilon:
+ remaining = sleep_time - slept
+ this_sleep_time = min(remaining, 0.5)
+ time.sleep(this_sleep_time)
+ slept += this_sleep_time
+ if thread.exit_flag:
+ return
+
+
+class _ThreadBase(threading.Thread):
+ """Provide some basic features for the threads used in the uploader.
+
+ This abstract base class is used to provide some common features:
+
+ * Flag to ask thread to exit as soon as possible.
+ * Record exit/error status for the primary thread to pick up.
+ * Capture exceptions and record them for pickup.
+ * Some basic logging of thread start/stop.
+ * All threads are "daemon" threads.
+ * Friendly names for presenting to users.
+
+ Concrete sub-classes must implement PerformWork().
+
+ Either self.NAME should be set or GetFriendlyName() be overridden to
+ return a human-friendly name for this thread.
+
+ The run() method starts the thread and prints start/exit messages.
+
+ self.exit_flag is intended to signal that this thread should exit
+ when it gets the chance. PerformWork() should check self.exit_flag
+ whenever it has the opportunity to exit gracefully.
+ """
+
+ def __init__(self):
+ threading.Thread.__init__(self)
+
+ self.setDaemon(True)
+
+ self.exit_flag = False
+ self.error = None
+ self.traceback = None
+
+ def run(self):
+ """Perform the work of the thread."""
+ logger.debug('[%s] %s: started', self.getName(), self.__class__.__name__)
+
+ try:
+ self.PerformWork()
+ except:
+ self.SetError()
+ logger.exception('[%s] %s:', self.getName(), self.__class__.__name__)
+
+ logger.debug('[%s] %s: exiting', self.getName(), self.__class__.__name__)
+
+ def SetError(self):
+ """Sets the error and traceback information for this thread.
+
+ This must be called from an exception handler.
+ """
+ if not self.error:
+ exc_info = sys.exc_info()
+ self.error = exc_info[1]
+ self.traceback = exc_info[2]
+
+ def PerformWork(self):
+ """Perform the thread-specific work."""
+ raise NotImplementedError()
+
+ def CheckError(self):
+ """If an error is present, then log it."""
+ if self.error:
+ logger.error('Error in %s: %s', self.GetFriendlyName(), self.error)
+ if self.traceback:
+ logger.debug(''.join(traceback.format_exception(self.error.__class__,
+ self.error,
+ self.traceback)))
+
+ def GetFriendlyName(self):
+ """Returns a human-friendly description of the thread."""
+ if hasattr(self, 'NAME'):
+ return self.NAME
+ return 'unknown thread'
+
+
+non_fatal_error_codes = set([errno.EAGAIN,
+ errno.ENETUNREACH,
+ errno.ENETRESET,
+ errno.ECONNRESET,
+ errno.ETIMEDOUT,
+ errno.EHOSTUNREACH])
+
+
+def IsURLErrorFatal(error):
+ """Returns False if the given URLError may be from a transient failure.
+
+ Args:
+ error: A urllib2.URLError instance.
+ """
+ assert isinstance(error, urllib2.URLError)
+ if not hasattr(error, 'reason'):
+ return True
+ if not isinstance(error.reason[0], int):
+ return True
+ return error.reason[0] not in non_fatal_error_codes
+
+
+class DataSourceThread(_ThreadBase):
+ """A thread which reads WorkItems and pushes them into queue.
+
+ This thread will read/consume WorkItems from a generator (produced by
+ the generator factory). These WorkItems will then be pushed into the
+ thread_pool. Note that reading will block if/when the thread_pool becomes
+ full. Information on content consumed from the generator will be pushed
+ into the progress_queue.
+ """
+
+ NAME = 'data source thread'
+
+ def __init__(self,
+ request_manager,
+ thread_pool,
+ progress_queue,
+ workitem_generator_factory,
+ progress_generator_factory):
+ """Initialize the DataSourceThread instance.
+
+ Args:
+ request_manager: A RequestManager instance.
+ thread_pool: An AdaptiveThreadPool instance.
+ progress_queue: A queue used for tracking progress information.
+ workitem_generator_factory: A factory that creates a WorkItem generator
+ progress_generator_factory: A factory that creates a generator which
+ produces prior progress status, or None if there is no prior status
+ to use.
+ """
+ _ThreadBase.__init__(self)
+
+ self.request_manager = request_manager
+ self.thread_pool = thread_pool
+ self.progress_queue = progress_queue
+ self.workitem_generator_factory = workitem_generator_factory
+ self.progress_generator_factory = progress_generator_factory
+ self.entity_count = 0
+
+ def PerformWork(self):
+ """Performs the work of a DataSourceThread."""
+ if self.progress_generator_factory:
+ progress_gen = self.progress_generator_factory()
+ else:
+ progress_gen = None
+
+ content_gen = self.workitem_generator_factory(self.request_manager,
+ self.progress_queue,
+ progress_gen)
+
+ self.xfer_count = 0
+ self.read_count = 0
+ self.read_all = False
+
+ for item in content_gen.Batches():
+ item.MarkAsRead()
+
+ while not self.exit_flag:
+ try:
+ self.thread_pool.SubmitItem(item, block=True, timeout=1.0)
+ self.entity_count += item.count
+ break
+ except Queue.Full:
+ pass
+
+ if self.exit_flag:
+ break
+
+ if not self.exit_flag:
+ self.read_all = True
+ self.read_count = content_gen.row_count
+ self.xfer_count = content_gen.xfer_count
+
+
+
+def _RunningInThread(thread):
+ """Return True if we are running within the specified thread."""
+ return threading.currentThread().getName() == thread.getName()
+
+
+class _Database(object):
+ """Base class for database connections in this module.
+
+ The table is created by a primary thread (the python main thread)
+ but all future lookups and updates are performed by a secondary
+ thread.
+ """
+
+ SIGNATURE_TABLE_NAME = 'bulkloader_database_signature'
+
+ def __init__(self,
+ db_filename,
+ create_table,
+ signature,
+ index=None,
+ commit_periodicity=100):
+ """Initialize the _Database instance.
+
+ Args:
+ db_filename: The sqlite3 file to use for the database.
+ create_table: A string containing the SQL table creation command.
+ signature: A string identifying the important invocation options,
+ used to make sure we are not using an old database.
+ index: An optional string to create an index for the database.
+ commit_periodicity: Number of operations between database commits.
+ """
+ self.db_filename = db_filename
+
+ logger.info('Opening database: %s', db_filename)
+ self.primary_conn = sqlite3.connect(db_filename, isolation_level=None)
+ self.primary_thread = threading.currentThread()
+
+ self.secondary_conn = None
+ self.secondary_thread = None
+
+ self.operation_count = 0
+ self.commit_periodicity = commit_periodicity
+
+ try:
+ self.primary_conn.execute(create_table)
+ except sqlite3.OperationalError, e:
+ if 'already exists' not in e.message:
+ raise
+
+ if index:
+ try:
+ self.primary_conn.execute(index)
+ except sqlite3.OperationalError, e:
+ if 'already exists' not in e.message:
+ raise
+
+ self.existing_table = False
+ signature_cursor = self.primary_conn.cursor()
+ create_signature = """
+ create table %s (
+ value TEXT not null)
+ """ % _Database.SIGNATURE_TABLE_NAME
+ try:
+ self.primary_conn.execute(create_signature)
+ self.primary_conn.cursor().execute(
+ 'insert into %s (value) values (?)' % _Database.SIGNATURE_TABLE_NAME,
+ (signature,))
+ except sqlite3.OperationalError, e:
+ if 'already exists' not in e.message:
+ logger.exception('Exception creating table:')
+ raise
+ else:
+ self.existing_table = True
+ signature_cursor.execute(
+ 'select * from %s' % _Database.SIGNATURE_TABLE_NAME)
+ (result,) = signature_cursor.fetchone()
+ if result and result != signature:
+ logger.error('Database signature mismatch:\n\n'
+ 'Found:\n'
+ '%s\n\n'
+ 'Expecting:\n'
+ '%s\n',
+ result, signature)
+ raise ResumeError('Database signature mismatch: %s != %s' % (
+ signature, result))
+
+ def ThreadComplete(self):
+ """Finalize any operations the secondary thread has performed.
+
+ The database aggregates lots of operations into a single commit, and
+ this method is used to commit any pending operations as the thread
+ is about to shut down.
+ """
+ if self.secondary_conn:
+ self._MaybeCommit(force_commit=True)
+
+ def _MaybeCommit(self, force_commit=False):
+ """Periodically commit changes into the SQLite database.
+
+ Committing every operation is quite expensive, and slows down the
+ operation of the script. Thus, we only commit after every N operations,
+ as determined by the self.commit_periodicity value. Optionally, the
+ caller can force a commit.
+
+ Args:
+ force_commit: Pass True in order for a commit to occur regardless
+ of the current operation count.
+ """
+ self.operation_count += 1
+ if force_commit or (self.operation_count % self.commit_periodicity) == 0:
+ self.secondary_conn.commit()
+
+ def _OpenSecondaryConnection(self):
+ """Possibly open a database connection for the secondary thread.
+
+ If the connection is not open (for the calling thread, which is assumed
+ to be the unique secondary thread), then open it. We also open a couple
+ cursors for later use (and reuse).
+ """
+ if self.secondary_conn:
+ return
+
+ assert not _RunningInThread(self.primary_thread)
+
+ self.secondary_thread = threading.currentThread()
+
+ self.secondary_conn = sqlite3.connect(self.db_filename)
+
+ self.insert_cursor = self.secondary_conn.cursor()
+ self.update_cursor = self.secondary_conn.cursor()
+
+
+zero_matcher = re.compile(r'\x00')
+
+zero_one_matcher = re.compile(r'\x00\x01')
+
+
+def KeyStr(key):
+ """Returns a string to represent a key, preserving ordering.
+
+ Unlike datastore.Key.__str__(), we have the property:
+
+ key1 < key2 ==> KeyStr(key1) < KeyStr(key2)
+
+ The key string is constructed from the key path as follows:
+ (1) Strings are prepended with ':' and numeric id's are padded to
+ 20 digits.
+ (2) Any null characters (u'\0') present are replaced with u'\0\1'
+ (3) The sequence u'\0\0' is used to separate each component of the path.
+
+ (1) assures that names and ids compare properly, while (2) and (3) enforce
+ the part-by-part comparison of pieces of the path.
+
+ Args:
+ key: A datastore.Key instance.
+
+ Returns:
+ A string representation of the key, which preserves ordering.
+ """
+ assert isinstance(key, datastore.Key)
+ path = key.to_path()
+
+ out_path = []
+ for part in path:
+ if isinstance(part, (int, long)):
+ part = '%020d' % part
+ else:
+ part = ':%s' % part
+
+ out_path.append(zero_matcher.sub(u'\0\1', part))
+
+ out_str = u'\0\0'.join(out_path)
+
+ return out_str
+
+
+def StrKey(key_str):
+ """The inverse of the KeyStr function.
+
+ Args:
+ key_str: A string in the range of KeyStr.
+
+ Returns:
+ A datastore.Key instance k, such that KeyStr(k) == key_str.
+ """
+ parts = key_str.split(u'\0\0')
+ for i in xrange(len(parts)):
+ if parts[i][0] == ':':
+ part = parts[i][1:]
+ part = zero_one_matcher.sub(u'\0', part)
+ parts[i] = part
+ else:
+ parts[i] = int(parts[i])
+ return datastore.Key.from_path(*parts)
+
+
+class ResultDatabase(_Database):
+ """Persistently record all the entities downloaded during an export.
+
+ The entities are held in the database by their unique datastore key
+ in order to avoid duplication if an export is restarted.
+ """
+
+ def __init__(self, db_filename, signature, commit_periodicity=1):
+ """Initialize a ResultDatabase object.
+
+ Args:
+ db_filename: The name of the SQLite database to use.
+ signature: A string identifying the important invocation options,
+ used to make sure we are not using an old database.
+ commit_periodicity: How many operations to perform between commits.
+ """
+ self.complete = False
+ create_table = ('create table result (\n'
+ 'id BLOB primary key,\n'
+ 'value BLOB not null)')
+
+ _Database.__init__(self,
+ db_filename,
+ create_table,
+ signature,
+ commit_periodicity=commit_periodicity)
+ if self.existing_table:
+ cursor = self.primary_conn.cursor()
+ cursor.execute('select count(*) from result')
+ self.existing_count = int(cursor.fetchone()[0])
+ else:
+ self.existing_count = 0
+ self.count = self.existing_count
+
+ def _StoreEntity(self, entity_id, entity):
+ """Store an entity in the result database.
+
+ Args:
+ entity_id: A datastore.Key for the entity.
+ entity: The entity to store.
+
+ Returns:
+ True if this entities is not already present in the result database.
+ """
+
+ assert _RunningInThread(self.secondary_thread)
+ assert isinstance(entity_id, datastore.Key), (
+ 'expected a datastore.Key, got a %s' % entity_id.__class__.__name__)
+
+ key_str = buffer(KeyStr(entity_id).encode('utf-8'))
+ self.insert_cursor.execute(
+ 'select count(*) from result where id = ?', (key_str,))
+
+ already_present = self.insert_cursor.fetchone()[0]
+ result = True
+ if already_present:
+ result = False
+ self.insert_cursor.execute('delete from result where id = ?',
+ (key_str,))
+ else:
+ self.count += 1
+ value = entity.Encode()
+ self.insert_cursor.execute(
+ 'insert into result (id, value) values (?, ?)',
+ (key_str, buffer(value)))
+ return result
+
+ def StoreEntities(self, keys, entities):
+ """Store a group of entities in the result database.
+
+ Args:
+ keys: A list of entity keys.
+ entities: A list of entities.
+
+ Returns:
+ The number of new entities stored in the result database.
+ """
+ self._OpenSecondaryConnection()
+ t = time.time()
+ count = 0
+ for entity_id, entity in zip(keys,
+ entities):
+ if self._StoreEntity(entity_id, entity):
+ count += 1
+ logger.debug('%s insert: delta=%.3f',
+ self.db_filename,
+ time.time() - t)
+ logger.debug('Entities transferred total: %s', self.count)
+ self._MaybeCommit()
+ return count
+
+ def ResultsComplete(self):
+ """Marks the result database as containing complete results."""
+ self.complete = True
+
+ def AllEntities(self):
+ """Yields all pairs of (id, value) from the result table."""
+ conn = sqlite3.connect(self.db_filename, isolation_level=None)
+ cursor = conn.cursor()
+
+ cursor.execute(
+ 'select id, value from result order by id')
+
+ for unused_entity_id, entity in cursor:
+ entity_proto = entity_pb.EntityProto(contents=entity)
+ yield datastore.Entity._FromPb(entity_proto)
+
+
+class _ProgressDatabase(_Database):
+ """Persistently record all progress information during an upload.
+
+ This class wraps a very simple SQLite database which records each of
+ the relevant details from a chunk of work. If the loader is
+ resumed, then data is replayed out of the database.
+ """
+
+ def __init__(self,
+ db_filename,
+ sql_type,
+ py_type,
+ signature,
+ commit_periodicity=100):
+ """Initialize the ProgressDatabase instance.
+
+ Args:
+ db_filename: The name of the SQLite database to use.
+ sql_type: A string of the SQL type to use for entity keys.
+ py_type: The python type of entity keys.
+ signature: A string identifying the important invocation options,
+ used to make sure we are not using an old database.
+ commit_periodicity: How many operations to perform between commits.
+ """
+ self.prior_key_end = None
+
+ create_table = ('create table progress (\n'
+ 'id integer primary key autoincrement,\n'
+ 'state integer not null,\n'
+ 'key_start %s,\n'
+ 'key_end %s)'
+ % (sql_type, sql_type))
+ self.py_type = py_type
+
+ index = 'create index i_state on progress (state)'
+ _Database.__init__(self,
+ db_filename,
+ create_table,
+ signature,
+ index=index,
+ commit_periodicity=commit_periodicity)
+
+ def UseProgressData(self):
+ """Returns True if the database has progress information.
+
+ Note there are two basic cases for progress information:
+ 1) All saved records indicate a successful upload. In this case, we
+ need to skip everything transmitted so far and then send the rest.
+ 2) Some records for incomplete transfer are present. These need to be
+ sent again, and then we resume sending after all the successful
+ data.
+
+ Returns:
+ True: if the database has progress information.
+
+ Raises:
+ ResumeError: if there is an error retrieving rows from the database.
+ """
+ assert _RunningInThread(self.primary_thread)
+
+ cursor = self.primary_conn.cursor()
+ cursor.execute('select count(*) from progress')
+ row = cursor.fetchone()
+ if row is None:
+ raise ResumeError('Cannot retrieve progress information from database.')
+
+ return row[0] != 0
+
+ def StoreKeys(self, key_start, key_end):
+ """Record a new progress record, returning a key for later updates.
+
+ The specified progress information will be persisted into the database.
+ A unique key will be returned that identifies this progress state. The
+ key is later used to (quickly) update this record.
+
+ For the progress resumption to proceed properly, calls to StoreKeys
+ MUST specify monotonically increasing key ranges. This will result in
+ a database whereby the ID, KEY_START, and KEY_END rows are all
+ increasing (rather than having ranges out of order).
+
+ NOTE: the above precondition is NOT tested by this method (since it
+ would imply an additional table read or two on each invocation).
+
+ Args:
+ key_start: The starting key of the WorkItem (inclusive)
+ key_end: The end key of the WorkItem (inclusive)
+
+ Returns:
+ A string to later be used as a unique key to update this state.
+ """
+ self._OpenSecondaryConnection()
+
+ assert _RunningInThread(self.secondary_thread)
+ assert (not key_start) or isinstance(key_start, self.py_type), (
+ '%s is a %s, %s expected %s' % (key_start,
+ key_start.__class__,
+ self.__class__.__name__,
+ self.py_type))
+ assert (not key_end) or isinstance(key_end, self.py_type), (
+ '%s is a %s, %s expected %s' % (key_end,
+ key_end.__class__,
+ self.__class__.__name__,
+ self.py_type))
+ assert KeyLEQ(key_start, key_end), '%s not less than %s' % (
+ repr(key_start), repr(key_end))
+
+ self.insert_cursor.execute(
+ 'insert into progress (state, key_start, key_end) values (?, ?, ?)',
+ (STATE_READ, unicode(key_start), unicode(key_end)))
+
+ progress_key = self.insert_cursor.lastrowid
+
+ self._MaybeCommit()
+
+ return progress_key
+
+ def UpdateState(self, key, new_state):
+ """Update a specified progress record with new information.
+
+ Args:
+ key: The key for this progress record, returned from StoreKeys
+ new_state: The new state to associate with this progress record.
+ """
+ self._OpenSecondaryConnection()
+
+ assert _RunningInThread(self.secondary_thread)
+ assert isinstance(new_state, int)
+
+ self.update_cursor.execute('update progress set state=? where id=?',
+ (new_state, key))
+
+ self._MaybeCommit()
+
+ def DeleteKey(self, progress_key):
+ """Delete the entities with the given key from the result database."""
+ self._OpenSecondaryConnection()
+
+ assert _RunningInThread(self.secondary_thread)
+
+ t = time.time()
+ self.insert_cursor.execute(
+ 'delete from progress where rowid = ?', (progress_key,))
+
+ logger.debug('delete: delta=%.3f', time.time() - t)
+
+ self._MaybeCommit()
+
+ def GetProgressStatusGenerator(self):
+ """Get a generator which yields progress information.
+
+ The returned generator will yield a series of 4-tuples that specify
+ progress information about a prior run of the uploader. The 4-tuples
+ have the following values:
+
+ progress_key: The unique key to later update this record with new
+ progress information.
+ state: The last state saved for this progress record.
+ key_start: The starting key of the items for uploading (inclusive).
+ key_end: The ending key of the items for uploading (inclusive).
+
+ After all incompletely-transferred records are provided, then one
+ more 4-tuple will be generated:
+
+ None
+ DATA_CONSUMED_TO_HERE: A unique string value indicating this record
+ is being provided.
+ None
+ key_end: An integer value specifying the last data source key that
+ was handled by the previous run of the uploader.
+
+ The caller should begin uploading records which occur after key_end.
+
+ Yields:
+ Four-tuples of (progress_key, state, key_start, key_end)
+ """
+ conn = sqlite3.connect(self.db_filename, isolation_level=None)
+ cursor = conn.cursor()
+
+ cursor.execute('select max(key_end) from progress')
+
+ result = cursor.fetchone()
+ if result is not None:
+ key_end = result[0]
+ else:
+ logger.debug('No rows in progress database.')
+ return
+
+ self.prior_key_end = key_end
+
+ cursor.execute(
+ 'select id, state, key_start, key_end from progress'
+ ' where state != ?'
+ ' order by id',
+ (STATE_SENT,))
+
+ rows = cursor.fetchall()
+
+ for row in rows:
+ if row is None:
+ break
+ progress_key, state, key_start, key_end = row
+
+ yield progress_key, state, key_start, key_end
+
+ yield None, DATA_CONSUMED_TO_HERE, None, key_end
+
+
+def ProgressDatabase(db_filename, signature):
+ """Returns a database to store upload progress information."""
+ return _ProgressDatabase(db_filename, 'INTEGER', int, signature)
+
+
+class ExportProgressDatabase(_ProgressDatabase):
+ """A database to store download progress information."""
+
+ def __init__(self, db_filename, signature):
+ """Initialize an ExportProgressDatabase."""
+ _ProgressDatabase.__init__(self,
+ db_filename,
+ 'TEXT',
+ datastore.Key,
+ signature,
+ commit_periodicity=1)
+
+ def UseProgressData(self):
+ """Check if the progress database contains progress data.
+
+ Returns:
+ True: if the database contains progress data.
+ """
+ return self.existing_table
+
+
+class StubProgressDatabase(object):
+ """A stub implementation of ProgressDatabase which does nothing."""
+
+ def UseProgressData(self):
+ """Whether the stub database has progress information (it doesn't)."""
+ return False
+
+ def StoreKeys(self, unused_key_start, unused_key_end):
+ """Pretend to store a key in the stub database."""
+ return 'fake-key'
+
+ def UpdateState(self, unused_key, unused_new_state):
+ """Pretend to update the state of a progress item."""
+ pass
+
+ def ThreadComplete(self):
+ """Finalize operations on the stub database (i.e. do nothing)."""
+ pass
+
+
+class _ProgressThreadBase(_ThreadBase):
+ """A thread which records progress information for the upload process.
+
+ The progress information is stored into the provided progress database.
+ This class is not responsible for replaying a prior run's progress
+ information out of the database. Separate mechanisms must be used to
+ resume a prior upload attempt.
+ """
+
+ NAME = 'progress tracking thread'
+
+ def __init__(self, progress_queue, progress_db):
+ """Initialize the ProgressTrackerThread instance.
+
+ Args:
+ progress_queue: A Queue used for tracking progress information.
+ progress_db: The database for tracking progress information; should
+ be an instance of ProgressDatabase.
+ """
+ _ThreadBase.__init__(self)
+
+ self.progress_queue = progress_queue
+ self.db = progress_db
+ self.entities_transferred = 0
+
+ def EntitiesTransferred(self):
+ """Return the total number of unique entities transferred."""
+ return self.entities_transferred
+
+ def UpdateProgress(self, item):
+ """Updates the progress information for the given item.
+
+ Args:
+ item: A work item whose new state will be recorded
+ """
+ raise NotImplementedError()
+
+ def WorkFinished(self):
+ """Performs final actions after the entity transfer is complete."""
+ raise NotImplementedError()
+
+ def PerformWork(self):
+ """Performs the work of a ProgressTrackerThread."""
+ while not self.exit_flag:
+ try:
+ item = self.progress_queue.get(block=True, timeout=1.0)
+ except Queue.Empty:
+ continue
+ if item == _THREAD_SHOULD_EXIT:
+ break
+
+ if item.state == STATE_READ and item.progress_key is None:
+ item.progress_key = self.db.StoreKeys(item.key_start, item.key_end)
+ else:
+ assert item.progress_key is not None
+ self.UpdateProgress(item)
+
+ item.progress_event.set()
+
+ self.progress_queue.task_done()
+
+ self.db.ThreadComplete()
+
+
+
+class ProgressTrackerThread(_ProgressThreadBase):
+ """A thread which records progress information for the upload process.
+
+ The progress information is stored into the provided progress database.
+ This class is not responsible for replaying a prior run's progress
+ information out of the database. Separate mechanisms must be used to
+ resume a prior upload attempt.
+ """
+ NAME = 'progress tracking thread'
+
+ def __init__(self, progress_queue, progress_db):
+ """Initialize the ProgressTrackerThread instance.
+
+ Args:
+ progress_queue: A Queue used for tracking progress information.
+ progress_db: The database for tracking progress information; should
+ be an instance of ProgressDatabase.
+ """
+ _ProgressThreadBase.__init__(self, progress_queue, progress_db)
+
+ def UpdateProgress(self, item):
+ """Update the state of the given WorkItem.
+
+ Args:
+ item: A WorkItem instance.
+ """
+ self.db.UpdateState(item.progress_key, item.state)
+ if item.state == STATE_SENT:
+ self.entities_transferred += item.count
+
+ def WorkFinished(self):
+ """Performs final actions after the entity transfer is complete."""
+ pass
+
+
+class ExportProgressThread(_ProgressThreadBase):
+ """A thread to record progress information and write record data for exports.
+
+ The progress information is stored into a provided progress database.
+ Exported results are stored in the result database and dumped to an output
+ file at the end of the download.
+ """
+
+ def __init__(self, kind, progress_queue, progress_db, result_db):
+ """Initialize the ExportProgressThread instance.
+
+ Args:
+ kind: The kind of entities being stored in the database.
+ progress_queue: A Queue used for tracking progress information.
+ progress_db: The database for tracking progress information; should
+ be an instance of ProgressDatabase.
+ result_db: The database for holding exported entities; should be an
+ instance of ResultDatabase.
+ """
+ _ProgressThreadBase.__init__(self, progress_queue, progress_db)
+
+ self.kind = kind
+ self.existing_count = result_db.existing_count
+ self.result_db = result_db
+
+ def EntitiesTransferred(self):
+ """Return the total number of unique entities transferred."""
+ return self.result_db.count
+
+ def WorkFinished(self):
+ """Write the contents of the result database."""
+ exporter = Exporter.RegisteredExporter(self.kind)
+ exporter.output_entities(self.result_db.AllEntities())
+
+ def UpdateProgress(self, item):
+ """Update the state of the given KeyRangeItem.
+
+ Args:
+ item: A KeyRange instance.
+ """
+ if item.state == STATE_GOT:
+ count = self.result_db.StoreEntities(item.download_result.keys,
+ item.download_result.entities)
+ self.db.DeleteKey(item.progress_key)
+ self.entities_transferred += count
+ else:
+ self.db.UpdateState(item.progress_key, item.state)
+
+
+class MapperProgressThread(_ProgressThreadBase):
+ """A thread to record progress information for maps over the datastore."""
+
+ def __init__(self, kind, progress_queue, progress_db):
+ """Initialize the MapperProgressThread instance.
+
+ Args:
+ kind: The kind of entities being stored in the database.
+ progress_queue: A Queue used for tracking progress information.
+ progress_db: The database for tracking progress information; should
+ be an instance of ProgressDatabase.
+ """
+ _ProgressThreadBase.__init__(self, progress_queue, progress_db)
+
+ self.kind = kind
+ self.mapper = Mapper.RegisteredMapper(self.kind)
+
+ def EntitiesTransferred(self):
+ """Return the total number of unique entities transferred."""
+ return self.entities_transferred
+
+ def WorkFinished(self):
+ """Perform actions after map is complete."""
+ pass
+
+ def UpdateProgress(self, item):
+ """Update the state of the given KeyRangeItem.
+
+ Args:
+ item: A KeyRange instance.
+ """
+ if item.state == STATE_GOT:
+ self.entities_transferred += item.count
+ self.db.DeleteKey(item.progress_key)
+ else:
+ self.db.UpdateState(item.progress_key, item.state)
+
+
+def ParseKey(key_string):
+ """Turn a key stored in the database into a Key or None.
+
+ Args:
+ key_string: The string representation of a Key.
+
+ Returns:
+ A datastore.Key instance or None
+ """
+ if not key_string:
+ return None
+ if key_string == 'None':
+ return None
+ return datastore.Key(encoded=key_string)
+
+
+def Validate(value, typ):
+ """Checks that value is non-empty and of the right type.
+
+ Args:
+ value: any value
+ typ: a type or tuple of types
+
+ Raises:
+ ValueError: if value is None or empty.
+ TypeError: if it's not the given type.
+ """
+ if not value:
+ raise ValueError('Value should not be empty; received %s.' % value)
+ elif not isinstance(value, typ):
+ raise TypeError('Expected a %s, but received %s (a %s).' %
+ (typ, value, value.__class__))
+
+
+def CheckFile(filename):
+ """Check that the given file exists and can be opened for reading.
+
+ Args:
+ filename: The name of the file.
+
+ Raises:
+ FileNotFoundError: if the given filename is not found
+ FileNotReadableError: if the given filename is not readable.
+ """
+ if not os.path.exists(filename):
+ raise FileNotFoundError('%s: file not found' % filename)
+ elif not os.access(filename, os.R_OK):
+ raise FileNotReadableError('%s: file not readable' % filename)
+
+
+class Loader(object):
+ """A base class for creating datastore entities from input data.
+
+ To add a handler for bulk loading a new entity kind into your datastore,
+ write a subclass of this class that calls Loader.__init__ from your
+ class's __init__.
+
+ If you need to run extra code to convert entities from the input
+ data, create new properties, or otherwise modify the entities before
+ they're inserted, override handle_entity.
+
+ See the create_entity method for the creation of entities from the
+ (parsed) input data.
+ """
+
+ __loaders = {}
+ kind = None
+ __properties = None
+
+ def __init__(self, kind, properties):
+ """Constructor.
+
+ Populates this Loader's kind and properties map.
+
+ Args:
+ kind: a string containing the entity kind that this loader handles
+
+ properties: list of (name, converter) tuples.
+
+ This is used to automatically convert the input columns into
+ properties. The converter should be a function that takes one
+ argument, a string value from the input file, and returns a
+ correctly typed property value that should be inserted. The
+ tuples in this list should match the columns in your input file,
+ in order.
+
+ For example:
+ [('name', str),
+ ('id_number', int),
+ ('email', datastore_types.Email),
+ ('user', users.User),
+ ('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
+ ('description', datastore_types.Text),
+ ]
+ """
+ Validate(kind, (basestring, tuple))
+ self.kind = kind
+ self.__openfile = open
+ self.__create_csv_reader = csv.reader
+
+ GetImplementationClass(kind)
+
+ Validate(properties, list)
+ for name, fn in properties:
+ Validate(name, basestring)
+ assert callable(fn), (
+ 'Conversion function %s for property %s is not callable.' % (fn, name))
+
+ self.__properties = properties
+
+ @staticmethod
+ def RegisterLoader(loader):
+ """Register loader and the Loader instance for its kind.
+
+ Args:
+ loader: A Loader instance.
+ """
+ Loader.__loaders[loader.kind] = loader
+
+ def alias_old_names(self):
+ """Aliases method names so that Loaders defined with old names work."""
+ aliases = (
+ ('CreateEntity', 'create_entity'),
+ ('HandleEntity', 'handle_entity'),
+ ('GenerateKey', 'generate_key'),
+ )
+ for old_name, new_name in aliases:
+ setattr(Loader, old_name, getattr(Loader, new_name))
+ if hasattr(self.__class__, old_name) and not (
+ getattr(self.__class__, old_name).im_func ==
+ getattr(Loader, new_name).im_func):
+ if hasattr(self.__class__, new_name) and not (
+ getattr(self.__class__, new_name).im_func ==
+ getattr(Loader, new_name).im_func):
+ raise NameClashError(old_name, new_name, self.__class__)
+ setattr(self, new_name, getattr(self, old_name))
+
+ def create_entity(self, values, key_name=None, parent=None):
+ """Creates a entity from a list of property values.
+
+ Args:
+ values: list/tuple of str
+ key_name: if provided, the name for the (single) resulting entity
+ parent: A datastore.Key instance for the parent, or None
+
+ Returns:
+ list of db.Model
+
+ The returned entities are populated with the property values from the
+ argument, converted to native types using the properties map given in
+ the constructor, and passed through handle_entity. They're ready to be
+ inserted.
+
+ Raises:
+ AssertionError: if the number of values doesn't match the number
+ of properties in the properties map.
+ ValueError: if any element of values is None or empty.
+ TypeError: if values is not a list or tuple.
+ """
+ Validate(values, (list, tuple))
+ assert len(values) == len(self.__properties), (
+ 'Expected %d columns, found %d.' %
+ (len(self.__properties), len(values)))
+
+ model_class = GetImplementationClass(self.kind)
+
+ properties = {
+ 'key_name': key_name,
+ 'parent': parent,
+ }
+ for (name, converter), val in zip(self.__properties, values):
+ if converter is bool and val.lower() in ('0', 'false', 'no'):
+ val = False
+ properties[name] = converter(val)
+
+ entity = model_class(**properties)
+ entities = self.handle_entity(entity)
+
+ if entities:
+ if not isinstance(entities, (list, tuple)):
+ entities = [entities]
+
+ for entity in entities:
+ if not isinstance(entity, db.Model):
+ raise TypeError('Expected a db.Model, received %s (a %s).' %
+ (entity, entity.__class__))
+
+ return entities
+
+ def generate_key(self, i, values):
+ """Generates a key_name to be used in creating the underlying object.
+
+ The default implementation returns None.
+
+ This method can be overridden to control the key generation for
+ uploaded entities. The value returned should be None (to use a
+ server generated numeric key), or a string which neither starts
+ with a digit nor has the form __*__ (see
+ http://code.google.com/appengine/docs/python/datastore/keysandentitygroups.html),
+ or a datastore.Key instance.
+
+ If you generate your own string keys, keep in mind:
+
+ 1. The key name for each entity must be unique.
+ 2. If an entity of the same kind and key already exists in the
+ datastore, it will be overwritten.
+
+ Args:
+ i: Number corresponding to this object (assume it's run in a loop,
+ this is your current count.
+ values: list/tuple of str.
+
+ Returns:
+ A string to be used as the key_name for an entity.
+ """
+ return None
+
+ def handle_entity(self, entity):
+ """Subclasses can override this to add custom entity conversion code.
+
+ This is called for each entity, after its properties are populated
+ from the input but before it is stored. Subclasses can override
+ this to add custom entity handling code.
+
+ The entity to be inserted should be returned. If multiple entities
+ should be inserted, return a list of entities. If no entities
+ should be inserted, return None or [].
+
+ Args:
+ entity: db.Model
+
+ Returns:
+ db.Model or list of db.Model
+ """
+ return entity
+
+ def initialize(self, filename, loader_opts):
+ """Performs initialization and validation of the input file.
+
+ This implementation checks that the input file exists and can be
+ opened for reading.
+
+ Args:
+ filename: The string given as the --filename flag argument.
+ loader_opts: The string given as the --loader_opts flag argument.
+ """
+ CheckFile(filename)
+
+ def finalize(self):
+ """Performs finalization actions after the upload completes."""
+ pass
+
+ def generate_records(self, filename):
+ """Subclasses can override this to add custom data input code.
+
+ This method must yield fixed-length lists of strings.
+
+ The default implementation uses csv.reader to read CSV rows
+ from filename.
+
+ Args:
+ filename: The string input for the --filename option.
+
+ Yields:
+ Lists of strings.
+ """
+ csv_generator = CSVGenerator(filename, openfile=self.__openfile,
+ create_csv_reader=self.__create_csv_reader
+ ).Records()
+ return csv_generator
+
+ @staticmethod
+ def RegisteredLoaders():
+ """Returns a dict of the Loader instances that have been created."""
+ return dict(Loader.__loaders)
+
+ @staticmethod
+ def RegisteredLoader(kind):
+ """Returns the loader instance for the given kind if it exists."""
+ return Loader.__loaders[kind]
+
+
+class RestoreThread(_ThreadBase):
+ """A thread to read saved entity_pbs from sqlite3."""
+ NAME = 'RestoreThread'
+ _ENTITIES_DONE = 'Entities Done'
+
+ def __init__(self, queue, filename):
+ _ThreadBase.__init__(self)
+ self.queue = queue
+ self.filename = filename
+
+ def PerformWork(self):
+ db_conn = sqlite3.connect(self.filename)
+ cursor = db_conn.cursor()
+ cursor.execute('select id, value from result')
+ for entity_id, value in cursor:
+ self.queue.put([entity_id, value], block=True)
+ self.queue.put(RestoreThread._ENTITIES_DONE, block=True)
+
+
+class RestoreLoader(Loader):
+ """A Loader which imports protobuffers from a file."""
+
+ def __init__(self, kind):
+ self.kind = kind
+
+ def initialize(self, filename, loader_opts):
+ CheckFile(filename)
+ self.queue = Queue.Queue(1000)
+ restore_thread = RestoreThread(self.queue, filename)
+ restore_thread.start()
+
+ def generate_records(self, filename):
+ while True:
+ record = self.queue.get(block=True)
+ if id(record) == id(RestoreThread._ENTITIES_DONE):
+ break
+ yield record
+
+ def create_entity(self, values, key_name=None, parent=None):
+ key = StrKey(unicode(values[0], 'utf-8'))
+ entity_proto = entity_pb.EntityProto(contents=str(values[1]))
+ entity_proto.mutable_key().CopyFrom(key._Key__reference)
+ return datastore.Entity._FromPb(entity_proto)
+
+
+class Exporter(object):
+ """A base class for serializing datastore entities.
+
+ To add a handler for exporting an entity kind from your datastore,
+ write a subclass of this class that calls Exporter.__init__ from your
+ class's __init__.
+
+ If you need to run extra code to convert entities from the input
+ data, create new properties, or otherwise modify the entities before
+ they're inserted, override handle_entity.
+
+ See the output_entities method for the writing of data from entities.
+ """
+
+ __exporters = {}
+ kind = None
+ __properties = None
+
+ def __init__(self, kind, properties):
+ """Constructor.
+
+ Populates this Exporters's kind and properties map.
+
+ Args:
+ kind: a string containing the entity kind that this exporter handles
+
+ properties: list of (name, converter, default) tuples.
+
+ This is used to automatically convert the entities to strings.
+ The converter should be a function that takes one argument, a property
+ value of the appropriate type, and returns a str or unicode. The default
+ is a string to be used if the property is not present, or None to fail
+ with an error if the property is missing.
+
+ For example:
+ [('name', str, None),
+ ('id_number', str, None),
+ ('email', str, ''),
+ ('user', str, None),
+ ('birthdate',
+ lambda x: str(datetime.datetime.fromtimestamp(float(x))),
+ None),
+ ('description', str, ''),
+ ]
+ """
+ Validate(kind, basestring)
+ self.kind = kind
+
+ GetImplementationClass(kind)
+
+ Validate(properties, list)
+ for name, fn, default in properties:
+ Validate(name, basestring)
+ assert callable(fn), (
+ 'Conversion function %s for property %s is not callable.' % (
+ fn, name))
+ if default:
+ Validate(default, basestring)
+
+ self.__properties = properties
+
+ @staticmethod
+ def RegisterExporter(exporter):
+ """Register exporter and the Exporter instance for its kind.
+
+ Args:
+ exporter: A Exporter instance.
+ """
+ Exporter.__exporters[exporter.kind] = exporter
+
+ def __ExtractProperties(self, entity):
+ """Converts an entity into a list of string values.
+
+ Args:
+ entity: An entity to extract the properties from.
+
+ Returns:
+ A list of the properties of the entity.
+
+ Raises:
+ MissingPropertyError: if an expected field on the entity is missing.
+ """
+ encoding = []
+ for name, fn, default in self.__properties:
+ try:
+ encoding.append(fn(entity[name]))
+ except AttributeError:
+ if default is None:
+ raise MissingPropertyError(name)
+ else:
+ encoding.append(default)
+ return encoding
+
+ def __EncodeEntity(self, entity):
+ """Convert the given entity into CSV string.
+
+ Args:
+ entity: The entity to encode.
+
+ Returns:
+ A CSV string.
+ """
+ output = StringIO.StringIO()
+ writer = csv.writer(output, lineterminator='')
+ writer.writerow(self.__ExtractProperties(entity))
+ return output.getvalue()
+
+ def __SerializeEntity(self, entity):
+ """Creates a string representation of an entity.
+
+ Args:
+ entity: The entity to serialize.
+
+ Returns:
+ A serialized representation of an entity.
+ """
+ encoding = self.__EncodeEntity(entity)
+ if not isinstance(encoding, unicode):
+ encoding = unicode(encoding, 'utf-8')
+ encoding = encoding.encode('utf-8')
+ return encoding
+
+ def output_entities(self, entity_generator):
+ """Outputs the downloaded entities.
+
+ This implementation writes CSV.
+
+ Args:
+ entity_generator: A generator that yields the downloaded entities
+ in key order.
+ """
+ CheckOutputFile(self.output_filename)
+ output_file = open(self.output_filename, 'w')
+ logger.debug('Export complete, writing to file')
+ output_file.writelines(self.__SerializeEntity(entity) + '\n'
+ for entity in entity_generator)
+
+ def initialize(self, filename, exporter_opts):
+ """Performs initialization and validation of the output file.
+
+ This implementation checks that the input file exists and can be
+ opened for writing.
+
+ Args:
+ filename: The string given as the --filename flag argument.
+ exporter_opts: The string given as the --exporter_opts flag argument.
+ """
+ CheckOutputFile(filename)
+ self.output_filename = filename
+
+ def finalize(self):
+ """Performs finalization actions after the download completes."""
+ pass
+
+ @staticmethod
+ def RegisteredExporters():
+ """Returns a dictionary of the exporter instances that have been created."""
+ return dict(Exporter.__exporters)
+
+ @staticmethod
+ def RegisteredExporter(kind):
+ """Returns an exporter instance for the given kind if it exists."""
+ return Exporter.__exporters[kind]
+
+
+class DumpExporter(Exporter):
+ """An exporter which dumps protobuffers to a file."""
+
+ def __init__(self, kind, result_db_filename):
+ self.kind = kind
+ self.result_db_filename = result_db_filename
+
+ def output_entities(self, entity_generator):
+ shutil.copyfile(self.result_db_filename, self.output_filename)
+
+
+class MapperRetry(Error):
+ """An exception that indicates a non-fatal error during mapping."""
+
+
+class Mapper(object):
+ """A base class for serializing datastore entities.
+
+ To add a handler for exporting an entity kind from your datastore,
+ write a subclass of this class that calls Mapper.__init__ from your
+ class's __init__.
+
+ You need to implement to batch_apply or apply method on your subclass
+ for the map to do anything.
+ """
+
+ __mappers = {}
+ kind = None
+
+ def __init__(self, kind):
+ """Constructor.
+
+ Populates this Mappers's kind.
+
+ Args:
+ kind: a string containing the entity kind that this mapper handles
+ """
+ Validate(kind, basestring)
+ self.kind = kind
+
+ GetImplementationClass(kind)
+
+ @staticmethod
+ def RegisterMapper(mapper):
+ """Register mapper and the Mapper instance for its kind.
+
+ Args:
+ mapper: A Mapper instance.
+ """
+ Mapper.__mappers[mapper.kind] = mapper
+
+ def initialize(self, mapper_opts):
+ """Performs initialization.
+
+ Args:
+ mapper_opts: The string given as the --mapper_opts flag argument.
+ """
+ pass
+
+ def finalize(self):
+ """Performs finalization actions after the download completes."""
+ pass
+
+ def apply(self, entity):
+ print 'Default map function doing nothing to %s' % entity
+
+ def batch_apply(self, entities):
+ for entity in entities:
+ self.apply(entity)
+
+ @staticmethod
+ def RegisteredMappers():
+ """Returns a dictionary of the mapper instances that have been created."""
+ return dict(Mapper.__mappers)
+
+ @staticmethod
+ def RegisteredMapper(kind):
+ """Returns an mapper instance for the given kind if it exists."""
+ return Mapper.__mappers[kind]
+
+
+class QueueJoinThread(threading.Thread):
+ """A thread that joins a queue and exits.
+
+ Queue joins do not have a timeout. To simulate a queue join with
+ timeout, run this thread and join it with a timeout.
+ """
+
+ def __init__(self, queue):
+ """Initialize a QueueJoinThread.
+
+ Args:
+ queue: The queue for this thread to join.
+ """
+ threading.Thread.__init__(self)
+ assert isinstance(queue, (Queue.Queue, ReQueue))
+ self.queue = queue
+
+ def run(self):
+ """Perform the queue join in this thread."""
+ self.queue.join()
+
+
+def InterruptibleQueueJoin(queue,
+ thread_local,
+ thread_pool,
+ queue_join_thread_factory=QueueJoinThread,
+ check_workers=True):
+ """Repeatedly joins the given ReQueue or Queue.Queue with short timeout.
+
+ Between each timeout on the join, worker threads are checked.
+
+ Args:
+ queue: A Queue.Queue or ReQueue instance.
+ thread_local: A threading.local instance which indicates interrupts.
+ thread_pool: An AdaptiveThreadPool instance.
+ queue_join_thread_factory: Used for dependency injection.
+ check_workers: Whether to interrupt the join on worker death.
+
+ Returns:
+ True unless the queue join is interrupted by SIGINT or worker death.
+ """
+ thread = queue_join_thread_factory(queue)
+ thread.start()
+ while True:
+ thread.join(timeout=.5)
+ if not thread.isAlive():
+ return True
+ if thread_local.shut_down:
+ logger.debug('Queue join interrupted')
+ return False
+ if check_workers:
+ for worker_thread in thread_pool.Threads():
+ if not worker_thread.isAlive():
+ return False
+
+
+def ShutdownThreads(data_source_thread, thread_pool):
+ """Shuts down the worker and data source threads.
+
+ Args:
+ data_source_thread: A running DataSourceThread instance.
+ thread_pool: An AdaptiveThreadPool instance with workers registered.
+ """
+ logger.info('An error occurred. Shutting down...')
+
+ data_source_thread.exit_flag = True
+
+ thread_pool.Shutdown()
+
+ data_source_thread.join(timeout=3.0)
+ if data_source_thread.isAlive():
+ logger.warn('%s hung while trying to exit',
+ data_source_thread.GetFriendlyName())
+
+
+class BulkTransporterApp(object):
+ """Class to wrap bulk transport application functionality."""
+
+ def __init__(self,
+ arg_dict,
+ input_generator_factory,
+ throttle,
+ progress_db,
+ progresstrackerthread_factory,
+ max_queue_size=DEFAULT_QUEUE_SIZE,
+ request_manager_factory=RequestManager,
+ datasourcethread_factory=DataSourceThread,
+ progress_queue_factory=Queue.Queue,
+ thread_pool_factory=adaptive_thread_pool.AdaptiveThreadPool):
+ """Instantiate a BulkTransporterApp.
+
+ Uploads or downloads data to or from application using HTTP requests.
+ When run, the class will spin up a number of threads to read entities
+ from the data source, pass those to a number of worker threads
+ for sending to the application, and track all of the progress in a
+ small database in case an error or pause/termination requires a
+ restart/resumption of the upload process.
+
+ Args:
+ arg_dict: Dictionary of command line options.
+ input_generator_factory: A factory that creates a WorkItem generator.
+ throttle: A Throttle instance.
+ progress_db: The database to use for replaying/recording progress.
+ progresstrackerthread_factory: Used for dependency injection.
+ max_queue_size: Maximum size of the queues before they should block.
+ request_manager_factory: Used for dependency injection.
+ datasourcethread_factory: Used for dependency injection.
+ progress_queue_factory: Used for dependency injection.
+ thread_pool_factory: Used for dependency injection.
+ """
+ self.app_id = arg_dict['app_id']
+ self.post_url = arg_dict['url']
+ self.kind = arg_dict['kind']
+ self.batch_size = arg_dict['batch_size']
+ self.input_generator_factory = input_generator_factory
+ self.num_threads = arg_dict['num_threads']
+ self.email = arg_dict['email']
+ self.passin = arg_dict['passin']
+ self.dry_run = arg_dict['dry_run']
+ self.throttle = throttle
+ self.progress_db = progress_db
+ self.progresstrackerthread_factory = progresstrackerthread_factory
+ self.max_queue_size = max_queue_size
+ self.request_manager_factory = request_manager_factory
+ self.datasourcethread_factory = datasourcethread_factory
+ self.progress_queue_factory = progress_queue_factory
+ self.thread_pool_factory = thread_pool_factory
+ (scheme,
+ self.host_port, self.url_path,
+ unused_query, unused_fragment) = urlparse.urlsplit(self.post_url)
+ self.secure = (scheme == 'https')
+
+ def Run(self):
+ """Perform the work of the BulkTransporterApp.
+
+ Raises:
+ AuthenticationError: If authentication is required and fails.
+
+ Returns:
+ Error code suitable for sys.exit, e.g. 0 on success, 1 on failure.
+ """
+ self.error = False
+ thread_pool = self.thread_pool_factory(
+ self.num_threads, queue_size=self.max_queue_size)
+
+ self.throttle.Register(threading.currentThread())
+ threading.currentThread().exit_flag = False
+
+ progress_queue = self.progress_queue_factory(self.max_queue_size)
+ request_manager = self.request_manager_factory(self.app_id,
+ self.host_port,
+ self.url_path,
+ self.kind,
+ self.throttle,
+ self.batch_size,
+ self.secure,
+ self.email,
+ self.passin,
+ self.dry_run)
+ try:
+ request_manager.Authenticate()
+ except Exception, e:
+ self.error = True
+ if not isinstance(e, urllib2.HTTPError) or (
+ e.code != 302 and e.code != 401):
+ logger.exception('Exception during authentication')
+ raise AuthenticationError()
+ if (request_manager.auth_called and
+ not request_manager.authenticated):
+ self.error = True
+ raise AuthenticationError('Authentication failed')
+
+ for thread in thread_pool.Threads():
+ self.throttle.Register(thread)
+
+ self.progress_thread = self.progresstrackerthread_factory(
+ progress_queue, self.progress_db)
+
+ if self.progress_db.UseProgressData():
+ logger.debug('Restarting upload using progress database')
+ progress_generator_factory = self.progress_db.GetProgressStatusGenerator
+ else:
+ progress_generator_factory = None
+
+ self.data_source_thread = (
+ self.datasourcethread_factory(request_manager,
+ thread_pool,
+ progress_queue,
+ self.input_generator_factory,
+ progress_generator_factory))
+
+ thread_local = threading.local()
+ thread_local.shut_down = False
+
+ def Interrupt(unused_signum, unused_frame):
+ """Shutdown gracefully in response to a signal."""
+ thread_local.shut_down = True
+ self.error = True
+
+ signal.signal(signal.SIGINT, Interrupt)
+
+ self.progress_thread.start()
+ self.data_source_thread.start()
+
+
+ while not thread_local.shut_down:
+ self.data_source_thread.join(timeout=0.25)
+
+ if self.data_source_thread.isAlive():
+ for thread in list(thread_pool.Threads()) + [self.progress_thread]:
+ if not thread.isAlive():
+ logger.info('Unexpected thread death: %s', thread.getName())
+ thread_local.shut_down = True
+ self.error = True
+ break
+ else:
+ break
+
+ def _Join(ob, msg):
+ logger.debug('Waiting for %s...', msg)
+ if isinstance(ob, threading.Thread):
+ ob.join(timeout=3.0)
+ if ob.isAlive():
+ logger.debug('Joining %s failed', ob)
+ else:
+ logger.debug('... done.')
+ elif isinstance(ob, (Queue.Queue, ReQueue)):
+ if not InterruptibleQueueJoin(ob, thread_local, thread_pool):
+ ShutdownThreads(self.data_source_thread, thread_pool)
+ else:
+ ob.join()
+ logger.debug('... done.')
+
+ if self.data_source_thread.error or thread_local.shut_down:
+ ShutdownThreads(self.data_source_thread, thread_pool)
+ else:
+ _Join(thread_pool.requeue, 'worker threads to finish')
+
+ thread_pool.Shutdown()
+ thread_pool.JoinThreads()
+ thread_pool.CheckErrors()
+ print ''
+
+ if self.progress_thread.isAlive():
+ InterruptibleQueueJoin(progress_queue, thread_local, thread_pool,
+ check_workers=False)
+ else:
+ logger.warn('Progress thread exited prematurely')
+
+ progress_queue.put(_THREAD_SHOULD_EXIT)
+ _Join(self.progress_thread, 'progress_thread to terminate')
+ self.progress_thread.CheckError()
+ if not thread_local.shut_down:
+ self.progress_thread.WorkFinished()
+
+ self.data_source_thread.CheckError()
+
+ return self.ReportStatus()
+
+ def ReportStatus(self):
+ """Display a message reporting the final status of the transfer."""
+ raise NotImplementedError()
+
+
+class BulkUploaderApp(BulkTransporterApp):
+ """Class to encapsulate bulk uploader functionality."""
+
+ def __init__(self, *args, **kwargs):
+ BulkTransporterApp.__init__(self, *args, **kwargs)
+
+ def ReportStatus(self):
+ """Display a message reporting the final status of the transfer."""
+ total_up, duration = self.throttle.TotalTransferred(
+ remote_api_throttle.BANDWIDTH_UP)
+ s_total_up, unused_duration = self.throttle.TotalTransferred(
+ remote_api_throttle.HTTPS_BANDWIDTH_UP)
+ total_up += s_total_up
+ total = total_up
+ logger.info('%d entites total, %d previously transferred',
+ self.data_source_thread.read_count,
+ self.data_source_thread.xfer_count)
+ transfer_count = self.progress_thread.EntitiesTransferred()
+ logger.info('%d entities (%d bytes) transferred in %.1f seconds',
+ transfer_count, total, duration)
+ if (self.data_source_thread.read_all and
+ transfer_count +
+ self.data_source_thread.xfer_count >=
+ self.data_source_thread.read_count):
+ logger.info('All entities successfully transferred')
+ return 0
+ else:
+ logger.info('Some entities not successfully transferred')
+ return 1
+
+
+class BulkDownloaderApp(BulkTransporterApp):
+ """Class to encapsulate bulk downloader functionality."""
+
+ def __init__(self, *args, **kwargs):
+ BulkTransporterApp.__init__(self, *args, **kwargs)
+
+ def ReportStatus(self):
+ """Display a message reporting the final status of the transfer."""
+ total_down, duration = self.throttle.TotalTransferred(
+ remote_api_throttle.BANDWIDTH_DOWN)
+ s_total_down, unused_duration = self.throttle.TotalTransferred(
+ remote_api_throttle.HTTPS_BANDWIDTH_DOWN)
+ total_down += s_total_down
+ total = total_down
+ existing_count = self.progress_thread.existing_count
+ xfer_count = self.progress_thread.EntitiesTransferred()
+ logger.info('Have %d entities, %d previously transferred',
+ xfer_count, existing_count)
+ logger.info('%d entities (%d bytes) transferred in %.1f seconds',
+ xfer_count, total, duration)
+ if self.error:
+ return 1
+ else:
+ return 0
+
+
+class BulkMapperApp(BulkTransporterApp):
+ """Class to encapsulate bulk map functionality."""
+
+ def __init__(self, *args, **kwargs):
+ BulkTransporterApp.__init__(self, *args, **kwargs)
+
+ def ReportStatus(self):
+ """Display a message reporting the final status of the transfer."""
+ total_down, duration = self.throttle.TotalTransferred(
+ remote_api_throttle.BANDWIDTH_DOWN)
+ s_total_down, unused_duration = self.throttle.TotalTransferred(
+ remote_api_throttle.HTTPS_BANDWIDTH_DOWN)
+ total_down += s_total_down
+ total = total_down
+ xfer_count = self.progress_thread.EntitiesTransferred()
+ logger.info('The following may be inaccurate if any mapper tasks '
+ 'encountered errors and had to be retried.')
+ logger.info('Applied mapper to %s entities.',
+ xfer_count)
+ logger.info('%s entities (%s bytes) transferred in %.1f seconds',
+ xfer_count, total, duration)
+ if self.error:
+ return 1
+ else:
+ return 0
+
+
+def PrintUsageExit(code):
+ """Prints usage information and exits with a status code.
+
+ Args:
+ code: Status code to pass to sys.exit() after displaying usage information.
+ """
+ print __doc__ % {'arg0': sys.argv[0]}
+ sys.stdout.flush()
+ sys.stderr.flush()
+ sys.exit(code)
+
+
+REQUIRED_OPTION = object()
+
+
+FLAG_SPEC = ['debug',
+ 'help',
+ 'url=',
+ 'filename=',
+ 'batch_size=',
+ 'kind=',
+ 'num_threads=',
+ 'bandwidth_limit=',
+ 'rps_limit=',
+ 'http_limit=',
+ 'db_filename=',
+ 'app_id=',
+ 'config_file=',
+ 'has_header',
+ 'csv_has_header',
+ 'auth_domain=',
+ 'result_db_filename=',
+ 'download',
+ 'loader_opts=',
+ 'exporter_opts=',
+ 'log_file=',
+ 'mapper_opts=',
+ 'email=',
+ 'passin',
+ 'map',
+ 'dry_run',
+ 'dump',
+ 'restore',
+ ]
+
+
+def ParseArguments(argv, die_fn=lambda: PrintUsageExit(1)):
+ """Parses command-line arguments.
+
+ Prints out a help message if -h or --help is supplied.
+
+ Args:
+ argv: List of command-line arguments.
+ die_fn: Function to invoke to end the program.
+
+ Returns:
+ A dictionary containing the value of command-line options.
+ """
+ opts, unused_args = getopt.getopt(
+ argv[1:],
+ 'h',
+ FLAG_SPEC)
+
+ arg_dict = {}
+
+ arg_dict['url'] = REQUIRED_OPTION
+ arg_dict['filename'] = None
+ arg_dict['config_file'] = None
+ arg_dict['kind'] = None
+
+ arg_dict['batch_size'] = None
+ arg_dict['num_threads'] = DEFAULT_THREAD_COUNT
+ arg_dict['bandwidth_limit'] = DEFAULT_BANDWIDTH_LIMIT
+ arg_dict['rps_limit'] = DEFAULT_RPS_LIMIT
+ arg_dict['http_limit'] = DEFAULT_REQUEST_LIMIT
+
+ arg_dict['db_filename'] = None
+ arg_dict['app_id'] = ''
+ arg_dict['auth_domain'] = 'gmail.com'
+ arg_dict['has_header'] = False
+ arg_dict['result_db_filename'] = None
+ arg_dict['download'] = False
+ arg_dict['loader_opts'] = None
+ arg_dict['exporter_opts'] = None
+ arg_dict['debug'] = False
+ arg_dict['log_file'] = None
+ arg_dict['email'] = None
+ arg_dict['passin'] = False
+ arg_dict['mapper_opts'] = None
+ arg_dict['map'] = False
+ arg_dict['dry_run'] = False
+ arg_dict['dump'] = False
+ arg_dict['restore'] = False
+
+ def ExpandFilename(filename):
+ """Expand shell variables and ~usernames in filename."""
+ return os.path.expandvars(os.path.expanduser(filename))
+
+ for option, value in opts:
+ if option == '--debug':
+ arg_dict['debug'] = True
+ elif option in ('-h', '--help'):
+ PrintUsageExit(0)
+ elif option == '--url':
+ arg_dict['url'] = value
+ elif option == '--filename':
+ arg_dict['filename'] = ExpandFilename(value)
+ elif option == '--batch_size':
+ arg_dict['batch_size'] = int(value)
+ elif option == '--kind':
+ arg_dict['kind'] = value
+ elif option == '--num_threads':
+ arg_dict['num_threads'] = int(value)
+ elif option == '--bandwidth_limit':
+ arg_dict['bandwidth_limit'] = int(value)
+ elif option == '--rps_limit':
+ arg_dict['rps_limit'] = int(value)
+ elif option == '--http_limit':
+ arg_dict['http_limit'] = int(value)
+ elif option == '--db_filename':
+ arg_dict['db_filename'] = ExpandFilename(value)
+ elif option == '--app_id':
+ arg_dict['app_id'] = value
+ elif option == '--config_file':
+ arg_dict['config_file'] = ExpandFilename(value)
+ elif option == '--auth_domain':
+ arg_dict['auth_domain'] = value
+ elif option == '--has_header':
+ arg_dict['has_header'] = True
+ elif option == '--csv_has_header':
+ print >>sys.stderr, ('--csv_has_header is deprecated, please use '
+ '--has_header.')
+ arg_dict['has_header'] = True
+ elif option == '--result_db_filename':
+ arg_dict['result_db_filename'] = ExpandFilename(value)
+ elif option == '--download':
+ arg_dict['download'] = True
+ elif option == '--loader_opts':
+ arg_dict['loader_opts'] = value
+ elif option == '--exporter_opts':
+ arg_dict['exporter_opts'] = value
+ elif option == '--log_file':
+ arg_dict['log_file'] = ExpandFilename(value)
+ elif option == '--email':
+ arg_dict['email'] = value
+ elif option == '--passin':
+ arg_dict['passin'] = True
+ elif option == '--map':
+ arg_dict['map'] = True
+ elif option == '--mapper_opts':
+ arg_dict['mapper_opts'] = value
+ elif option == '--dry_run':
+ arg_dict['dry_run'] = True
+ elif option == '--dump':
+ arg_dict['dump'] = True
+ elif option == '--restore':
+ arg_dict['restore'] = True
+
+ return ProcessArguments(arg_dict, die_fn=die_fn)
+
+
+def ThrottleLayout(bandwidth_limit, http_limit, rps_limit):
+ """Return a dictionary indicating the throttle options."""
+ bulkloader_limits = dict(remote_api_throttle.NO_LIMITS)
+ bulkloader_limits.update({
+ remote_api_throttle.BANDWIDTH_UP: bandwidth_limit,
+ remote_api_throttle.BANDWIDTH_DOWN: bandwidth_limit,
+ remote_api_throttle.REQUESTS: http_limit,
+ remote_api_throttle.HTTPS_BANDWIDTH_UP: bandwidth_limit,
+ remote_api_throttle.HTTPS_BANDWIDTH_DOWN: bandwidth_limit,
+ remote_api_throttle.HTTPS_REQUESTS: http_limit,
+ remote_api_throttle.ENTITIES_FETCHED: rps_limit,
+ remote_api_throttle.ENTITIES_MODIFIED: rps_limit,
+ })
+ return bulkloader_limits
+
+
+def CheckOutputFile(filename):
+ """Check that the given file does not exist and can be opened for writing.
+
+ Args:
+ filename: The name of the file.
+
+ Raises:
+ FileExistsError: if the given filename is not found
+ FileNotWritableError: if the given filename is not readable.
+ """
+ full_path = os.path.abspath(filename)
+ if os.path.exists(full_path):
+ raise FileExistsError('%s: output file exists' % filename)
+ elif not os.access(os.path.dirname(full_path), os.W_OK):
+ raise FileNotWritableError(
+ '%s: not writable' % os.path.dirname(full_path))
+
+
+def LoadConfig(config_file_name, exit_fn=sys.exit):
+ """Loads a config file and registers any Loader classes present.
+
+ Args:
+ config_file_name: The name of the configuration file.
+ exit_fn: Used for dependency injection.
+ """
+ if config_file_name:
+ config_file = open(config_file_name, 'r')
+ try:
+ bulkloader_config = imp.load_module(
+ 'bulkloader_config', config_file, config_file_name,
+ ('', 'r', imp.PY_SOURCE))
+ sys.modules['bulkloader_config'] = bulkloader_config
+
+ if hasattr(bulkloader_config, 'loaders'):
+ for cls in bulkloader_config.loaders:
+ Loader.RegisterLoader(cls())
+
+ if hasattr(bulkloader_config, 'exporters'):
+ for cls in bulkloader_config.exporters:
+ Exporter.RegisterExporter(cls())
+
+ if hasattr(bulkloader_config, 'mappers'):
+ for cls in bulkloader_config.mappers:
+ Mapper.RegisterMapper(cls())
+
+ except NameError, e:
+ m = re.search(r"[^']*'([^']*)'.*", str(e))
+ if m.groups() and m.group(1) == 'Loader':
+ print >>sys.stderr, """
+The config file format has changed and you appear to be using an old-style
+config file. Please make the following changes:
+
+1. At the top of the file, add this:
+
+from google.appengine.tools.bulkloader import Loader
+
+2. For each of your Loader subclasses add the following at the end of the
+ __init__ definitioion:
+
+self.alias_old_names()
+
+3. At the bottom of the file, add this:
+
+loaders = [MyLoader1,...,MyLoaderN]
+
+Where MyLoader1,...,MyLoaderN are the Loader subclasses you want the bulkloader
+to have access to.
+"""
+ exit_fn(1)
+ else:
+ raise
+ except Exception, e:
+ if isinstance(e, NameClashError) or 'bulkloader_config' in vars() and (
+ hasattr(bulkloader_config, 'bulkloader') and
+ isinstance(e, bulkloader_config.bulkloader.NameClashError)):
+ print >> sys.stderr, (
+ 'Found both %s and %s while aliasing old names on %s.'%
+ (e.old_name, e.new_name, e.klass))
+ exit_fn(1)
+ else:
+ raise
+
+def GetArgument(kwargs, name, die_fn):
+ """Get the value of the key name in kwargs, or die with die_fn.
+
+ Args:
+ kwargs: A dictionary containing the options for the bulkloader.
+ name: The name of a bulkloader option.
+ die_fn: The function to call to exit the program.
+
+ Returns:
+ The value of kwargs[name] is name in kwargs
+ """
+ if name in kwargs:
+ return kwargs[name]
+ else:
+ print >>sys.stderr, '%s argument required' % name
+ die_fn()
+
+
+def _MakeSignature(app_id=None,
+ url=None,
+ kind=None,
+ db_filename=None,
+ perform_map=None,
+ download=None,
+ has_header=None,
+ result_db_filename=None,
+ dump=None,
+ restore=None):
+ """Returns a string that identifies the important options for the database."""
+ if download:
+ result_db_line = 'result_db: %s' % result_db_filename
+ else:
+ result_db_line = ''
+ return u"""
+ app_id: %s
+ url: %s
+ kind: %s
+ download: %s
+ map: %s
+ dump: %s
+ restore: %s
+ progress_db: %s
+ has_header: %s
+ %s
+ """ % (app_id, url, kind, download, perform_map, dump, restore, db_filename,
+ has_header, result_db_line)
+
+
+def ProcessArguments(arg_dict,
+ die_fn=lambda: sys.exit(1)):
+ """Processes non command-line input arguments.
+
+ Args:
+ arg_dict: Dictionary containing the values of bulkloader options.
+ die_fn: Function to call in case of an error during argument processing.
+
+ Returns:
+ A dictionary of bulkloader options.
+ """
+ app_id = GetArgument(arg_dict, 'app_id', die_fn)
+ url = GetArgument(arg_dict, 'url', die_fn)
+ dump = GetArgument(arg_dict, 'dump', die_fn)
+ restore = GetArgument(arg_dict, 'restore', die_fn)
+ filename = GetArgument(arg_dict, 'filename', die_fn)
+ batch_size = GetArgument(arg_dict, 'batch_size', die_fn)
+ kind = GetArgument(arg_dict, 'kind', die_fn)
+ db_filename = GetArgument(arg_dict, 'db_filename', die_fn)
+ config_file = GetArgument(arg_dict, 'config_file', die_fn)
+ result_db_filename = GetArgument(arg_dict, 'result_db_filename', die_fn)
+ download = GetArgument(arg_dict, 'download', die_fn)
+ log_file = GetArgument(arg_dict, 'log_file', die_fn)
+ perform_map = GetArgument(arg_dict, 'map', die_fn)
+
+ errors = []
+
+ if batch_size is None:
+ if download or perform_map:
+ arg_dict['batch_size'] = DEFAULT_DOWNLOAD_BATCH_SIZE
+ else:
+ arg_dict['batch_size'] = DEFAULT_BATCH_SIZE
+ elif batch_size <= 0:
+ errors.append('batch_size must be at least 1')
+
+ if db_filename is None:
+ arg_dict['db_filename'] = time.strftime(
+ 'bulkloader-progress-%Y%m%d.%H%M%S.sql3')
+
+ if result_db_filename is None:
+ arg_dict['result_db_filename'] = time.strftime(
+ 'bulkloader-results-%Y%m%d.%H%M%S.sql3')
+
+ if log_file is None:
+ arg_dict['log_file'] = time.strftime('bulkloader-log-%Y%m%d.%H%M%S')
+
+ required = '%s argument required'
+
+ if config_file is None and not dump and not restore:
+ errors.append('One of --config_file, --dump, or --restore is required')
+
+ if url is REQUIRED_OPTION:
+ errors.append(required % 'url')
+
+ if not filename and not perform_map:
+ errors.append(required % 'filename')
+
+ if kind is None:
+ if download or map:
+ errors.append('kind argument required for this operation')
+ elif not dump and not restore:
+ errors.append(
+ 'kind argument required unless --dump or --restore is specified')
+
+ if not app_id:
+ if url and url is not REQUIRED_OPTION:
+ (unused_scheme, host_port, unused_url_path,
+ unused_query, unused_fragment) = urlparse.urlsplit(url)
+ suffix_idx = host_port.find('.appspot.com')
+ if suffix_idx > -1:
+ arg_dict['app_id'] = host_port[:suffix_idx]
+ elif host_port.split(':')[0].endswith('google.com'):
+ arg_dict['app_id'] = host_port.split('.')[0]
+ else:
+ errors.append('app_id argument required for non appspot.com domains')
+
+ if errors:
+ print >>sys.stderr, '\n'.join(errors)
+ die_fn()
+
+ return arg_dict
+
+
+def ParseKind(kind):
+ if kind and kind[0] == '(' and kind[-1] == ')':
+ return tuple(kind[1:-1].split(','))
+ else:
+ return kind
+
+
+def _PerformBulkload(arg_dict,
+ check_file=CheckFile,
+ check_output_file=CheckOutputFile):
+ """Runs the bulkloader, given the command line options.
+
+ Args:
+ arg_dict: Dictionary of bulkloader options.
+ check_file: Used for dependency injection.
+ check_output_file: Used for dependency injection.
+
+ Returns:
+ An exit code.
+
+ Raises:
+ ConfigurationError: if inconsistent options are passed.
+ """
+ app_id = arg_dict['app_id']
+ url = arg_dict['url']
+ filename = arg_dict['filename']
+ batch_size = arg_dict['batch_size']
+ kind = arg_dict['kind']
+ num_threads = arg_dict['num_threads']
+ bandwidth_limit = arg_dict['bandwidth_limit']
+ rps_limit = arg_dict['rps_limit']
+ http_limit = arg_dict['http_limit']
+ db_filename = arg_dict['db_filename']
+ config_file = arg_dict['config_file']
+ auth_domain = arg_dict['auth_domain']
+ has_header = arg_dict['has_header']
+ download = arg_dict['download']
+ result_db_filename = arg_dict['result_db_filename']
+ loader_opts = arg_dict['loader_opts']
+ exporter_opts = arg_dict['exporter_opts']
+ mapper_opts = arg_dict['mapper_opts']
+ email = arg_dict['email']
+ passin = arg_dict['passin']
+ perform_map = arg_dict['map']
+ dump = arg_dict['dump']
+ restore = arg_dict['restore']
+
+ os.environ['AUTH_DOMAIN'] = auth_domain
+
+ kind = ParseKind(kind)
+
+ if not dump and not restore:
+ check_file(config_file)
+
+ if download and perform_map:
+ logger.error('--download and --map are mutually exclusive.')
+
+ if download or dump:
+ check_output_file(filename)
+ elif not perform_map:
+ check_file(filename)
+
+ if dump:
+ Exporter.RegisterExporter(DumpExporter(kind, result_db_filename))
+ elif restore:
+ Loader.RegisterLoader(RestoreLoader(kind))
+ else:
+ LoadConfig(config_file)
+
+ os.environ['APPLICATION_ID'] = app_id
+
+ throttle_layout = ThrottleLayout(bandwidth_limit, http_limit, rps_limit)
+ logger.info('Throttling transfers:')
+ logger.info('Bandwidth: %s bytes/second', bandwidth_limit)
+ logger.info('HTTP connections: %s/second', http_limit)
+ logger.info('Entities inserted/fetched/modified: %s/second', rps_limit)
+
+ throttle = remote_api_throttle.Throttle(layout=throttle_layout)
+ signature = _MakeSignature(app_id=app_id,
+ url=url,
+ kind=kind,
+ db_filename=db_filename,
+ download=download,
+ perform_map=perform_map,
+ has_header=has_header,
+ result_db_filename=result_db_filename,
+ dump=dump,
+ restore=restore)
+
+
+ max_queue_size = max(DEFAULT_QUEUE_SIZE, 3 * num_threads + 5)
+
+ if db_filename == 'skip':
+ progress_db = StubProgressDatabase()
+ elif not download and not perform_map and not dump:
+ progress_db = ProgressDatabase(db_filename, signature)
+ else:
+ progress_db = ExportProgressDatabase(db_filename, signature)
+
+ return_code = 1
+
+ if not download and not perform_map and not dump:
+ loader = Loader.RegisteredLoader(kind)
+ try:
+ loader.initialize(filename, loader_opts)
+ workitem_generator_factory = GetCSVGeneratorFactory(
+ kind, filename, batch_size, has_header)
+
+ app = BulkUploaderApp(arg_dict,
+ workitem_generator_factory,
+ throttle,
+ progress_db,
+ ProgressTrackerThread,
+ max_queue_size,
+ RequestManager,
+ DataSourceThread,
+ Queue.Queue)
+ try:
+ return_code = app.Run()
+ except AuthenticationError:
+ logger.info('Authentication Failed')
+ finally:
+ loader.finalize()
+ elif not perform_map:
+ result_db = ResultDatabase(result_db_filename, signature)
+ exporter = Exporter.RegisteredExporter(kind)
+ try:
+ exporter.initialize(filename, exporter_opts)
+
+ def KeyRangeGeneratorFactory(request_manager, progress_queue,
+ progress_gen):
+ return KeyRangeItemGenerator(request_manager, kind, progress_queue,
+ progress_gen, DownloadItem)
+
+ def ExportProgressThreadFactory(progress_queue, progress_db):
+ return ExportProgressThread(kind,
+ progress_queue,
+ progress_db,
+ result_db)
+
+ app = BulkDownloaderApp(arg_dict,
+ KeyRangeGeneratorFactory,
+ throttle,
+ progress_db,
+ ExportProgressThreadFactory,
+ 0,
+ RequestManager,
+ DataSourceThread,
+ Queue.Queue)
+ try:
+ return_code = app.Run()
+ except AuthenticationError:
+ logger.info('Authentication Failed')
+ finally:
+ exporter.finalize()
+ elif not download:
+ mapper = Mapper.RegisteredMapper(kind)
+ try:
+ mapper.initialize(mapper_opts)
+ def KeyRangeGeneratorFactory(request_manager, progress_queue,
+ progress_gen):
+ return KeyRangeItemGenerator(request_manager, kind, progress_queue,
+ progress_gen, MapperItem)
+
+ def MapperProgressThreadFactory(progress_queue, progress_db):
+ return MapperProgressThread(kind,
+ progress_queue,
+ progress_db)
+
+ app = BulkMapperApp(arg_dict,
+ KeyRangeGeneratorFactory,
+ throttle,
+ progress_db,
+ MapperProgressThreadFactory,
+ 0,
+ RequestManager,
+ DataSourceThread,
+ Queue.Queue)
+ try:
+ return_code = app.Run()
+ except AuthenticationError:
+ logger.info('Authentication Failed')
+ finally:
+ mapper.finalize()
+ return return_code
+
+
+def SetupLogging(arg_dict):
+ """Sets up logging for the bulkloader.
+
+ Args:
+ arg_dict: Dictionary mapping flag names to their arguments.
+ """
+ format = '[%(levelname)-8s %(asctime)s %(filename)s] %(message)s'
+ debug = arg_dict['debug']
+ log_file = arg_dict['log_file']
+
+ logger.setLevel(logging.DEBUG)
+
+ logger.propagate = False
+
+ file_handler = logging.FileHandler(log_file, 'w')
+ file_handler.setLevel(logging.DEBUG)
+ file_formatter = logging.Formatter(format)
+ file_handler.setFormatter(file_formatter)
+ logger.addHandler(file_handler)
+
+ console = logging.StreamHandler()
+ level = logging.INFO
+ if debug:
+ level = logging.DEBUG
+ console.setLevel(level)
+ console_format = '[%(levelname)-8s] %(message)s'
+ formatter = logging.Formatter(console_format)
+ console.setFormatter(formatter)
+ logger.addHandler(console)
+
+ logger.info('Logging to %s', log_file)
+
+ remote_api_throttle.logger.setLevel(level)
+ remote_api_throttle.logger.addHandler(file_handler)
+ remote_api_throttle.logger.addHandler(console)
+
+ appengine_rpc.logger.setLevel(logging.WARN)
+
+ adaptive_thread_pool.logger.setLevel(logging.DEBUG)
+ adaptive_thread_pool.logger.addHandler(console)
+ adaptive_thread_pool.logger.addHandler(file_handler)
+ adaptive_thread_pool.logger.propagate = False
+
+
+def Run(arg_dict):
+ """Sets up and runs the bulkloader, given the options as keyword arguments.
+
+ Args:
+ arg_dict: Dictionary of bulkloader options
+
+ Returns:
+ An exit code.
+ """
+ arg_dict = ProcessArguments(arg_dict)
+
+ SetupLogging(arg_dict)
+
+ return _PerformBulkload(arg_dict)
+
+
+def main(argv):
+ """Runs the importer from the command line."""
+
+ arg_dict = ParseArguments(argv)
+
+ errors = ['%s argument required' % key
+ for (key, value) in arg_dict.iteritems()
+ if value is REQUIRED_OPTION]
+ if errors:
+ print >>sys.stderr, '\n'.join(errors)
+ PrintUsageExit(1)
+
+ SetupLogging(arg_dict)
+ return _PerformBulkload(arg_dict)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/google_appengine/google/appengine/tools/bulkloader.pyc b/google_appengine/google/appengine/tools/bulkloader.pyc
new file mode 100644
index 0000000..119fff9
--- /dev/null
+++ b/google_appengine/google/appengine/tools/bulkloader.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/dev_appserver.py b/google_appengine/google/appengine/tools/dev_appserver.py
new file mode 100755
index 0000000..b7e5f82
--- /dev/null
+++ b/google_appengine/google/appengine/tools/dev_appserver.py
@@ -0,0 +1,3542 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Pure-Python application server for testing applications locally.
+
+Given a port and the paths to a valid application directory (with an 'app.yaml'
+file), the external library directory, and a relative URL to use for logins,
+creates an HTTP server that can be used to test an application locally. Uses
+stubs instead of actual APIs when SetupStubs() is called first.
+
+Example:
+ root_path = '/path/to/application/directory'
+ login_url = '/login'
+ port = 8080
+ template_dir = '/path/to/appserver/templates'
+ server = dev_appserver.CreateServer(root_path, login_url, port, template_dir)
+ server.serve_forever()
+"""
+
+
+from google.appengine.tools import os_compat
+
+import __builtin__
+import BaseHTTPServer
+import Cookie
+import base64
+import cStringIO
+import cgi
+import cgitb
+
+try:
+ import distutils.util
+except ImportError:
+ pass
+
+import dummy_thread
+import email.Utils
+import errno
+import heapq
+import httplib
+import imp
+import inspect
+import itertools
+import locale
+import logging
+import mimetools
+import mimetypes
+import os
+import pickle
+import pprint
+import random
+import select
+
+import re
+import sre_compile
+import sre_constants
+import sre_parse
+
+import socket
+import sys
+import time
+import traceback
+import types
+import urlparse
+import urllib
+
+import google
+from google.pyglib import gexcept
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import appinfo
+from google.appengine.api import croninfo
+from google.appengine.api import datastore_admin
+from google.appengine.api import datastore_file_stub
+from google.appengine.api import mail_stub
+from google.appengine.api import urlfetch_stub
+from google.appengine.api import user_service_stub
+from google.appengine.api import yaml_errors
+from google.appengine.api.capabilities import capability_stub
+from google.appengine.api.labs.taskqueue import taskqueue_stub
+from google.appengine.api.memcache import memcache_stub
+from google.appengine.api.xmpp import xmpp_service_stub
+
+from google.appengine import dist
+
+from google.appengine.tools import dev_appserver_index
+from google.appengine.tools import dev_appserver_login
+
+
+PYTHON_LIB_VAR = '$PYTHON_LIB'
+DEVEL_CONSOLE_PATH = PYTHON_LIB_VAR + '/google/appengine/ext/admin'
+
+FILE_MISSING_EXCEPTIONS = frozenset([errno.ENOENT, errno.ENOTDIR])
+
+MAX_URL_LENGTH = 2047
+
+HEADER_TEMPLATE = 'logging_console_header.html'
+SCRIPT_TEMPLATE = 'logging_console.js'
+MIDDLE_TEMPLATE = 'logging_console_middle.html'
+FOOTER_TEMPLATE = 'logging_console_footer.html'
+
+DEFAULT_ENV = {
+ 'GATEWAY_INTERFACE': 'CGI/1.1',
+ 'AUTH_DOMAIN': 'gmail.com',
+ 'TZ': 'UTC',
+}
+
+DEFAULT_SELECT_DELAY = 30.0
+
+for ext, mime_type in (('.asc', 'text/plain'),
+ ('.diff', 'text/plain'),
+ ('.csv', 'text/comma-separated-values'),
+ ('.rss', 'application/rss+xml'),
+ ('.text', 'text/plain'),
+ ('.wbmp', 'image/vnd.wap.wbmp')):
+ mimetypes.add_type(mime_type, ext)
+
+MAX_RUNTIME_RESPONSE_SIZE = 10 << 20
+
+MAX_REQUEST_SIZE = 10 * 1024 * 1024
+
+API_VERSION = '1'
+
+SITE_PACKAGES = os.path.normcase(os.path.join(os.path.dirname(os.__file__),
+ 'site-packages'))
+
+
+
+class Error(Exception):
+ """Base-class for exceptions in this module."""
+
+
+class InvalidAppConfigError(Error):
+ """The supplied application configuration file is invalid."""
+
+
+class AppConfigNotFoundError(Error):
+ """Application configuration file not found."""
+
+
+class TemplatesNotLoadedError(Error):
+ """Templates for the debugging console were not loaded."""
+
+
+
+def SplitURL(relative_url):
+ """Splits a relative URL into its path and query-string components.
+
+ Args:
+ relative_url: String containing the relative URL (often starting with '/')
+ to split. Should be properly escaped as www-form-urlencoded data.
+
+ Returns:
+ Tuple (script_name, query_string) where:
+ script_name: Relative URL of the script that was accessed.
+ query_string: String containing everything after the '?' character.
+ """
+ (unused_scheme, unused_netloc, path, query,
+ unused_fragment) = urlparse.urlsplit(relative_url)
+ return path, query
+
+
+def GetFullURL(server_name, server_port, relative_url):
+ """Returns the full, original URL used to access the relative URL.
+
+ Args:
+ server_name: Name of the local host, or the value of the 'host' header
+ from the request.
+ server_port: Port on which the request was served (string or int).
+ relative_url: Relative URL that was accessed, including query string.
+
+ Returns:
+ String containing the original URL.
+ """
+ if str(server_port) != '80':
+ netloc = '%s:%s' % (server_name, server_port)
+ else:
+ netloc = server_name
+ return 'http://%s%s' % (netloc, relative_url)
+
+
+
+class URLDispatcher(object):
+ """Base-class for handling HTTP requests."""
+
+ def Dispatch(self,
+ relative_url,
+ path,
+ headers,
+ infile,
+ outfile,
+ base_env_dict=None):
+ """Dispatch and handle an HTTP request.
+
+ base_env_dict should contain at least these CGI variables:
+ REQUEST_METHOD, REMOTE_ADDR, SERVER_SOFTWARE, SERVER_NAME,
+ SERVER_PROTOCOL, SERVER_PORT
+
+ Args:
+ relative_url: String containing the URL accessed.
+ path: Local path of the resource that was matched; back-references will be
+ replaced by values matched in the relative_url. Path may be relative
+ or absolute, depending on the resource being served (e.g., static files
+ will have an absolute path; scripts will be relative).
+ headers: Instance of mimetools.Message with headers from the request.
+ infile: File-like object with input data from the request.
+ outfile: File-like object where output data should be written.
+ base_env_dict: Dictionary of CGI environment parameters if available.
+ Defaults to None.
+
+ Returns:
+ None if request handling is complete.
+ Tuple (path, headers, input_file) for an internal redirect:
+ path: Path of URL to redirect to.
+ headers: Headers to send to other dispatcher.
+ input_file: New input to send to new dispatcher.
+ """
+ raise NotImplementedError
+
+ def EndRedirect(self, dispatched_output, original_output):
+ """Process the end of an internal redirect.
+
+ This method is called after all subsequent dispatch requests have finished.
+ By default the output from the dispatched process is copied to the original.
+
+ This will not be called on dispatchers that do not return an internal
+ redirect.
+
+ Args:
+ dispatched_output: StringIO buffer containing the results from the
+ dispatched
+ original_output: The original output file.
+ """
+ original_output.write(dispatched_output.read())
+
+
+class URLMatcher(object):
+ """Matches an arbitrary URL using a list of URL patterns from an application.
+
+ Each URL pattern has an associated URLDispatcher instance and path to the
+ resource's location on disk. See AddURL for more details. The first pattern
+ that matches an inputted URL will have its associated values returned by
+ Match().
+ """
+
+ def __init__(self):
+ """Initializer."""
+ self._url_patterns = []
+
+ def AddURL(self, regex, dispatcher, path, requires_login, admin_only):
+ """Adds a URL pattern to the list of patterns.
+
+ If the supplied regex starts with a '^' or ends with a '$' an
+ InvalidAppConfigError exception will be raised. Start and end symbols
+ and implicitly added to all regexes, meaning we assume that all regexes
+ consume all input from a URL.
+
+ Args:
+ regex: String containing the regular expression pattern.
+ dispatcher: Instance of URLDispatcher that should handle requests that
+ match this regex.
+ path: Path on disk for the resource. May contain back-references like
+ r'\1', r'\2', etc, which will be replaced by the corresponding groups
+ matched by the regex if present.
+ requires_login: True if the user must be logged-in before accessing this
+ URL; False if anyone can access this URL.
+ admin_only: True if the user must be a logged-in administrator to
+ access the URL; False if anyone can access the URL.
+
+ Raises:
+ TypeError: if dispatcher is not a URLDispatcher sub-class instance.
+ InvalidAppConfigError: if regex isn't valid.
+ """
+ if not isinstance(dispatcher, URLDispatcher):
+ raise TypeError('dispatcher must be a URLDispatcher sub-class')
+
+ if regex.startswith('^') or regex.endswith('$'):
+ raise InvalidAppConfigError('regex starts with "^" or ends with "$"')
+
+ adjusted_regex = '^%s$' % regex
+
+ try:
+ url_re = re.compile(adjusted_regex)
+ except re.error, e:
+ raise InvalidAppConfigError('regex invalid: %s' % e)
+
+ match_tuple = (url_re, dispatcher, path, requires_login, admin_only)
+ self._url_patterns.append(match_tuple)
+
+ def Match(self,
+ relative_url,
+ split_url=SplitURL):
+ """Matches a URL from a request against the list of URL patterns.
+
+ The supplied relative_url may include the query string (i.e., the '?'
+ character and everything following).
+
+ Args:
+ relative_url: Relative URL being accessed in a request.
+ split_url: Used for dependency injection.
+
+ Returns:
+ Tuple (dispatcher, matched_path, requires_login, admin_only), which are
+ the corresponding values passed to AddURL when the matching URL pattern
+ was added to this matcher. The matched_path will have back-references
+ replaced using values matched by the URL pattern. If no match was found,
+ dispatcher will be None.
+ """
+ adjusted_url, unused_query_string = split_url(relative_url)
+
+ for url_tuple in self._url_patterns:
+ url_re, dispatcher, path, requires_login, admin_only = url_tuple
+ the_match = url_re.match(adjusted_url)
+
+ if the_match:
+ adjusted_path = the_match.expand(path)
+ return dispatcher, adjusted_path, requires_login, admin_only
+
+ return None, None, None, None
+
+ def GetDispatchers(self):
+ """Retrieves the URLDispatcher objects that could be matched.
+
+ Should only be used in tests.
+
+ Returns:
+ A set of URLDispatcher objects.
+ """
+ return set([url_tuple[1] for url_tuple in self._url_patterns])
+
+
+
+class MatcherDispatcher(URLDispatcher):
+ """Dispatcher across multiple URLMatcher instances."""
+
+ def __init__(self,
+ login_url,
+ url_matchers,
+ get_user_info=dev_appserver_login.GetUserInfo,
+ login_redirect=dev_appserver_login.LoginRedirect):
+ """Initializer.
+
+ Args:
+ login_url: Relative URL which should be used for handling user logins.
+ url_matchers: Sequence of URLMatcher objects.
+ get_user_info: Used for dependency injection.
+ login_redirect: Used for dependency injection.
+ """
+ self._login_url = login_url
+ self._url_matchers = tuple(url_matchers)
+ self._get_user_info = get_user_info
+ self._login_redirect = login_redirect
+
+ def Dispatch(self,
+ relative_url,
+ path,
+ headers,
+ infile,
+ outfile,
+ base_env_dict=None):
+ """Dispatches a request to the first matching dispatcher.
+
+ Matchers are checked in the order they were supplied to the constructor.
+ If no matcher matches, a 404 error will be written to the outfile. The
+ path variable supplied to this method is ignored.
+ """
+ cookies = ', '.join(headers.getheaders('cookie'))
+ email_addr, admin, user_id = self._get_user_info(cookies)
+
+ for matcher in self._url_matchers:
+ dispatcher, matched_path, requires_login, admin_only = matcher.Match(
+ relative_url)
+ if dispatcher is None:
+ continue
+
+ logging.debug('Matched "%s" to %s with path %s',
+ relative_url, dispatcher, matched_path)
+
+ if (requires_login or admin_only) and not email_addr:
+ logging.debug('Login required, redirecting user')
+ self._login_redirect(self._login_url,
+ base_env_dict['SERVER_NAME'],
+ base_env_dict['SERVER_PORT'],
+ relative_url,
+ outfile)
+ elif admin_only and not admin:
+ outfile.write('Status: %d Not authorized\r\n'
+ '\r\n'
+ 'Current logged in user %s is not '
+ 'authorized to view this page.'
+ % (httplib.FORBIDDEN, email_addr))
+ else:
+ forward = dispatcher.Dispatch(relative_url,
+ matched_path,
+ headers,
+ infile,
+ outfile,
+ base_env_dict=base_env_dict)
+
+ if forward:
+ new_path, new_headers, new_input = forward
+ logging.info('Internal redirection to %s', new_path)
+ new_outfile = cStringIO.StringIO()
+ self.Dispatch(new_path,
+ None,
+ new_headers,
+ new_input,
+ new_outfile,
+ dict(base_env_dict))
+ new_outfile.seek(0)
+ dispatcher.EndRedirect(new_outfile, outfile)
+
+ return
+
+ outfile.write('Status: %d URL did not match\r\n'
+ '\r\n'
+ 'Not found error: %s did not match any patterns '
+ 'in application configuration.'
+ % (httplib.NOT_FOUND, relative_url))
+
+
+
+class ApplicationLoggingHandler(logging.Handler):
+ """Python Logging handler that displays the debugging console to users."""
+
+ _COOKIE_NAME = '_ah_severity'
+
+ _TEMPLATES_INITIALIZED = False
+ _HEADER = None
+ _SCRIPT = None
+ _MIDDLE = None
+ _FOOTER = None
+
+ @staticmethod
+ def InitializeTemplates(header, script, middle, footer):
+ """Initializes the templates used to render the debugging console.
+
+ This method must be called before any ApplicationLoggingHandler instances
+ are created.
+
+ Args:
+ header: The header template that is printed first.
+ script: The script template that is printed after the logging messages.
+ middle: The middle element that's printed before the footer.
+ footer; The last element that's printed at the end of the document.
+ """
+ ApplicationLoggingHandler._HEADER = header
+ ApplicationLoggingHandler._SCRIPT = script
+ ApplicationLoggingHandler._MIDDLE = middle
+ ApplicationLoggingHandler._FOOTER = footer
+ ApplicationLoggingHandler._TEMPLATES_INITIALIZED = True
+
+ @staticmethod
+ def AreTemplatesInitialized():
+ """Returns True if InitializeTemplates has been called, False otherwise."""
+ return ApplicationLoggingHandler._TEMPLATES_INITIALIZED
+
+ def __init__(self, *args, **kwargs):
+ """Initializer.
+
+ Args:
+ args, kwargs: See logging.Handler.
+
+ Raises:
+ TemplatesNotLoadedError exception if the InitializeTemplates method was
+ not called before creating this instance.
+ """
+ if not self._TEMPLATES_INITIALIZED:
+ raise TemplatesNotLoadedError
+
+ logging.Handler.__init__(self, *args, **kwargs)
+ self._record_list = []
+ self._start_time = time.time()
+
+ def emit(self, record):
+ """Called by the logging module each time the application logs a message.
+
+ Args:
+ record: logging.LogRecord instance corresponding to the newly logged
+ message.
+ """
+ self._record_list.append(record)
+
+ def AddDebuggingConsole(self, relative_url, env, outfile):
+ """Prints an HTML debugging console to an output stream, if requested.
+
+ Args:
+ relative_url: Relative URL that was accessed, including the query string.
+ Used to determine if the parameter 'debug' was supplied, in which case
+ the console will be shown.
+ env: Dictionary containing CGI environment variables. Checks for the
+ HTTP_COOKIE entry to see if the accessing user has any logging-related
+ cookies set.
+ outfile: Output stream to which the console should be written if either
+ a debug parameter was supplied or a logging cookie is present.
+ """
+ unused_script_name, query_string = SplitURL(relative_url)
+ param_dict = cgi.parse_qs(query_string, True)
+ cookie_dict = Cookie.SimpleCookie(env.get('HTTP_COOKIE', ''))
+ if 'debug' not in param_dict and self._COOKIE_NAME not in cookie_dict:
+ return
+
+ outfile.write(self._HEADER)
+ for record in self._record_list:
+ self._PrintRecord(record, outfile)
+
+ outfile.write(self._MIDDLE)
+ outfile.write(self._SCRIPT)
+ outfile.write(self._FOOTER)
+
+ def _PrintRecord(self, record, outfile):
+ """Prints a single logging record to an output stream.
+
+ Args:
+ record: logging.LogRecord instance to print.
+ outfile: Output stream to which the LogRecord should be printed.
+ """
+ message = cgi.escape(record.getMessage())
+ level_name = logging.getLevelName(record.levelno).lower()
+ level_letter = level_name[:1].upper()
+ time_diff = record.created - self._start_time
+ outfile.write('<span class="_ah_logline_%s">\n' % level_name)
+ outfile.write('<span class="_ah_logline_%s_prefix">%2.5f %s &gt;</span>\n'
+ % (level_name, time_diff, level_letter))
+ outfile.write('%s\n' % message)
+ outfile.write('</span>\n')
+
+
+_IGNORE_REQUEST_HEADERS = frozenset(['content-type', 'content-length',
+ 'accept-encoding', 'transfer-encoding'])
+
+
+def SetupEnvironment(cgi_path,
+ relative_url,
+ headers,
+ infile,
+ split_url=SplitURL,
+ get_user_info=dev_appserver_login.GetUserInfo):
+ """Sets up environment variables for a CGI.
+
+ Args:
+ cgi_path: Full file-system path to the CGI being executed.
+ relative_url: Relative URL used to access the CGI.
+ headers: Instance of mimetools.Message containing request headers.
+ infile: File-like object with input data from the request.
+ split_url, get_user_info: Used for dependency injection.
+
+ Returns:
+ Dictionary containing CGI environment variables.
+ """
+ env = DEFAULT_ENV.copy()
+
+ script_name, query_string = split_url(relative_url)
+
+ env['SCRIPT_NAME'] = ''
+ env['QUERY_STRING'] = query_string
+ env['PATH_INFO'] = urllib.unquote(script_name)
+ env['PATH_TRANSLATED'] = cgi_path
+ env['CONTENT_TYPE'] = headers.getheader('content-type',
+ 'application/x-www-form-urlencoded')
+ env['CONTENT_LENGTH'] = headers.getheader('content-length', '')
+
+ cookies = ', '.join(headers.getheaders('cookie'))
+ email_addr, admin, user_id = get_user_info(cookies)
+ env['USER_EMAIL'] = email_addr
+ env['USER_ID'] = user_id
+ if admin:
+ env['USER_IS_ADMIN'] = '1'
+
+ for key in headers:
+ if key in _IGNORE_REQUEST_HEADERS:
+ continue
+ adjusted_name = key.replace('-', '_').upper()
+ env['HTTP_' + adjusted_name] = ', '.join(headers.getheaders(key))
+
+ PAYLOAD_HEADER = 'HTTP_X_APPENGINE_DEVELOPMENT_PAYLOAD'
+ if PAYLOAD_HEADER in env:
+ del env[PAYLOAD_HEADER]
+ new_data = base64.standard_b64decode(infile.getvalue())
+ infile.seek(0)
+ infile.truncate()
+ infile.write(new_data)
+ infile.seek(0)
+ env['CONTENT_LENGTH'] = str(len(new_data))
+
+ return env
+
+
+def NotImplementedFake(*args, **kwargs):
+ """Fake for methods/functions that are not implemented in the production
+ environment.
+ """
+ raise NotImplementedError('This class/method is not available.')
+
+
+class NotImplementedFakeClass(object):
+ """Fake class for classes that are not implemented in the production env.
+ """
+ __init__ = NotImplementedFake
+
+
+def IsEncodingsModule(module_name):
+ """Determines if the supplied module is related to encodings in any way.
+
+ Encodings-related modules cannot be reloaded, so they need to be treated
+ specially when sys.modules is modified in any way.
+
+ Args:
+ module_name: Absolute name of the module regardless of how it is imported
+ into the local namespace (e.g., foo.bar.baz).
+
+ Returns:
+ True if it's an encodings-related module; False otherwise.
+ """
+ if (module_name in ('codecs', 'encodings') or
+ module_name.startswith('encodings.')):
+ return True
+ return False
+
+
+def ClearAllButEncodingsModules(module_dict):
+ """Clear all modules in a module dictionary except for those modules that
+ are in any way related to encodings.
+
+ Args:
+ module_dict: Dictionary in the form used by sys.modules.
+ """
+ for module_name in module_dict.keys():
+ if not IsEncodingsModule(module_name):
+ del module_dict[module_name]
+
+
+def FakeURandom(n):
+ """Fake version of os.urandom."""
+ bytes = ''
+ for _ in range(n):
+ bytes += chr(random.randint(0, 255))
+ return bytes
+
+
+def FakeUname():
+ """Fake version of os.uname."""
+ return ('Linux', '', '', '', '')
+
+
+def FakeUnlink(path):
+ """Fake version of os.unlink."""
+ if os.path.isdir(path):
+ raise OSError(errno.ENOENT, "Is a directory", path)
+ else:
+ raise OSError(errno.EPERM, "Operation not permitted", path)
+
+
+def FakeReadlink(path):
+ """Fake version of os.readlink."""
+ raise OSError(errno.EINVAL, "Invalid argument", path)
+
+
+def FakeAccess(path, mode):
+ """Fake version of os.access where only reads are supported."""
+ if not os.path.exists(path) or mode != os.R_OK:
+ return False
+ else:
+ return True
+
+
+def FakeSetLocale(category, value=None, original_setlocale=locale.setlocale):
+ """Fake version of locale.setlocale that only supports the default."""
+ if value not in (None, '', 'C', 'POSIX'):
+ raise locale.Error('locale emulation only supports "C" locale')
+ return original_setlocale(category, 'C')
+
+
+def FakeOpen(filename, flags, mode=0777):
+ """Fake version of os.open."""
+ raise OSError(errno.EPERM, "Operation not permitted", filename)
+
+
+def FakeRename(src, dst):
+ """Fake version of os.rename."""
+ raise OSError(errno.EPERM, "Operation not permitted", src)
+
+
+def FakeUTime(path, times):
+ """Fake version of os.utime."""
+ raise OSError(errno.EPERM, "Operation not permitted", path)
+
+
+def FakeGetPlatform():
+ """Fake distutils.util.get_platform on OS/X. Pass-through otherwise."""
+ if sys.platform == 'darwin':
+ return 'macosx-'
+ else:
+ return distutils.util.get_platform()
+
+
+def IsPathInSubdirectories(filename,
+ subdirectories,
+ normcase=os.path.normcase):
+ """Determines if a filename is contained within one of a set of directories.
+
+ Args:
+ filename: Path of the file (relative or absolute).
+ subdirectories: Iterable collection of paths to subdirectories which the
+ given filename may be under.
+ normcase: Used for dependency injection.
+
+ Returns:
+ True if the supplied filename is in one of the given sub-directories or
+ its hierarchy of children. False otherwise.
+ """
+ file_dir = normcase(os.path.dirname(os.path.abspath(filename)))
+ for parent in subdirectories:
+ fixed_parent = normcase(os.path.abspath(parent))
+ if os.path.commonprefix([file_dir, fixed_parent]) == fixed_parent:
+ return True
+ return False
+
+SHARED_MODULE_PREFIXES = set([
+ 'google',
+ 'logging',
+ 'sys',
+ 'warnings',
+
+
+
+
+ 're',
+ 'sre_compile',
+ 'sre_constants',
+ 'sre_parse',
+
+
+
+
+ 'wsgiref',
+])
+
+NOT_SHARED_MODULE_PREFIXES = set([
+ 'google.appengine.ext',
+])
+
+
+def ModuleNameHasPrefix(module_name, prefix_set):
+ """Determines if a module's name belongs to a set of prefix strings.
+
+ Args:
+ module_name: String containing the fully qualified module name.
+ prefix_set: Iterable set of module name prefixes to check against.
+
+ Returns:
+ True if the module_name belongs to the prefix set or is a submodule of
+ any of the modules specified in the prefix_set. Otherwise False.
+ """
+ for prefix in prefix_set:
+ if prefix == module_name:
+ return True
+
+ if module_name.startswith(prefix + '.'):
+ return True
+
+ return False
+
+
+def SetupSharedModules(module_dict):
+ """Creates a module dictionary for the hardened part of the process.
+
+ Module dictionary will contain modules that should be shared between the
+ hardened and unhardened parts of the process.
+
+ Args:
+ module_dict: Module dictionary from which existing modules should be
+ pulled (usually sys.modules).
+
+ Returns:
+ A new module dictionary.
+ """
+ output_dict = {}
+ for module_name, module in module_dict.iteritems():
+ if module is None:
+ continue
+
+ if IsEncodingsModule(module_name):
+ output_dict[module_name] = module
+ continue
+
+ shared_prefix = ModuleNameHasPrefix(module_name, SHARED_MODULE_PREFIXES)
+ banned_prefix = ModuleNameHasPrefix(module_name, NOT_SHARED_MODULE_PREFIXES)
+
+ if shared_prefix and not banned_prefix:
+ output_dict[module_name] = module
+
+ return output_dict
+
+
+def GeneratePythonPaths(*p):
+ """Generate all valid filenames for the given file.
+
+ Args:
+ p: Positional args are the folders to the file and finally the file
+ without a suffix.
+
+ Returns:
+ A list of strings representing the given path to a file with each valid
+ suffix for this python build.
+ """
+ suffixes = imp.get_suffixes()
+ return [os.path.join(*p) + s for s, m, t in suffixes]
+
+
+class FakeFile(file):
+ """File sub-class that enforces the security restrictions of the production
+ environment.
+ """
+
+ ALLOWED_MODES = frozenset(['r', 'rb', 'U', 'rU'])
+
+ ALLOWED_FILES = set(os.path.normcase(filename)
+ for filename in mimetypes.knownfiles
+ if os.path.isfile(filename))
+
+ ALLOWED_DIRS = set([
+ os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
+ os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
+ ])
+
+ NOT_ALLOWED_DIRS = set([
+
+
+
+
+ SITE_PACKAGES,
+ ])
+
+ ALLOWED_SITE_PACKAGE_DIRS = set(
+ os.path.normcase(os.path.abspath(os.path.join(SITE_PACKAGES, path)))
+ for path in [
+
+ ])
+
+ ALLOWED_SITE_PACKAGE_FILES = set(
+ os.path.normcase(os.path.abspath(os.path.join(
+ os.path.dirname(os.__file__), 'site-packages', path)))
+ for path in itertools.chain(*[
+
+ [os.path.join('Crypto')],
+ GeneratePythonPaths('Crypto', '__init__'),
+ [os.path.join('Crypto', 'Cipher')],
+ GeneratePythonPaths('Crypto', 'Cipher', '__init__'),
+ GeneratePythonPaths('Crypto', 'Cipher', 'AES'),
+ GeneratePythonPaths('Crypto', 'Cipher', 'ARC2'),
+ GeneratePythonPaths('Crypto', 'Cipher', 'ARC4'),
+ GeneratePythonPaths('Crypto', 'Cipher', 'Blowfish'),
+ GeneratePythonPaths('Crypto', 'Cipher', 'CAST'),
+ GeneratePythonPaths('Crypto', 'Cipher', 'DES'),
+ GeneratePythonPaths('Crypto', 'Cipher', 'DES3'),
+ GeneratePythonPaths('Crypto', 'Cipher', 'XOR'),
+ [os.path.join('Crypto', 'Hash')],
+ GeneratePythonPaths('Crypto', 'Hash', '__init__'),
+ GeneratePythonPaths('Crypto', 'Hash', 'HMAC'),
+ os.path.join('Crypto', 'Hash', 'MD2'),
+ os.path.join('Crypto', 'Hash', 'MD4'),
+ GeneratePythonPaths('Crypto', 'Hash', 'MD5'),
+ GeneratePythonPaths('Crypto', 'Hash', 'SHA'),
+ os.path.join('Crypto', 'Hash', 'SHA256'),
+ os.path.join('Crypto', 'Hash', 'RIPEMD'),
+ [os.path.join('Crypto', 'Protocol')],
+ GeneratePythonPaths('Crypto', 'Protocol', '__init__'),
+ GeneratePythonPaths('Crypto', 'Protocol', 'AllOrNothing'),
+ GeneratePythonPaths('Crypto', 'Protocol', 'Chaffing'),
+ [os.path.join('Crypto', 'PublicKey')],
+ GeneratePythonPaths('Crypto', 'PublicKey', '__init__'),
+ GeneratePythonPaths('Crypto', 'PublicKey', 'DSA'),
+ GeneratePythonPaths('Crypto', 'PublicKey', 'ElGamal'),
+ GeneratePythonPaths('Crypto', 'PublicKey', 'RSA'),
+ GeneratePythonPaths('Crypto', 'PublicKey', 'pubkey'),
+ GeneratePythonPaths('Crypto', 'PublicKey', 'qNEW'),
+ [os.path.join('Crypto', 'Util')],
+ GeneratePythonPaths('Crypto', 'Util', '__init__'),
+ GeneratePythonPaths('Crypto', 'Util', 'RFC1751'),
+ GeneratePythonPaths('Crypto', 'Util', 'number'),
+ GeneratePythonPaths('Crypto', 'Util', 'randpool'),
+ ]))
+
+ _original_file = file
+
+ _root_path = None
+ _application_paths = None
+ _skip_files = None
+ _static_file_config_matcher = None
+
+ _allow_skipped_files = True
+
+ _availability_cache = {}
+
+ @staticmethod
+ def SetAllowedPaths(root_path, application_paths):
+ """Configures which paths are allowed to be accessed.
+
+ Must be called at least once before any file objects are created in the
+ hardened environment.
+
+ Args:
+ root_path: Absolute path to the root of the application.
+ application_paths: List of additional paths that the application may
+ access, this must include the App Engine runtime but
+ not the Python library directories.
+ """
+ FakeFile._application_paths = (set(os.path.realpath(path)
+ for path in application_paths) |
+ set(os.path.abspath(path)
+ for path in application_paths))
+ FakeFile._application_paths.add(root_path)
+
+ FakeFile._root_path = os.path.join(root_path, '')
+
+ FakeFile._availability_cache = {}
+
+ @staticmethod
+ def SetAllowSkippedFiles(allow_skipped_files):
+ """Configures access to files matching FakeFile._skip_files.
+
+ Args:
+ allow_skipped_files: Boolean whether to allow access to skipped files
+ """
+ FakeFile._allow_skipped_files = allow_skipped_files
+ FakeFile._availability_cache = {}
+
+ @staticmethod
+ def SetAllowedModule(name):
+ """Allow the use of a module based on where it is located.
+
+ Meant to be used by use_library() so that it has a link back into the
+ trusted part of the interpreter.
+
+ Args:
+ name: Name of the module to allow.
+ """
+ stream, pathname, description = imp.find_module(name)
+ pathname = os.path.normcase(os.path.abspath(pathname))
+ if stream:
+ stream.close()
+ FakeFile.ALLOWED_FILES.add(pathname)
+ FakeFile.ALLOWED_FILES.add(os.path.realpath(pathname))
+ else:
+ assert description[2] == imp.PKG_DIRECTORY
+ if pathname.startswith(SITE_PACKAGES):
+ FakeFile.ALLOWED_SITE_PACKAGE_DIRS.add(pathname)
+ FakeFile.ALLOWED_SITE_PACKAGE_DIRS.add(os.path.realpath(pathname))
+ else:
+ FakeFile.ALLOWED_DIRS.add(pathname)
+ FakeFile.ALLOWED_DIRS.add(os.path.realpath(pathname))
+
+ @staticmethod
+ def SetSkippedFiles(skip_files):
+ """Sets which files in the application directory are to be ignored.
+
+ Must be called at least once before any file objects are created in the
+ hardened environment.
+
+ Must be called whenever the configuration was updated.
+
+ Args:
+ skip_files: Object with .match() method (e.g. compiled regexp).
+ """
+ FakeFile._skip_files = skip_files
+ FakeFile._availability_cache = {}
+
+ @staticmethod
+ def SetStaticFileConfigMatcher(static_file_config_matcher):
+ """Sets StaticFileConfigMatcher instance for checking if a file is static.
+
+ Must be called at least once before any file objects are created in the
+ hardened environment.
+
+ Must be called whenever the configuration was updated.
+
+ Args:
+ static_file_config_matcher: StaticFileConfigMatcher instance.
+ """
+ FakeFile._static_file_config_matcher = static_file_config_matcher
+ FakeFile._availability_cache = {}
+
+ @staticmethod
+ def IsFileAccessible(filename, normcase=os.path.normcase):
+ """Determines if a file's path is accessible.
+
+ SetAllowedPaths(), SetSkippedFiles() and SetStaticFileConfigMatcher() must
+ be called before this method or else all file accesses will raise an error.
+
+ Args:
+ filename: Path of the file to check (relative or absolute). May be a
+ directory, in which case access for files inside that directory will
+ be checked.
+ normcase: Used for dependency injection.
+
+ Returns:
+ True if the file is accessible, False otherwise.
+ """
+ logical_filename = normcase(os.path.abspath(filename))
+
+ result = FakeFile._availability_cache.get(logical_filename)
+ if result is None:
+ result = FakeFile._IsFileAccessibleNoCache(logical_filename,
+ normcase=normcase)
+ FakeFile._availability_cache[logical_filename] = result
+ return result
+
+ @staticmethod
+ def _IsFileAccessibleNoCache(logical_filename, normcase=os.path.normcase):
+ """Determines if a file's path is accessible.
+
+ This is an internal part of the IsFileAccessible implementation.
+
+ Args:
+ logical_filename: Absolute path of the file to check.
+ normcase: Used for dependency injection.
+
+ Returns:
+ True if the file is accessible, False otherwise.
+ """
+ logical_dirfakefile = logical_filename
+ if os.path.isdir(logical_filename):
+ logical_dirfakefile = os.path.join(logical_filename, 'foo')
+
+ if IsPathInSubdirectories(logical_dirfakefile, [FakeFile._root_path],
+ normcase=normcase):
+ relative_filename = logical_dirfakefile[len(FakeFile._root_path):]
+
+ if (not FakeFile._allow_skipped_files and
+ FakeFile._skip_files.match(relative_filename)):
+ logging.warning('Blocking access to skipped file "%s"',
+ logical_filename)
+ return False
+
+ if FakeFile._static_file_config_matcher.IsStaticFile(relative_filename):
+ logging.warning('Blocking access to static file "%s"',
+ logical_filename)
+ return False
+
+ if logical_filename in FakeFile.ALLOWED_FILES:
+ return True
+
+ if logical_filename in FakeFile.ALLOWED_SITE_PACKAGE_FILES:
+ return True
+
+ if IsPathInSubdirectories(logical_dirfakefile,
+ FakeFile.ALLOWED_SITE_PACKAGE_DIRS,
+ normcase=normcase):
+ return True
+
+ allowed_dirs = FakeFile._application_paths | FakeFile.ALLOWED_DIRS
+ if (IsPathInSubdirectories(logical_dirfakefile,
+ allowed_dirs,
+ normcase=normcase) and
+ not IsPathInSubdirectories(logical_dirfakefile,
+ FakeFile.NOT_ALLOWED_DIRS,
+ normcase=normcase)):
+ return True
+
+ return False
+
+ def __init__(self, filename, mode='r', bufsize=-1, **kwargs):
+ """Initializer. See file built-in documentation."""
+ if mode not in FakeFile.ALLOWED_MODES:
+ raise IOError('invalid mode: %s' % mode)
+
+ if not FakeFile.IsFileAccessible(filename):
+ raise IOError(errno.EACCES, 'file not accessible', filename)
+
+ super(FakeFile, self).__init__(filename, mode, bufsize, **kwargs)
+
+
+from google.appengine.dist import _library
+_library.SetAllowedModule = FakeFile.SetAllowedModule
+
+
+class RestrictedPathFunction(object):
+ """Enforces access restrictions for functions that have a file or
+ directory path as their first argument."""
+
+ _original_os = os
+
+ def __init__(self, original_func):
+ """Initializer.
+
+ Args:
+ original_func: Callable that takes as its first argument the path to a
+ file or directory on disk; all subsequent arguments may be variable.
+ """
+ self._original_func = original_func
+
+ def __call__(self, path, *args, **kwargs):
+ """Enforces access permissions for the function passed to the constructor.
+ """
+ if not FakeFile.IsFileAccessible(path):
+ raise OSError(errno.EACCES, 'path not accessible', path)
+
+ return self._original_func(path, *args, **kwargs)
+
+
+def GetSubmoduleName(fullname):
+ """Determines the leaf submodule name of a full module name.
+
+ Args:
+ fullname: Fully qualified module name, e.g. 'foo.bar.baz'
+
+ Returns:
+ Submodule name, e.g. 'baz'. If the supplied module has no submodule (e.g.,
+ 'stuff'), the returned value will just be that module name ('stuff').
+ """
+ return fullname.rsplit('.', 1)[-1]
+
+
+
+class CouldNotFindModuleError(ImportError):
+ """Raised when a module could not be found.
+
+ In contrast to when a module has been found, but cannot be loaded because of
+ hardening restrictions.
+ """
+
+
+def Trace(func):
+ """Call stack logging decorator for HardenedModulesHook class.
+
+ This decorator logs the call stack of the HardenedModulesHook class as
+ it executes, indenting logging messages based on the current stack depth.
+
+ Args:
+ func: the function to decorate.
+
+ Returns:
+ The decorated function.
+ """
+
+ def Decorate(self, *args, **kwargs):
+ args_to_show = []
+ if args is not None:
+ args_to_show.extend(str(argument) for argument in args)
+ if kwargs is not None:
+ args_to_show.extend('%s=%s' % (key, value)
+ for key, value in kwargs.iteritems())
+
+ args_string = ', '.join(args_to_show)
+
+ self.log('Entering %s(%s)', func.func_name, args_string)
+ self._indent_level += 1
+ try:
+ return func(self, *args, **kwargs)
+ finally:
+ self._indent_level -= 1
+ self.log('Exiting %s(%s)', func.func_name, args_string)
+
+ return Decorate
+
+
+class HardenedModulesHook(object):
+ """Meta import hook that restricts the modules used by applications to match
+ the production environment.
+
+ Module controls supported:
+ - Disallow native/extension modules from being loaded
+ - Disallow built-in and/or Python-distributed modules from being loaded
+ - Replace modules with completely empty modules
+ - Override specific module attributes
+ - Replace one module with another
+
+ After creation, this object should be added to the front of the sys.meta_path
+ list (which may need to be created). The sys.path_importer_cache dictionary
+ should also be cleared, to prevent loading any non-restricted modules.
+
+ See PEP302 for more info on how this works:
+ http://www.python.org/dev/peps/pep-0302/
+ """
+
+ ENABLE_LOGGING = False
+
+ def log(self, message, *args):
+ """Logs an import-related message to stderr, with indentation based on
+ current call-stack depth.
+
+ Args:
+ message: Logging format string.
+ args: Positional format parameters for the logging message.
+ """
+ if HardenedModulesHook.ENABLE_LOGGING:
+ indent = self._indent_level * ' '
+ print >>sys.stderr, indent + (message % args)
+
+ _WHITE_LIST_C_MODULES = [
+ 'AES',
+ 'ARC2',
+ 'ARC4',
+ 'Blowfish',
+ 'CAST',
+ 'DES',
+ 'DES3',
+ 'MD2',
+ 'MD4',
+ 'RIPEMD',
+ 'SHA256',
+ 'XOR',
+
+ '_Crypto_Cipher__AES',
+ '_Crypto_Cipher__ARC2',
+ '_Crypto_Cipher__ARC4',
+ '_Crypto_Cipher__Blowfish',
+ '_Crypto_Cipher__CAST',
+ '_Crypto_Cipher__DES',
+ '_Crypto_Cipher__DES3',
+ '_Crypto_Cipher__XOR',
+ '_Crypto_Hash__MD2',
+ '_Crypto_Hash__MD4',
+ '_Crypto_Hash__RIPEMD',
+ '_Crypto_Hash__SHA256',
+ 'array',
+ 'binascii',
+ 'bz2',
+ 'cmath',
+ 'collections',
+ 'crypt',
+ 'cStringIO',
+ 'datetime',
+ 'errno',
+ 'exceptions',
+ 'gc',
+ 'itertools',
+ 'math',
+ 'md5',
+ 'operator',
+ 'posix',
+ 'posixpath',
+ 'pyexpat',
+ 'sha',
+ 'struct',
+ 'sys',
+ 'time',
+ 'timing',
+ 'unicodedata',
+ 'zlib',
+ '_ast',
+ '_bisect',
+ '_codecs',
+ '_codecs_cn',
+ '_codecs_hk',
+ '_codecs_iso2022',
+ '_codecs_jp',
+ '_codecs_kr',
+ '_codecs_tw',
+ '_collections',
+ '_csv',
+ '_elementtree',
+ '_functools',
+ '_hashlib',
+ '_heapq',
+ '_locale',
+ '_lsprof',
+ '_md5',
+ '_multibytecodec',
+ '_random',
+ '_sha',
+ '_sha256',
+ '_sha512',
+ '_sre',
+ '_struct',
+ '_types',
+ '_weakref',
+ '__main__',
+ ]
+
+ __CRYPTO_CIPHER_ALLOWED_MODULES = [
+ 'MODE_CBC',
+ 'MODE_CFB',
+ 'MODE_CTR',
+ 'MODE_ECB',
+ 'MODE_OFB',
+ 'block_size',
+ 'key_size',
+ 'new',
+ ]
+ _WHITE_LIST_PARTIAL_MODULES = {
+ 'Crypto.Cipher.AES': __CRYPTO_CIPHER_ALLOWED_MODULES,
+ 'Crypto.Cipher.ARC2': __CRYPTO_CIPHER_ALLOWED_MODULES,
+ 'Crypto.Cipher.Blowfish': __CRYPTO_CIPHER_ALLOWED_MODULES,
+ 'Crypto.Cipher.CAST': __CRYPTO_CIPHER_ALLOWED_MODULES,
+ 'Crypto.Cipher.DES': __CRYPTO_CIPHER_ALLOWED_MODULES,
+ 'Crypto.Cipher.DES3': __CRYPTO_CIPHER_ALLOWED_MODULES,
+
+ 'gc': [
+ 'enable',
+ 'disable',
+ 'isenabled',
+ 'collect',
+ 'get_debug',
+ 'set_threshold',
+ 'get_threshold',
+ 'get_count'
+ ],
+
+
+
+ 'os': [
+ 'access',
+ 'altsep',
+ 'curdir',
+ 'defpath',
+ 'devnull',
+ 'environ',
+ 'error',
+ 'extsep',
+ 'EX_NOHOST',
+ 'EX_NOINPUT',
+ 'EX_NOPERM',
+ 'EX_NOUSER',
+ 'EX_OK',
+ 'EX_OSERR',
+ 'EX_OSFILE',
+ 'EX_PROTOCOL',
+ 'EX_SOFTWARE',
+ 'EX_TEMPFAIL',
+ 'EX_UNAVAILABLE',
+ 'EX_USAGE',
+ 'F_OK',
+ 'getcwd',
+ 'getcwdu',
+ 'getenv',
+ 'listdir',
+ 'lstat',
+ 'name',
+ 'NGROUPS_MAX',
+ 'O_APPEND',
+ 'O_CREAT',
+ 'O_DIRECT',
+ 'O_DIRECTORY',
+ 'O_DSYNC',
+ 'O_EXCL',
+ 'O_LARGEFILE',
+ 'O_NDELAY',
+ 'O_NOCTTY',
+ 'O_NOFOLLOW',
+ 'O_NONBLOCK',
+ 'O_RDONLY',
+ 'O_RDWR',
+ 'O_RSYNC',
+ 'O_SYNC',
+ 'O_TRUNC',
+ 'O_WRONLY',
+ 'open',
+ 'pardir',
+ 'path',
+ 'pathsep',
+ 'R_OK',
+ 'readlink',
+ 'remove',
+ 'rename',
+ 'SEEK_CUR',
+ 'SEEK_END',
+ 'SEEK_SET',
+ 'sep',
+ 'stat',
+ 'stat_float_times',
+ 'stat_result',
+ 'strerror',
+ 'TMP_MAX',
+ 'unlink',
+ 'urandom',
+ 'utime',
+ 'walk',
+ 'WCOREDUMP',
+ 'WEXITSTATUS',
+ 'WIFEXITED',
+ 'WIFSIGNALED',
+ 'WIFSTOPPED',
+ 'WNOHANG',
+ 'WSTOPSIG',
+ 'WTERMSIG',
+ 'WUNTRACED',
+ 'W_OK',
+ 'X_OK',
+ ],
+ }
+
+ _MODULE_OVERRIDES = {
+ 'locale': {
+ 'setlocale': FakeSetLocale,
+ },
+
+ 'os': {
+ 'access': FakeAccess,
+ 'listdir': RestrictedPathFunction(os.listdir),
+
+ 'lstat': RestrictedPathFunction(os.stat),
+ 'open': FakeOpen,
+ 'readlink': FakeReadlink,
+ 'remove': FakeUnlink,
+ 'rename': FakeRename,
+ 'stat': RestrictedPathFunction(os.stat),
+ 'uname': FakeUname,
+ 'unlink': FakeUnlink,
+ 'urandom': FakeURandom,
+ 'utime': FakeUTime,
+ },
+
+ 'distutils.util': {
+ 'get_platform': FakeGetPlatform,
+ },
+ }
+
+ _ENABLED_FILE_TYPES = (
+ imp.PKG_DIRECTORY,
+ imp.PY_SOURCE,
+ imp.PY_COMPILED,
+ imp.C_BUILTIN,
+ )
+
+ def __init__(self,
+ module_dict,
+ imp_module=imp,
+ os_module=os,
+ dummy_thread_module=dummy_thread,
+ pickle_module=pickle):
+ """Initializer.
+
+ Args:
+ module_dict: Module dictionary to use for managing system modules.
+ Should be sys.modules.
+ imp_module, os_module, dummy_thread_module, pickle_module: References to
+ modules that exist in the dev_appserver that must be used by this class
+ in order to function, even if these modules have been unloaded from
+ sys.modules.
+ """
+ self._module_dict = module_dict
+ self._imp = imp_module
+ self._os = os_module
+ self._dummy_thread = dummy_thread_module
+ self._pickle = pickle
+ self._indent_level = 0
+
+ @Trace
+ def find_module(self, fullname, path=None):
+ """See PEP 302."""
+ if fullname in ('cPickle', 'thread'):
+ return self
+
+ search_path = path
+ all_modules = fullname.split('.')
+ try:
+ for index, current_module in enumerate(all_modules):
+ current_module_fullname = '.'.join(all_modules[:index + 1])
+ if (current_module_fullname == fullname and not
+ self.StubModuleExists(fullname)):
+ self.FindModuleRestricted(current_module,
+ current_module_fullname,
+ search_path)
+ else:
+ if current_module_fullname in self._module_dict:
+ module = self._module_dict[current_module_fullname]
+ else:
+ module = self.FindAndLoadModule(current_module,
+ current_module_fullname,
+ search_path)
+
+ if hasattr(module, '__path__'):
+ search_path = module.__path__
+ except CouldNotFindModuleError:
+ return None
+
+ return self
+
+ def StubModuleExists(self, name):
+ """Check if the named module has a stub replacement."""
+ if name in sys.builtin_module_names:
+ name = 'py_%s' % name
+ if name in dist.__all__:
+ return True
+ return False
+
+ def ImportStubModule(self, name):
+ """Import the stub module replacement for the specified module."""
+ if name in sys.builtin_module_names:
+ name = 'py_%s' % name
+ module = __import__(dist.__name__, {}, {}, [name])
+ return getattr(module, name)
+
+ @Trace
+ def FixModule(self, module):
+ """Prunes and overrides restricted module attributes.
+
+ Args:
+ module: The module to prune. This should be a new module whose attributes
+ reference back to the real module's __dict__ members.
+ """
+ if module.__name__ in self._WHITE_LIST_PARTIAL_MODULES:
+ allowed_symbols = self._WHITE_LIST_PARTIAL_MODULES[module.__name__]
+ for symbol in set(module.__dict__) - set(allowed_symbols):
+ if not (symbol.startswith('__') and symbol.endswith('__')):
+ del module.__dict__[symbol]
+
+ if module.__name__ in self._MODULE_OVERRIDES:
+ module.__dict__.update(self._MODULE_OVERRIDES[module.__name__])
+
+ @Trace
+ def FindModuleRestricted(self,
+ submodule,
+ submodule_fullname,
+ search_path):
+ """Locates a module while enforcing module import restrictions.
+
+ Args:
+ submodule: The short name of the submodule (i.e., the last section of
+ the fullname; for 'foo.bar' this would be 'bar').
+ submodule_fullname: The fully qualified name of the module to find (e.g.,
+ 'foo.bar').
+ search_path: List of paths to search for to find this module. Should be
+ None if the current sys.path should be used.
+
+ Returns:
+ Tuple (source_file, pathname, description) where:
+ source_file: File-like object that contains the module; in the case
+ of packages, this will be None, which implies to look at __init__.py.
+ pathname: String containing the full path of the module on disk.
+ description: Tuple returned by imp.find_module().
+ However, in the case of an import using a path hook (e.g. a zipfile),
+ source_file will be a PEP-302-style loader object, pathname will be None,
+ and description will be a tuple filled with None values.
+
+ Raises:
+ ImportError exception if the requested module was found, but importing
+ it is disallowed.
+
+ CouldNotFindModuleError exception if the request module could not even
+ be found for import.
+ """
+ if search_path is None:
+ search_path = [None] + sys.path
+ for path_entry in search_path:
+ result = self.FindPathHook(submodule, submodule_fullname, path_entry)
+ if result is not None:
+ source_file, pathname, description = result
+ if description == (None, None, None):
+ return result
+ else:
+ break
+ else:
+ self.log('Could not find module "%s"', submodule_fullname)
+ raise CouldNotFindModuleError()
+
+ suffix, mode, file_type = description
+
+ if (file_type not in (self._imp.C_BUILTIN, self._imp.C_EXTENSION) and
+ not FakeFile.IsFileAccessible(pathname)):
+ error_message = 'Access to module file denied: %s' % pathname
+ logging.debug(error_message)
+ raise ImportError(error_message)
+
+ if (file_type not in self._ENABLED_FILE_TYPES and
+ submodule not in self._WHITE_LIST_C_MODULES):
+ error_message = ('Could not import "%s": Disallowed C-extension '
+ 'or built-in module' % submodule_fullname)
+ logging.debug(error_message)
+ raise ImportError(error_message)
+
+ return source_file, pathname, description
+
+ def FindPathHook(self, submodule, submodule_fullname, path_entry):
+ """Helper for FindModuleRestricted to find a module in a sys.path entry.
+
+ Args:
+ submodule:
+ submodule_fullname:
+ path_entry: A single sys.path entry, or None representing the builtins.
+
+ Returns:
+ Either None (if nothing was found), or a triple (source_file, path_name,
+ description). See the doc string for FindModuleRestricted() for the
+ meaning of the latter.
+ """
+ if path_entry is None:
+ if submodule_fullname in sys.builtin_module_names:
+ try:
+ result = self._imp.find_module(submodule)
+ except ImportError:
+ pass
+ else:
+ source_file, pathname, description = result
+ suffix, mode, file_type = description
+ if file_type == self._imp.C_BUILTIN:
+ return result
+ return None
+
+
+ if path_entry in sys.path_importer_cache:
+ importer = sys.path_importer_cache[path_entry]
+ else:
+ importer = None
+ for hook in sys.path_hooks:
+ try:
+ importer = hook(path_entry)
+ break
+ except ImportError:
+ pass
+ sys.path_importer_cache[path_entry] = importer
+
+ if importer is None:
+ try:
+ return self._imp.find_module(submodule, [path_entry])
+ except ImportError:
+ pass
+ else:
+ loader = importer.find_module(submodule)
+ if loader is not None:
+ return (loader, None, (None, None, None))
+
+ return None
+
+ @Trace
+ def LoadModuleRestricted(self,
+ submodule_fullname,
+ source_file,
+ pathname,
+ description):
+ """Loads a module while enforcing module import restrictions.
+
+ As a byproduct, the new module will be added to the module dictionary.
+
+ Args:
+ submodule_fullname: The fully qualified name of the module to find (e.g.,
+ 'foo.bar').
+ source_file: File-like object that contains the module's source code,
+ or a PEP-302-style loader object.
+ pathname: String containing the full path of the module on disk.
+ description: Tuple returned by imp.find_module(), or (None, None, None)
+ in case source_file is a PEP-302-style loader object.
+
+ Returns:
+ The new module.
+
+ Raises:
+ ImportError exception of the specified module could not be loaded for
+ whatever reason.
+ """
+ if description == (None, None, None):
+ return source_file.load_module(submodule_fullname)
+
+ try:
+ try:
+ return self._imp.load_module(submodule_fullname,
+ source_file,
+ pathname,
+ description)
+ except:
+ if submodule_fullname in self._module_dict:
+ del self._module_dict[submodule_fullname]
+ raise
+
+ finally:
+ if source_file is not None:
+ source_file.close()
+
+ @Trace
+ def FindAndLoadModule(self,
+ submodule,
+ submodule_fullname,
+ search_path):
+ """Finds and loads a module, loads it, and adds it to the module dictionary.
+
+ Args:
+ submodule: Name of the module to import (e.g., baz).
+ submodule_fullname: Full name of the module to import (e.g., foo.bar.baz).
+ search_path: Path to use for searching for this submodule. For top-level
+ modules this should be None; otherwise it should be the __path__
+ attribute from the parent package.
+
+ Returns:
+ A new module instance that has been inserted into the module dictionary
+ supplied to __init__.
+
+ Raises:
+ ImportError exception if the module could not be loaded for whatever
+ reason (e.g., missing, not allowed).
+ """
+ module = self._imp.new_module(submodule_fullname)
+
+ if submodule_fullname == 'thread':
+ module.__dict__.update(self._dummy_thread.__dict__)
+ module.__name__ = 'thread'
+ elif submodule_fullname == 'cPickle':
+ module.__dict__.update(self._pickle.__dict__)
+ module.__name__ = 'cPickle'
+ elif submodule_fullname == 'os':
+ module.__dict__.update(self._os.__dict__)
+ elif self.StubModuleExists(submodule_fullname):
+ module = self.ImportStubModule(submodule_fullname)
+ else:
+ source_file, pathname, description = self.FindModuleRestricted(submodule, submodule_fullname, search_path)
+ module = self.LoadModuleRestricted(submodule_fullname,
+ source_file,
+ pathname,
+ description)
+
+ module.__loader__ = self
+ self.FixModule(module)
+ if submodule_fullname not in self._module_dict:
+ self._module_dict[submodule_fullname] = module
+
+ if submodule_fullname == 'os':
+ os_path_name = module.path.__name__
+ os_path = self.FindAndLoadModule(os_path_name, os_path_name, search_path)
+ self._module_dict['os.path'] = os_path
+ module.__dict__['path'] = os_path
+
+ return module
+
+ @Trace
+ def GetParentPackage(self, fullname):
+ """Retrieves the parent package of a fully qualified module name.
+
+ Args:
+ fullname: Full name of the module whose parent should be retrieved (e.g.,
+ foo.bar).
+
+ Returns:
+ Module instance for the parent or None if there is no parent module.
+
+ Raise:
+ ImportError exception if the module's parent could not be found.
+ """
+ all_modules = fullname.split('.')
+ parent_module_fullname = '.'.join(all_modules[:-1])
+ if parent_module_fullname:
+ if self.find_module(fullname) is None:
+ raise ImportError('Could not find module %s' % fullname)
+
+ return self._module_dict[parent_module_fullname]
+ return None
+
+ @Trace
+ def GetParentSearchPath(self, fullname):
+ """Determines the search path of a module's parent package.
+
+ Args:
+ fullname: Full name of the module to look up (e.g., foo.bar).
+
+ Returns:
+ Tuple (submodule, search_path) where:
+ submodule: The last portion of the module name from fullname (e.g.,
+ if fullname is foo.bar, then this is bar).
+ search_path: List of paths that belong to the parent package's search
+ path or None if there is no parent package.
+
+ Raises:
+ ImportError exception if the module or its parent could not be found.
+ """
+ submodule = GetSubmoduleName(fullname)
+ parent_package = self.GetParentPackage(fullname)
+ search_path = None
+ if parent_package is not None and hasattr(parent_package, '__path__'):
+ search_path = parent_package.__path__
+ return submodule, search_path
+
+ @Trace
+ def GetModuleInfo(self, fullname):
+ """Determines the path on disk and the search path of a module or package.
+
+ Args:
+ fullname: Full name of the module to look up (e.g., foo.bar).
+
+ Returns:
+ Tuple (pathname, search_path, submodule) where:
+ pathname: String containing the full path of the module on disk,
+ or None if the module wasn't loaded from disk (e.g. from a zipfile).
+ search_path: List of paths that belong to the found package's search
+ path or None if found module is not a package.
+ submodule: The relative name of the submodule that's being imported.
+ """
+ submodule, search_path = self.GetParentSearchPath(fullname)
+ source_file, pathname, description = self.FindModuleRestricted(submodule, fullname, search_path)
+ suffix, mode, file_type = description
+ module_search_path = None
+ if file_type == self._imp.PKG_DIRECTORY:
+ module_search_path = [pathname]
+ pathname = os.path.join(pathname, '__init__%spy' % os.extsep)
+ return pathname, module_search_path, submodule
+
+ @Trace
+ def load_module(self, fullname):
+ """See PEP 302."""
+ all_modules = fullname.split('.')
+ submodule = all_modules[-1]
+ parent_module_fullname = '.'.join(all_modules[:-1])
+ search_path = None
+ if parent_module_fullname and parent_module_fullname in self._module_dict:
+ parent_module = self._module_dict[parent_module_fullname]
+ if hasattr(parent_module, '__path__'):
+ search_path = parent_module.__path__
+
+ return self.FindAndLoadModule(submodule, fullname, search_path)
+
+ @Trace
+ def is_package(self, fullname):
+ """See PEP 302 extensions."""
+ submodule, search_path = self.GetParentSearchPath(fullname)
+ source_file, pathname, description = self.FindModuleRestricted(submodule, fullname, search_path)
+ suffix, mode, file_type = description
+ if file_type == self._imp.PKG_DIRECTORY:
+ return True
+ return False
+
+ @Trace
+ def get_source(self, fullname):
+ """See PEP 302 extensions."""
+ full_path, search_path, submodule = self.GetModuleInfo(fullname)
+ if full_path is None:
+ return None
+ source_file = open(full_path)
+ try:
+ return source_file.read()
+ finally:
+ source_file.close()
+
+ @Trace
+ def get_code(self, fullname):
+ """See PEP 302 extensions."""
+ full_path, search_path, submodule = self.GetModuleInfo(fullname)
+ if full_path is None:
+ return None
+ source_file = open(full_path)
+ try:
+ source_code = source_file.read()
+ finally:
+ source_file.close()
+
+ source_code = source_code.replace('\r\n', '\n')
+ if not source_code.endswith('\n'):
+ source_code += '\n'
+
+ return compile(source_code, full_path, 'exec')
+
+
+
+def ModuleHasValidMainFunction(module):
+ """Determines if a module has a main function that takes no arguments.
+
+ This includes functions that have arguments with defaults that are all
+ assigned, thus requiring no additional arguments in order to be called.
+
+ Args:
+ module: A types.ModuleType instance.
+
+ Returns:
+ True if the module has a valid, reusable main function; False otherwise.
+ """
+ if hasattr(module, 'main') and type(module.main) is types.FunctionType:
+ arg_names, var_args, var_kwargs, default_values = inspect.getargspec(
+ module.main)
+ if len(arg_names) == 0:
+ return True
+ if default_values is not None and len(arg_names) == len(default_values):
+ return True
+ return False
+
+
+def GetScriptModuleName(handler_path):
+ """Determines the fully-qualified Python module name of a script on disk.
+
+ Args:
+ handler_path: CGI path stored in the application configuration (as a path
+ like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.
+
+ Returns:
+ String containing the corresponding module name (e.g., 'foo.bar.baz').
+ """
+ if handler_path.startswith(PYTHON_LIB_VAR + '/'):
+ handler_path = handler_path[len(PYTHON_LIB_VAR):]
+ handler_path = os.path.normpath(handler_path)
+
+ extension_index = handler_path.rfind('.py')
+ if extension_index != -1:
+ handler_path = handler_path[:extension_index]
+ module_fullname = handler_path.replace(os.sep, '.')
+ module_fullname = module_fullname.strip('.')
+ module_fullname = re.sub('\.+', '.', module_fullname)
+
+ if module_fullname.endswith('.__init__'):
+ module_fullname = module_fullname[:-len('.__init__')]
+
+ return module_fullname
+
+
+def FindMissingInitFiles(cgi_path, module_fullname, isfile=os.path.isfile):
+ """Determines which __init__.py files are missing from a module's parent
+ packages.
+
+ Args:
+ cgi_path: Absolute path of the CGI module file on disk.
+ module_fullname: Fully qualified Python module name used to import the
+ cgi_path module.
+ isfile: Used for testing.
+
+ Returns:
+ List containing the paths to the missing __init__.py files.
+ """
+ missing_init_files = []
+
+ if cgi_path.endswith('.py'):
+ module_base = os.path.dirname(cgi_path)
+ else:
+ module_base = cgi_path
+
+ depth_count = module_fullname.count('.')
+ if cgi_path.endswith('__init__.py') or not cgi_path.endswith('.py'):
+ depth_count += 1
+
+ for index in xrange(depth_count):
+ current_init_file = os.path.abspath(
+ os.path.join(module_base, '__init__.py'))
+
+ if not isfile(current_init_file):
+ missing_init_files.append(current_init_file)
+
+ module_base = os.path.abspath(os.path.join(module_base, os.pardir))
+
+ return missing_init_files
+
+
+def LoadTargetModule(handler_path,
+ cgi_path,
+ import_hook,
+ module_dict=sys.modules):
+ """Loads a target CGI script by importing it as a Python module.
+
+ If the module for the target CGI script has already been loaded before,
+ the new module will be loaded in its place using the same module object,
+ possibly overwriting existing module attributes.
+
+ Args:
+ handler_path: CGI path stored in the application configuration (as a path
+ like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.
+ cgi_path: Absolute path to the CGI script file on disk.
+ import_hook: Instance of HardenedModulesHook to use for module loading.
+ module_dict: Used for dependency injection.
+
+ Returns:
+ Tuple (module_fullname, script_module, module_code) where:
+ module_fullname: Fully qualified module name used to import the script.
+ script_module: The ModuleType object corresponding to the module_fullname.
+ If the module has not already been loaded, this will be an empty
+ shell of a module.
+ module_code: Code object (returned by compile built-in) corresponding
+ to the cgi_path to run. If the script_module was previously loaded
+ and has a main() function that can be reused, this will be None.
+ """
+ module_fullname = GetScriptModuleName(handler_path)
+ script_module = module_dict.get(module_fullname)
+ module_code = None
+ if script_module is not None and ModuleHasValidMainFunction(script_module):
+ logging.debug('Reusing main() function of module "%s"', module_fullname)
+ else:
+ if script_module is None:
+ script_module = imp.new_module(module_fullname)
+ script_module.__loader__ = import_hook
+
+ try:
+ module_code = import_hook.get_code(module_fullname)
+ full_path, search_path, submodule = (
+ import_hook.GetModuleInfo(module_fullname))
+ script_module.__file__ = full_path
+ if search_path is not None:
+ script_module.__path__ = search_path
+ except:
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ import_error_message = str(exc_type)
+ if exc_value:
+ import_error_message += ': ' + str(exc_value)
+
+ logging.exception('Encountered error loading module "%s": %s',
+ module_fullname, import_error_message)
+ missing_inits = FindMissingInitFiles(cgi_path, module_fullname)
+ if missing_inits:
+ logging.warning('Missing package initialization files: %s',
+ ', '.join(missing_inits))
+ else:
+ logging.error('Parent package initialization files are present, '
+ 'but must be broken')
+
+ independent_load_successful = True
+
+ if not os.path.isfile(cgi_path):
+ independent_load_successful = False
+ else:
+ try:
+ source_file = open(cgi_path)
+ try:
+ module_code = compile(source_file.read(), cgi_path, 'exec')
+ script_module.__file__ = cgi_path
+ finally:
+ source_file.close()
+
+ except OSError:
+ independent_load_successful = False
+
+ if not independent_load_successful:
+ raise exc_type, exc_value, exc_tb
+
+ module_dict[module_fullname] = script_module
+
+ return module_fullname, script_module, module_code
+
+
+def ExecuteOrImportScript(handler_path, cgi_path, import_hook):
+ """Executes a CGI script by importing it as a new module.
+
+ This possibly reuses the module's main() function if it is defined and
+ takes no arguments.
+
+ Basic technique lifted from PEP 338 and Python2.5's runpy module. See:
+ http://www.python.org/dev/peps/pep-0338/
+
+ See the section entitled "Import Statements and the Main Module" to understand
+ why a module named '__main__' cannot do relative imports. To get around this,
+ the requested module's path could be added to sys.path on each request.
+
+ Args:
+ handler_path: CGI path stored in the application configuration (as a path
+ like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.
+ cgi_path: Absolute path to the CGI script file on disk.
+ import_hook: Instance of HardenedModulesHook to use for module loading.
+
+ Returns:
+ True if the response code had an error status (e.g., 404), or False if it
+ did not.
+
+ Raises:
+ Any kind of exception that could have been raised when loading the target
+ module, running a target script, or executing the application code itself.
+ """
+ module_fullname, script_module, module_code = LoadTargetModule(
+ handler_path, cgi_path, import_hook)
+ script_module.__name__ = '__main__'
+ sys.modules['__main__'] = script_module
+ try:
+ if module_code:
+ exec module_code in script_module.__dict__
+ else:
+ script_module.main()
+
+ sys.stdout.flush()
+ sys.stdout.seek(0)
+ try:
+ headers = mimetools.Message(sys.stdout)
+ finally:
+ sys.stdout.seek(0, 2)
+ status_header = headers.get('status')
+ error_response = False
+ if status_header:
+ try:
+ status_code = int(status_header.split(' ', 1)[0])
+ error_response = status_code >= 400
+ except ValueError:
+ error_response = True
+
+ if not error_response:
+ try:
+ parent_package = import_hook.GetParentPackage(module_fullname)
+ except Exception:
+ parent_package = None
+
+ if parent_package is not None:
+ submodule = GetSubmoduleName(module_fullname)
+ setattr(parent_package, submodule, script_module)
+
+ return error_response
+ finally:
+ script_module.__name__ = module_fullname
+
+
+def ExecuteCGI(root_path,
+ handler_path,
+ cgi_path,
+ env,
+ infile,
+ outfile,
+ module_dict,
+ exec_script=ExecuteOrImportScript):
+ """Executes Python file in this process as if it were a CGI.
+
+ Does not return an HTTP response line. CGIs should output headers followed by
+ the body content.
+
+ The modules in sys.modules should be the same before and after the CGI is
+ executed, with the specific exception of encodings-related modules, which
+ cannot be reloaded and thus must always stay in sys.modules.
+
+ Args:
+ root_path: Path to the root of the application.
+ handler_path: CGI path stored in the application configuration (as a path
+ like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.
+ cgi_path: Absolute path to the CGI script file on disk.
+ env: Dictionary of environment variables to use for the execution.
+ infile: File-like object to read HTTP request input data from.
+ outfile: FIle-like object to write HTTP response data to.
+ module_dict: Dictionary in which application-loaded modules should be
+ preserved between requests. This removes the need to reload modules that
+ are reused between requests, significantly increasing load performance.
+ This dictionary must be separate from the sys.modules dictionary.
+ exec_script: Used for dependency injection.
+ """
+ old_module_dict = sys.modules.copy()
+ old_builtin = __builtin__.__dict__.copy()
+ old_argv = sys.argv
+ old_stdin = sys.stdin
+ old_stdout = sys.stdout
+ old_env = os.environ.copy()
+ old_cwd = os.getcwd()
+ old_file_type = types.FileType
+ reset_modules = False
+
+ try:
+ ClearAllButEncodingsModules(sys.modules)
+ sys.modules.update(module_dict)
+ sys.argv = [cgi_path]
+ sys.stdin = cStringIO.StringIO(infile.getvalue())
+ sys.stdout = outfile
+ os.environ.clear()
+ os.environ.update(env)
+ before_path = sys.path[:]
+ cgi_dir = os.path.normpath(os.path.dirname(cgi_path))
+ root_path = os.path.normpath(os.path.abspath(root_path))
+ if cgi_dir.startswith(root_path + os.sep):
+ os.chdir(cgi_dir)
+ else:
+ os.chdir(root_path)
+
+ hook = HardenedModulesHook(sys.modules)
+ sys.meta_path = [hook]
+ if hasattr(sys, 'path_importer_cache'):
+ sys.path_importer_cache.clear()
+
+ __builtin__.file = FakeFile
+ __builtin__.open = FakeFile
+ types.FileType = FakeFile
+
+ __builtin__.buffer = NotImplementedFakeClass
+
+ logging.debug('Executing CGI with env:\n%s', pprint.pformat(env))
+ try:
+ reset_modules = exec_script(handler_path, cgi_path, hook)
+ except SystemExit, e:
+ logging.debug('CGI exited with status: %s', e)
+ except:
+ reset_modules = True
+ raise
+
+ finally:
+ sys.meta_path = []
+ sys.path_importer_cache.clear()
+
+ _ClearTemplateCache(sys.modules)
+
+ module_dict.update(sys.modules)
+ ClearAllButEncodingsModules(sys.modules)
+ sys.modules.update(old_module_dict)
+
+ __builtin__.__dict__.update(old_builtin)
+ sys.argv = old_argv
+ sys.stdin = old_stdin
+ sys.stdout = old_stdout
+
+ sys.path[:] = before_path
+
+ os.environ.clear()
+ os.environ.update(old_env)
+ os.chdir(old_cwd)
+
+ types.FileType = old_file_type
+
+
+class CGIDispatcher(URLDispatcher):
+ """Dispatcher that executes Python CGI scripts."""
+
+ def __init__(self,
+ module_dict,
+ root_path,
+ path_adjuster,
+ setup_env=SetupEnvironment,
+ exec_cgi=ExecuteCGI,
+ create_logging_handler=ApplicationLoggingHandler):
+ """Initializer.
+
+ Args:
+ module_dict: Dictionary in which application-loaded modules should be
+ preserved between requests. This dictionary must be separate from the
+ sys.modules dictionary.
+ path_adjuster: Instance of PathAdjuster to use for finding absolute
+ paths of CGI files on disk.
+ setup_env, exec_cgi, create_logging_handler: Used for dependency
+ injection.
+ """
+ self._module_dict = module_dict
+ self._root_path = root_path
+ self._path_adjuster = path_adjuster
+ self._setup_env = setup_env
+ self._exec_cgi = exec_cgi
+ self._create_logging_handler = create_logging_handler
+
+ def Dispatch(self,
+ relative_url,
+ path,
+ headers,
+ infile,
+ outfile,
+ base_env_dict=None):
+ """Dispatches the Python CGI."""
+ handler = self._create_logging_handler()
+ logging.getLogger().addHandler(handler)
+ before_level = logging.root.level
+ try:
+ env = {}
+ if base_env_dict:
+ env.update(base_env_dict)
+ cgi_path = self._path_adjuster.AdjustPath(path)
+ env.update(self._setup_env(cgi_path, relative_url, headers, infile))
+ self._exec_cgi(self._root_path,
+ path,
+ cgi_path,
+ env,
+ infile,
+ outfile,
+ self._module_dict)
+ handler.AddDebuggingConsole(relative_url, env, outfile)
+ finally:
+ logging.root.level = before_level
+ logging.getLogger().removeHandler(handler)
+
+ def __str__(self):
+ """Returns a string representation of this dispatcher."""
+ return 'CGI dispatcher'
+
+
+class LocalCGIDispatcher(CGIDispatcher):
+ """Dispatcher that executes local functions like they're CGIs.
+
+ The contents of sys.modules will be preserved for local CGIs running this
+ dispatcher, but module hardening will still occur for any new imports. Thus,
+ be sure that any local CGIs have loaded all of their dependent modules
+ _before_ they are executed.
+ """
+
+ def __init__(self, module_dict, path_adjuster, cgi_func):
+ """Initializer.
+
+ Args:
+ module_dict: Passed to CGIDispatcher.
+ path_adjuster: Passed to CGIDispatcher.
+ cgi_func: Callable function taking no parameters that should be
+ executed in a CGI environment in the current process.
+ """
+ self._cgi_func = cgi_func
+
+ def curried_exec_script(*args, **kwargs):
+ cgi_func()
+ return False
+
+ def curried_exec_cgi(*args, **kwargs):
+ kwargs['exec_script'] = curried_exec_script
+ return ExecuteCGI(*args, **kwargs)
+
+ CGIDispatcher.__init__(self,
+ module_dict,
+ '',
+ path_adjuster,
+ exec_cgi=curried_exec_cgi)
+
+ def Dispatch(self, *args, **kwargs):
+ """Preserves sys.modules for CGIDispatcher.Dispatch."""
+ self._module_dict.update(sys.modules)
+ CGIDispatcher.Dispatch(self, *args, **kwargs)
+
+ def __str__(self):
+ """Returns a string representation of this dispatcher."""
+ return 'Local CGI dispatcher for %s' % self._cgi_func
+
+
+
+class PathAdjuster(object):
+ """Adjusts application file paths to paths relative to the application or
+ external library directories."""
+
+ def __init__(self, root_path):
+ """Initializer.
+
+ Args:
+ root_path: Path to the root of the application running on the server.
+ """
+ self._root_path = os.path.abspath(root_path)
+
+ def AdjustPath(self, path):
+ """Adjusts application file paths to relative to the application.
+
+ More precisely this method adjusts application file path to paths
+ relative to the application or external library directories.
+
+ Handler paths that start with $PYTHON_LIB will be converted to paths
+ relative to the google directory.
+
+ Args:
+ path: File path that should be adjusted.
+
+ Returns:
+ The adjusted path.
+ """
+ if path.startswith(PYTHON_LIB_VAR):
+ path = os.path.join(os.path.dirname(os.path.dirname(google.__file__)),
+ path[len(PYTHON_LIB_VAR) + 1:])
+ else:
+ path = os.path.join(self._root_path, path)
+
+ return path
+
+
+
+class StaticFileConfigMatcher(object):
+ """Keeps track of file/directory specific application configuration.
+
+ Specifically:
+ - Computes mime type based on URLMap and file extension.
+ - Decides on cache expiration time based on URLMap and default expiration.
+
+ To determine the mime type, we first see if there is any mime-type property
+ on each URLMap entry. If non is specified, we use the mimetypes module to
+ guess the mime type from the file path extension, and use
+ application/octet-stream if we can't find the mimetype.
+ """
+
+ def __init__(self,
+ url_map_list,
+ path_adjuster,
+ default_expiration):
+ """Initializer.
+
+ Args:
+ url_map_list: List of appinfo.URLMap objects.
+ If empty or None, then we always use the mime type chosen by the
+ mimetypes module.
+ path_adjuster: PathAdjuster object used to adjust application file paths.
+ default_expiration: String describing default expiration time for browser
+ based caching of static files. If set to None this disallows any
+ browser caching of static content.
+ """
+ if default_expiration is not None:
+ self._default_expiration = appinfo.ParseExpiration(default_expiration)
+ else:
+ self._default_expiration = None
+
+ self._patterns = []
+
+ if url_map_list:
+ for entry in url_map_list:
+ handler_type = entry.GetHandlerType()
+ if handler_type not in (appinfo.STATIC_FILES, appinfo.STATIC_DIR):
+ continue
+
+ if handler_type == appinfo.STATIC_FILES:
+ regex = entry.upload + '$'
+ else:
+ path = entry.static_dir
+ if path[-1] == '/':
+ path = path[:-1]
+ regex = re.escape(path + os.path.sep) + r'(.*)'
+
+ try:
+ path_re = re.compile(regex)
+ except re.error, e:
+ raise InvalidAppConfigError('regex %s does not compile: %s' %
+ (regex, e))
+
+ if self._default_expiration is None:
+ expiration = 0
+ elif entry.expiration is None:
+ expiration = self._default_expiration
+ else:
+ expiration = appinfo.ParseExpiration(entry.expiration)
+
+ self._patterns.append((path_re, entry.mime_type, expiration))
+
+ def IsStaticFile(self, path):
+ """Tests if the given path points to a "static" file.
+
+ Args:
+ path: String containing the file's path relative to the app.
+
+ Returns:
+ Boolean, True if the file was configured to be static.
+ """
+ for (path_re, _, _) in self._patterns:
+ if path_re.match(path):
+ return True
+ return False
+
+ def GetMimeType(self, path):
+ """Returns the mime type that we should use when serving the specified file.
+
+ Args:
+ path: String containing the file's path relative to the app.
+
+ Returns:
+ String containing the mime type to use. Will be 'application/octet-stream'
+ if we have no idea what it should be.
+ """
+ for (path_re, mimetype, unused_expiration) in self._patterns:
+ if mimetype is not None:
+ the_match = path_re.match(path)
+ if the_match:
+ return mimetype
+
+ unused_filename, extension = os.path.splitext(path)
+ return mimetypes.types_map.get(extension, 'application/octet-stream')
+
+ def GetExpiration(self, path):
+ """Returns the cache expiration duration to be users for the given file.
+
+ Args:
+ path: String containing the file's path relative to the app.
+
+ Returns:
+ Integer number of seconds to be used for browser cache expiration time.
+ """
+ for (path_re, unused_mimetype, expiration) in self._patterns:
+ the_match = path_re.match(path)
+ if the_match:
+ return expiration
+
+ return self._default_expiration or 0
+
+
+
+
+def ReadDataFile(data_path, openfile=file):
+ """Reads a file on disk, returning a corresponding HTTP status and data.
+
+ Args:
+ data_path: Path to the file on disk to read.
+ openfile: Used for dependency injection.
+
+ Returns:
+ Tuple (status, data) where status is an HTTP response code, and data is
+ the data read; will be an empty string if an error occurred or the
+ file was empty.
+ """
+ status = httplib.INTERNAL_SERVER_ERROR
+ data = ""
+
+ try:
+ data_file = openfile(data_path, 'rb')
+ try:
+ data = data_file.read()
+ finally:
+ data_file.close()
+ status = httplib.OK
+ except (OSError, IOError), e:
+ logging.error('Error encountered reading file "%s":\n%s', data_path, e)
+ if e.errno in FILE_MISSING_EXCEPTIONS:
+ status = httplib.NOT_FOUND
+ else:
+ status = httplib.FORBIDDEN
+
+ return status, data
+
+
+class FileDispatcher(URLDispatcher):
+ """Dispatcher that reads data files from disk."""
+
+ def __init__(self,
+ path_adjuster,
+ static_file_config_matcher,
+ read_data_file=ReadDataFile):
+ """Initializer.
+
+ Args:
+ path_adjuster: Instance of PathAdjuster to use for finding absolute
+ paths of data files on disk.
+ static_file_config_matcher: StaticFileConfigMatcher object.
+ read_data_file: Used for dependency injection.
+ """
+ self._path_adjuster = path_adjuster
+ self._static_file_config_matcher = static_file_config_matcher
+ self._read_data_file = read_data_file
+
+ def Dispatch(self,
+ relative_url,
+ path,
+ headers,
+ infile,
+ outfile,
+ base_env_dict=None):
+ """Reads the file and returns the response status and data."""
+ full_path = self._path_adjuster.AdjustPath(path)
+ status, data = self._read_data_file(full_path)
+ content_type = self._static_file_config_matcher.GetMimeType(path)
+ expiration = self._static_file_config_matcher.GetExpiration(path)
+
+ outfile.write('Status: %d\r\n' % status)
+ outfile.write('Content-type: %s\r\n' % content_type)
+ if expiration:
+ outfile.write('Expires: %s\r\n'
+ % email.Utils.formatdate(time.time() + expiration,
+ usegmt=True))
+ outfile.write('Cache-Control: public, max-age=%i\r\n' % expiration)
+ outfile.write('\r\n')
+ outfile.write(data)
+
+ def __str__(self):
+ """Returns a string representation of this dispatcher."""
+ return 'File dispatcher'
+
+
+_IGNORE_RESPONSE_HEADERS = frozenset([
+ 'content-encoding', 'accept-encoding', 'transfer-encoding',
+ 'server', 'date',
+ ])
+
+
+def IgnoreHeadersRewriter(status_code, status_message, headers, body):
+ """Ignore specific response headers.
+
+ Certain response headers cannot be modified by an Application. For a
+ complete list of these headers please see:
+
+ http://code.google.com/appengine/docs/webapp/responseclass.html#Disallowed_HTTP_Response_Headers
+
+ This rewriter simply removes those headers.
+ """
+ for h in _IGNORE_RESPONSE_HEADERS:
+ if h in headers:
+ del headers[h]
+
+ return status_code, status_message, headers, body
+
+
+def ParseStatusRewriter(status_code, status_message, headers, body):
+ """Parse status header, if it exists.
+
+ Handles the server-side 'status' header, which instructs the server to change
+ the HTTP response code accordingly. Handles the 'location' header, which
+ issues an HTTP 302 redirect to the client. Also corrects the 'content-length'
+ header to reflect actual content length in case extra information has been
+ appended to the response body.
+
+ If the 'status' header supplied by the client is invalid, this method will
+ set the response to a 500 with an error message as content.
+ """
+ location_value = headers.getheader('location')
+ status_value = headers.getheader('status')
+ if status_value:
+ response_status = status_value
+ del headers['status']
+ elif location_value:
+ response_status = '%d Redirecting' % httplib.FOUND
+ else:
+ return status_code, status_message, headers, body
+
+ status_parts = response_status.split(' ', 1)
+ status_code, status_message = (status_parts + [''])[:2]
+ try:
+ status_code = int(status_code)
+ except ValueError:
+ status_code = 500
+ body = cStringIO.StringIO('Error: Invalid "status" header value returned.')
+
+ return status_code, status_message, headers, body
+
+
+def CacheRewriter(status_code, status_message, headers, body):
+ """Update the cache header."""
+ if not 'Cache-Control' in headers:
+ headers['Cache-Control'] = 'no-cache'
+ if not 'Expires' in headers:
+ headers['Expires'] = 'Fri, 01 Jan 1990 00:00:00 GMT'
+ return status_code, status_message, headers, body
+
+
+def ContentLengthRewriter(status_code, status_message, headers, body):
+ """Rewrite the Content-Length header.
+
+ Even though Content-Length is not a user modifiable header, App Engine
+ sends a correct Content-Length to the user based on the actual response.
+ """
+ current_position = body.tell()
+ body.seek(0, 2)
+
+ headers['Content-Length'] = str(body.tell() - current_position)
+ body.seek(current_position)
+ return status_code, status_message, headers, body
+
+
+def CreateResponseRewritersChain():
+ """Create the default response rewriter chain.
+
+ A response rewriter is the a function that gets a final chance to change part
+ of the dev_appservers response. A rewriter is not like a dispatcher in that
+ it is called after every request has been handled by the dispatchers
+ regardless of which dispatcher was used.
+
+ The order in which rewriters are registered will be the order in which they
+ are used to rewrite the response. Modifications from earlier rewriters
+ are used as input to later rewriters.
+
+ A response rewriter is a function that can rewrite the request in any way.
+ Thefunction can returned modified values or the original values it was
+ passed.
+
+ A rewriter function has the following parameters and return values:
+
+ Args:
+ status_code: Status code of response from dev_appserver or previous
+ rewriter.
+ status_message: Text corresponding to status code.
+ headers: mimetools.Message instance with parsed headers. NOTE: These
+ headers can contain its own 'status' field, but the default
+ dev_appserver implementation will remove this. Future rewriters
+ should avoid re-introducing the status field and return new codes
+ instead.
+ body: File object containing the body of the response. This position of
+ this file may not be at the start of the file. Any content before the
+ files position is considered not to be part of the final body.
+
+ Returns:
+ status_code: Rewritten status code or original.
+ status_message: Rewritter message or original.
+ headers: Rewritten/modified headers or original.
+ body: Rewritten/modified body or original.
+
+ Returns:
+ List of response rewriters.
+ """
+ return [IgnoreHeadersRewriter,
+ ParseStatusRewriter,
+ CacheRewriter,
+ ContentLengthRewriter,
+ ]
+
+
+def RewriteResponse(response_file, response_rewriters=None):
+ """Allows final rewrite of dev_appserver response.
+
+ This function receives the unparsed HTTP response from the application
+ or internal handler, parses out the basic structure and feeds that structure
+ in to a chain of response rewriters.
+
+ It also makes sure the final HTTP headers are properly terminated.
+
+ For more about response rewriters, please see documentation for
+ CreateResponeRewritersChain.
+
+ Args:
+ response_file: File-like object containing the full HTTP response including
+ the response code, all headers, and the request body.
+ response_rewriters: A list of response rewriters. If none is provided it
+ will create a new chain using CreateResponseRewritersChain.
+
+ Returns:
+ Tuple (status_code, status_message, header, body) where:
+ status_code: Integer HTTP response status (e.g., 200, 302, 404, 500)
+ status_message: String containing an informational message about the
+ response code, possibly derived from the 'status' header, if supplied.
+ header: String containing the HTTP headers of the response, without
+ a trailing new-line (CRLF).
+ body: String containing the body of the response.
+ """
+ if response_rewriters is None:
+ response_rewriters = CreateResponseRewritersChain()
+
+ status_code = 200
+ status_message = 'Good to go'
+ headers = mimetools.Message(response_file)
+
+ for response_rewriter in response_rewriters:
+ status_code, status_message, headers, response_file = response_rewriter(
+ status_code,
+ status_message,
+ headers,
+ response_file)
+
+ header_list = []
+ for header in headers.headers:
+ header = header.rstrip('\n')
+ header = header.rstrip('\r')
+ header_list.append(header)
+
+ header_data = '\r\n'.join(header_list) + '\r\n'
+ return status_code, status_message, header_data, response_file.read()
+
+
+
+class ModuleManager(object):
+ """Manages loaded modules in the runtime.
+
+ Responsible for monitoring and reporting about file modification times.
+ Modules can be loaded from source or precompiled byte-code files. When a
+ file has source code, the ModuleManager monitors the modification time of
+ the source file even if the module itself is loaded from byte-code.
+ """
+
+ def __init__(self, modules):
+ """Initializer.
+
+ Args:
+ modules: Dictionary containing monitored modules.
+ """
+ self._modules = modules
+ self._default_modules = self._modules.copy()
+ self._save_path_hooks = sys.path_hooks[:]
+ self._modification_times = {}
+
+ @staticmethod
+ def GetModuleFile(module, is_file=os.path.isfile):
+ """Helper method to try to determine modules source file.
+
+ Args:
+ module: Module object to get file for.
+ is_file: Function used to determine if a given path is a file.
+
+ Returns:
+ Path of the module's corresponding Python source file if it exists, or
+ just the module's compiled Python file. If the module has an invalid
+ __file__ attribute, None will be returned.
+ """
+ module_file = getattr(module, '__file__', None)
+ if module_file is None:
+ return None
+
+ source_file = module_file[:module_file.rfind('py') + 2]
+
+ if is_file(source_file):
+ return source_file
+ return module.__file__
+
+ def AreModuleFilesModified(self):
+ """Determines if any monitored files have been modified.
+
+ Returns:
+ True if one or more files have been modified, False otherwise.
+ """
+ for name, (mtime, fname) in self._modification_times.iteritems():
+ if name not in self._modules:
+ continue
+
+ module = self._modules[name]
+
+ if not os.path.isfile(fname):
+ return True
+
+ if mtime != os.path.getmtime(fname):
+ return True
+
+ return False
+
+ def UpdateModuleFileModificationTimes(self):
+ """Records the current modification times of all monitored modules."""
+ self._modification_times.clear()
+ for name, module in self._modules.items():
+ if not isinstance(module, types.ModuleType):
+ continue
+ module_file = self.GetModuleFile(module)
+ if not module_file:
+ continue
+ try:
+ self._modification_times[name] = (os.path.getmtime(module_file),
+ module_file)
+ except OSError, e:
+ if e.errno not in FILE_MISSING_EXCEPTIONS:
+ raise e
+
+ def ResetModules(self):
+ """Clear modules so that when request is run they are reloaded."""
+ self._modules.clear()
+ self._modules.update(self._default_modules)
+ sys.path_hooks[:] = self._save_path_hooks
+
+
+
+def _ClearTemplateCache(module_dict=sys.modules):
+ """Clear template cache in webapp.template module.
+
+ Attempts to load template module. Ignores failure. If module loads, the
+ template cache is cleared.
+
+ Args:
+ module_dict: Used for dependency injection.
+ """
+ template_module = module_dict.get('google.appengine.ext.webapp.template')
+ if template_module is not None:
+ template_module.template_cache.clear()
+
+
+
+def CreateRequestHandler(root_path,
+ login_url,
+ require_indexes=False,
+ static_caching=True):
+ """Creates a new BaseHTTPRequestHandler sub-class.
+
+ This class will be used with the Python BaseHTTPServer module's HTTP server.
+
+ Python's built-in HTTP server does not support passing context information
+ along to instances of its request handlers. This function gets around that
+ by creating a sub-class of the handler in a closure that has access to
+ this context information.
+
+ Args:
+ root_path: Path to the root of the application running on the server.
+ login_url: Relative URL which should be used for handling user logins.
+ require_indexes: True if index.yaml is read-only gospel; default False.
+ static_caching: True if browser caching of static files should be allowed.
+
+ Returns:
+ Sub-class of BaseHTTPRequestHandler.
+ """
+ application_module_dict = SetupSharedModules(sys.modules)
+
+ if require_indexes:
+ index_yaml_updater = None
+ else:
+ index_yaml_updater = dev_appserver_index.IndexYamlUpdater(root_path)
+
+ application_config_cache = AppConfigCache()
+
+ class DevAppServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ """Dispatches URLs using patterns from a URLMatcher.
+
+ The URLMatcher is created by loading an application's configuration file.
+ Executes CGI scripts in the local process so the scripts can use mock
+ versions of APIs.
+
+ HTTP requests that correctly specify a user info cookie
+ (dev_appserver_login.COOKIE_NAME) will have the 'USER_EMAIL' environment
+ variable set accordingly. If the user is also an admin, the
+ 'USER_IS_ADMIN' variable will exist and be set to '1'. If the user is not
+ logged in, 'USER_EMAIL' will be set to the empty string.
+
+ On each request, raises an InvalidAppConfigError exception if the
+ application configuration file in the directory specified by the root_path
+ argument is invalid.
+ """
+ server_version = 'Development/1.0'
+
+ module_dict = application_module_dict
+ module_manager = ModuleManager(application_module_dict)
+
+ config_cache = application_config_cache
+
+ rewriter_chain = CreateResponseRewritersChain()
+
+ def __init__(self, *args, **kwargs):
+ """Initializer.
+
+ Args:
+ args: Positional arguments passed to the superclass constructor.
+ kwargs: Keyword arguments passed to the superclass constructor.
+ """
+ BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
+
+ def version_string(self):
+ """Returns server's version string used for Server HTTP header."""
+ return self.server_version
+
+ def do_GET(self):
+ """Handle GET requests."""
+ self._HandleRequest()
+
+ def do_POST(self):
+ """Handles POST requests."""
+ self._HandleRequest()
+
+ def do_PUT(self):
+ """Handle PUT requests."""
+ self._HandleRequest()
+
+ def do_HEAD(self):
+ """Handle HEAD requests."""
+ self._HandleRequest()
+
+ def do_OPTIONS(self):
+ """Handles OPTIONS requests."""
+ self._HandleRequest()
+
+ def do_DELETE(self):
+ """Handle DELETE requests."""
+ self._HandleRequest()
+
+ def do_TRACE(self):
+ """Handles TRACE requests."""
+ self._HandleRequest()
+
+ def _HandleRequest(self):
+ """Handles any type of request and prints exceptions if they occur."""
+ server_name = self.headers.get('host') or self.server.server_name
+ server_name = server_name.split(':', 1)[0]
+
+ env_dict = {
+ 'REQUEST_METHOD': self.command,
+ 'REMOTE_ADDR': self.client_address[0],
+ 'SERVER_SOFTWARE': self.server_version,
+ 'SERVER_NAME': server_name,
+ 'SERVER_PROTOCOL': self.protocol_version,
+ 'SERVER_PORT': str(self.server.server_port),
+ }
+
+ full_url = GetFullURL(server_name, self.server.server_port, self.path)
+ if len(full_url) > MAX_URL_LENGTH:
+ msg = 'Requested URI too long: %s' % full_url
+ logging.error(msg)
+ self.send_response(httplib.REQUEST_URI_TOO_LONG, msg)
+ return
+
+ tbhandler = cgitb.Hook(file=self.wfile).handle
+ try:
+ if self.module_manager.AreModuleFilesModified():
+ self.module_manager.ResetModules()
+
+ implicit_matcher = CreateImplicitMatcher(self.module_dict,
+ root_path,
+ login_url)
+ config, explicit_matcher = LoadAppConfig(root_path, self.module_dict,
+ cache=self.config_cache,
+ static_caching=static_caching)
+ if config.api_version != API_VERSION:
+ logging.error(
+ "API versions cannot be switched dynamically: %r != %r",
+ config.api_version, API_VERSION)
+ sys.exit(1)
+ env_dict['CURRENT_VERSION_ID'] = config.version + ".1"
+ env_dict['APPLICATION_ID'] = config.application
+ dispatcher = MatcherDispatcher(login_url,
+ [implicit_matcher, explicit_matcher])
+
+ if require_indexes:
+ dev_appserver_index.SetupIndexes(config.application, root_path)
+
+ infile = cStringIO.StringIO()
+ infile.write(self.rfile.read(
+ int(self.headers.get('content-length', 0))))
+ infile.seek(0)
+
+ request_size = len(infile.getvalue())
+ if request_size > MAX_REQUEST_SIZE:
+ msg = ('HTTP request was too large: %d. The limit is: %d.'
+ % (request_size, MAX_REQUEST_SIZE))
+ logging.error(msg)
+ self.send_response(httplib.REQUEST_ENTITY_TOO_LARGE, msg)
+ return
+
+ outfile = cStringIO.StringIO()
+ try:
+ dispatcher.Dispatch(self.path,
+ None,
+ self.headers,
+ infile,
+ outfile,
+ base_env_dict=env_dict)
+ finally:
+ self.module_manager.UpdateModuleFileModificationTimes()
+
+ outfile.flush()
+ outfile.seek(0)
+
+ status_code, status_message, header_data, body = (
+ RewriteResponse(outfile, self.rewriter_chain))
+
+ runtime_response_size = len(outfile.getvalue())
+ if runtime_response_size > MAX_RUNTIME_RESPONSE_SIZE:
+ status_code = 403
+ status_message = 'Forbidden'
+ new_headers = []
+ for header in header_data.split('\n'):
+ if not header.lower().startswith('content-length'):
+ new_headers.append(header)
+ header_data = '\n'.join(new_headers)
+ body = ('HTTP response was too large: %d. The limit is: %d.'
+ % (runtime_response_size, MAX_RUNTIME_RESPONSE_SIZE))
+
+ except yaml_errors.EventListenerError, e:
+ title = 'Fatal error when loading application configuration'
+ msg = '%s:\n%s' % (title, str(e))
+ logging.error(msg)
+ self.send_response(httplib.INTERNAL_SERVER_ERROR, title)
+ self.wfile.write('Content-Type: text/html\n\n')
+ self.wfile.write('<pre>%s</pre>' % cgi.escape(msg))
+ except:
+ msg = 'Exception encountered handling request'
+ logging.exception(msg)
+ self.send_response(httplib.INTERNAL_SERVER_ERROR, msg)
+ tbhandler()
+ else:
+ try:
+ self.send_response(status_code, status_message)
+ self.wfile.write(header_data)
+ self.wfile.write('\r\n')
+ if self.command != 'HEAD':
+ self.wfile.write(body)
+ elif body:
+ logging.warning('Dropping unexpected body in response '
+ 'to HEAD request')
+ except (IOError, OSError), e:
+ if e.errno != errno.EPIPE:
+ raise e
+ except socket.error, e:
+ if len(e.args) >= 1 and e.args[0] != errno.EPIPE:
+ raise e
+ else:
+ if index_yaml_updater is not None:
+ index_yaml_updater.UpdateIndexYaml()
+
+ def log_error(self, format, *args):
+ """Redirect error messages through the logging module."""
+ logging.error(format, *args)
+
+ def log_message(self, format, *args):
+ """Redirect log messages through the logging module."""
+ logging.info(format, *args)
+
+ return DevAppServerRequestHandler
+
+
+
+def ReadAppConfig(appinfo_path, parse_app_config=appinfo.LoadSingleAppInfo):
+ """Reads app.yaml file and returns its app id and list of URLMap instances.
+
+ Args:
+ appinfo_path: String containing the path to the app.yaml file.
+ parse_app_config: Used for dependency injection.
+
+ Returns:
+ AppInfoExternal instance.
+
+ Raises:
+ If the config file could not be read or the config does not contain any
+ URLMap instances, this function will raise an InvalidAppConfigError
+ exception.
+ """
+ try:
+ appinfo_file = file(appinfo_path, 'r')
+ except IOError, unused_e:
+ raise InvalidAppConfigError(
+ 'Application configuration could not be read from "%s"' % appinfo_path)
+ try:
+ return parse_app_config(appinfo_file)
+ finally:
+ appinfo_file.close()
+
+
+def CreateURLMatcherFromMaps(root_path,
+ url_map_list,
+ module_dict,
+ default_expiration,
+ create_url_matcher=URLMatcher,
+ create_cgi_dispatcher=CGIDispatcher,
+ create_file_dispatcher=FileDispatcher,
+ create_path_adjuster=PathAdjuster,
+ normpath=os.path.normpath):
+ """Creates a URLMatcher instance from URLMap.
+
+ Creates all of the correct URLDispatcher instances to handle the various
+ content types in the application configuration.
+
+ Args:
+ root_path: Path to the root of the application running on the server.
+ url_map_list: List of appinfo.URLMap objects to initialize this
+ matcher with. Can be an empty list if you would like to add patterns
+ manually.
+ module_dict: Dictionary in which application-loaded modules should be
+ preserved between requests. This dictionary must be separate from the
+ sys.modules dictionary.
+ default_expiration: String describing default expiration time for browser
+ based caching of static files. If set to None this disallows any
+ browser caching of static content.
+ create_url_matcher: Used for dependency injection.
+ create_cgi_dispatcher: Used for dependency injection.
+ create_file_dispatcher: Used for dependency injection.
+ create_path_adjuster: Used for dependency injection.
+ normpath: Used for dependency injection.
+
+ Returns:
+ Instance of URLMatcher with the supplied URLMap objects properly loaded.
+
+ Raises:
+ InvalidAppConfigError: if the handler in url_map_list is an unknown type.
+ """
+ url_matcher = create_url_matcher()
+ path_adjuster = create_path_adjuster(root_path)
+ cgi_dispatcher = create_cgi_dispatcher(module_dict, root_path, path_adjuster)
+ static_file_config_matcher = StaticFileConfigMatcher(url_map_list,
+ path_adjuster,
+ default_expiration)
+ file_dispatcher = create_file_dispatcher(path_adjuster,
+ static_file_config_matcher)
+
+ FakeFile.SetStaticFileConfigMatcher(static_file_config_matcher)
+
+ for url_map in url_map_list:
+ admin_only = url_map.login == appinfo.LOGIN_ADMIN
+ requires_login = url_map.login == appinfo.LOGIN_REQUIRED or admin_only
+
+ handler_type = url_map.GetHandlerType()
+ if handler_type == appinfo.HANDLER_SCRIPT:
+ dispatcher = cgi_dispatcher
+ elif handler_type in (appinfo.STATIC_FILES, appinfo.STATIC_DIR):
+ dispatcher = file_dispatcher
+ else:
+ raise InvalidAppConfigError('Unknown handler type "%s"' % handler_type)
+
+ regex = url_map.url
+ path = url_map.GetHandler()
+ if handler_type == appinfo.STATIC_DIR:
+ if regex[-1] == r'/':
+ regex = regex[:-1]
+ if path[-1] == os.path.sep:
+ path = path[:-1]
+ regex = '/'.join((re.escape(regex), '(.*)'))
+ if os.path.sep == '\\':
+ backref = r'\\1'
+ else:
+ backref = r'\1'
+ path = (normpath(path).replace('\\', '\\\\') +
+ os.path.sep + backref)
+
+ url_matcher.AddURL(regex,
+ dispatcher,
+ path,
+ requires_login, admin_only)
+
+ return url_matcher
+
+
+class AppConfigCache(object):
+ """Cache used by LoadAppConfig.
+
+ If given to LoadAppConfig instances of this class are used to cache contents
+ of the app config (app.yaml or app.yml) and the Matcher created from it.
+
+ Code outside LoadAppConfig should treat instances of this class as opaque
+ objects and not access its members.
+ """
+
+ path = None
+ mtime = None
+ config = None
+ matcher = None
+
+
+def LoadAppConfig(root_path,
+ module_dict,
+ cache=None,
+ static_caching=True,
+ read_app_config=ReadAppConfig,
+ create_matcher=CreateURLMatcherFromMaps):
+ """Creates a Matcher instance for an application configuration file.
+
+ Raises an InvalidAppConfigError exception if there is anything wrong with
+ the application configuration file.
+
+ Args:
+ root_path: Path to the root of the application to load.
+ module_dict: Dictionary in which application-loaded modules should be
+ preserved between requests. This dictionary must be separate from the
+ sys.modules dictionary.
+ cache: Instance of AppConfigCache or None.
+ static_caching: True if browser caching of static files should be allowed.
+ read_app_config: Used for dependency injection.
+ create_matcher: Used for dependency injection.
+
+ Returns:
+ tuple: (AppInfoExternal, URLMatcher)
+
+ Raises:
+ AppConfigNotFound: if an app.yaml file cannot be found.
+ """
+ for appinfo_path in [os.path.join(root_path, 'app.yaml'),
+ os.path.join(root_path, 'app.yml')]:
+
+ if os.path.isfile(appinfo_path):
+ if cache is not None:
+ mtime = os.path.getmtime(appinfo_path)
+ if cache.path == appinfo_path and cache.mtime == mtime:
+ return (cache.config, cache.matcher)
+
+ cache.config = cache.matcher = cache.path = None
+ cache.mtime = mtime
+
+ try:
+ config = read_app_config(appinfo_path, appinfo.LoadSingleAppInfo)
+
+ if static_caching:
+ if config.default_expiration:
+ default_expiration = config.default_expiration
+ else:
+ default_expiration = '0'
+ else:
+ default_expiration = None
+
+ matcher = create_matcher(root_path,
+ config.handlers,
+ module_dict,
+ default_expiration)
+
+ FakeFile.SetSkippedFiles(config.skip_files)
+
+ if cache is not None:
+ cache.path = appinfo_path
+ cache.config = config
+ cache.matcher = matcher
+
+ return (config, matcher)
+ except gexcept.AbstractMethod:
+ pass
+
+ raise AppConfigNotFoundError
+
+
+def ReadCronConfig(croninfo_path, parse_cron_config=croninfo.LoadSingleCron):
+ """Reads cron.yaml file and returns a list of CronEntry instances.
+
+ Args:
+ croninfo_path: String containing the path to the cron.yaml file.
+ parse_cron_config: Used for dependency injection.
+
+ Returns:
+ A CronInfoExternal object.
+
+ Raises:
+ If the config file is unreadable, empty or invalid, this function will
+ raise an InvalidAppConfigError or a MalformedCronConfiguration exception.
+ """
+ try:
+ croninfo_file = file(croninfo_path, 'r')
+ except IOError, e:
+ raise InvalidAppConfigError(
+ 'Cron configuration could not be read from "%s": %s'
+ % (croninfo_path, e))
+ try:
+ return parse_cron_config(croninfo_file)
+ finally:
+ croninfo_file.close()
+
+
+
+def SetupStubs(app_id, **config):
+ """Sets up testing stubs of APIs.
+
+ Args:
+ app_id: Application ID being served.
+ config: keyword arguments.
+
+ Keywords:
+ root_path: Root path to the directory of the application which should
+ contain the app.yaml, indexes.yaml, and queues.yaml files.
+ login_url: Relative URL which should be used for handling user login/logout.
+ datastore_path: Path to the file to store Datastore file stub data in.
+ history_path: Path to the file to store Datastore history in.
+ clear_datastore: If the datastore and history should be cleared on startup.
+ smtp_host: SMTP host used for sending test mail.
+ smtp_port: SMTP port.
+ smtp_user: SMTP user.
+ smtp_password: SMTP password.
+ enable_sendmail: Whether to use sendmail as an alternative to SMTP.
+ show_mail_body: Whether to log the body of emails.
+ remove: Used for dependency injection.
+ trusted: True if this app can access data belonging to other apps. This
+ behavior is different from the real app server and should be left False
+ except for advanced uses of dev_appserver.
+ """
+ root_path = config.get('root_path', None)
+ login_url = config['login_url']
+ datastore_path = config['datastore_path']
+ history_path = config['history_path']
+ clear_datastore = config['clear_datastore']
+ require_indexes = config.get('require_indexes', False)
+ smtp_host = config.get('smtp_host', None)
+ smtp_port = config.get('smtp_port', 25)
+ smtp_user = config.get('smtp_user', '')
+ smtp_password = config.get('smtp_password', '')
+ enable_sendmail = config.get('enable_sendmail', False)
+ show_mail_body = config.get('show_mail_body', False)
+ remove = config.get('remove', os.remove)
+ trusted = config.get('trusted', False)
+
+ os.environ['APPLICATION_ID'] = app_id
+
+ if clear_datastore:
+ for path in (datastore_path, history_path):
+ if os.path.lexists(path):
+ logging.info('Attempting to remove file at %s', path)
+ try:
+ remove(path)
+ except OSError, e:
+ logging.warning('Removing file failed: %s', e)
+
+ apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
+
+ datastore = datastore_file_stub.DatastoreFileStub(
+ app_id, datastore_path, history_path, require_indexes=require_indexes,
+ trusted=trusted)
+ apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore)
+
+ fixed_login_url = '%s?%s=%%s' % (login_url,
+ dev_appserver_login.CONTINUE_PARAM)
+ fixed_logout_url = '%s&%s' % (fixed_login_url,
+ dev_appserver_login.LOGOUT_PARAM)
+
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'user',
+ user_service_stub.UserServiceStub(login_url=fixed_login_url,
+ logout_url=fixed_logout_url))
+
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'urlfetch',
+ urlfetch_stub.URLFetchServiceStub())
+
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'mail',
+ mail_stub.MailServiceStub(smtp_host,
+ smtp_port,
+ smtp_user,
+ smtp_password,
+ enable_sendmail=enable_sendmail,
+ show_mail_body=show_mail_body))
+
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'memcache',
+ memcache_stub.MemcacheServiceStub())
+
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'capability_service',
+ capability_stub.CapabilityServiceStub())
+
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'taskqueue',
+ taskqueue_stub.TaskQueueServiceStub(root_path=root_path))
+
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'xmpp',
+ xmpp_service_stub.XmppServiceStub())
+
+
+
+ try:
+ from google.appengine.api.images import images_stub
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'images',
+ images_stub.ImagesServiceStub())
+ except ImportError, e:
+ logging.warning('Could not initialize images API; you are likely missing '
+ 'the Python "PIL" module. ImportError: %s', e)
+ from google.appengine.api.images import images_not_implemented_stub
+ apiproxy_stub_map.apiproxy.RegisterStub(
+ 'images',
+ images_not_implemented_stub.ImagesNotImplementedServiceStub())
+
+
+def CreateImplicitMatcher(module_dict,
+ root_path,
+ login_url,
+ create_path_adjuster=PathAdjuster,
+ create_local_dispatcher=LocalCGIDispatcher,
+ create_cgi_dispatcher=CGIDispatcher):
+ """Creates a URLMatcher instance that handles internal URLs.
+
+ Used to facilitate handling user login/logout, debugging, info about the
+ currently running app, etc.
+
+ Args:
+ module_dict: Dictionary in the form used by sys.modules.
+ root_path: Path to the root of the application.
+ login_url: Relative URL which should be used for handling user login/logout.
+ create_path_adjuster: Used for dependedency injection.
+ create_local_dispatcher: Used for dependency injection.
+ create_cgi_dispatcher: Used for dependedency injection.
+
+ Returns:
+ Instance of URLMatcher with appropriate dispatchers.
+ """
+ url_matcher = URLMatcher()
+ path_adjuster = create_path_adjuster(root_path)
+
+ login_dispatcher = create_local_dispatcher(sys.modules, path_adjuster,
+ dev_appserver_login.main)
+ url_matcher.AddURL(login_url,
+ login_dispatcher,
+ '',
+ False,
+ False)
+
+ admin_dispatcher = create_cgi_dispatcher(module_dict, root_path,
+ path_adjuster)
+ url_matcher.AddURL('/_ah/admin(?:/.*)?',
+ admin_dispatcher,
+ DEVEL_CONSOLE_PATH,
+ False,
+ False)
+
+ return url_matcher
+
+
+def SetupTemplates(template_dir):
+ """Reads debugging console template files and initializes the console.
+
+ Does nothing if templates have already been initialized.
+
+ Args:
+ template_dir: Path to the directory containing the templates files.
+
+ Raises:
+ OSError or IOError if any of the template files could not be read.
+ """
+ if ApplicationLoggingHandler.AreTemplatesInitialized():
+ return
+
+ try:
+ header = open(os.path.join(template_dir, HEADER_TEMPLATE)).read()
+ script = open(os.path.join(template_dir, SCRIPT_TEMPLATE)).read()
+ middle = open(os.path.join(template_dir, MIDDLE_TEMPLATE)).read()
+ footer = open(os.path.join(template_dir, FOOTER_TEMPLATE)).read()
+ except (OSError, IOError):
+ logging.error('Could not read template files from %s', template_dir)
+ raise
+
+ ApplicationLoggingHandler.InitializeTemplates(header, script, middle, footer)
+
+
+def CreateServer(root_path,
+ login_url,
+ port,
+ template_dir,
+ serve_address='',
+ require_indexes=False,
+ allow_skipped_files=False,
+ static_caching=True,
+ python_path_list=sys.path,
+ sdk_dir=os.path.dirname(os.path.dirname(google.__file__))):
+ """Creates an new HTTPServer for an application.
+
+ The sdk_dir argument must be specified for the directory storing all code for
+ the SDK so as to allow for the sandboxing of module access to work for any
+ and all SDK code. While typically this is where the 'google' package lives,
+ it can be in another location because of API version support.
+
+ Args:
+ root_path: String containing the path to the root directory of the
+ application where the app.yaml file is.
+ login_url: Relative URL which should be used for handling user login/logout.
+ port: Port to start the application server on.
+ template_dir: Path to the directory in which the debug console templates
+ are stored.
+ serve_address: Address on which the server should serve.
+ require_indexes: True if index.yaml is read-only gospel; default False.
+ allow_skipped_files: True if skipped files should be accessible.
+ static_caching: True if browser caching of static files should be allowed.
+ python_path_list: Used for dependency injection.
+ sdk_dir: Directory where the SDK is stored.
+
+ Returns:
+ Instance of BaseHTTPServer.HTTPServer that's ready to start accepting.
+ """
+ absolute_root_path = os.path.realpath(root_path)
+
+ SetupTemplates(template_dir)
+ FakeFile.SetAllowedPaths(absolute_root_path,
+ [sdk_dir,
+ template_dir])
+ FakeFile.SetAllowSkippedFiles(allow_skipped_files)
+
+ handler_class = CreateRequestHandler(absolute_root_path,
+ login_url,
+ require_indexes,
+ static_caching)
+
+ if absolute_root_path not in python_path_list:
+ python_path_list.insert(0, absolute_root_path)
+ return HTTPServerWithScheduler((serve_address, port), handler_class)
+
+
+class HTTPServerWithScheduler(BaseHTTPServer.HTTPServer):
+ """A BaseHTTPServer subclass that calls a method at a regular interval."""
+
+ def __init__(self, server_address, request_handler_class):
+ """Constructor.
+
+ Args:
+ server_address: the bind address of the server.
+ request_handler_class: class used to handle requests.
+ """
+ BaseHTTPServer.HTTPServer.__init__(self, server_address,
+ request_handler_class)
+ self._events = []
+
+ def get_request(self, time_func=time.time, select_func=select.select):
+ """Overrides the base get_request call.
+
+ Args:
+ time_func: used for testing.
+ select_func: used for testing.
+
+ Returns:
+ a (socket_object, address info) tuple.
+ """
+ while True:
+ if self._events:
+ current_time = time_func()
+ next_eta = self._events[0][0]
+ delay = next_eta - current_time
+ else:
+ delay = DEFAULT_SELECT_DELAY
+ readable, _, _ = select_func([self.socket], [], [], max(delay, 0))
+ if readable:
+ return self.socket.accept()
+ current_time = time_func()
+ if self._events and current_time >= self._events[0][0]:
+ unused_eta, runnable = heapq.heappop(self._events)
+ runnable()
+
+ def AddEvent(self, eta, runnable):
+ """Add a runnable event to be run at the specified time.
+
+ Args:
+ eta: when to run the event, in seconds since epoch.
+ runnable: a callable object.
+ """
+ heapq.heappush(self._events, (eta, runnable))
diff --git a/google_appengine/google/appengine/tools/dev_appserver.pyc b/google_appengine/google/appengine/tools/dev_appserver.pyc
new file mode 100644
index 0000000..37585ff
--- /dev/null
+++ b/google_appengine/google/appengine/tools/dev_appserver.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/dev_appserver_index.py b/google_appengine/google/appengine/tools/dev_appserver_index.py
new file mode 100755
index 0000000..d69f656
--- /dev/null
+++ b/google_appengine/google/appengine/tools/dev_appserver_index.py
@@ -0,0 +1,277 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Utilities for generating and updating index.yaml."""
+
+
+
+import os
+import logging
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore_admin
+from google.appengine.api import yaml_errors
+from google.appengine.datastore import datastore_index
+
+import yaml
+
+AUTO_MARKER = '\n# AUTOGENERATED\n'
+
+AUTO_COMMENT = '''
+# This index.yaml is automatically updated whenever the dev_appserver
+# detects that a new type of query is run. If you want to manage the
+# index.yaml file manually, remove the above marker line (the line
+# saying "# AUTOGENERATED"). If you want to manage some indexes
+# manually, move them above the marker line. The index.yaml file is
+# automatically uploaded to the admin console when you next deploy
+# your application using appcfg.py.
+'''
+
+
+def GenerateIndexFromHistory(query_history,
+ all_indexes=None, manual_indexes=None):
+ """Generate most of the text for index.yaml from the query history.
+
+ Args:
+ query_history: Query history, a dict mapping query
+ all_indexes: Optional datastore_index.IndexDefinitions instance
+ representing all the indexes found in the input file. May be None.
+ manual_indexes: Optional datastore_index.IndexDefinitions instance
+ containing indexes for which we should not generate output. May be None.
+
+ Returns:
+ A string representation that can safely be appended to an
+ existing index.yaml file.
+ """
+
+ all_keys = datastore_index.IndexDefinitionsToKeys(all_indexes)
+ manual_keys = datastore_index.IndexDefinitionsToKeys(manual_indexes)
+
+ indexes = dict((key, 0) for key in all_keys - manual_keys)
+
+ for query, count in query_history.iteritems():
+ required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
+ if required:
+ key = (kind, ancestor, props)
+ if key not in manual_keys:
+ if key in indexes:
+ indexes[key] += count
+ else:
+ indexes[key] = count
+
+ res = []
+ for (kind, ancestor, props), count in sorted(indexes.iteritems()):
+ res.append('')
+ if count == 0:
+ message = '# Unused in query history -- copied from input.'
+ elif count == 1:
+ message = '# Used once in query history.'
+ else:
+ message = '# Used %d times in query history.' % count
+ res.append(message)
+ res.append(datastore_index.IndexYamlForQuery(kind, ancestor, props))
+
+ res.append('')
+ return '\n'.join(res)
+
+
+class IndexYamlUpdater(object):
+ """Helper class for updating index.yaml.
+
+ This class maintains some state about the query history and the
+ index.yaml file in order to minimize the number of times index.yaml
+ is actually overwritten.
+ """
+
+ index_yaml_is_manual = False
+ index_yaml_mtime = 0
+ last_history_size = 0
+
+ def __init__(self, root_path):
+ """Constructor.
+
+ Args:
+ root_path: Path to the app's root directory.
+ """
+ self.root_path = root_path
+
+ def UpdateIndexYaml(self, openfile=open):
+ """Update index.yaml.
+
+ Args:
+ openfile: Used for dependency injection.
+
+ We only ever write to index.yaml if either:
+ - it doesn't exist yet; or
+ - it contains an 'AUTOGENERATED' comment.
+
+ All indexes *before* the AUTOGENERATED comment will be written
+ back unchanged. All indexes *after* the AUTOGENERATED comment
+ will be updated with the latest query counts (query counts are
+ reset by --clear_datastore). Indexes that aren't yet in the file
+ will be appended to the AUTOGENERATED section.
+
+ We keep track of some data in order to avoid doing repetitive work:
+ - if index.yaml is fully manual, we keep track of its mtime to
+ avoid parsing it over and over;
+ - we keep track of the number of keys in the history dict since
+ the last time we updated index.yaml (or decided there was
+ nothing to update).
+ """
+ index_yaml_file = os.path.join(self.root_path, 'index.yaml')
+
+ try:
+ index_yaml_mtime = os.path.getmtime(index_yaml_file)
+ except os.error:
+ index_yaml_mtime = None
+
+ index_yaml_changed = (index_yaml_mtime != self.index_yaml_mtime)
+ self.index_yaml_mtime = index_yaml_mtime
+
+ datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
+ query_history = datastore_stub.QueryHistory()
+ history_changed = (len(query_history) != self.last_history_size)
+ self.last_history_size = len(query_history)
+
+ if not (index_yaml_changed or history_changed):
+ logging.debug('No need to update index.yaml')
+ return
+
+ if self.index_yaml_is_manual and not index_yaml_changed:
+ logging.debug('Will not update manual index.yaml')
+ return
+
+ if index_yaml_mtime is None:
+ index_yaml_data = None
+ else:
+ try:
+ fh = open(index_yaml_file, 'r')
+ except IOError:
+ index_yaml_data = None
+ else:
+ try:
+ index_yaml_data = fh.read()
+ finally:
+ fh.close()
+
+ self.index_yaml_is_manual = (index_yaml_data is not None and
+ AUTO_MARKER not in index_yaml_data)
+ if self.index_yaml_is_manual:
+ logging.info('Detected manual index.yaml, will not update')
+ return
+
+ if index_yaml_data is None:
+ all_indexes = None
+ else:
+ try:
+ all_indexes = datastore_index.ParseIndexDefinitions(index_yaml_data)
+ except yaml_errors.EventListenerError, e:
+ logging.error('Error parsing %s:\n%s', index_yaml_file, e)
+ return
+ except Exception, err:
+ logging.error('Error parsing %s:\n%s.%s: %s', index_yaml_file,
+ err.__class__.__module__, err.__class__.__name__, err)
+ return
+
+ if index_yaml_data is None:
+ manual_part, automatic_part = 'indexes:\n', ''
+ manual_indexes = None
+ else:
+ manual_part, automatic_part = index_yaml_data.split(AUTO_MARKER, 1)
+ try:
+ manual_indexes = datastore_index.ParseIndexDefinitions(manual_part)
+ except Exception, err:
+ logging.error('Error parsing manual part of %s: %s',
+ index_yaml_file, err)
+ return
+
+ automatic_part = GenerateIndexFromHistory(query_history,
+ all_indexes, manual_indexes)
+
+ try:
+ fh = openfile(index_yaml_file, 'w')
+ except IOError, err:
+ logging.error('Can\'t write index.yaml: %s', err)
+ return
+
+ try:
+ logging.info('Updating %s', index_yaml_file)
+ fh.write(manual_part)
+ fh.write(AUTO_MARKER)
+ fh.write(AUTO_COMMENT)
+ fh.write(automatic_part)
+ finally:
+ fh.close()
+
+ try:
+ self.index_yaml_mtime = os.path.getmtime(index_yaml_file)
+ except os.error, err:
+ logging.error('Can\'t stat index.yaml we just wrote: %s', err)
+ self.index_yaml_mtime = None
+
+
+def SetupIndexes(app_id, root_path):
+ """Ensure that the set of existing composite indexes matches index.yaml.
+
+ Note: this is similar to the algorithm used by the admin console for
+ the same purpose.
+
+ Args:
+ app_id: Application ID being served.
+ root_path: Path to the root of the application.
+ """
+ index_yaml_file = os.path.join(root_path, 'index.yaml')
+ try:
+ fh = open(index_yaml_file, 'r')
+ except IOError:
+ index_yaml_data = None
+ else:
+ try:
+ index_yaml_data = fh.read()
+ finally:
+ fh.close()
+
+ indexes = []
+ if index_yaml_data is not None:
+ index_defs = datastore_index.ParseIndexDefinitions(index_yaml_data)
+ if index_defs is not None:
+ indexes = index_defs.indexes
+ if indexes is None:
+ indexes = []
+
+ requested_indexes = datastore_admin.IndexDefinitionsToProtos(app_id, indexes)
+
+ existing_indexes = datastore_admin.GetIndices(app_id)
+
+ requested = dict((x.definition().Encode(), x) for x in requested_indexes)
+ existing = dict((x.definition().Encode(), x) for x in existing_indexes)
+
+ created = 0
+ for key, index in requested.iteritems():
+ if key not in existing:
+ datastore_admin.CreateIndex(index)
+ created += 1
+
+ deleted = 0
+ for key, index in existing.iteritems():
+ if key not in requested:
+ datastore_admin.DeleteIndex(index)
+ deleted += 1
+
+ if created or deleted:
+ logging.info("Created %d and deleted %d index(es); total %d",
+ created, deleted, len(requested))
diff --git a/google_appengine/google/appengine/tools/dev_appserver_index.pyc b/google_appengine/google/appengine/tools/dev_appserver_index.pyc
new file mode 100644
index 0000000..62d6d2c
--- /dev/null
+++ b/google_appengine/google/appengine/tools/dev_appserver_index.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/dev_appserver_info.py b/google_appengine/google/appengine/tools/dev_appserver_info.py
new file mode 100755
index 0000000..40f7406
--- /dev/null
+++ b/google_appengine/google/appengine/tools/dev_appserver_info.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""CGI for displaying info about the currently running app in dev_appserver.
+
+This serves pages under /_ah/info/ that display information about the app
+currently running in the dev_appserver. It currently serves on these URLs:
+
+ /_ah/info/queries:
+ A list of datastore queries run so far, grouped by kind. Used to suggest
+ composite indices that should be built.
+
+ /_ah/info/index.yaml:
+ Produces an index.yaml file that can be uploaded to the real app
+ server by appcfg.py. This information is derived from the query
+ history above, by removing queries that don't need any indexes to
+ be built and by combining queries that can use the same index.
+"""
+
+
+
+import cgi
+import wsgiref.handlers
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.datastore import datastore_pb
+from google.appengine.ext import webapp
+from google.appengine.tools import dev_appserver_index
+
+
+class QueriesHandler(webapp.RequestHandler):
+ """A handler that displays a list of the datastore queries run so far.
+ """
+
+ HEADER = """<html>
+<head><title>Query History</title></head>
+
+<body>
+<h3>Query History</h3>
+
+<p>This is a list of datastore queries your app has run. You have to
+make composite indices for these queries before deploying your app.
+This is normally done automatically by running dev_appserver, which
+will write the file index.yaml into your app's root directory, and
+then deploying your app with appcfg, which will upload that
+index.yaml.</p>
+
+<p>You can also view a 'clean' <a href="index.yaml">index.yaml</a>
+file and save that to your app's root directory.</p>
+
+<table>
+<tr><th>Times run</th><th>Query</th></tr>
+"""
+
+ ROW = """<tr><td>%(count)s</td><td>%(query)s</td></tr>"""
+
+ FOOTER = """
+</table>
+</body>
+</html>"""
+
+ def Render(self):
+ """Renders and returns the query history page HTML.
+
+ Returns:
+ A string, formatted as an HTML page.
+ """
+ history = apiproxy_stub_map.apiproxy.GetStub('datastore_v3').QueryHistory()
+ history_items = [(count, query) for query, count in history.items()]
+ history_items.sort(reverse=True)
+ rows = [self.ROW % {'query': _FormatQuery(query),
+ 'count': count}
+ for count, query in history_items]
+ return self.HEADER + '\n'.join(rows) + self.FOOTER
+
+ def get(self):
+ """Handle a GET. Just calls Render()."""
+ self.response.out.write(self.Render())
+
+
+class IndexYamlHandler(webapp.RequestHandler):
+ """A handler that renders an index.yaml file suitable for upload."""
+
+ def Render(self):
+ """Renders and returns the index.yaml file.
+
+ Returns:
+ A string, formatted as an index.yaml file.
+ """
+ datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
+ query_history = datastore_stub.QueryHistory()
+ body = dev_appserver_index.GenerateIndexFromHistory(query_history)
+ return 'indexes:\n' + body
+
+ def get(self):
+ """Handle a GET. Just calls Render()."""
+ self.response.headers['Content-Type'] = 'text/plain'
+ self.response.out.write(self.Render())
+
+
+def _FormatQuery(query):
+ """Format a Query protobuf as (very simple) HTML.
+
+ Args:
+ query: A datastore_pb.Query instance.
+
+ Returns:
+ A string containing formatted HTML. This is mostly the output of
+ str(query) with '<' etc. escaped, and '<br>' inserted in front of
+ Order and Filter parts.
+ """
+ res = cgi.escape(str(query))
+ res = res.replace('Order', '<br>Order')
+ res = res.replace('Filter', '<br>Filter')
+ return res
+
+
+def _DirectionToString(direction):
+ """Turn a direction enum into a string.
+
+ Args:
+ direction: ASCENDING or DESCENDING
+
+ Returns:
+ Either 'asc' or 'descending'.
+ """
+ if direction == datastore_pb.Query_Order.DESCENDING:
+ return 'descending'
+ else:
+ return 'asc'
+
+
+URL_MAP = {
+ '/_ah/info/queries': QueriesHandler,
+ '/_ah/info/index.yaml': IndexYamlHandler,
+
+}
+
+
+def main():
+ application = webapp.WSGIApplication(URL_MAP.items())
+ wsgiref.handlers.CGIHandler().run(application)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/google_appengine/google/appengine/tools/dev_appserver_login.py b/google_appengine/google/appengine/tools/dev_appserver_login.py
new file mode 100755
index 0000000..e03e9a3
--- /dev/null
+++ b/google_appengine/google/appengine/tools/dev_appserver_login.py
@@ -0,0 +1,297 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Helper CGI for logins/logout in the development application server.
+
+This CGI has these parameters:
+
+ continue: URL to redirect to after a login or logout has completed.
+ email: Email address to set for the client.
+ admin: If 'True', the client should be logged in as an admin.
+ action: What action to take ('Login' or 'Logout').
+
+To view the current user information and a form for logging in and out,
+supply no parameters.
+"""
+
+
+import cgi
+import Cookie
+import md5
+import os
+import sys
+import urllib
+
+
+CONTINUE_PARAM = 'continue'
+EMAIL_PARAM = 'email'
+ADMIN_PARAM = 'admin'
+ACTION_PARAM = 'action'
+
+LOGOUT_ACTION = 'Logout'
+LOGIN_ACTION = 'Login'
+
+LOGOUT_PARAM = 'action=%s' % LOGOUT_ACTION
+
+COOKIE_NAME = 'dev_appserver_login'
+
+
+def GetUserInfo(http_cookie, cookie_name=COOKIE_NAME):
+ """Get the requestor's user info from the HTTP cookie in the CGI environment.
+
+ Args:
+ http_cookie: Value of the HTTP_COOKIE environment variable.
+ cookie_name: Name of the cookie that stores the user info.
+
+ Returns:
+ Tuple (email, admin) where:
+ email: The user's email address, if any.
+ admin: True if the user is an admin; False otherwise.
+ """
+ cookie = Cookie.SimpleCookie(http_cookie)
+
+ cookie_value = ''
+ if cookie_name in cookie:
+ cookie_value = cookie[cookie_name].value
+
+ email, admin, user_id = (cookie_value.split(':') + ['', '', ''])[:3]
+ return email, (admin == 'True'), user_id
+
+
+def CreateCookieData(email, admin):
+ """Creates cookie payload data.
+
+ Args:
+ email, admin: Parameters to incorporate into the cookie.
+
+ Returns:
+ String containing the cookie payload.
+ """
+ admin_string = 'False'
+ if admin:
+ admin_string = 'True'
+ if email:
+ user_id_digest = md5.new(email.lower()).digest()
+ user_id = '1' + ''.join(['%02d' % ord(x) for x in user_id_digest])[:20]
+ else:
+ user_id = ''
+ return '%s:%s:%s' % (email, admin_string, user_id)
+
+
+def SetUserInfoCookie(email, admin, cookie_name=COOKIE_NAME):
+ """Creates a cookie to set the user information for the requestor.
+
+ Args:
+ email: Email to set for the user.
+ admin: True if the user should be admin; False otherwise.
+ cookie_name: Name of the cookie that stores the user info.
+
+ Returns:
+ 'Set-Cookie' header for setting the user info of the requestor.
+ """
+ cookie_value = CreateCookieData(email, admin)
+ set_cookie = Cookie.SimpleCookie()
+ set_cookie[cookie_name] = cookie_value
+ set_cookie[cookie_name]['path'] = '/'
+ return '%s\r\n' % set_cookie
+
+
+def ClearUserInfoCookie(cookie_name=COOKIE_NAME):
+ """Clears the user info cookie from the requestor, logging them out.
+
+ Args:
+ cookie_name: Name of the cookie that stores the user info.
+
+ Returns:
+ 'Set-Cookie' header for clearing the user info of the requestor.
+ """
+ set_cookie = Cookie.SimpleCookie()
+ set_cookie[cookie_name] = ''
+ set_cookie[cookie_name]['path'] = '/'
+ set_cookie[cookie_name]['max-age'] = '0'
+ return '%s\r\n' % set_cookie
+
+
+LOGIN_TEMPLATE = """<html>
+<head>
+ <title>Login</title>
+</head>
+<body>
+
+<form method='get' action='%(login_url)s'
+ style='text-align:center; font: 13px sans-serif'>
+ <div style='width: 20em; margin: 1em auto;
+ text-align:left;
+ padding: 0 2em 1.25em 2em;
+ background-color: #d6e9f8;
+ border: 2px solid #67a7e3'>
+ <h3>%(login_message)s</h3>
+ <p style='padding: 0; margin: 0'>
+ <label for='email' style="width: 3em">Email:</label>
+ <input name='email' type='text' value='%(email)s' id='email'/>
+ </p>
+ <p style='margin: .5em 0 0 3em; font-size:12px'>
+ <input name='admin' type='checkbox' value='True'
+ %(admin_checked)s id='admin'/>
+ <label for='admin'>Sign in as Administrator</label>
+ </p>
+ <p style='margin-left: 3em'>
+ <input name='action' value='Login' type='submit'
+ id='submit-login' />
+ <input name='action' value='Logout' type='submit'
+ id='submit-logout' />
+ </p>
+ </div>
+ <input name='continue' type='hidden' value='%(continue_url)s'/>
+</form>
+
+</body>
+</html>
+"""
+
+
+def RenderLoginTemplate(login_url, continue_url, email, admin):
+ """Renders the login page.
+
+ Args:
+ login_url, continue_url, email, admin: Parameters passed to
+ LoginCGI.
+
+ Returns:
+ String containing the contents of the login page.
+ """
+ login_message = 'Not logged in'
+ if email:
+ login_message = 'Logged in'
+ admin_checked = ''
+ if admin:
+ admin_checked = 'checked'
+
+ template_dict = {
+
+
+ 'email': email or 'test\x40example.com',
+ 'admin_checked': admin_checked,
+ 'login_message': login_message,
+ 'login_url': login_url,
+ 'continue_url': continue_url
+ }
+
+ return LOGIN_TEMPLATE % template_dict
+
+
+def LoginRedirect(login_url,
+ hostname,
+ port,
+ relative_url,
+ outfile):
+ """Writes a login redirection URL to a user.
+
+ Args:
+ login_url: Relative URL which should be used for handling user logins.
+ hostname: Name of the host on which the webserver is running.
+ port: Port on which the webserver is running.
+ relative_url: String containing the URL accessed.
+ outfile: File-like object to which the response should be written.
+ """
+ dest_url = "http://%s:%s%s" % (hostname, port, relative_url)
+ redirect_url = 'http://%s:%s%s?%s=%s' % (hostname,
+ port,
+ login_url,
+ CONTINUE_PARAM,
+ urllib.quote(dest_url))
+ outfile.write('Status: 302 Requires login\r\n')
+ outfile.write('Location: %s\r\n\r\n' % redirect_url)
+
+
+def LoginCGI(login_url,
+ email,
+ admin,
+ action,
+ set_email,
+ set_admin,
+ continue_url,
+ outfile):
+ """Runs the login CGI.
+
+ This CGI does not care about the method at all. For both POST and GET the
+ client will be redirected to the continue URL.
+
+ Args:
+ login_url: URL used to run the CGI.
+ email: Current email address of the requesting user.
+ admin: True if the requesting user is an admin; False otherwise.
+ action: The action used to run the CGI; 'Login' for a login action, 'Logout'
+ for when a logout should occur.
+ set_email: Email to set for the user; Empty if no email should be set.
+ set_admin: True if the user should be an admin; False otherwise.
+ continue_url: URL to which the user should be redirected when the CGI
+ finishes loading; defaults to the login_url with no parameters (showing
+ current status) if not supplied.
+ outfile: File-like object to which all output data should be written.
+ """
+ redirect_url = ''
+ output_headers = []
+
+ if action:
+ if action.lower() == LOGOUT_ACTION.lower():
+ output_headers.append(ClearUserInfoCookie())
+ elif set_email:
+ output_headers.append(SetUserInfoCookie(set_email, set_admin))
+
+ redirect_url = continue_url or login_url
+
+ if redirect_url:
+ outfile.write('Status: 302 Redirecting to continue URL\r\n')
+ for header in output_headers:
+ outfile.write(header)
+ outfile.write('Location: %s\r\n' % redirect_url)
+ outfile.write('\r\n')
+ else:
+ outfile.write('Status: 200\r\n')
+ outfile.write('Content-Type: text/html\r\n')
+ outfile.write('\r\n')
+ outfile.write(RenderLoginTemplate(login_url,
+ continue_url,
+ email,
+ admin))
+
+
+def main():
+ """Runs the login and logout CGI script."""
+ form = cgi.FieldStorage()
+ login_url = os.environ['PATH_INFO']
+ email = os.environ.get('USER_EMAIL', '')
+ admin = os.environ.get('USER_IS_ADMIN', '0') == '1'
+
+ action = form.getfirst(ACTION_PARAM)
+ set_email = form.getfirst(EMAIL_PARAM, '')
+ set_admin = form.getfirst(ADMIN_PARAM, '') == 'True'
+ continue_url = form.getfirst(CONTINUE_PARAM, '')
+
+ LoginCGI(login_url,
+ email,
+ admin,
+ action,
+ set_email,
+ set_admin,
+ continue_url,
+ sys.stdout)
+ return 0
+
+
+if __name__ == '__main__':
+ main()
diff --git a/google_appengine/google/appengine/tools/dev_appserver_login.pyc b/google_appengine/google/appengine/tools/dev_appserver_login.pyc
new file mode 100644
index 0000000..c4575c0
--- /dev/null
+++ b/google_appengine/google/appengine/tools/dev_appserver_login.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/dev_appserver_main.py b/google_appengine/google/appengine/tools/dev_appserver_main.py
new file mode 100755
index 0000000..232804a
--- /dev/null
+++ b/google_appengine/google/appengine/tools/dev_appserver_main.py
@@ -0,0 +1,498 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Runs a development application server for an application.
+
+%(script)s [options] <application root>
+
+Application root must be the path to the application to run in this server.
+Must contain a valid app.yaml or app.yml file.
+
+Options:
+ --help, -h View this helpful message.
+ --debug, -d Use debug logging. (Default false)
+ --clear_datastore, -c Clear the Datastore on startup. (Default false)
+ --address=ADDRESS, -a ADDRESS
+ Address to which this server should bind. (Default
+ %(address)s).
+ --port=PORT, -p PORT Port for the server to run on. (Default %(port)s)
+ --datastore_path=PATH Path to use for storing Datastore file stub data.
+ (Default %(datastore_path)s)
+ --history_path=PATH Path to use for storing Datastore history.
+ (Default %(history_path)s)
+ --require_indexes Disallows queries that require composite indexes
+ not defined in index.yaml.
+ --smtp_host=HOSTNAME SMTP host to send test mail to. Leaving this
+ unset will disable SMTP mail sending.
+ (Default '%(smtp_host)s')
+ --smtp_port=PORT SMTP port to send test mail to.
+ (Default %(smtp_port)s)
+ --smtp_user=USER SMTP user to connect as. Stub will only attempt
+ to login if this field is non-empty.
+ (Default '%(smtp_user)s').
+ --smtp_password=PASSWORD Password for SMTP server.
+ (Default '%(smtp_password)s')
+ --enable_sendmail Enable sendmail when SMTP not configured.
+ (Default false)
+ --show_mail_body Log the body of emails in mail stub.
+ (Default false)
+ --auth_domain Authorization domain that this app runs in.
+ (Default gmail.com)
+ --debug_imports Enables debug logging for module imports, showing
+ search paths used for finding modules and any
+ errors encountered during the import process.
+ --allow_skipped_files Allow access to files matched by app.yaml's
+ skipped_files (default False)
+ --disable_static_caching Never allow the browser to cache static files.
+ (Default enable if expiration set in app.yaml)
+"""
+
+
+
+from google.appengine.tools import os_compat
+
+import getopt
+import logging
+import os
+import re
+import signal
+import sys
+import traceback
+import tempfile
+
+logging.basicConfig(
+ level=logging.INFO,
+ format='%(levelname)-8s %(asctime)s %(filename)s:%(lineno)s] %(message)s')
+
+
+def SetGlobals():
+ """Set various global variables involving the 'google' package.
+
+ This function should not be called until sys.path has been properly set.
+ """
+ global yaml_errors, appcfg, appengine_rpc, dev_appserver, os_compat
+ from google.appengine.api import yaml_errors
+ from google.appengine.dist import py_zipimport
+ from google.appengine.tools import appcfg
+ from google.appengine.tools import appengine_rpc
+ from google.appengine.tools import dev_appserver
+ from google.appengine.tools import os_compat
+
+
+
+DEFAULT_ADMIN_CONSOLE_SERVER = 'appengine.google.com'
+
+ARG_ADDRESS = 'address'
+ARG_ADMIN_CONSOLE_SERVER = 'admin_console_server'
+ARG_ADMIN_CONSOLE_HOST = 'admin_console_host'
+ARG_AUTH_DOMAIN = 'auth_domain'
+ARG_CLEAR_DATASTORE = 'clear_datastore'
+ARG_DATASTORE_PATH = 'datastore_path'
+ARG_DEBUG_IMPORTS = 'debug_imports'
+ARG_ENABLE_SENDMAIL = 'enable_sendmail'
+ARG_SHOW_MAIL_BODY = 'show_mail_body'
+ARG_HISTORY_PATH = 'history_path'
+ARG_LOGIN_URL = 'login_url'
+ARG_LOG_LEVEL = 'log_level'
+ARG_PORT = 'port'
+ARG_REQUIRE_INDEXES = 'require_indexes'
+ARG_ALLOW_SKIPPED_FILES = 'allow_skipped_files'
+ARG_SMTP_HOST = 'smtp_host'
+ARG_SMTP_PASSWORD = 'smtp_password'
+ARG_SMTP_PORT = 'smtp_port'
+ARG_SMTP_USER = 'smtp_user'
+ARG_STATIC_CACHING = 'static_caching'
+ARG_TEMPLATE_DIR = 'template_dir'
+ARG_TRUSTED = 'trusted'
+
+SDK_PATH = os.path.dirname(
+ os.path.dirname(
+ os.path.dirname(
+ os.path.dirname(os_compat.__file__)
+ )
+ )
+ )
+
+DEFAULT_ARGS = {
+ ARG_PORT: 8080,
+ ARG_LOG_LEVEL: logging.INFO,
+ ARG_DATASTORE_PATH: os.path.join(tempfile.gettempdir(),
+ 'dev_appserver.datastore'),
+ ARG_HISTORY_PATH: os.path.join(tempfile.gettempdir(),
+ 'dev_appserver.datastore.history'),
+ ARG_LOGIN_URL: '/_ah/login',
+ ARG_CLEAR_DATASTORE: False,
+ ARG_REQUIRE_INDEXES: False,
+ ARG_TEMPLATE_DIR: os.path.join(SDK_PATH, 'templates'),
+ ARG_SMTP_HOST: '',
+ ARG_SMTP_PORT: 25,
+ ARG_SMTP_USER: '',
+ ARG_SMTP_PASSWORD: '',
+ ARG_ENABLE_SENDMAIL: False,
+ ARG_SHOW_MAIL_BODY: False,
+ ARG_AUTH_DOMAIN: 'gmail.com',
+ ARG_ADDRESS: 'localhost',
+ ARG_ADMIN_CONSOLE_SERVER: DEFAULT_ADMIN_CONSOLE_SERVER,
+ ARG_ADMIN_CONSOLE_HOST: None,
+ ARG_ALLOW_SKIPPED_FILES: False,
+ ARG_STATIC_CACHING: True,
+ ARG_TRUSTED: False,
+}
+
+API_PATHS = {'1':
+ {'google': (),
+ 'antlr3': ('lib', 'antlr3'),
+ 'django': ('lib', 'django'),
+ 'webob': ('lib', 'webob'),
+ 'yaml': ('lib', 'yaml', 'lib'),
+ }
+ }
+
+DEFAULT_API_VERSION = '1'
+
+API_PATHS['test'] = API_PATHS[DEFAULT_API_VERSION].copy()
+API_PATHS['test']['_test'] = ('nonexistent', 'test', 'path')
+
+
+def SetPaths(app_config_path):
+ """Set the interpreter to use the specified API version.
+
+ The app.yaml file is scanned for the api_version field and the value is
+ extracted. With that information, the paths in API_PATHS are added to the
+ front of sys.paths to make sure that they take precedent over any other paths
+ to older versions of a package. All modules for each package set are cleared
+ out of sys.modules to make sure only the newest version is used.
+
+ Args:
+ - app_config_path: Path to the app.yaml file.
+ """
+ api_version_re = re.compile(r'api_version:\s*(?P<api_version>[\w.]{1,32})')
+ api_version = None
+ app_config_file = open(app_config_path, 'r')
+ try:
+ for line in app_config_file:
+ re_match = api_version_re.match(line)
+ if re_match:
+ api_version = re_match.group('api_version')
+ break
+ finally:
+ app_config_file.close()
+
+ if api_version is None:
+ logging.error("Application configuration file missing an 'api_version' "
+ "value:\n%s" % app_config_path)
+ sys.exit(1)
+ if api_version not in API_PATHS:
+ logging.error("Value of %r for 'api_version' from the application "
+ "configuration file is not valid:\n%s" %
+ (api_version, app_config_path))
+ sys.exit(1)
+
+ if api_version == DEFAULT_API_VERSION:
+ return DEFAULT_API_VERSION
+
+ sdk_path = os.path.dirname(
+ os.path.dirname(
+ os.path.dirname(
+ os.path.dirname(os_compat.__file__)
+ )
+ )
+ )
+ for pkg_name, path_parts in API_PATHS[api_version].iteritems():
+ for name in sys.modules.keys():
+ if name == pkg_name or name.startswith('%s.' % pkg_name):
+ del sys.modules[name]
+ pkg_path = os.path.join(sdk_path, *path_parts)
+ sys.path.insert(0, pkg_path)
+
+ return api_version
+
+
+def PrintUsageExit(code):
+ """Prints usage information and exits with a status code.
+
+ Args:
+ code: Status code to pass to sys.exit() after displaying usage information.
+ """
+ render_dict = DEFAULT_ARGS.copy()
+ render_dict['script'] = os.path.basename(sys.argv[0])
+ print sys.modules['__main__'].__doc__ % render_dict
+ sys.stdout.flush()
+ sys.exit(code)
+
+
+def ParseArguments(argv):
+ """Parses command-line arguments.
+
+ Args:
+ argv: Command-line arguments, including the executable name, used to
+ execute this application.
+
+ Returns:
+ Tuple (args, option_dict) where:
+ args: List of command-line arguments following the executable name.
+ option_dict: Dictionary of parsed flags that maps keys from DEFAULT_ARGS
+ to their values, which are either pulled from the defaults, or from
+ command-line flags.
+ """
+ option_dict = DEFAULT_ARGS.copy()
+
+ try:
+ opts, args = getopt.gnu_getopt(
+ argv[1:],
+ 'a:cdhp:',
+ [ 'address=',
+ 'admin_console_server=',
+ 'admin_console_host=',
+ 'allow_skipped_files',
+ 'auth_domain=',
+ 'clear_datastore',
+ 'datastore_path=',
+ 'debug',
+ 'debug_imports',
+ 'enable_sendmail',
+ 'disable_static_caching',
+ 'show_mail_body',
+ 'help',
+ 'history_path=',
+ 'port=',
+ 'require_indexes',
+ 'smtp_host=',
+ 'smtp_password=',
+ 'smtp_port=',
+ 'smtp_user=',
+ 'template_dir=',
+ 'trusted',
+ ])
+ except getopt.GetoptError, e:
+ print >>sys.stderr, 'Error: %s' % e
+ PrintUsageExit(1)
+
+ for option, value in opts:
+ if option in ('-h', '--help'):
+ PrintUsageExit(0)
+
+ if option in ('-d', '--debug'):
+ option_dict[ARG_LOG_LEVEL] = logging.DEBUG
+
+ if option in ('-p', '--port'):
+ try:
+ option_dict[ARG_PORT] = int(value)
+ if not (65535 > option_dict[ARG_PORT] > 0):
+ raise ValueError
+ except ValueError:
+ print >>sys.stderr, 'Invalid value supplied for port'
+ PrintUsageExit(1)
+
+ if option in ('-a', '--address'):
+ option_dict[ARG_ADDRESS] = value
+
+ if option == '--datastore_path':
+ option_dict[ARG_DATASTORE_PATH] = os.path.abspath(value)
+
+ if option == '--history_path':
+ option_dict[ARG_HISTORY_PATH] = os.path.abspath(value)
+
+ if option in ('-c', '--clear_datastore'):
+ option_dict[ARG_CLEAR_DATASTORE] = True
+
+ if option == '--require_indexes':
+ option_dict[ARG_REQUIRE_INDEXES] = True
+
+ if option == '--smtp_host':
+ option_dict[ARG_SMTP_HOST] = value
+
+ if option == '--smtp_port':
+ try:
+ option_dict[ARG_SMTP_PORT] = int(value)
+ if not (65535 > option_dict[ARG_SMTP_PORT] > 0):
+ raise ValueError
+ except ValueError:
+ print >>sys.stderr, 'Invalid value supplied for SMTP port'
+ PrintUsageExit(1)
+
+ if option == '--smtp_user':
+ option_dict[ARG_SMTP_USER] = value
+
+ if option == '--smtp_password':
+ option_dict[ARG_SMTP_PASSWORD] = value
+
+ if option == '--enable_sendmail':
+ option_dict[ARG_ENABLE_SENDMAIL] = True
+
+ if option == '--show_mail_body':
+ option_dict[ARG_SHOW_MAIL_BODY] = True
+
+ if option == '--auth_domain':
+ option_dict['_DEFAULT_ENV_AUTH_DOMAIN'] = value
+
+ if option == '--debug_imports':
+ option_dict['_ENABLE_LOGGING'] = True
+
+ if option == '--template_dir':
+ option_dict[ARG_TEMPLATE_DIR] = value
+
+ if option == '--admin_console_server':
+ option_dict[ARG_ADMIN_CONSOLE_SERVER] = value.strip()
+
+ if option == '--admin_console_host':
+ option_dict[ARG_ADMIN_CONSOLE_HOST] = value
+
+ if option == '--allow_skipped_files':
+ option_dict[ARG_ALLOW_SKIPPED_FILES] = True
+
+ if option == '--disable_static_caching':
+ option_dict[ARG_STATIC_CACHING] = False
+
+ if option == '--trusted':
+ option_dict[ARG_TRUSTED] = True
+
+ return args, option_dict
+
+
+def MakeRpcServer(option_dict):
+ """Create a new HttpRpcServer.
+
+ Creates a new HttpRpcServer to check for updates to the SDK.
+
+ Args:
+ option_dict: The dict of command line options.
+
+ Returns:
+ A HttpRpcServer.
+ """
+ server = appengine_rpc.HttpRpcServer(
+ option_dict[ARG_ADMIN_CONSOLE_SERVER],
+ lambda: ('unused_email', 'unused_password'),
+ appcfg.GetUserAgent(),
+ appcfg.GetSourceName(),
+ host_override=option_dict[ARG_ADMIN_CONSOLE_HOST])
+ server.authenticated = True
+ return server
+
+
+def SigTermHandler(signum, frame):
+ """Handler for TERM signal.
+
+ Raises a KeyboardInterrupt to perform a graceful shutdown on SIGTERM signal.
+ """
+ raise KeyboardInterrupt()
+
+
+def main(argv):
+ """Runs the development application server."""
+ args, option_dict = ParseArguments(argv)
+
+ if len(args) != 1:
+ print >>sys.stderr, 'Invalid arguments'
+ PrintUsageExit(1)
+
+ root_path = args[0]
+ for suffix in ('yaml', 'yml'):
+ path = os.path.join(root_path, 'app.%s' % suffix)
+ if os.path.exists(path):
+ api_version = SetPaths(path)
+ break
+ else:
+ logging.error("Application configuration file not found in %s" % root_path)
+ return 1
+
+ SetGlobals()
+ dev_appserver.API_VERSION = api_version
+
+ if '_DEFAULT_ENV_AUTH_DOMAIN' in option_dict:
+ auth_domain = option_dict['_DEFAULT_ENV_AUTH_DOMAIN']
+ dev_appserver.DEFAULT_ENV['AUTH_DOMAIN'] = auth_domain
+ if '_ENABLE_LOGGING' in option_dict:
+ enable_logging = option_dict['_ENABLE_LOGGING']
+ dev_appserver.HardenedModulesHook.ENABLE_LOGGING = enable_logging
+
+ log_level = option_dict[ARG_LOG_LEVEL]
+ port = option_dict[ARG_PORT]
+ datastore_path = option_dict[ARG_DATASTORE_PATH]
+ login_url = option_dict[ARG_LOGIN_URL]
+ template_dir = option_dict[ARG_TEMPLATE_DIR]
+ serve_address = option_dict[ARG_ADDRESS]
+ require_indexes = option_dict[ARG_REQUIRE_INDEXES]
+ allow_skipped_files = option_dict[ARG_ALLOW_SKIPPED_FILES]
+ static_caching = option_dict[ARG_STATIC_CACHING]
+
+ option_dict['root_path'] = os.path.realpath(root_path)
+
+ logging.getLogger().setLevel(log_level)
+
+ config = None
+ try:
+ config, matcher = dev_appserver.LoadAppConfig(root_path, {})
+ except yaml_errors.EventListenerError, e:
+ logging.error('Fatal error when loading application configuration:\n' +
+ str(e))
+ return 1
+ except dev_appserver.InvalidAppConfigError, e:
+ logging.error('Application configuration file invalid:\n%s', e)
+ return 1
+
+ if option_dict[ARG_ADMIN_CONSOLE_SERVER] != '':
+ server = MakeRpcServer(option_dict)
+ update_check = appcfg.UpdateCheck(server, config)
+ update_check.CheckSupportedVersion()
+ if update_check.AllowedToCheckForUpdates():
+ update_check.CheckForUpdates()
+
+ try:
+ dev_appserver.SetupStubs(config.application, **option_dict)
+ except:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ logging.error(str(exc_type) + ': ' + str(exc_value))
+ logging.debug(''.join(traceback.format_exception(
+ exc_type, exc_value, exc_traceback)))
+ return 1
+
+ http_server = dev_appserver.CreateServer(
+ root_path,
+ login_url,
+ port,
+ template_dir,
+ sdk_dir=SDK_PATH,
+ serve_address=serve_address,
+ require_indexes=require_indexes,
+ allow_skipped_files=allow_skipped_files,
+ static_caching=static_caching)
+
+ signal.signal(signal.SIGTERM, SigTermHandler)
+
+ logging.info('Running application %s on port %d: http://%s:%d',
+ config.application, port, serve_address, port)
+ try:
+ try:
+ http_server.serve_forever()
+ except KeyboardInterrupt:
+ logging.info('Server interrupted by user, terminating')
+ except:
+ exc_info = sys.exc_info()
+ info_string = '\n'.join(traceback.format_exception(*exc_info))
+ logging.error('Error encountered:\n%s\nNow terminating.', info_string)
+ return 1
+ finally:
+ http_server.server_close()
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
+else:
+ SetGlobals()
diff --git a/google_appengine/google/appengine/tools/os_compat.py b/google_appengine/google/appengine/tools/os_compat.py
new file mode 100755
index 0000000..4bfa34f
--- /dev/null
+++ b/google_appengine/google/appengine/tools/os_compat.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""OS cross-platform compatibility tweaks.
+
+This module will, on import, change some parts of the running evironment so
+that other modules do not need special handling when running on different
+operating systems, such as Linux/Mac OSX/Windows.
+
+Some of these changes must be done before other modules are imported, so
+always import this module first.
+"""
+
+
+import os
+os.environ['TZ'] = 'UTC'
+import time
+if hasattr(time, 'tzset'):
+ time.tzset()
+
+import __builtin__
+
+
+if 'WindowsError' in __builtin__.__dict__:
+ WindowsError = WindowsError
+else:
+ class WindowsError(Exception):
+ """A fake Windows Error exception which should never be thrown."""
+
+
+ERROR_PATH_NOT_FOUND = 3
+ERROR_ACCESS_DENIED = 5
+ERROR_ALREADY_EXISTS = 183 \ No newline at end of file
diff --git a/google_appengine/google/appengine/tools/os_compat.pyc b/google_appengine/google/appengine/tools/os_compat.pyc
new file mode 100644
index 0000000..fe0ff3e
--- /dev/null
+++ b/google_appengine/google/appengine/tools/os_compat.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/tools/remote_api_shell.py b/google_appengine/google/appengine/tools/remote_api_shell.py
new file mode 100755
index 0000000..8705803
--- /dev/null
+++ b/google_appengine/google/appengine/tools/remote_api_shell.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""An interactive python shell that uses remote_api.
+
+Usage:
+ remote_api_shell.py [-s HOSTNAME] APPID [PATH]
+"""
+
+
+import atexit
+import code
+import getpass
+import optparse
+import os
+import sys
+
+try:
+ import readline
+except ImportError:
+ readline = None
+
+from google.appengine.ext.remote_api import remote_api_stub
+
+from google.appengine.api import datastore
+from google.appengine.api import memcache
+from google.appengine.api import urlfetch
+from google.appengine.api import users
+from google.appengine.ext import db
+from google.appengine.ext import search
+
+
+HISTORY_PATH = os.path.expanduser('~/.remote_api_shell_history')
+DEFAULT_PATH = '/remote_api'
+BANNER = """App Engine remote_api shell
+Python %s
+The db, users, urlfetch, and memcache modules are imported.""" % sys.version
+
+
+def auth_func():
+ return (raw_input('Email: '), getpass.getpass('Password: '))
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('-s', '--server', dest='server',
+ help='The hostname your app is deployed on. '
+ 'Defaults to <app_id>.appspot.com.')
+ (options, args) = parser.parse_args()
+
+ if not args or len(args) > 2:
+ print >> sys.stderr, __doc__
+ if len(args) > 2:
+ print >> sys.stderr, 'Unexpected arguments: %s' % args[2:]
+ sys.exit(1)
+
+ appid = args[0]
+ if len(args) == 2:
+ path = args[1]
+ else:
+ path = DEFAULT_PATH
+
+ remote_api_stub.ConfigureRemoteApi(appid, path, auth_func,
+ servername=options.server)
+ remote_api_stub.MaybeInvokeAuthentication()
+
+ os.environ['SERVER_SOFTWARE'] = 'Development (remote_api_shell)/1.0'
+
+ sys.ps1 = '%s> ' % appid
+ if readline is not None:
+ readline.parse_and_bind('tab: complete')
+ atexit.register(lambda: readline.write_history_file(HISTORY_PATH))
+ if os.path.exists(HISTORY_PATH):
+ readline.read_history_file(HISTORY_PATH)
+
+ code.interact(banner=BANNER, local=globals())
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/google_appengine/google/appengine/tools/requeue.py b/google_appengine/google/appengine/tools/requeue.py
new file mode 100755
index 0000000..986bc5c
--- /dev/null
+++ b/google_appengine/google/appengine/tools/requeue.py
@@ -0,0 +1,219 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A thread-safe queue in which removed objects put back to the front."""
+
+
+import logging
+import Queue
+import threading
+import time
+
+logger = logging.getLogger('google.appengine.tools.requeue')
+
+
+class ReQueue(object):
+ """A special thread-safe queue.
+
+ A ReQueue allows unfinished work items to be returned with a call to
+ reput(). When an item is reput, task_done() should *not* be called
+ in addition, getting an item that has been reput does not increase
+ the number of outstanding tasks.
+
+ This class shares an interface with Queue.Queue and provides the
+ additional reput method.
+ """
+
+ def __init__(self,
+ queue_capacity,
+ requeue_capacity=None,
+ queue_factory=Queue.Queue,
+ get_time=time.time):
+ """Initialize a ReQueue instance.
+
+ Args:
+ queue_capacity: The number of items that can be put in the ReQueue.
+ requeue_capacity: The numer of items that can be reput in the ReQueue.
+ queue_factory: Used for dependency injection.
+ get_time: Used for dependency injection.
+ """
+ if requeue_capacity is None:
+ requeue_capacity = queue_capacity
+
+ self.get_time = get_time
+ self.queue = queue_factory(queue_capacity)
+ self.requeue = queue_factory(requeue_capacity)
+ self.lock = threading.Lock()
+ self.put_cond = threading.Condition(self.lock)
+ self.get_cond = threading.Condition(self.lock)
+
+ def _DoWithTimeout(self,
+ action,
+ exc,
+ wait_cond,
+ done_cond,
+ lock,
+ timeout=None,
+ block=True):
+ """Performs the given action with a timeout.
+
+ The action must be non-blocking, and raise an instance of exc on a
+ recoverable failure. If the action fails with an instance of exc,
+ we wait on wait_cond before trying again. Failure after the
+ timeout is reached is propagated as an exception. Success is
+ signalled by notifying on done_cond and returning the result of
+ the action. If action raises any exception besides an instance of
+ exc, it is immediately propagated.
+
+ Args:
+ action: A callable that performs a non-blocking action.
+ exc: An exception type that is thrown by the action to indicate
+ a recoverable error.
+ wait_cond: A condition variable which should be waited on when
+ action throws exc.
+ done_cond: A condition variable to signal if the action returns.
+ lock: The lock used by wait_cond and done_cond.
+ timeout: A non-negative float indicating the maximum time to wait.
+ block: Whether to block if the action cannot complete immediately.
+
+ Returns:
+ The result of the action, if it is successful.
+
+ Raises:
+ ValueError: If the timeout argument is negative.
+ """
+ if timeout is not None and timeout < 0.0:
+ raise ValueError('\'timeout\' must not be a negative number')
+ if not block:
+ timeout = 0.0
+ result = None
+ success = False
+ start_time = self.get_time()
+ lock.acquire()
+ try:
+ while not success:
+ try:
+ result = action()
+ success = True
+ except Exception, e:
+ if not isinstance(e, exc):
+ raise e
+ if timeout is not None:
+ elapsed_time = self.get_time() - start_time
+ timeout -= elapsed_time
+ if timeout <= 0.0:
+ raise e
+ wait_cond.wait(timeout)
+ finally:
+ if success:
+ done_cond.notify()
+ lock.release()
+ return result
+
+ def put(self, item, block=True, timeout=None):
+ """Put an item into the requeue.
+
+ Args:
+ item: An item to add to the requeue.
+ block: Whether to block if the requeue is full.
+ timeout: Maximum on how long to wait until the queue is non-full.
+
+ Raises:
+ Queue.Full if the queue is full and the timeout expires.
+ """
+ def PutAction():
+ self.queue.put(item, block=False)
+ self._DoWithTimeout(PutAction,
+ Queue.Full,
+ self.get_cond,
+ self.put_cond,
+ self.lock,
+ timeout=timeout,
+ block=block)
+
+ def reput(self, item, block=True, timeout=None):
+ """Re-put an item back into the requeue.
+
+ Re-putting an item does not increase the number of outstanding
+ tasks, so the reput item should be uniquely associated with an
+ item that was previously removed from the requeue and for which
+ TaskDone has not been called.
+
+ Args:
+ item: An item to add to the requeue.
+ block: Whether to block if the requeue is full.
+ timeout: Maximum on how long to wait until the queue is non-full.
+
+ Raises:
+ Queue.Full is the queue is full and the timeout expires.
+ """
+ def ReputAction():
+ self.requeue.put(item, block=False)
+ self._DoWithTimeout(ReputAction,
+ Queue.Full,
+ self.get_cond,
+ self.put_cond,
+ self.lock,
+ timeout=timeout,
+ block=block)
+
+ def get(self, block=True, timeout=None):
+ """Get an item from the requeue.
+
+ Args:
+ block: Whether to block if the requeue is empty.
+ timeout: Maximum on how long to wait until the requeue is non-empty.
+
+ Returns:
+ An item from the requeue.
+
+ Raises:
+ Queue.Empty if the queue is empty and the timeout expires.
+ """
+ def GetAction():
+ try:
+ result = self.requeue.get(block=False)
+ self.requeue.task_done()
+ except Queue.Empty:
+ result = self.queue.get(block=False)
+ return result
+ return self._DoWithTimeout(GetAction,
+ Queue.Empty,
+ self.put_cond,
+ self.get_cond,
+ self.lock,
+ timeout=timeout,
+ block=block)
+
+ def join(self):
+ """Blocks until all of the items in the requeue have been processed."""
+ self.queue.join()
+
+ def task_done(self):
+ """Indicate that a previously enqueued item has been fully processed."""
+ self.queue.task_done()
+
+ def empty(self):
+ """Returns true if the requeue is empty."""
+ return self.queue.empty() and self.requeue.empty()
+
+ def get_nowait(self):
+ """Try to get an item from the queue without blocking."""
+ return self.get(block=False)
+
+ def qsize(self):
+ return self.queue.qsize() + self.requeue.qsize()
diff --git a/google_appengine/google/appengine/tools/requeue.pyc b/google_appengine/google/appengine/tools/requeue.pyc
new file mode 100644
index 0000000..fc51eb2
--- /dev/null
+++ b/google_appengine/google/appengine/tools/requeue.pyc
Binary files differ