summaryrefslogtreecommitdiffstats
path: root/google_appengine/lib/django/django/core
diff options
context:
space:
mode:
Diffstat (limited to 'google_appengine/lib/django/django/core')
-rwxr-xr-xgoogle_appengine/lib/django/django/core/__init__.py0
-rw-r--r--google_appengine/lib/django/django/core/__init__.pycbin0 -> 158 bytes
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/__init__.py54
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/backends/__init__.py0
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/backends/base.py56
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/backends/db.py82
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/backends/dummy.py22
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/backends/filebased.py80
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/backends/locmem.py47
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/backends/memcached.py29
-rwxr-xr-xgoogle_appengine/lib/django/django/core/cache/backends/simple.py64
-rwxr-xr-xgoogle_appengine/lib/django/django/core/context_processors.py69
-rwxr-xr-xgoogle_appengine/lib/django/django/core/exceptions.py25
-rw-r--r--google_appengine/lib/django/django/core/exceptions.pycbin0 -> 1893 bytes
-rwxr-xr-xgoogle_appengine/lib/django/django/core/handler.py11
-rwxr-xr-xgoogle_appengine/lib/django/django/core/handlers/__init__.py0
-rwxr-xr-xgoogle_appengine/lib/django/django/core/handlers/base.py131
-rwxr-xr-xgoogle_appengine/lib/django/django/core/handlers/modpython.py177
-rwxr-xr-xgoogle_appengine/lib/django/django/core/handlers/profiler-hotshot.py22
-rwxr-xr-xgoogle_appengine/lib/django/django/core/handlers/wsgi.py207
-rwxr-xr-xgoogle_appengine/lib/django/django/core/mail.py108
-rwxr-xr-xgoogle_appengine/lib/django/django/core/management.py1670
-rwxr-xr-xgoogle_appengine/lib/django/django/core/paginator.py88
-rwxr-xr-xgoogle_appengine/lib/django/django/core/serializers/__init__.py90
-rwxr-xr-xgoogle_appengine/lib/django/django/core/serializers/base.py165
-rwxr-xr-xgoogle_appengine/lib/django/django/core/serializers/json.py51
-rwxr-xr-xgoogle_appengine/lib/django/django/core/serializers/python.py101
-rwxr-xr-xgoogle_appengine/lib/django/django/core/serializers/pyyaml.py36
-rwxr-xr-xgoogle_appengine/lib/django/django/core/serializers/xml_serializer.py229
-rwxr-xr-xgoogle_appengine/lib/django/django/core/servers/__init__.py0
-rwxr-xr-xgoogle_appengine/lib/django/django/core/servers/basehttp.py664
-rwxr-xr-xgoogle_appengine/lib/django/django/core/servers/fastcgi.py158
-rwxr-xr-xgoogle_appengine/lib/django/django/core/signals.py3
-rwxr-xr-xgoogle_appengine/lib/django/django/core/template_loader.py7
-rwxr-xr-xgoogle_appengine/lib/django/django/core/urlresolvers.py241
-rwxr-xr-xgoogle_appengine/lib/django/django/core/validators.py573
-rwxr-xr-xgoogle_appengine/lib/django/django/core/xheaders.py22
37 files changed, 5282 insertions, 0 deletions
diff --git a/google_appengine/lib/django/django/core/__init__.py b/google_appengine/lib/django/django/core/__init__.py
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/google_appengine/lib/django/django/core/__init__.py
diff --git a/google_appengine/lib/django/django/core/__init__.pyc b/google_appengine/lib/django/django/core/__init__.pyc
new file mode 100644
index 0000000..0131802
--- /dev/null
+++ b/google_appengine/lib/django/django/core/__init__.pyc
Binary files differ
diff --git a/google_appengine/lib/django/django/core/cache/__init__.py b/google_appengine/lib/django/django/core/cache/__init__.py
new file mode 100755
index 0000000..6da8e88
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/__init__.py
@@ -0,0 +1,54 @@
+"""
+Caching framework.
+
+This package defines set of cache backends that all conform to a simple API.
+In a nutshell, a cache is a set of values -- which can be any object that
+may be pickled -- identified by string keys. For the complete API, see
+the abstract BaseCache class in django.core.cache.backends.base.
+
+Client code should not access a cache backend directly; instead it should
+either use the "cache" variable made available here, or it should use the
+get_cache() function made available here. get_cache() takes a backend URI
+(e.g. "memcached://127.0.0.1:11211/") and returns an instance of a backend
+cache class.
+
+See docs/cache.txt for information on the public API.
+"""
+
+from cgi import parse_qsl
+from django.conf import settings
+from django.core.cache.backends.base import InvalidCacheBackendError
+
+BACKENDS = {
+ # name for use in settings file --> name of module in "backends" directory
+ 'memcached': 'memcached',
+ 'simple': 'simple',
+ 'locmem': 'locmem',
+ 'file': 'filebased',
+ 'db': 'db',
+ 'dummy': 'dummy',
+}
+
+def get_cache(backend_uri):
+ if backend_uri.find(':') == -1:
+ raise InvalidCacheBackendError, "Backend URI must start with scheme://"
+ scheme, rest = backend_uri.split(':', 1)
+ if not rest.startswith('//'):
+ raise InvalidCacheBackendError, "Backend URI must start with scheme://"
+ if scheme not in BACKENDS:
+ raise InvalidCacheBackendError, "%r is not a valid cache backend" % scheme
+
+ host = rest[2:]
+ qpos = rest.find('?')
+ if qpos != -1:
+ params = dict(parse_qsl(rest[qpos+1:]))
+ host = rest[2:qpos]
+ else:
+ params = {}
+ if host.endswith('/'):
+ host = host[:-1]
+
+ cache_class = getattr(__import__('django.core.cache.backends.%s' % BACKENDS[scheme], {}, {}, ['']), 'CacheClass')
+ return cache_class(host, params)
+
+cache = get_cache(settings.CACHE_BACKEND)
diff --git a/google_appengine/lib/django/django/core/cache/backends/__init__.py b/google_appengine/lib/django/django/core/cache/backends/__init__.py
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/backends/__init__.py
diff --git a/google_appengine/lib/django/django/core/cache/backends/base.py b/google_appengine/lib/django/django/core/cache/backends/base.py
new file mode 100755
index 0000000..ef5f6a6
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/backends/base.py
@@ -0,0 +1,56 @@
+"Base Cache class."
+
+from django.core.exceptions import ImproperlyConfigured
+
+class InvalidCacheBackendError(ImproperlyConfigured):
+ pass
+
+class BaseCache(object):
+ def __init__(self, params):
+ timeout = params.get('timeout', 300)
+ try:
+ timeout = int(timeout)
+ except (ValueError, TypeError):
+ timeout = 300
+ self.default_timeout = timeout
+
+ def get(self, key, default=None):
+ """
+ Fetch a given key from the cache. If the key does not exist, return
+ default, which itself defaults to None.
+ """
+ raise NotImplementedError
+
+ def set(self, key, value, timeout=None):
+ """
+ Set a value in the cache. If timeout is given, that timeout will be
+ used for the key; otherwise the default cache timeout will be used.
+ """
+ raise NotImplementedError
+
+ def delete(self, key):
+ """
+ Delete a key from the cache, failing silently.
+ """
+ raise NotImplementedError
+
+ def get_many(self, keys):
+ """
+ Fetch a bunch of keys from the cache. For certain backends (memcached,
+ pgsql) this can be *much* faster when fetching multiple values.
+
+ Returns a dict mapping each key in keys to its value. If the given
+ key is missing, it will be missing from the response dict.
+ """
+ d = {}
+ for k in keys:
+ val = self.get(k)
+ if val is not None:
+ d[k] = val
+ return d
+
+ def has_key(self, key):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ return self.get(key) is not None
diff --git a/google_appengine/lib/django/django/core/cache/backends/db.py b/google_appengine/lib/django/django/core/cache/backends/db.py
new file mode 100755
index 0000000..4a0d44a
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/backends/db.py
@@ -0,0 +1,82 @@
+"Database cache backend."
+
+from django.core.cache.backends.base import BaseCache
+from django.db import connection, transaction, DatabaseError
+import base64, time
+from datetime import datetime
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+class CacheClass(BaseCache):
+ def __init__(self, table, params):
+ BaseCache.__init__(self, params)
+ self._table = table
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ def get(self, key, default=None):
+ cursor = connection.cursor()
+ cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
+ row = cursor.fetchone()
+ if row is None:
+ return default
+ now = datetime.now()
+ if row[2] < now:
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
+ transaction.commit_unless_managed()
+ return default
+ return pickle.loads(base64.decodestring(row[1]))
+
+ def set(self, key, value, timeout=None):
+ if timeout is None:
+ timeout = self.default_timeout
+ cursor = connection.cursor()
+ cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
+ num = cursor.fetchone()[0]
+ now = datetime.now().replace(microsecond=0)
+ exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
+ if num > self._max_entries:
+ self._cull(cursor, now)
+ encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
+ cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s" % self._table, [key])
+ try:
+ if cursor.fetchone():
+ cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % self._table, [encoded, str(exp), key])
+ else:
+ cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % self._table, [key, encoded, str(exp)])
+ except DatabaseError:
+ # To be threadsafe, updates/inserts are allowed to fail silently
+ pass
+ else:
+ transaction.commit_unless_managed()
+
+ def delete(self, key):
+ cursor = connection.cursor()
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
+ transaction.commit_unless_managed()
+
+ def has_key(self, key):
+ cursor = connection.cursor()
+ cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s" % self._table, [key])
+ return cursor.fetchone() is not None
+
+ def _cull(self, cursor, now):
+ if self._cull_frequency == 0:
+ cursor.execute("DELETE FROM %s" % self._table)
+ else:
+ cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table, [str(now)])
+ cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
+ num = cursor.fetchone()[0]
+ if num > self._max_entries:
+ cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency])
+ cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]])
diff --git a/google_appengine/lib/django/django/core/cache/backends/dummy.py b/google_appengine/lib/django/django/core/cache/backends/dummy.py
new file mode 100755
index 0000000..4c64161
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/backends/dummy.py
@@ -0,0 +1,22 @@
+"Dummy cache backend"
+
+from django.core.cache.backends.base import BaseCache
+
+class CacheClass(BaseCache):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def get(self, key, default=None):
+ return default
+
+ def set(self, *args, **kwargs):
+ pass
+
+ def delete(self, *args, **kwargs):
+ pass
+
+ def get_many(self, *args, **kwargs):
+ return {}
+
+ def has_key(self, *args, **kwargs):
+ return False
diff --git a/google_appengine/lib/django/django/core/cache/backends/filebased.py b/google_appengine/lib/django/django/core/cache/backends/filebased.py
new file mode 100755
index 0000000..faaf891
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/backends/filebased.py
@@ -0,0 +1,80 @@
+"File-based cache backend"
+
+from django.core.cache.backends.simple import CacheClass as SimpleCacheClass
+import os, time, urllib
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+class CacheClass(SimpleCacheClass):
+ def __init__(self, dir, params):
+ self._dir = dir
+ if not os.path.exists(self._dir):
+ self._createdir()
+ SimpleCacheClass.__init__(self, dir, params)
+ del self._cache
+ del self._expire_info
+
+ def get(self, key, default=None):
+ fname = self._key_to_file(key)
+ try:
+ f = open(fname, 'rb')
+ exp = pickle.load(f)
+ now = time.time()
+ if exp < now:
+ f.close()
+ os.remove(fname)
+ else:
+ return pickle.load(f)
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ pass
+ return default
+
+ def set(self, key, value, timeout=None):
+ fname = self._key_to_file(key)
+ if timeout is None:
+ timeout = self.default_timeout
+ try:
+ filelist = os.listdir(self._dir)
+ except (IOError, OSError):
+ self._createdir()
+ filelist = []
+ if len(filelist) > self._max_entries:
+ self._cull(filelist)
+ try:
+ f = open(fname, 'wb')
+ now = time.time()
+ pickle.dump(now + timeout, f, 2)
+ pickle.dump(value, f, 2)
+ except (IOError, OSError):
+ pass
+
+ def delete(self, key):
+ try:
+ os.remove(self._key_to_file(key))
+ except (IOError, OSError):
+ pass
+
+ def has_key(self, key):
+ return os.path.exists(self._key_to_file(key))
+
+ def _cull(self, filelist):
+ if self._cull_frequency == 0:
+ doomed = filelist
+ else:
+ doomed = [k for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
+ for fname in doomed:
+ try:
+ os.remove(os.path.join(self._dir, fname))
+ except (IOError, OSError):
+ pass
+
+ def _createdir(self):
+ try:
+ os.makedirs(self._dir)
+ except OSError:
+ raise EnvironmentError, "Cache directory '%s' does not exist and could not be created'" % self._dir
+
+ def _key_to_file(self, key):
+ return os.path.join(self._dir, urllib.quote_plus(key))
diff --git a/google_appengine/lib/django/django/core/cache/backends/locmem.py b/google_appengine/lib/django/django/core/cache/backends/locmem.py
new file mode 100755
index 0000000..0e21b80
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/backends/locmem.py
@@ -0,0 +1,47 @@
+"Thread-safe in-memory cache backend."
+
+from django.core.cache.backends.simple import CacheClass as SimpleCacheClass
+from django.utils.synch import RWLock
+import copy, time
+
+class CacheClass(SimpleCacheClass):
+ def __init__(self, host, params):
+ SimpleCacheClass.__init__(self, host, params)
+ self._lock = RWLock()
+
+ def get(self, key, default=None):
+ should_delete = False
+ self._lock.reader_enters()
+ try:
+ now = time.time()
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return default
+ elif exp < now:
+ should_delete = True
+ else:
+ return copy.deepcopy(self._cache[key])
+ finally:
+ self._lock.reader_leaves()
+ if should_delete:
+ self._lock.writer_enters()
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ return default
+ finally:
+ self._lock.writer_leaves()
+
+ def set(self, key, value, timeout=None):
+ self._lock.writer_enters()
+ try:
+ SimpleCacheClass.set(self, key, value, timeout)
+ finally:
+ self._lock.writer_leaves()
+
+ def delete(self, key):
+ self._lock.writer_enters()
+ try:
+ SimpleCacheClass.delete(self, key)
+ finally:
+ self._lock.writer_leaves()
diff --git a/google_appengine/lib/django/django/core/cache/backends/memcached.py b/google_appengine/lib/django/django/core/cache/backends/memcached.py
new file mode 100755
index 0000000..180f95d
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/backends/memcached.py
@@ -0,0 +1,29 @@
+"Memcached cache backend"
+
+from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
+
+try:
+ import memcache
+except ImportError:
+ raise InvalidCacheBackendError, "Memcached cache backend requires the 'memcache' library"
+
+class CacheClass(BaseCache):
+ def __init__(self, server, params):
+ BaseCache.__init__(self, params)
+ self._cache = memcache.Client(server.split(';'))
+
+ def get(self, key, default=None):
+ val = self._cache.get(key)
+ if val is None:
+ return default
+ else:
+ return val
+
+ def set(self, key, value, timeout=0):
+ self._cache.set(key, value, timeout or self.default_timeout)
+
+ def delete(self, key):
+ self._cache.delete(key)
+
+ def get_many(self, keys):
+ return self._cache.get_multi(keys)
diff --git a/google_appengine/lib/django/django/core/cache/backends/simple.py b/google_appengine/lib/django/django/core/cache/backends/simple.py
new file mode 100755
index 0000000..175944a
--- /dev/null
+++ b/google_appengine/lib/django/django/core/cache/backends/simple.py
@@ -0,0 +1,64 @@
+"Single-process in-memory cache backend."
+
+from django.core.cache.backends.base import BaseCache
+import time
+
+class CacheClass(BaseCache):
+ def __init__(self, host, params):
+ BaseCache.__init__(self, params)
+ self._cache = {}
+ self._expire_info = {}
+
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ def get(self, key, default=None):
+ now = time.time()
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return default
+ elif exp < now:
+ del self._cache[key]
+ del self._expire_info[key]
+ return default
+ else:
+ return self._cache[key]
+
+ def set(self, key, value, timeout=None):
+ if len(self._cache) >= self._max_entries:
+ self._cull()
+ if timeout is None:
+ timeout = self.default_timeout
+ self._cache[key] = value
+ self._expire_info[key] = time.time() + timeout
+
+ def delete(self, key):
+ try:
+ del self._cache[key]
+ except KeyError:
+ pass
+ try:
+ del self._expire_info[key]
+ except KeyError:
+ pass
+
+ def has_key(self, key):
+ return self._cache.has_key(key)
+
+ def _cull(self):
+ if self._cull_frequency == 0:
+ self._cache.clear()
+ self._expire_info.clear()
+ else:
+ doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
+ for k in doomed:
+ self.delete(k)
diff --git a/google_appengine/lib/django/django/core/context_processors.py b/google_appengine/lib/django/django/core/context_processors.py
new file mode 100755
index 0000000..f4b288d
--- /dev/null
+++ b/google_appengine/lib/django/django/core/context_processors.py
@@ -0,0 +1,69 @@
+"""
+A set of request processors that return dictionaries to be merged into a
+template context. Each function takes the request object as its only parameter
+and returns a dictionary to add to the context.
+
+These are referenced from the setting TEMPLATE_CONTEXT_PROCESSORS and used by
+RequestContext.
+"""
+
+from django.conf import settings
+
+def auth(request):
+ """
+ Returns context variables required by apps that use Django's authentication
+ system.
+ """
+ return {
+ 'user': request.user,
+ 'messages': request.user.get_and_delete_messages(),
+ 'perms': PermWrapper(request.user),
+ }
+
+def debug(request):
+ "Returns context variables helpful for debugging."
+ context_extras = {}
+ if settings.DEBUG and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
+ context_extras['debug'] = True
+ from django.db import connection
+ context_extras['sql_queries'] = connection.queries
+ return context_extras
+
+def i18n(request):
+ context_extras = {}
+ context_extras['LANGUAGES'] = settings.LANGUAGES
+ if hasattr(request, 'LANGUAGE_CODE'):
+ context_extras['LANGUAGE_CODE'] = request.LANGUAGE_CODE
+ else:
+ context_extras['LANGUAGE_CODE'] = settings.LANGUAGE_CODE
+
+ from django.utils import translation
+ context_extras['LANGUAGE_BIDI'] = translation.get_language_bidi()
+
+ return context_extras
+
+def request(request):
+ return {'request': request}
+
+# PermWrapper and PermLookupDict proxy the permissions system into objects that
+# the template system can understand.
+
+class PermLookupDict(object):
+ def __init__(self, user, module_name):
+ self.user, self.module_name = user, module_name
+
+ def __repr__(self):
+ return str(self.user.get_all_permissions())
+
+ def __getitem__(self, perm_name):
+ return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
+
+ def __nonzero__(self):
+ return self.user.has_module_perms(self.module_name)
+
+class PermWrapper(object):
+ def __init__(self, user):
+ self.user = user
+
+ def __getitem__(self, module_name):
+ return PermLookupDict(self.user, module_name)
diff --git a/google_appengine/lib/django/django/core/exceptions.py b/google_appengine/lib/django/django/core/exceptions.py
new file mode 100755
index 0000000..f22f67c
--- /dev/null
+++ b/google_appengine/lib/django/django/core/exceptions.py
@@ -0,0 +1,25 @@
+"Global Django exceptions"
+
+class ObjectDoesNotExist(Exception):
+ "The requested object does not exist"
+ silent_variable_failure = True
+
+class SuspiciousOperation(Exception):
+ "The user did something suspicious"
+ pass
+
+class PermissionDenied(Exception):
+ "The user did not have permission to do that"
+ pass
+
+class ViewDoesNotExist(Exception):
+ "The requested view does not exist"
+ pass
+
+class MiddlewareNotUsed(Exception):
+ "This middleware is not used in this server configuration"
+ pass
+
+class ImproperlyConfigured(Exception):
+ "Django is somehow improperly configured"
+ pass
diff --git a/google_appengine/lib/django/django/core/exceptions.pyc b/google_appengine/lib/django/django/core/exceptions.pyc
new file mode 100644
index 0000000..52f0758
--- /dev/null
+++ b/google_appengine/lib/django/django/core/exceptions.pyc
Binary files differ
diff --git a/google_appengine/lib/django/django/core/handler.py b/google_appengine/lib/django/django/core/handler.py
new file mode 100755
index 0000000..0394067
--- /dev/null
+++ b/google_appengine/lib/django/django/core/handler.py
@@ -0,0 +1,11 @@
+# This module is DEPRECATED!
+#
+# You should no longer be pointing your mod_python configuration
+# at "django.core.handler".
+#
+# Use "django.core.handlers.modpython" instead.
+
+from django.core.handlers.modpython import ModPythonHandler
+
+def handler(req):
+ return ModPythonHandler()(req)
diff --git a/google_appengine/lib/django/django/core/handlers/__init__.py b/google_appengine/lib/django/django/core/handlers/__init__.py
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/google_appengine/lib/django/django/core/handlers/__init__.py
diff --git a/google_appengine/lib/django/django/core/handlers/base.py b/google_appengine/lib/django/django/core/handlers/base.py
new file mode 100755
index 0000000..ca48b30
--- /dev/null
+++ b/google_appengine/lib/django/django/core/handlers/base.py
@@ -0,0 +1,131 @@
+from django.core import signals
+from django.dispatch import dispatcher
+from django import http
+import sys
+
+class BaseHandler(object):
+ def __init__(self):
+ self._request_middleware = self._view_middleware = self._response_middleware = self._exception_middleware = None
+
+ def load_middleware(self):
+ """
+ Populate middleware lists from settings.MIDDLEWARE_CLASSES.
+
+ Must be called after the environment is fixed (see __call__).
+ """
+ from django.conf import settings
+ from django.core import exceptions
+ self._request_middleware = []
+ self._view_middleware = []
+ self._response_middleware = []
+ self._exception_middleware = []
+ for middleware_path in settings.MIDDLEWARE_CLASSES:
+ try:
+ dot = middleware_path.rindex('.')
+ except ValueError:
+ raise exceptions.ImproperlyConfigured, '%s isn\'t a middleware module' % middleware_path
+ mw_module, mw_classname = middleware_path[:dot], middleware_path[dot+1:]
+ try:
+ mod = __import__(mw_module, {}, {}, [''])
+ except ImportError, e:
+ raise exceptions.ImproperlyConfigured, 'Error importing middleware %s: "%s"' % (mw_module, e)
+ try:
+ mw_class = getattr(mod, mw_classname)
+ except AttributeError:
+ raise exceptions.ImproperlyConfigured, 'Middleware module "%s" does not define a "%s" class' % (mw_module, mw_classname)
+
+ try:
+ mw_instance = mw_class()
+ except exceptions.MiddlewareNotUsed:
+ continue
+
+ if hasattr(mw_instance, 'process_request'):
+ self._request_middleware.append(mw_instance.process_request)
+ if hasattr(mw_instance, 'process_view'):
+ self._view_middleware.append(mw_instance.process_view)
+ if hasattr(mw_instance, 'process_response'):
+ self._response_middleware.insert(0, mw_instance.process_response)
+ if hasattr(mw_instance, 'process_exception'):
+ self._exception_middleware.insert(0, mw_instance.process_exception)
+
+ def get_response(self, request):
+ "Returns an HttpResponse object for the given HttpRequest"
+ from django.core import exceptions, urlresolvers
+ from django.core.mail import mail_admins
+ from django.conf import settings
+
+ # Apply request middleware
+ for middleware_method in self._request_middleware:
+ response = middleware_method(request)
+ if response:
+ return response
+
+ # Get urlconf from request object, if available. Otherwise use default.
+ urlconf = getattr(request, "urlconf", settings.ROOT_URLCONF)
+
+ resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
+ try:
+ callback, callback_args, callback_kwargs = resolver.resolve(request.path)
+
+ # Apply view middleware
+ for middleware_method in self._view_middleware:
+ response = middleware_method(request, callback, callback_args, callback_kwargs)
+ if response:
+ return response
+
+ try:
+ response = callback(request, *callback_args, **callback_kwargs)
+ except Exception, e:
+ # If the view raised an exception, run it through exception
+ # middleware, and if the exception middleware returns a
+ # response, use that. Otherwise, reraise the exception.
+ for middleware_method in self._exception_middleware:
+ response = middleware_method(request, e)
+ if response:
+ return response
+ raise
+
+ # Complain if the view returned None (a common error).
+ if response is None:
+ try:
+ view_name = callback.func_name # If it's a function
+ except AttributeError:
+ view_name = callback.__class__.__name__ + '.__call__' # If it's a class
+ raise ValueError, "The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name)
+
+ return response
+ except http.Http404, e:
+ if settings.DEBUG:
+ from django.views import debug
+ return debug.technical_404_response(request, e)
+ else:
+ callback, param_dict = resolver.resolve404()
+ return callback(request, **param_dict)
+ except exceptions.PermissionDenied:
+ return http.HttpResponseForbidden('<h1>Permission denied</h1>')
+ except SystemExit:
+ pass # See http://code.djangoproject.com/ticket/1023
+ except: # Handle everything else, including SuspiciousOperation, etc.
+ if settings.DEBUG:
+ from django.views import debug
+ return debug.technical_500_response(request, *sys.exc_info())
+ else:
+ # Get the exception info now, in case another exception is thrown later.
+ exc_info = sys.exc_info()
+ receivers = dispatcher.send(signal=signals.got_request_exception)
+ # When DEBUG is False, send an error message to the admins.
+ subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), request.path)
+ try:
+ request_repr = repr(request)
+ except:
+ request_repr = "Request repr() unavailable"
+ message = "%s\n\n%s" % (self._get_traceback(exc_info), request_repr)
+ mail_admins(subject, message, fail_silently=True)
+ # Return an HttpResponse that displays a friendly error message.
+ callback, param_dict = resolver.resolve500()
+ return callback(request, **param_dict)
+
+ def _get_traceback(self, exc_info=None):
+ "Helper function to return the traceback as a string"
+ import traceback
+ return '\n'.join(traceback.format_exception(*(exc_info or sys.exc_info())))
diff --git a/google_appengine/lib/django/django/core/handlers/modpython.py b/google_appengine/lib/django/django/core/handlers/modpython.py
new file mode 100755
index 0000000..5fc41a0
--- /dev/null
+++ b/google_appengine/lib/django/django/core/handlers/modpython.py
@@ -0,0 +1,177 @@
+from django.core.handlers.base import BaseHandler
+from django.core import signals
+from django.dispatch import dispatcher
+from django.utils import datastructures
+from django import http
+from pprint import pformat
+import os
+
+# NOTE: do *not* import settings (or any module which eventually imports
+# settings) until after ModPythonHandler has been called; otherwise os.environ
+# won't be set up correctly (with respect to settings).
+
+class ModPythonRequest(http.HttpRequest):
+ def __init__(self, req):
+ self._req = req
+ self.path = req.uri
+
+ def __repr__(self):
+ # Since this is called as part of error handling, we need to be very
+ # robust against potentially malformed input.
+ try:
+ get = pformat(self.GET)
+ except:
+ get = '<could not parse>'
+ try:
+ post = pformat(self.POST)
+ except:
+ post = '<could not parse>'
+ try:
+ cookies = pformat(self.COOKIES)
+ except:
+ cookies = '<could not parse>'
+ try:
+ meta = pformat(self.META)
+ except:
+ meta = '<could not parse>'
+ return '<ModPythonRequest\npath:%s,\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
+ (self.path, get, post, cookies, meta)
+
+ def get_full_path(self):
+ return '%s%s' % (self.path, self._req.args and ('?' + self._req.args) or '')
+
+ def is_secure(self):
+ # Note: modpython 3.2.10+ has req.is_https(), but we need to support previous versions
+ return self._req.subprocess_env.has_key('HTTPS') and self._req.subprocess_env['HTTPS'] == 'on'
+
+ def _load_post_and_files(self):
+ "Populates self._post and self._files"
+ if self._req.headers_in.has_key('content-type') and self._req.headers_in['content-type'].startswith('multipart'):
+ self._post, self._files = http.parse_file_upload(self._req.headers_in, self.raw_post_data)
+ else:
+ self._post, self._files = http.QueryDict(self.raw_post_data), datastructures.MultiValueDict()
+
+ def _get_request(self):
+ if not hasattr(self, '_request'):
+ self._request = datastructures.MergeDict(self.POST, self.GET)
+ return self._request
+
+ def _get_get(self):
+ if not hasattr(self, '_get'):
+ self._get = http.QueryDict(self._req.args)
+ return self._get
+
+ def _set_get(self, get):
+ self._get = get
+
+ def _get_post(self):
+ if not hasattr(self, '_post'):
+ self._load_post_and_files()
+ return self._post
+
+ def _set_post(self, post):
+ self._post = post
+
+ def _get_cookies(self):
+ if not hasattr(self, '_cookies'):
+ self._cookies = http.parse_cookie(self._req.headers_in.get('cookie', ''))
+ return self._cookies
+
+ def _set_cookies(self, cookies):
+ self._cookies = cookies
+
+ def _get_files(self):
+ if not hasattr(self, '_files'):
+ self._load_post_and_files()
+ return self._files
+
+ def _get_meta(self):
+ "Lazy loader that returns self.META dictionary"
+ if not hasattr(self, '_meta'):
+ self._meta = {
+ 'AUTH_TYPE': self._req.ap_auth_type,
+ 'CONTENT_LENGTH': self._req.clength, # This may be wrong
+ 'CONTENT_TYPE': self._req.content_type, # This may be wrong
+ 'GATEWAY_INTERFACE': 'CGI/1.1',
+ 'PATH_INFO': self._req.path_info,
+ 'PATH_TRANSLATED': None, # Not supported
+ 'QUERY_STRING': self._req.args,
+ 'REMOTE_ADDR': self._req.connection.remote_ip,
+ 'REMOTE_HOST': None, # DNS lookups not supported
+ 'REMOTE_IDENT': self._req.connection.remote_logname,
+ 'REMOTE_USER': self._req.user,
+ 'REQUEST_METHOD': self._req.method,
+ 'SCRIPT_NAME': None, # Not supported
+ 'SERVER_NAME': self._req.server.server_hostname,
+ 'SERVER_PORT': self._req.server.port,
+ 'SERVER_PROTOCOL': self._req.protocol,
+ 'SERVER_SOFTWARE': 'mod_python'
+ }
+ for key, value in self._req.headers_in.items():
+ key = 'HTTP_' + key.upper().replace('-', '_')
+ self._meta[key] = value
+ return self._meta
+
+ def _get_raw_post_data(self):
+ try:
+ return self._raw_post_data
+ except AttributeError:
+ self._raw_post_data = self._req.read()
+ return self._raw_post_data
+
+ def _get_method(self):
+ return self.META['REQUEST_METHOD'].upper()
+
+ GET = property(_get_get, _set_get)
+ POST = property(_get_post, _set_post)
+ COOKIES = property(_get_cookies, _set_cookies)
+ FILES = property(_get_files)
+ META = property(_get_meta)
+ REQUEST = property(_get_request)
+ raw_post_data = property(_get_raw_post_data)
+ method = property(_get_method)
+
+class ModPythonHandler(BaseHandler):
+ def __call__(self, req):
+ # mod_python fakes the environ, and thus doesn't process SetEnv. This fixes that
+ os.environ.update(req.subprocess_env)
+
+ # now that the environ works we can see the correct settings, so imports
+ # that use settings now can work
+ from django.conf import settings
+
+ # if we need to set up middleware, now that settings works we can do it now.
+ if self._request_middleware is None:
+ self.load_middleware()
+
+ dispatcher.send(signal=signals.request_started)
+ try:
+ request = ModPythonRequest(req)
+ response = self.get_response(request)
+
+ # Apply response middleware
+ for middleware_method in self._response_middleware:
+ response = middleware_method(request, response)
+
+ finally:
+ dispatcher.send(signal=signals.request_finished)
+
+ # Convert our custom HttpResponse object back into the mod_python req.
+ req.content_type = response['Content-Type']
+ for key, value in response.headers.items():
+ if key != 'Content-Type':
+ req.headers_out[key] = value
+ for c in response.cookies.values():
+ req.headers_out.add('Set-Cookie', c.output(header=''))
+ req.status = response.status_code
+ try:
+ for chunk in response:
+ req.write(chunk)
+ finally:
+ response.close()
+
+ return 0 # mod_python.apache.OK
+
+def handler(req):
+ # mod_python hooks into this function.
+ return ModPythonHandler()(req)
diff --git a/google_appengine/lib/django/django/core/handlers/profiler-hotshot.py b/google_appengine/lib/django/django/core/handlers/profiler-hotshot.py
new file mode 100755
index 0000000..6cf94b0
--- /dev/null
+++ b/google_appengine/lib/django/django/core/handlers/profiler-hotshot.py
@@ -0,0 +1,22 @@
+import hotshot, time, os
+from django.core.handlers.modpython import ModPythonHandler
+
+PROFILE_DATA_DIR = "/var/log/cmsprofile"
+
+def handler(req):
+ '''
+ Handler that uses hotshot to store profile data.
+
+ Stores profile data in PROFILE_DATA_DIR. Since hotshot has no way (that I
+ know of) to append profile data to a single file, each request gets its own
+ profile. The file names are in the format <url>.<n>.prof where <url> is
+ the request path with "/" replaced by ".", and <n> is a timestamp with
+ microseconds to prevent overwriting files.
+
+ Use the gather_profile_stats.py script to gather these individual request
+ profiles into aggregated profiles by request path.
+ '''
+ profname = "%s.%.3f.prof" % (req.uri.strip("/").replace('/', '.'), time.time())
+ profname = os.path.join(PROFILE_DATA_DIR, profname)
+ prof = hotshot.Profile(profname)
+ return prof.runcall(ModPythonHandler(), req)
diff --git a/google_appengine/lib/django/django/core/handlers/wsgi.py b/google_appengine/lib/django/django/core/handlers/wsgi.py
new file mode 100755
index 0000000..71cfecd
--- /dev/null
+++ b/google_appengine/lib/django/django/core/handlers/wsgi.py
@@ -0,0 +1,207 @@
+from django.core.handlers.base import BaseHandler
+from django.core import signals
+from django.dispatch import dispatcher
+from django.utils import datastructures
+from django import http
+from pprint import pformat
+from shutil import copyfileobj
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+STATUS_CODE_TEXT = {
+ 100: 'CONTINUE',
+ 101: 'SWITCHING PROTOCOLS',
+ 200: 'OK',
+ 201: 'CREATED',
+ 202: 'ACCEPTED',
+ 203: 'NON-AUTHORITATIVE INFORMATION',
+ 204: 'NO CONTENT',
+ 205: 'RESET CONTENT',
+ 206: 'PARTIAL CONTENT',
+ 300: 'MULTIPLE CHOICES',
+ 301: 'MOVED PERMANENTLY',
+ 302: 'FOUND',
+ 303: 'SEE OTHER',
+ 304: 'NOT MODIFIED',
+ 305: 'USE PROXY',
+ 306: 'RESERVED',
+ 307: 'TEMPORARY REDIRECT',
+ 400: 'BAD REQUEST',
+ 401: 'UNAUTHORIZED',
+ 402: 'PAYMENT REQUIRED',
+ 403: 'FORBIDDEN',
+ 404: 'NOT FOUND',
+ 405: 'METHOD NOT ALLOWED',
+ 406: 'NOT ACCEPTABLE',
+ 407: 'PROXY AUTHENTICATION REQUIRED',
+ 408: 'REQUEST TIMEOUT',
+ 409: 'CONFLICT',
+ 410: 'GONE',
+ 411: 'LENGTH REQUIRED',
+ 412: 'PRECONDITION FAILED',
+ 413: 'REQUEST ENTITY TOO LARGE',
+ 414: 'REQUEST-URI TOO LONG',
+ 415: 'UNSUPPORTED MEDIA TYPE',
+ 416: 'REQUESTED RANGE NOT SATISFIABLE',
+ 417: 'EXPECTATION FAILED',
+ 500: 'INTERNAL SERVER ERROR',
+ 501: 'NOT IMPLEMENTED',
+ 502: 'BAD GATEWAY',
+ 503: 'SERVICE UNAVAILABLE',
+ 504: 'GATEWAY TIMEOUT',
+ 505: 'HTTP VERSION NOT SUPPORTED',
+}
+
+def safe_copyfileobj(fsrc, fdst, length=16*1024, size=0):
+ """
+ A version of shutil.copyfileobj that will not read more than 'size' bytes.
+ This makes it safe from clients sending more than CONTENT_LENGTH bytes of
+ data in the body.
+ """
+ if not size:
+ return
+ while size > 0:
+ buf = fsrc.read(min(length, size))
+ if not buf:
+ break
+ fdst.write(buf)
+ size -= len(buf)
+
+class WSGIRequest(http.HttpRequest):
+ def __init__(self, environ):
+ self.environ = environ
+ self.path = environ['PATH_INFO']
+ self.META = environ
+ self.method = environ['REQUEST_METHOD'].upper()
+
+ def __repr__(self):
+ # Since this is called as part of error handling, we need to be very
+ # robust against potentially malformed input.
+ try:
+ get = pformat(self.GET)
+ except:
+ get = '<could not parse>'
+ try:
+ post = pformat(self.POST)
+ except:
+ post = '<could not parse>'
+ try:
+ cookies = pformat(self.COOKIES)
+ except:
+ cookies = '<could not parse>'
+ try:
+ meta = pformat(self.META)
+ except:
+ meta = '<could not parse>'
+ return '<WSGIRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
+ (get, post, cookies, meta)
+
+ def get_full_path(self):
+ return '%s%s' % (self.path, self.environ.get('QUERY_STRING', '') and ('?' + self.environ.get('QUERY_STRING', '')) or '')
+
+ def is_secure(self):
+ return self.environ.has_key('HTTPS') and self.environ['HTTPS'] == 'on'
+
+ def _load_post_and_files(self):
+ # Populates self._post and self._files
+ if self.method == 'POST':
+ if self.environ.get('CONTENT_TYPE', '').startswith('multipart'):
+ header_dict = dict([(k, v) for k, v in self.environ.items() if k.startswith('HTTP_')])
+ header_dict['Content-Type'] = self.environ.get('CONTENT_TYPE', '')
+ self._post, self._files = http.parse_file_upload(header_dict, self.raw_post_data)
+ else:
+ self._post, self._files = http.QueryDict(self.raw_post_data), datastructures.MultiValueDict()
+ else:
+ self._post, self._files = http.QueryDict(''), datastructures.MultiValueDict()
+
+ def _get_request(self):
+ if not hasattr(self, '_request'):
+ self._request = datastructures.MergeDict(self.POST, self.GET)
+ return self._request
+
+ def _get_get(self):
+ if not hasattr(self, '_get'):
+ # The WSGI spec says 'QUERY_STRING' may be absent.
+ self._get = http.QueryDict(self.environ.get('QUERY_STRING', ''))
+ return self._get
+
+ def _set_get(self, get):
+ self._get = get
+
+ def _get_post(self):
+ if not hasattr(self, '_post'):
+ self._load_post_and_files()
+ return self._post
+
+ def _set_post(self, post):
+ self._post = post
+
+ def _get_cookies(self):
+ if not hasattr(self, '_cookies'):
+ self._cookies = http.parse_cookie(self.environ.get('HTTP_COOKIE', ''))
+ return self._cookies
+
+ def _set_cookies(self, cookies):
+ self._cookies = cookies
+
+ def _get_files(self):
+ if not hasattr(self, '_files'):
+ self._load_post_and_files()
+ return self._files
+
+ def _get_raw_post_data(self):
+ try:
+ return self._raw_post_data
+ except AttributeError:
+ buf = StringIO()
+ try:
+ # CONTENT_LENGTH might be absent if POST doesn't have content at all (lighttpd)
+ content_length = int(self.environ.get('CONTENT_LENGTH', 0))
+ except ValueError: # if CONTENT_LENGTH was empty string or not an integer
+ content_length = 0
+ safe_copyfileobj(self.environ['wsgi.input'], buf, size=content_length)
+ self._raw_post_data = buf.getvalue()
+ buf.close()
+ return self._raw_post_data
+
+ GET = property(_get_get, _set_get)
+ POST = property(_get_post, _set_post)
+ COOKIES = property(_get_cookies, _set_cookies)
+ FILES = property(_get_files)
+ REQUEST = property(_get_request)
+ raw_post_data = property(_get_raw_post_data)
+
+class WSGIHandler(BaseHandler):
+ def __call__(self, environ, start_response):
+ from django.conf import settings
+
+ # Set up middleware if needed. We couldn't do this earlier, because
+ # settings weren't available.
+ if self._request_middleware is None:
+ self.load_middleware()
+
+ dispatcher.send(signal=signals.request_started)
+ try:
+ request = WSGIRequest(environ)
+ response = self.get_response(request)
+
+ # Apply response middleware
+ for middleware_method in self._response_middleware:
+ response = middleware_method(request, response)
+
+ finally:
+ dispatcher.send(signal=signals.request_finished)
+
+ try:
+ status_text = STATUS_CODE_TEXT[response.status_code]
+ except KeyError:
+ status_text = 'UNKNOWN STATUS CODE'
+ status = '%s %s' % (response.status_code, status_text)
+ response_headers = response.headers.items()
+ for c in response.cookies.values():
+ response_headers.append(('Set-Cookie', c.output(header='')))
+ start_response(status, response_headers)
+ return response
diff --git a/google_appengine/lib/django/django/core/mail.py b/google_appengine/lib/django/django/core/mail.py
new file mode 100755
index 0000000..b9966c2
--- /dev/null
+++ b/google_appengine/lib/django/django/core/mail.py
@@ -0,0 +1,108 @@
+# Use this module for e-mailing.
+
+from django.conf import settings
+from email.MIMEText import MIMEText
+from email.Header import Header
+from email.Utils import formatdate
+import smtplib
+import socket
+import time
+import random
+
+# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
+# seconds, which slows down the restart of the server.
+class CachedDnsName(object):
+ def __str__(self):
+ return self.get_fqdn()
+
+ def get_fqdn(self):
+ if not hasattr(self, '_fqdn'):
+ self._fqdn = socket.getfqdn()
+ return self._fqdn
+
+DNS_NAME = CachedDnsName()
+
+class BadHeaderError(ValueError):
+ pass
+
+class SafeMIMEText(MIMEText):
+ def __setitem__(self, name, val):
+ "Forbids multi-line headers, to prevent header injection."
+ if '\n' in val or '\r' in val:
+ raise BadHeaderError, "Header values can't contain newlines (got %r for header %r)" % (val, name)
+ if name == "Subject":
+ val = Header(val, settings.DEFAULT_CHARSET)
+ MIMEText.__setitem__(self, name, val)
+
+def send_mail(subject, message, from_email, recipient_list, fail_silently=False, auth_user=None, auth_password=None):
+ """
+ Easy wrapper for sending a single message to a recipient list. All members
+ of the recipient list will see the other recipients in the 'To' field.
+
+ If auth_user is None, the EMAIL_HOST_USER setting is used.
+ If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
+ """
+ if auth_user is None:
+ auth_user = settings.EMAIL_HOST_USER
+ if auth_password is None:
+ auth_password = settings.EMAIL_HOST_PASSWORD
+ return send_mass_mail([[subject, message, from_email, recipient_list]], fail_silently, auth_user, auth_password)
+
+def send_mass_mail(datatuple, fail_silently=False, auth_user=None, auth_password=None):
+ """
+ Given a datatuple of (subject, message, from_email, recipient_list), sends
+ each message to each recipient list. Returns the number of e-mails sent.
+
+ If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
+ If auth_user and auth_password are set, they're used to log in.
+ If auth_user is None, the EMAIL_HOST_USER setting is used.
+ If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
+ """
+ if auth_user is None:
+ auth_user = settings.EMAIL_HOST_USER
+ if auth_password is None:
+ auth_password = settings.EMAIL_HOST_PASSWORD
+ try:
+ server = smtplib.SMTP(settings.EMAIL_HOST, settings.EMAIL_PORT)
+ if auth_user and auth_password:
+ server.login(auth_user, auth_password)
+ except:
+ if fail_silently:
+ return
+ raise
+ num_sent = 0
+ for subject, message, from_email, recipient_list in datatuple:
+ if not recipient_list:
+ continue
+ from_email = from_email or settings.DEFAULT_FROM_EMAIL
+ msg = SafeMIMEText(message, 'plain', settings.DEFAULT_CHARSET)
+ msg['Subject'] = subject
+ msg['From'] = from_email
+ msg['To'] = ', '.join(recipient_list)
+ msg['Date'] = formatdate()
+ try:
+ random_bits = str(random.getrandbits(64))
+ except AttributeError: # Python 2.3 doesn't have random.getrandbits().
+ random_bits = ''.join([random.choice('1234567890') for i in range(19)])
+ msg['Message-ID'] = "<%d.%s@%s>" % (time.time(), random_bits, DNS_NAME)
+ try:
+ server.sendmail(from_email, recipient_list, msg.as_string())
+ num_sent += 1
+ except:
+ if not fail_silently:
+ raise
+ try:
+ server.quit()
+ except:
+ if fail_silently:
+ return
+ raise
+ return num_sent
+
+def mail_admins(subject, message, fail_silently=False):
+ "Sends a message to the admins, as defined by the ADMINS setting."
+ send_mail(settings.EMAIL_SUBJECT_PREFIX + subject, message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS], fail_silently)
+
+def mail_managers(subject, message, fail_silently=False):
+ "Sends a message to the managers, as defined by the MANAGERS setting."
+ send_mail(settings.EMAIL_SUBJECT_PREFIX + subject, message, settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS], fail_silently)
diff --git a/google_appengine/lib/django/django/core/management.py b/google_appengine/lib/django/django/core/management.py
new file mode 100755
index 0000000..68c85c7
--- /dev/null
+++ b/google_appengine/lib/django/django/core/management.py
@@ -0,0 +1,1670 @@
+# Django management-related functions, including "CREATE TABLE" generation and
+# development-server initialization.
+
+import django
+from django.core.exceptions import ImproperlyConfigured
+import os, re, shutil, sys, textwrap
+from optparse import OptionParser
+from django.utils import termcolors
+
+# For Python 2.3
+if not hasattr(__builtins__, 'set'):
+ from sets import Set as set
+
+MODULE_TEMPLATE = ''' {%% if perms.%(app)s.%(addperm)s or perms.%(app)s.%(changeperm)s %%}
+ <tr>
+ <th>{%% if perms.%(app)s.%(changeperm)s %%}<a href="%(app)s/%(mod)s/">{%% endif %%}%(name)s{%% if perms.%(app)s.%(changeperm)s %%}</a>{%% endif %%}</th>
+ <td class="x50">{%% if perms.%(app)s.%(addperm)s %%}<a href="%(app)s/%(mod)s/add/" class="addlink">{%% endif %%}Add{%% if perms.%(app)s.%(addperm)s %%}</a>{%% endif %%}</td>
+ <td class="x75">{%% if perms.%(app)s.%(changeperm)s %%}<a href="%(app)s/%(mod)s/" class="changelink">{%% endif %%}Change{%% if perms.%(app)s.%(changeperm)s %%}</a>{%% endif %%}</td>
+ </tr>
+ {%% endif %%}'''
+
+APP_ARGS = '[appname ...]'
+
+# Use django.__path__[0] because we don't know which directory django into
+# which has been installed.
+PROJECT_TEMPLATE_DIR = os.path.join(django.__path__[0], 'conf', '%s_template')
+
+INVALID_PROJECT_NAMES = ('django', 'site', 'test')
+
+# Set up the terminal color scheme.
+class dummy: pass
+style = dummy()
+style.ERROR = termcolors.make_style(fg='red', opts=('bold',))
+style.ERROR_OUTPUT = termcolors.make_style(fg='red', opts=('bold',))
+style.NOTICE = termcolors.make_style(fg='red')
+style.SQL_FIELD = termcolors.make_style(fg='green', opts=('bold',))
+style.SQL_COLTYPE = termcolors.make_style(fg='green')
+style.SQL_KEYWORD = termcolors.make_style(fg='yellow')
+style.SQL_TABLE = termcolors.make_style(opts=('bold',))
+del dummy
+
+def disable_termcolors():
+ class dummy:
+ def __getattr__(self, attr):
+ return lambda x: x
+ global style
+ style = dummy()
+
+# Disable terminal coloring on Windows, Pocket PC, or if somebody's piping the output.
+if sys.platform == 'win32' or sys.platform == 'Pocket PC' or not sys.stdout.isatty():
+ disable_termcolors()
+
+def _is_valid_dir_name(s):
+ return bool(re.search(r'^\w+$', s))
+
+def _get_installed_models(table_list):
+ "Gets a set of all models that are installed, given a list of existing tables"
+ from django.db import models
+ all_models = []
+ for app in models.get_apps():
+ for model in models.get_models(app):
+ all_models.append(model)
+ return set([m for m in all_models if m._meta.db_table in table_list])
+
+def _get_table_list():
+ "Gets a list of all db tables that are physically installed."
+ from django.db import connection, get_introspection_module
+ cursor = connection.cursor()
+ return get_introspection_module().get_table_list(cursor)
+
+def _get_sequence_list():
+ "Returns a list of information about all DB sequences for all models in all apps"
+ from django.db import models
+
+ apps = models.get_apps()
+ sequence_list = []
+
+ for app in apps:
+ for model in models.get_models(app):
+ for f in model._meta.fields:
+ if isinstance(f, models.AutoField):
+ sequence_list.append({'table':model._meta.db_table,'column':f.column,})
+ break # Only one AutoField is allowed per model, so don't bother continuing.
+
+ for f in model._meta.many_to_many:
+ sequence_list.append({'table':f.m2m_db_table(),'column':None,})
+
+ return sequence_list
+
+# If the foreign key points to an AutoField, a PositiveIntegerField or a
+# PositiveSmallIntegerField, the foreign key should be an IntegerField, not the
+# referred field type. Otherwise, the foreign key should be the same type of
+# field as the field to which it points.
+get_rel_data_type = lambda f: (f.get_internal_type() in ('AutoField', 'PositiveIntegerField', 'PositiveSmallIntegerField')) and 'IntegerField' or f.get_internal_type()
+
+def get_version():
+ "Returns the version as a human-format string."
+ from django import VERSION
+ v = '.'.join([str(i) for i in VERSION[:-1]])
+ if VERSION[-1]:
+ v += '-' + VERSION[-1]
+ return v
+
+def get_sql_create(app):
+ "Returns a list of the CREATE TABLE SQL statements for the given app."
+ from django.db import get_creation_module, models
+ data_types = get_creation_module().DATA_TYPES
+
+ if not data_types:
+ # This must be the "dummy" database backend, which means the user
+ # hasn't set DATABASE_ENGINE.
+ sys.stderr.write(style.ERROR("Error: Django doesn't know which syntax to use for your SQL statements,\n" +
+ "because you haven't specified the DATABASE_ENGINE setting.\n" +
+ "Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.\n"))
+ sys.exit(1)
+
+ # Get installed models, so we generate REFERENCES right.
+ # We trim models from the current app so that the sqlreset command does not
+ # generate invalid SQL (leaving models out of known_models is harmless, so
+ # we can be conservative).
+ app_models = models.get_models(app)
+ final_output = []
+ known_models = set([model for model in _get_installed_models(_get_table_list()) if model not in app_models])
+ pending_references = {}
+
+ for model in app_models:
+ output, references = _get_sql_model_create(model, known_models)
+ final_output.extend(output)
+ for refto, refs in references.items():
+ pending_references.setdefault(refto,[]).extend(refs)
+ final_output.extend(_get_sql_for_pending_references(model, pending_references))
+ # Keep track of the fact that we've created the table for this model.
+ known_models.add(model)
+
+ # Create the many-to-many join tables.
+ for model in app_models:
+ final_output.extend(_get_many_to_many_sql_for_model(model))
+
+ # Handle references to tables that are from other apps
+ # but don't exist physically
+ not_installed_models = set(pending_references.keys())
+ if not_installed_models:
+ alter_sql = []
+ for model in not_installed_models:
+ alter_sql.extend(['-- ' + sql for sql in
+ _get_sql_for_pending_references(model, pending_references)])
+ if alter_sql:
+ final_output.append('-- The following references should be added but depend on non-existent tables:')
+ final_output.extend(alter_sql)
+
+ return final_output
+get_sql_create.help_doc = "Prints the CREATE TABLE SQL statements for the given app name(s)."
+get_sql_create.args = APP_ARGS
+
+def _get_sql_model_create(model, known_models=set()):
+ """
+ Get the SQL required to create a single model.
+
+ Returns list_of_sql, pending_references_dict
+ """
+ from django.db import backend, get_creation_module, models
+ data_types = get_creation_module().DATA_TYPES
+
+ opts = model._meta
+ final_output = []
+ table_output = []
+ pending_references = {}
+ for f in opts.fields:
+ if isinstance(f, (models.ForeignKey, models.OneToOneField)):
+ rel_field = f.rel.get_related_field()
+ data_type = get_rel_data_type(rel_field)
+ else:
+ rel_field = f
+ data_type = f.get_internal_type()
+ col_type = data_types[data_type]
+ if col_type is not None:
+ # Make the definition (e.g. 'foo VARCHAR(30)') for this field.
+ field_output = [style.SQL_FIELD(backend.quote_name(f.column)),
+ style.SQL_COLTYPE(col_type % rel_field.__dict__)]
+ field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or '')))
+ if f.unique:
+ field_output.append(style.SQL_KEYWORD('UNIQUE'))
+ if f.primary_key:
+ field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
+ if f.rel:
+ if f.rel.to in known_models:
+ field_output.append(style.SQL_KEYWORD('REFERENCES') + ' ' + \
+ style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)) + ' (' + \
+ style.SQL_FIELD(backend.quote_name(f.rel.to._meta.get_field(f.rel.field_name).column)) + ')' +
+ backend.get_deferrable_sql()
+ )
+ else:
+ # We haven't yet created the table to which this field
+ # is related, so save it for later.
+ pr = pending_references.setdefault(f.rel.to, []).append((model, f))
+ table_output.append(' '.join(field_output))
+ if opts.order_with_respect_to:
+ table_output.append(style.SQL_FIELD(backend.quote_name('_order')) + ' ' + \
+ style.SQL_COLTYPE(data_types['IntegerField']) + ' ' + \
+ style.SQL_KEYWORD('NULL'))
+ for field_constraints in opts.unique_together:
+ table_output.append(style.SQL_KEYWORD('UNIQUE') + ' (%s)' % \
+ ", ".join([backend.quote_name(style.SQL_FIELD(opts.get_field(f).column)) for f in field_constraints]))
+
+ full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + style.SQL_TABLE(backend.quote_name(opts.db_table)) + ' (']
+ for i, line in enumerate(table_output): # Combine and add commas.
+ full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
+ full_statement.append(');')
+ final_output.append('\n'.join(full_statement))
+
+ return final_output, pending_references
+
+def _get_sql_for_pending_references(model, pending_references):
+ """
+ Get any ALTER TABLE statements to add constraints after the fact.
+ """
+ from django.db import backend, get_creation_module
+ data_types = get_creation_module().DATA_TYPES
+
+ final_output = []
+ if backend.supports_constraints:
+ opts = model._meta
+ if model in pending_references:
+ for rel_class, f in pending_references[model]:
+ rel_opts = rel_class._meta
+ r_table = rel_opts.db_table
+ r_col = f.column
+ table = opts.db_table
+ col = opts.get_field(f.rel.field_name).column
+ # For MySQL, r_name must be unique in the first 64 characters.
+ # So we are careful with character usage here.
+ r_name = '%s_refs_%s_%x' % (r_col, col, abs(hash((r_table, table))))
+ final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \
+ (backend.quote_name(r_table), r_name,
+ backend.quote_name(r_col), backend.quote_name(table), backend.quote_name(col),
+ backend.get_deferrable_sql()))
+ del pending_references[model]
+ return final_output
+
+def _get_many_to_many_sql_for_model(model):
+ from django.db import backend, get_creation_module
+ from django.db.models import GenericRel
+
+ data_types = get_creation_module().DATA_TYPES
+
+ opts = model._meta
+ final_output = []
+ for f in opts.many_to_many:
+ if not isinstance(f.rel, GenericRel):
+ table_output = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + \
+ style.SQL_TABLE(backend.quote_name(f.m2m_db_table())) + ' (']
+ table_output.append(' %s %s %s,' % \
+ (style.SQL_FIELD(backend.quote_name('id')),
+ style.SQL_COLTYPE(data_types['AutoField']),
+ style.SQL_KEYWORD('NOT NULL PRIMARY KEY')))
+ table_output.append(' %s %s %s %s (%s)%s,' % \
+ (style.SQL_FIELD(backend.quote_name(f.m2m_column_name())),
+ style.SQL_COLTYPE(data_types[get_rel_data_type(opts.pk)] % opts.pk.__dict__),
+ style.SQL_KEYWORD('NOT NULL REFERENCES'),
+ style.SQL_TABLE(backend.quote_name(opts.db_table)),
+ style.SQL_FIELD(backend.quote_name(opts.pk.column)),
+ backend.get_deferrable_sql()))
+ table_output.append(' %s %s %s %s (%s)%s,' % \
+ (style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name())),
+ style.SQL_COLTYPE(data_types[get_rel_data_type(f.rel.to._meta.pk)] % f.rel.to._meta.pk.__dict__),
+ style.SQL_KEYWORD('NOT NULL REFERENCES'),
+ style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)),
+ style.SQL_FIELD(backend.quote_name(f.rel.to._meta.pk.column)),
+ backend.get_deferrable_sql()))
+ table_output.append(' %s (%s, %s)' % \
+ (style.SQL_KEYWORD('UNIQUE'),
+ style.SQL_FIELD(backend.quote_name(f.m2m_column_name())),
+ style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name()))))
+ table_output.append(');')
+ final_output.append('\n'.join(table_output))
+ return final_output
+
+def get_sql_delete(app):
+ "Returns a list of the DROP TABLE SQL statements for the given app."
+ from django.db import backend, connection, models, get_introspection_module
+ introspection = get_introspection_module()
+
+ # This should work even if a connection isn't available
+ try:
+ cursor = connection.cursor()
+ except:
+ cursor = None
+
+ # Figure out which tables already exist
+ if cursor:
+ table_names = introspection.get_table_list(cursor)
+ else:
+ table_names = []
+
+ output = []
+
+ # Output DROP TABLE statements for standard application tables.
+ to_delete = set()
+
+ references_to_delete = {}
+ app_models = models.get_models(app)
+ for model in app_models:
+ if cursor and model._meta.db_table in table_names:
+ # The table exists, so it needs to be dropped
+ opts = model._meta
+ for f in opts.fields:
+ if f.rel and f.rel.to not in to_delete:
+ references_to_delete.setdefault(f.rel.to, []).append( (model, f) )
+
+ to_delete.add(model)
+
+ for model in app_models:
+ if cursor and model._meta.db_table in table_names:
+ # Drop the table now
+ output.append('%s %s;' % (style.SQL_KEYWORD('DROP TABLE'),
+ style.SQL_TABLE(backend.quote_name(model._meta.db_table))))
+ if backend.supports_constraints and references_to_delete.has_key(model):
+ for rel_class, f in references_to_delete[model]:
+ table = rel_class._meta.db_table
+ col = f.column
+ r_table = model._meta.db_table
+ r_col = model._meta.get_field(f.rel.field_name).column
+ output.append('%s %s %s %s;' % \
+ (style.SQL_KEYWORD('ALTER TABLE'),
+ style.SQL_TABLE(backend.quote_name(table)),
+ style.SQL_KEYWORD(backend.get_drop_foreignkey_sql()),
+ style.SQL_FIELD(backend.quote_name('%s_refs_%s_%x' % (col, r_col, abs(hash((table, r_table))))))))
+ del references_to_delete[model]
+
+ # Output DROP TABLE statements for many-to-many tables.
+ for model in app_models:
+ opts = model._meta
+ for f in opts.many_to_many:
+ if cursor and f.m2m_db_table() in table_names:
+ output.append("%s %s;" % (style.SQL_KEYWORD('DROP TABLE'),
+ style.SQL_TABLE(backend.quote_name(f.m2m_db_table()))))
+
+ app_label = app_models[0]._meta.app_label
+
+ # Close database connection explicitly, in case this output is being piped
+ # directly into a database client, to avoid locking issues.
+ if cursor:
+ cursor.close()
+ connection.close()
+
+ return output[::-1] # Reverse it, to deal with table dependencies.
+get_sql_delete.help_doc = "Prints the DROP TABLE SQL statements for the given app name(s)."
+get_sql_delete.args = APP_ARGS
+
+def get_sql_reset(app):
+ "Returns a list of the DROP TABLE SQL, then the CREATE TABLE SQL, for the given module."
+ return get_sql_delete(app) + get_sql_all(app)
+get_sql_reset.help_doc = "Prints the DROP TABLE SQL, then the CREATE TABLE SQL, for the given app name(s)."
+get_sql_reset.args = APP_ARGS
+
+def get_sql_flush():
+ "Returns a list of the SQL statements used to flush the database"
+ from django.db import backend
+ statements = backend.get_sql_flush(style, _get_table_list(), _get_sequence_list())
+ return statements
+get_sql_flush.help_doc = "Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed."
+get_sql_flush.args = ''
+
+def get_custom_sql_for_model(model):
+ from django.db import models
+ from django.conf import settings
+
+ opts = model._meta
+ app_dir = os.path.normpath(os.path.join(os.path.dirname(models.get_app(model._meta.app_label).__file__), 'sql'))
+ output = []
+
+ # Some backends can't execute more than one SQL statement at a time,
+ # so split into separate statements.
+ statements = re.compile(r";[ \t]*$", re.M)
+
+ # Find custom SQL, if it's available.
+ sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.object_name.lower(), settings.DATABASE_ENGINE)),
+ os.path.join(app_dir, "%s.sql" % opts.object_name.lower())]
+ for sql_file in sql_files:
+ if os.path.exists(sql_file):
+ fp = open(sql_file, 'U')
+ for statement in statements.split(fp.read()):
+ # Remove any comments from the file
+ statement = re.sub(r"--.*[\n\Z]", "", statement)
+ if statement.strip():
+ output.append(statement + ";")
+ fp.close()
+
+ return output
+
+def get_custom_sql(app):
+ "Returns a list of the custom table modifying SQL statements for the given app."
+ from django.db.models import get_models
+ output = []
+
+ app_models = get_models(app)
+ app_dir = os.path.normpath(os.path.join(os.path.dirname(app.__file__), 'sql'))
+
+ for model in app_models:
+ output.extend(get_custom_sql_for_model(model))
+
+ return output
+get_custom_sql.help_doc = "Prints the custom table modifying SQL statements for the given app name(s)."
+get_custom_sql.args = APP_ARGS
+
+def get_sql_initial_data(apps):
+ "Returns a list of the initial INSERT SQL statements for the given app."
+ return style.ERROR("This action has been renamed. Try './manage.py sqlcustom %s'." % ' '.join(apps and apps or ['app1', 'app2']))
+get_sql_initial_data.help_doc = "RENAMED: see 'sqlcustom'"
+get_sql_initial_data.args = ''
+
+def get_sql_sequence_reset(app):
+ "Returns a list of the SQL statements to reset PostgreSQL sequences for the given app."
+ from django.db import backend, models
+ output = []
+ for model in models.get_models(app):
+ for f in model._meta.fields:
+ if isinstance(f, models.AutoField):
+ output.append("%s setval('%s', (%s max(%s) %s %s));" % \
+ (style.SQL_KEYWORD('SELECT'),
+ style.SQL_FIELD('%s_%s_seq' % (model._meta.db_table, f.column)),
+ style.SQL_KEYWORD('SELECT'),
+ style.SQL_FIELD(backend.quote_name(f.column)),
+ style.SQL_KEYWORD('FROM'),
+ style.SQL_TABLE(backend.quote_name(model._meta.db_table))))
+ break # Only one AutoField is allowed per model, so don't bother continuing.
+ for f in model._meta.many_to_many:
+ output.append("%s setval('%s', (%s max(%s) %s %s));" % \
+ (style.SQL_KEYWORD('SELECT'),
+ style.SQL_FIELD('%s_id_seq' % f.m2m_db_table()),
+ style.SQL_KEYWORD('SELECT'),
+ style.SQL_FIELD(backend.quote_name('id')),
+ style.SQL_KEYWORD('FROM'),
+ style.SQL_TABLE(f.m2m_db_table())))
+ return output
+get_sql_sequence_reset.help_doc = "Prints the SQL statements for resetting PostgreSQL sequences for the given app name(s)."
+get_sql_sequence_reset.args = APP_ARGS
+
+def get_sql_indexes(app):
+ "Returns a list of the CREATE INDEX SQL statements for all models in the given app."
+ from django.db import models
+ output = []
+ for model in models.get_models(app):
+ output.extend(get_sql_indexes_for_model(model))
+ return output
+get_sql_indexes.help_doc = "Prints the CREATE INDEX SQL statements for the given model module name(s)."
+get_sql_indexes.args = APP_ARGS
+
+def get_sql_indexes_for_model(model):
+ "Returns the CREATE INDEX SQL statements for a single model"
+ from django.db import backend
+ output = []
+
+ for f in model._meta.fields:
+ if f.db_index:
+ unique = f.unique and 'UNIQUE ' or ''
+ output.append(
+ style.SQL_KEYWORD('CREATE %sINDEX' % unique) + ' ' + \
+ style.SQL_TABLE('%s_%s' % (model._meta.db_table, f.column)) + ' ' + \
+ style.SQL_KEYWORD('ON') + ' ' + \
+ style.SQL_TABLE(backend.quote_name(model._meta.db_table)) + ' ' + \
+ "(%s);" % style.SQL_FIELD(backend.quote_name(f.column))
+ )
+ return output
+
+def get_sql_all(app):
+ "Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
+ return get_sql_create(app) + get_custom_sql(app) + get_sql_indexes(app)
+get_sql_all.help_doc = "Prints the CREATE TABLE, initial-data and CREATE INDEX SQL statements for the given model module name(s)."
+get_sql_all.args = APP_ARGS
+
+def _emit_post_sync_signal(created_models, verbosity, interactive):
+ from django.db import models
+ from django.dispatch import dispatcher
+ # Emit the post_sync signal for every application.
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ if verbosity >= 2:
+ print "Running post-sync handlers for application", app_name
+ dispatcher.send(signal=models.signals.post_syncdb, sender=app,
+ app=app, created_models=created_models,
+ verbosity=verbosity, interactive=interactive)
+
+def syncdb(verbosity=1, interactive=True):
+ "Creates the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
+ from django.db import connection, transaction, models, get_creation_module
+ from django.conf import settings
+
+ disable_termcolors()
+
+ # First, try validating the models.
+ _check_for_validation_errors()
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ __import__(app_name + '.management', {}, {}, [''])
+ except ImportError:
+ pass
+
+ data_types = get_creation_module().DATA_TYPES
+
+ cursor = connection.cursor()
+
+ # Get a list of all existing database tables,
+ # so we know what needs to be added.
+ table_list = _get_table_list()
+
+ # Get a list of already installed *models* so that references work right.
+ seen_models = _get_installed_models(table_list)
+ created_models = set()
+ pending_references = {}
+
+ # Create the tables for each model
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ model_list = models.get_models(app)
+ for model in model_list:
+ # Create the model's database table, if it doesn't already exist.
+ if verbosity >= 2:
+ print "Processing %s.%s model" % (app_name, model._meta.object_name)
+ if model._meta.db_table in table_list:
+ continue
+ sql, references = _get_sql_model_create(model, seen_models)
+ seen_models.add(model)
+ created_models.add(model)
+ for refto, refs in references.items():
+ pending_references.setdefault(refto, []).extend(refs)
+ sql.extend(_get_sql_for_pending_references(model, pending_references))
+ if verbosity >= 1:
+ print "Creating table %s" % model._meta.db_table
+ for statement in sql:
+ cursor.execute(statement)
+ table_list.append(model._meta.db_table)
+
+ # Create the m2m tables. This must be done after all tables have been created
+ # to ensure that all referred tables will exist.
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ model_list = models.get_models(app)
+ for model in model_list:
+ if model in created_models:
+ sql = _get_many_to_many_sql_for_model(model)
+ if sql:
+ if verbosity >= 2:
+ print "Creating many-to-many tables for %s.%s model" % (app_name, model._meta.object_name)
+ for statement in sql:
+ cursor.execute(statement)
+
+ transaction.commit_unless_managed()
+
+ # Send the post_syncdb signal, so individual apps can do whatever they need
+ # to do at this point.
+ _emit_post_sync_signal(created_models, verbosity, interactive)
+
+ # Install custom SQL for the app (but only if this
+ # is a model we've just created)
+ for app in models.get_apps():
+ for model in models.get_models(app):
+ if model in created_models:
+ custom_sql = get_custom_sql_for_model(model)
+ if custom_sql:
+ if verbosity >= 1:
+ print "Installing custom SQL for %s.%s model" % (app_name, model._meta.object_name)
+ try:
+ for sql in custom_sql:
+ cursor.execute(sql)
+ except Exception, e:
+ sys.stderr.write("Failed to install custom SQL for %s.%s model: %s" % \
+ (app_name, model._meta.object_name, e))
+ transaction.rollback_unless_managed()
+ else:
+ transaction.commit_unless_managed()
+
+ # Install SQL indicies for all newly created models
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ for model in models.get_models(app):
+ if model in created_models:
+ index_sql = get_sql_indexes_for_model(model)
+ if index_sql:
+ if verbosity >= 1:
+ print "Installing index for %s.%s model" % (app_name, model._meta.object_name)
+ try:
+ for sql in index_sql:
+ cursor.execute(sql)
+ except Exception, e:
+ sys.stderr.write("Failed to install index for %s.%s model: %s" % \
+ (app_name, model._meta.object_name, e))
+ transaction.rollback_unless_managed()
+ else:
+ transaction.commit_unless_managed()
+
+ # Install the 'initialdata' fixture, using format discovery
+ load_data(['initial_data'], verbosity=verbosity)
+syncdb.help_doc = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
+syncdb.args = '[--verbosity] [--interactive]'
+
+def get_admin_index(app):
+ "Returns admin-index template snippet (in list form) for the given app."
+ from django.utils.text import capfirst
+ from django.db.models import get_models
+ output = []
+ app_models = get_models(app)
+ app_label = app_models[0]._meta.app_label
+ output.append('{%% if perms.%s %%}' % app_label)
+ output.append('<div class="module"><h2>%s</h2><table>' % app_label.title())
+ for model in app_models:
+ if model._meta.admin:
+ output.append(MODULE_TEMPLATE % {
+ 'app': app_label,
+ 'mod': model._meta.module_name,
+ 'name': capfirst(model._meta.verbose_name_plural),
+ 'addperm': model._meta.get_add_permission(),
+ 'changeperm': model._meta.get_change_permission(),
+ })
+ output.append('</table></div>')
+ output.append('{% endif %}')
+ return output
+get_admin_index.help_doc = "Prints the admin-index template snippet for the given app name(s)."
+get_admin_index.args = APP_ARGS
+
+def _module_to_dict(module, omittable=lambda k: k.startswith('_')):
+ "Converts a module namespace to a Python dictionary. Used by get_settings_diff."
+ return dict([(k, repr(v)) for k, v in module.__dict__.items() if not omittable(k)])
+
+def diffsettings():
+ """
+ Displays differences between the current settings.py and Django's
+ default settings. Settings that don't appear in the defaults are
+ followed by "###".
+ """
+ # Inspired by Postfix's "postconf -n".
+ from django.conf import settings, global_settings
+
+ user_settings = _module_to_dict(settings._target)
+ default_settings = _module_to_dict(global_settings)
+
+ output = []
+ keys = user_settings.keys()
+ keys.sort()
+ for key in keys:
+ if key not in default_settings:
+ output.append("%s = %s ###" % (key, user_settings[key]))
+ elif user_settings[key] != default_settings[key]:
+ output.append("%s = %s" % (key, user_settings[key]))
+ print '\n'.join(output)
+diffsettings.args = ""
+
+def reset(app, interactive=True):
+ "Executes the equivalent of 'get_sql_reset' in the current database."
+ from django.db import connection, transaction
+ from django.conf import settings
+ app_name = app.__name__.split('.')[-2]
+
+ disable_termcolors()
+
+ # First, try validating the models.
+ _check_for_validation_errors(app)
+ sql_list = get_sql_reset(app)
+
+ if interactive:
+ confirm = raw_input("""
+You have requested a database reset.
+This will IRREVERSIBLY DESTROY any data for
+the "%s" application in the database "%s".
+Are you sure you want to do this?
+
+Type 'yes' to continue, or 'no' to cancel: """ % (app_name, settings.DATABASE_NAME))
+ else:
+ confirm = 'yes'
+
+ if confirm == 'yes':
+ try:
+ cursor = connection.cursor()
+ for sql in sql_list:
+ cursor.execute(sql)
+ except Exception, e:
+ sys.stderr.write(style.ERROR("""Error: %s couldn't be reset. Possible reasons:
+ * The database isn't running or isn't configured correctly.
+ * At least one of the database tables doesn't exist.
+ * The SQL was invalid.
+Hint: Look at the output of 'django-admin.py sqlreset %s'. That's the SQL this command wasn't able to run.
+The full error: """ % (app_name, app_name)) + style.ERROR_OUTPUT(str(e)) + '\n')
+ transaction.rollback_unless_managed()
+ sys.exit(1)
+ transaction.commit_unless_managed()
+ else:
+ print "Reset cancelled."
+reset.help_doc = "Executes ``sqlreset`` for the given app(s) in the current database."
+reset.args = '[--interactive]' + APP_ARGS
+
+def flush(verbosity=1, interactive=True):
+ "Returns all tables in the database to the same state they were in immediately after syncdb."
+ from django.conf import settings
+ from django.db import connection, transaction, models
+ from django.dispatch import dispatcher
+
+ disable_termcolors()
+
+ # First, try validating the models.
+ _check_for_validation_errors()
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ __import__(app_name + '.management', {}, {}, [''])
+ except ImportError:
+ pass
+
+ sql_list = get_sql_flush()
+
+ if interactive:
+ confirm = raw_input("""
+You have requested a flush of the database.
+This will IRREVERSIBLY DESTROY all data currently in the database,
+and return each table to the state it was in after syncdb.
+Are you sure you want to do this?
+
+Type 'yes' to continue, or 'no' to cancel: """)
+ else:
+ confirm = 'yes'
+
+ if confirm == 'yes':
+ try:
+ cursor = connection.cursor()
+ for sql in sql_list:
+ cursor.execute(sql)
+ except Exception, e:
+ sys.stderr.write(style.ERROR("""Error: Database %s couldn't be flushed. Possible reasons:
+ * The database isn't running or isn't configured correctly.
+ * At least one of the expected database tables doesn't exist.
+ * The SQL was invalid.
+Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
+The full error: """ % settings.DATABASE_NAME + style.ERROR_OUTPUT(str(e)) + '\n'))
+ transaction.rollback_unless_managed()
+ sys.exit(1)
+ transaction.commit_unless_managed()
+
+ # Emit the post sync signal. This allows individual
+ # applications to respond as if the database had been
+ # sync'd from scratch.
+ _emit_post_sync_signal(models.get_models(), verbosity, interactive)
+
+ # Reinstall the initial_data fixture
+ load_data(['initial_data'], verbosity=verbosity)
+
+ else:
+ print "Flush cancelled."
+flush.help_doc = "Executes ``sqlflush`` on the current database."
+flush.args = '[--verbosity] [--interactive]'
+
+def _start_helper(app_or_project, name, directory, other_name=''):
+ other = {'project': 'app', 'app': 'project'}[app_or_project]
+ if not _is_valid_dir_name(name):
+ sys.stderr.write(style.ERROR("Error: %r is not a valid %s name. Please use only numbers, letters and underscores.\n" % (name, app_or_project)))
+ sys.exit(1)
+ top_dir = os.path.join(directory, name)
+ try:
+ os.mkdir(top_dir)
+ except OSError, e:
+ sys.stderr.write(style.ERROR("Error: %s\n" % e))
+ sys.exit(1)
+ template_dir = PROJECT_TEMPLATE_DIR % app_or_project
+ for d, subdirs, files in os.walk(template_dir):
+ relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name)
+ if relative_dir:
+ os.mkdir(os.path.join(top_dir, relative_dir))
+ for i, subdir in enumerate(subdirs):
+ if subdir.startswith('.'):
+ del subdirs[i]
+ for f in files:
+ if f.endswith('.pyc'):
+ continue
+ path_old = os.path.join(d, f)
+ path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name))
+ fp_old = open(path_old, 'r')
+ fp_new = open(path_new, 'w')
+ fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
+ fp_old.close()
+ fp_new.close()
+ try:
+ shutil.copymode(path_old, path_new)
+ except OSError:
+ sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
+
+def startproject(project_name, directory):
+ "Creates a Django project for the given project_name in the given directory."
+ from random import choice
+ if project_name in INVALID_PROJECT_NAMES:
+ sys.stderr.write(style.ERROR("Error: '%r' conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name.\n" % project_name))
+ sys.exit(1)
+ _start_helper('project', project_name, directory)
+ # Create a random SECRET_KEY hash, and put it in the main settings.
+ main_settings_file = os.path.join(directory, project_name, 'settings.py')
+ settings_contents = open(main_settings_file, 'r').read()
+ fp = open(main_settings_file, 'w')
+ secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
+ settings_contents = re.sub(r"(?<=SECRET_KEY = ')'", secret_key + "'", settings_contents)
+ fp.write(settings_contents)
+ fp.close()
+startproject.help_doc = "Creates a Django project directory structure for the given project name in the current directory."
+startproject.args = "[projectname]"
+
+def startapp(app_name, directory):
+ "Creates a Django app for the given app_name in the given directory."
+ # Determine the project_name a bit naively -- by looking at the name of
+ # the parent directory.
+ project_dir = os.path.normpath(os.path.join(directory, '..'))
+ project_name = os.path.basename(project_dir)
+ if app_name == os.path.basename(directory):
+ sys.stderr.write(style.ERROR("Error: You cannot create an app with the same name (%r) as your project.\n" % app_name))
+ sys.exit(1)
+ _start_helper('app', app_name, directory, project_name)
+startapp.help_doc = "Creates a Django app directory structure for the given app name in the current directory."
+startapp.args = "[appname]"
+
+def inspectdb():
+ "Generator that introspects the tables in the given database name and returns a Django model, one line at a time."
+ from django.db import connection, get_introspection_module
+ import keyword
+
+ introspection_module = get_introspection_module()
+
+ table2model = lambda table_name: table_name.title().replace('_', '')
+
+ cursor = connection.cursor()
+ yield "# This is an auto-generated Django model module."
+ yield "# You'll have to do the following manually to clean this up:"
+ yield "# * Rearrange models' order"
+ yield "# * Make sure each model has one field with primary_key=True"
+ yield "# Feel free to rename the models, but don't rename db_table values or field names."
+ yield "#"
+ yield "# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'"
+ yield "# into your database."
+ yield ''
+ yield 'from django.db import models'
+ yield ''
+ for table_name in introspection_module.get_table_list(cursor):
+ yield 'class %s(models.Model):' % table2model(table_name)
+ try:
+ relations = introspection_module.get_relations(cursor, table_name)
+ except NotImplementedError:
+ relations = {}
+ try:
+ indexes = introspection_module.get_indexes(cursor, table_name)
+ except NotImplementedError:
+ indexes = {}
+ for i, row in enumerate(introspection_module.get_table_description(cursor, table_name)):
+ att_name = row[0]
+ comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
+ extra_params = {} # Holds Field parameters such as 'db_column'.
+
+ if ' ' in att_name:
+ extra_params['db_column'] = att_name
+ att_name = att_name.replace(' ', '')
+ comment_notes.append('Field renamed to remove spaces.')
+ if keyword.iskeyword(att_name):
+ extra_params['db_column'] = att_name
+ att_name += '_field'
+ comment_notes.append('Field renamed because it was a Python reserved word.')
+
+ if relations.has_key(i):
+ rel_to = relations[i][1] == table_name and "'self'" or table2model(relations[i][1])
+ field_type = 'ForeignKey(%s' % rel_to
+ if att_name.endswith('_id'):
+ att_name = att_name[:-3]
+ else:
+ extra_params['db_column'] = att_name
+ else:
+ try:
+ field_type = introspection_module.DATA_TYPES_REVERSE[row[1]]
+ except KeyError:
+ field_type = 'TextField'
+ comment_notes.append('This field type is a guess.')
+
+ # This is a hook for DATA_TYPES_REVERSE to return a tuple of
+ # (field_type, extra_params_dict).
+ if type(field_type) is tuple:
+ field_type, new_params = field_type
+ extra_params.update(new_params)
+
+ # Add maxlength for all CharFields.
+ if field_type == 'CharField' and row[3]:
+ extra_params['maxlength'] = row[3]
+
+ if field_type == 'FloatField':
+ extra_params['max_digits'] = row[4]
+ extra_params['decimal_places'] = row[5]
+
+ # Add primary_key and unique, if necessary.
+ column_name = extra_params.get('db_column', att_name)
+ if column_name in indexes:
+ if indexes[column_name]['primary_key']:
+ extra_params['primary_key'] = True
+ elif indexes[column_name]['unique']:
+ extra_params['unique'] = True
+
+ field_type += '('
+
+ # Don't output 'id = meta.AutoField(primary_key=True)', because
+ # that's assumed if it doesn't exist.
+ if att_name == 'id' and field_type == 'AutoField(' and extra_params == {'primary_key': True}:
+ continue
+
+ # Add 'null' and 'blank', if the 'null_ok' flag was present in the
+ # table description.
+ if row[6]: # If it's NULL...
+ extra_params['blank'] = True
+ if not field_type in ('TextField(', 'CharField('):
+ extra_params['null'] = True
+
+ field_desc = '%s = models.%s' % (att_name, field_type)
+ if extra_params:
+ if not field_desc.endswith('('):
+ field_desc += ', '
+ field_desc += ', '.join(['%s=%r' % (k, v) for k, v in extra_params.items()])
+ field_desc += ')'
+ if comment_notes:
+ field_desc += ' # ' + ' '.join(comment_notes)
+ yield ' %s' % field_desc
+ yield ' class Meta:'
+ yield ' db_table = %r' % table_name
+ yield ''
+inspectdb.help_doc = "Introspects the database tables in the given database and outputs a Django model module."
+inspectdb.args = ""
+
+class ModelErrorCollection:
+ def __init__(self, outfile=sys.stdout):
+ self.errors = []
+ self.outfile = outfile
+
+ def add(self, context, error):
+ self.errors.append((context, error))
+ self.outfile.write(style.ERROR("%s: %s\n" % (context, error)))
+
+def get_validation_errors(outfile, app=None):
+ """
+ Validates all models that are part of the specified app. If no app name is provided,
+ validates all models of all installed apps. Writes errors, if any, to outfile.
+ Returns number of errors.
+ """
+ from django.conf import settings
+ from django.db import models, connection
+ from django.db.models.loading import get_app_errors
+ from django.db.models.fields.related import RelatedObject
+
+ e = ModelErrorCollection(outfile)
+
+ for (app_name, error) in get_app_errors().items():
+ e.add(app_name, error)
+
+ for cls in models.get_models(app):
+ opts = cls._meta
+
+ # Do field-specific validation.
+ for f in opts.fields:
+ if f.name == 'id' and not f.primary_key and opts.pk.name == 'id':
+ e.add(opts, '"%s": You can\'t use "id" as a field name, because each model automatically gets an "id" field if none of the fields have primary_key=True. You need to either remove/rename your "id" field or add primary_key=True to a field.' % f.name)
+ if isinstance(f, models.CharField) and f.maxlength in (None, 0):
+ e.add(opts, '"%s": CharFields require a "maxlength" attribute.' % f.name)
+ if isinstance(f, models.FloatField):
+ if f.decimal_places is None:
+ e.add(opts, '"%s": FloatFields require a "decimal_places" attribute.' % f.name)
+ if f.max_digits is None:
+ e.add(opts, '"%s": FloatFields require a "max_digits" attribute.' % f.name)
+ if isinstance(f, models.FileField) and not f.upload_to:
+ e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name)
+ if isinstance(f, models.ImageField):
+ try:
+ from PIL import Image
+ except ImportError:
+ e.add(opts, '"%s": To use ImageFields, you need to install the Python Imaging Library. Get it at http://www.pythonware.com/products/pil/ .' % f.name)
+ if f.prepopulate_from is not None and type(f.prepopulate_from) not in (list, tuple):
+ e.add(opts, '"%s": prepopulate_from should be a list or tuple.' % f.name)
+ if f.choices:
+ if not hasattr(f.choices, '__iter__'):
+ e.add(opts, '"%s": "choices" should be iterable (e.g., a tuple or list).' % f.name)
+ else:
+ for c in f.choices:
+ if not type(c) in (tuple, list) or len(c) != 2:
+ e.add(opts, '"%s": "choices" should be a sequence of two-tuples.' % f.name)
+ if f.db_index not in (None, True, False):
+ e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name)
+
+ # Check that maxlength <= 255 if using older MySQL versions.
+ if settings.DATABASE_ENGINE == 'mysql':
+ db_version = connection.get_server_version()
+ if db_version < (5, 0, 3) and isinstance(f, (models.CharField, models.CommaSeparatedIntegerField, models.SlugField)) and f.maxlength > 255:
+ e.add(opts, '"%s": %s cannot have a "maxlength" greater than 255 when you are using a version of MySQL prior to 5.0.3 (you are using %s).' % (f.name, f.__class__.__name__, '.'.join([str(n) for n in db_version[:3]])))
+
+ # Check to see if the related field will clash with any
+ # existing fields, m2m fields, m2m related objects or related objects
+ if f.rel:
+ rel_opts = f.rel.to._meta
+ if f.rel.to not in models.get_models():
+ e.add(opts, "'%s' has relation with model %s, which has not been installed" % (f.name, rel_opts.object_name))
+
+ rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
+ rel_query_name = f.related_query_name()
+ for r in rel_opts.fields:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.many_to_many:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.get_all_related_many_to_many_objects():
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ for r in rel_opts.get_all_related_objects():
+ if r.field is not f:
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+
+
+ for i, f in enumerate(opts.many_to_many):
+ # Check to see if the related m2m field will clash with any
+ # existing fields, m2m fields, m2m related objects or related objects
+ rel_opts = f.rel.to._meta
+ if f.rel.to not in models.get_models():
+ e.add(opts, "'%s' has m2m relation with model %s, which has not been installed" % (f.name, rel_opts.object_name))
+
+ rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
+ rel_query_name = f.related_query_name()
+ # If rel_name is none, there is no reverse accessor.
+ # (This only occurs for symmetrical m2m relations to self).
+ # If this is the case, there are no clashes to check for this field, as
+ # there are no reverse descriptors for this field.
+ if rel_name is not None:
+ for r in rel_opts.fields:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.many_to_many:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.get_all_related_many_to_many_objects():
+ if r.field is not f:
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ for r in rel_opts.get_all_related_objects():
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+
+ # Check admin attribute.
+ if opts.admin is not None:
+ if not isinstance(opts.admin, models.AdminOptions):
+ e.add(opts, '"admin" attribute, if given, must be set to a models.AdminOptions() instance.')
+ else:
+ # list_display
+ if not isinstance(opts.admin.list_display, (list, tuple)):
+ e.add(opts, '"admin.list_display", if given, must be set to a list or tuple.')
+ else:
+ for fn in opts.admin.list_display:
+ try:
+ f = opts.get_field(fn)
+ except models.FieldDoesNotExist:
+ if not hasattr(cls, fn):
+ e.add(opts, '"admin.list_display" refers to %r, which isn\'t an attribute, method or property.' % fn)
+ else:
+ if isinstance(f, models.ManyToManyField):
+ e.add(opts, '"admin.list_display" doesn\'t support ManyToManyFields (%r).' % fn)
+ # list_display_links
+ if opts.admin.list_display_links and not opts.admin.list_display:
+ e.add(opts, '"admin.list_display" must be defined for "admin.list_display_links" to be used.')
+ if not isinstance(opts.admin.list_display_links, (list, tuple)):
+ e.add(opts, '"admin.list_display_links", if given, must be set to a list or tuple.')
+ else:
+ for fn in opts.admin.list_display_links:
+ try:
+ f = opts.get_field(fn)
+ except models.FieldDoesNotExist:
+ if not hasattr(cls, fn):
+ e.add(opts, '"admin.list_display_links" refers to %r, which isn\'t an attribute, method or property.' % fn)
+ if fn not in opts.admin.list_display:
+ e.add(opts, '"admin.list_display_links" refers to %r, which is not defined in "admin.list_display".' % fn)
+ # list_filter
+ if not isinstance(opts.admin.list_filter, (list, tuple)):
+ e.add(opts, '"admin.list_filter", if given, must be set to a list or tuple.')
+ else:
+ for fn in opts.admin.list_filter:
+ try:
+ f = opts.get_field(fn)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"admin.list_filter" refers to %r, which isn\'t a field.' % fn)
+ # date_hierarchy
+ if opts.admin.date_hierarchy:
+ try:
+ f = opts.get_field(opts.admin.date_hierarchy)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"admin.date_hierarchy" refers to %r, which isn\'t a field.' % opts.admin.date_hierarchy)
+
+ # Check ordering attribute.
+ if opts.ordering:
+ for field_name in opts.ordering:
+ if field_name == '?': continue
+ if field_name.startswith('-'):
+ field_name = field_name[1:]
+ if opts.order_with_respect_to and field_name == '_order':
+ continue
+ if '.' in field_name: continue # Skip ordering in the format 'table.field'.
+ try:
+ opts.get_field(field_name, many_to_many=False)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"ordering" refers to "%s", a field that doesn\'t exist.' % field_name)
+
+ # Check core=True, if needed.
+ for related in opts.get_followed_related_objects():
+ if not related.edit_inline:
+ continue
+ try:
+ for f in related.opts.fields:
+ if f.core:
+ raise StopIteration
+ e.add(related.opts, "At least one field in %s should have core=True, because it's being edited inline by %s.%s." % (related.opts.object_name, opts.module_name, opts.object_name))
+ except StopIteration:
+ pass
+
+ # Check unique_together.
+ for ut in opts.unique_together:
+ for field_name in ut:
+ try:
+ f = opts.get_field(field_name, many_to_many=True)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"unique_together" refers to %s, a field that doesn\'t exist. Check your syntax.' % field_name)
+ else:
+ if isinstance(f.rel, models.ManyToManyRel):
+ e.add(opts, '"unique_together" refers to %s. ManyToManyFields are not supported in unique_together.' % f.name)
+
+ return len(e.errors)
+
+def validate(outfile=sys.stdout, silent_success=False):
+ "Validates all installed models."
+ try:
+ num_errors = get_validation_errors(outfile)
+ if silent_success and num_errors == 0:
+ return
+ outfile.write('%s error%s found.\n' % (num_errors, num_errors != 1 and 's' or ''))
+ except ImproperlyConfigured:
+ outfile.write("Skipping validation because things aren't configured properly.")
+validate.args = ''
+
+def _check_for_validation_errors(app=None):
+ """Check that an app has no validation errors, and exit with errors if it does."""
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+ s = StringIO()
+ num_errors = get_validation_errors(s, app)
+ if num_errors:
+ if app:
+ sys.stderr.write(style.ERROR("Error: %s couldn't be installed, because there were errors in your model:\n" % app))
+ else:
+ sys.stderr.write(style.ERROR("Error: Couldn't install apps, because there were errors in one or more models:\n"))
+ s.seek(0)
+ sys.stderr.write(s.read())
+ sys.exit(1)
+
+def runserver(addr, port, use_reloader=True, admin_media_dir=''):
+ "Starts a lightweight Web server for development."
+ from django.core.servers.basehttp import run, AdminMediaHandler, WSGIServerException
+ from django.core.handlers.wsgi import WSGIHandler
+ if not addr:
+ addr = '127.0.0.1'
+ if not port.isdigit():
+ sys.stderr.write(style.ERROR("Error: %r is not a valid port number.\n" % port))
+ sys.exit(1)
+ quit_command = sys.platform == 'win32' and 'CTRL-BREAK' or 'CONTROL-C'
+ def inner_run():
+ from django.conf import settings
+ print "Validating models..."
+ validate()
+ print "\nDjango version %s, using settings %r" % (get_version(), settings.SETTINGS_MODULE)
+ print "Development server is running at http://%s:%s/" % (addr, port)
+ print "Quit the server with %s." % quit_command
+ try:
+ handler = AdminMediaHandler(WSGIHandler(), admin_media_path)
+ run(addr, int(port), handler)
+ except WSGIServerException, e:
+ # Use helpful error messages instead of ugly tracebacks.
+ ERRORS = {
+ 13: "You don't have permission to access that port.",
+ 98: "That port is already in use.",
+ 99: "That IP address can't be assigned-to.",
+ }
+ try:
+ error_text = ERRORS[e.args[0].args[0]]
+ except (AttributeError, KeyError):
+ error_text = str(e)
+ sys.stderr.write(style.ERROR("Error: %s" % error_text) + '\n')
+ sys.exit(1)
+ except KeyboardInterrupt:
+ sys.exit(0)
+ if use_reloader:
+ from django.utils import autoreload
+ autoreload.main(inner_run)
+ else:
+ inner_run()
+runserver.args = '[--noreload] [--adminmedia=ADMIN_MEDIA_PATH] [optional port number, or ipaddr:port]'
+
+def createcachetable(tablename):
+ "Creates the table needed to use the SQL cache backend"
+ from django.db import backend, connection, transaction, get_creation_module, models
+ data_types = get_creation_module().DATA_TYPES
+ fields = (
+ # "key" is a reserved word in MySQL, so use "cache_key" instead.
+ models.CharField(name='cache_key', maxlength=255, unique=True, primary_key=True),
+ models.TextField(name='value'),
+ models.DateTimeField(name='expires', db_index=True),
+ )
+ table_output = []
+ index_output = []
+ for f in fields:
+ field_output = [backend.quote_name(f.name), data_types[f.get_internal_type()] % f.__dict__]
+ field_output.append("%sNULL" % (not f.null and "NOT " or ""))
+ if f.unique:
+ field_output.append("UNIQUE")
+ if f.primary_key:
+ field_output.append("PRIMARY KEY")
+ if f.db_index:
+ unique = f.unique and "UNIQUE " or ""
+ index_output.append("CREATE %sINDEX %s_%s ON %s (%s);" % \
+ (unique, tablename, f.name, backend.quote_name(tablename),
+ backend.quote_name(f.name)))
+ table_output.append(" ".join(field_output))
+ full_statement = ["CREATE TABLE %s (" % backend.quote_name(tablename)]
+ for i, line in enumerate(table_output):
+ full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
+ full_statement.append(');')
+ curs = connection.cursor()
+ curs.execute("\n".join(full_statement))
+ for statement in index_output:
+ curs.execute(statement)
+ transaction.commit_unless_managed()
+createcachetable.args = "[tablename]"
+
+def run_shell(use_plain=False):
+ "Runs a Python interactive interpreter. Tries to use IPython, if it's available."
+ # XXX: (Temporary) workaround for ticket #1796: force early loading of all
+ # models from installed apps.
+ from django.db.models.loading import get_models
+ loaded_models = get_models()
+
+ try:
+ if use_plain:
+ # Don't bother loading IPython, because the user wants plain Python.
+ raise ImportError
+ import IPython
+ # Explicitly pass an empty list as arguments, because otherwise IPython
+ # would use sys.argv from this script.
+ shell = IPython.Shell.IPShell(argv=[])
+ shell.mainloop()
+ except ImportError:
+ import code
+ try: # Try activating rlcompleter, because it's handy.
+ import readline
+ except ImportError:
+ pass
+ else:
+ # We don't have to wrap the following import in a 'try', because
+ # we already know 'readline' was imported successfully.
+ import rlcompleter
+ readline.parse_and_bind("tab:complete")
+ code.interact()
+run_shell.args = '[--plain]'
+
+def dbshell():
+ "Runs the command-line client for the current DATABASE_ENGINE."
+ from django.db import runshell
+ runshell()
+dbshell.args = ""
+
+def runfcgi(args):
+ "Runs this project as a FastCGI application. Requires flup."
+ from django.conf import settings
+ from django.utils import translation
+ # Activate the current language, because it won't get activated later.
+ try:
+ translation.activate(settings.LANGUAGE_CODE)
+ except AttributeError:
+ pass
+ from django.core.servers.fastcgi import runfastcgi
+ runfastcgi(args)
+runfcgi.args = '[various KEY=val options, use `runfcgi help` for help]'
+
+def test(app_labels, verbosity=1):
+ "Runs the test suite for the specified applications"
+ from django.conf import settings
+ from django.db.models import get_app, get_apps
+
+ if len(app_labels) == 0:
+ app_list = get_apps()
+ else:
+ app_list = [get_app(app_label) for app_label in app_labels]
+
+ test_path = settings.TEST_RUNNER.split('.')
+ # Allow for Python 2.5 relative paths
+ if len(test_path) > 1:
+ test_module_name = '.'.join(test_path[:-1])
+ else:
+ test_module_name = '.'
+ test_module = __import__(test_module_name, {}, {}, test_path[-1])
+ test_runner = getattr(test_module, test_path[-1])
+
+ failures = test_runner(app_list, verbosity)
+ if failures:
+ sys.exit(failures)
+
+test.help_doc = 'Runs the test suite for the specified applications, or the entire site if no apps are specified'
+test.args = '[--verbosity] ' + APP_ARGS
+
+def load_data(fixture_labels, verbosity=1):
+ "Installs the provided fixture file(s) as data in the database."
+ from django.db.models import get_apps
+ from django.core import serializers
+ from django.db import connection, transaction
+ from django.conf import settings
+ import sys
+
+ # Keep a count of the installed objects and fixtures
+ count = [0,0]
+
+ humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'
+
+ # Get a cursor (even though we don't need one yet). This has
+ # the side effect of initializing the test database (if
+ # it isn't already initialized).
+ cursor = connection.cursor()
+
+ # Start transaction management. All fixtures are installed in a
+ # single transaction to ensure that all references are resolved.
+ transaction.commit_unless_managed()
+ transaction.enter_transaction_management()
+ transaction.managed(True)
+
+ app_fixtures = [os.path.join(os.path.dirname(app.__file__),'fixtures') for app in get_apps()]
+ for fixture_label in fixture_labels:
+ if verbosity > 0:
+ print "Loading '%s' fixtures..." % fixture_label
+ for fixture_dir in app_fixtures + list(settings.FIXTURE_DIRS) + ['']:
+ if verbosity > 1:
+ print "Checking %s for fixtures..." % humanize(fixture_dir)
+ parts = fixture_label.split('.')
+ if len(parts) == 1:
+ fixture_name = fixture_label
+ formats = serializers.get_serializer_formats()
+ else:
+ fixture_name, format = '.'.join(parts[:-1]), parts[-1]
+ formats = [format]
+
+ label_found = False
+ for format in formats:
+ serializer = serializers.get_serializer(format)
+ if verbosity > 1:
+ print "Trying %s for %s fixture '%s'..." % \
+ (humanize(fixture_dir), format, fixture_name)
+ try:
+ full_path = os.path.join(fixture_dir, '.'.join([fixture_name, format]))
+ fixture = open(full_path, 'r')
+ if label_found:
+ fixture.close()
+ print style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
+ (fixture_name, humanize(fixture_dir)))
+ transaction.rollback()
+ transaction.leave_transaction_management()
+ return
+ else:
+ count[1] += 1
+ if verbosity > 0:
+ print "Installing %s fixture '%s' from %s." % \
+ (format, fixture_name, humanize(fixture_dir))
+ try:
+ objects = serializers.deserialize(format, fixture)
+ for obj in objects:
+ count[0] += 1
+ obj.save()
+ label_found = True
+ except Exception, e:
+ fixture.close()
+ sys.stderr.write(
+ style.ERROR("Problem installing fixture '%s': %s\n" %
+ (full_path, str(e))))
+ transaction.rollback()
+ transaction.leave_transaction_management()
+ return
+ fixture.close()
+ except:
+ if verbosity > 1:
+ print "No %s fixture '%s' in %s." % \
+ (format, fixture_name, humanize(fixture_dir))
+ if count[0] == 0:
+ if verbosity > 0:
+ print "No fixtures found."
+ else:
+ if verbosity > 0:
+ print "Installed %d object(s) from %d fixture(s)" % tuple(count)
+ transaction.commit()
+ transaction.leave_transaction_management()
+
+load_data.help_doc = 'Installs the named fixture(s) in the database'
+load_data.args = "[--verbosity] fixture, fixture, ..."
+
+def dump_data(app_labels, format='json', indent=None):
+ "Output the current contents of the database as a fixture of the given format"
+ from django.db.models import get_app, get_apps, get_models
+ from django.core import serializers
+
+ if len(app_labels) == 0:
+ app_list = get_apps()
+ else:
+ app_list = [get_app(app_label) for app_label in app_labels]
+
+ # Check that the serialization format exists; this is a shortcut to
+ # avoid collating all the objects and _then_ failing.
+ try:
+ serializers.get_serializer(format)
+ except KeyError:
+ sys.stderr.write(style.ERROR("Unknown serialization format: %s\n" % format))
+
+ objects = []
+ for app in app_list:
+ for model in get_models(app):
+ objects.extend(model.objects.all())
+ try:
+ return serializers.serialize(format, objects, indent=indent)
+ except Exception, e:
+ sys.stderr.write(style.ERROR("Unable to serialize database: %s\n" % e))
+dump_data.help_doc = 'Output the contents of the database as a fixture of the given format'
+dump_data.args = '[--format]' + APP_ARGS
+
+# Utilities for command-line script
+
+DEFAULT_ACTION_MAPPING = {
+ 'adminindex': get_admin_index,
+ 'createcachetable' : createcachetable,
+ 'dbshell': dbshell,
+ 'diffsettings': diffsettings,
+ 'dumpdata': dump_data,
+ 'flush': flush,
+ 'inspectdb': inspectdb,
+ 'loaddata': load_data,
+ 'reset': reset,
+ 'runfcgi': runfcgi,
+ 'runserver': runserver,
+ 'shell': run_shell,
+ 'sql': get_sql_create,
+ 'sqlall': get_sql_all,
+ 'sqlclear': get_sql_delete,
+ 'sqlcustom': get_custom_sql,
+ 'sqlflush': get_sql_flush,
+ 'sqlindexes': get_sql_indexes,
+ 'sqlinitialdata': get_sql_initial_data,
+ 'sqlreset': get_sql_reset,
+ 'sqlsequencereset': get_sql_sequence_reset,
+ 'startapp': startapp,
+ 'startproject': startproject,
+ 'syncdb': syncdb,
+ 'validate': validate,
+ 'test':test,
+}
+
+NO_SQL_TRANSACTION = (
+ 'adminindex',
+ 'createcachetable',
+ 'dbshell',
+ 'diffsettings',
+ 'reset',
+ 'sqlindexes',
+ 'syncdb',
+)
+
+class DjangoOptionParser(OptionParser):
+ def print_usage_and_exit(self):
+ self.print_help(sys.stderr)
+ sys.exit(1)
+
+def get_usage(action_mapping):
+ """
+ Returns a usage string. Doesn't do the options stuff, because optparse
+ takes care of that.
+ """
+ usage = ["%prog action [options]\nactions:"]
+ available_actions = action_mapping.keys()
+ available_actions.sort()
+ for a in available_actions:
+ func = action_mapping[a]
+ usage.append(" %s %s" % (a, func.args))
+ usage.extend(textwrap.wrap(getattr(func, 'help_doc', textwrap.dedent(func.__doc__.strip())), initial_indent=' ', subsequent_indent=' '))
+ usage.append("")
+ return '\n'.join(usage[:-1]) # Cut off last list element, an empty space.
+
+def print_error(msg, cmd):
+ sys.stderr.write(style.ERROR('Error: %s' % msg) + '\nRun "%s --help" for help.\n' % cmd)
+ sys.exit(1)
+
+def execute_from_command_line(action_mapping=DEFAULT_ACTION_MAPPING, argv=None):
+ # Use sys.argv if we've not passed in a custom argv
+ if argv is None:
+ argv = sys.argv
+
+ # Parse the command-line arguments. optparse handles the dirty work.
+ parser = DjangoOptionParser(usage=get_usage(action_mapping), version=get_version())
+ parser.add_option('--settings',
+ help='Python path to settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.')
+ parser.add_option('--pythonpath',
+ help='Lets you manually add a directory the Python path, e.g. "/home/djangoprojects/myproject".')
+ parser.add_option('--plain', action='store_true', dest='plain',
+ help='Tells Django to use plain Python, not IPython, for "shell" command.')
+ parser.add_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.')
+ parser.add_option('--noreload', action='store_false', dest='use_reloader', default=True,
+ help='Tells Django to NOT use the auto-reloader when running the development server.')
+ parser.add_option('--format', default='json', dest='format',
+ help='Specifies the output serialization format for fixtures')
+ parser.add_option('--indent', default=None, dest='indent',
+ type='int', help='Specifies the indent level to use when pretty-printing output')
+ parser.add_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ parser.add_option('--adminmedia', dest='admin_media_path', default='', help='Specifies the directory from which to serve admin media for runserver.'),
+
+ options, args = parser.parse_args(argv[1:])
+
+ # Take care of options.
+ if options.settings:
+ os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
+ if options.pythonpath:
+ sys.path.insert(0, options.pythonpath)
+
+ # Run the appropriate action. Unfortunately, optparse can't handle
+ # positional arguments, so this has to parse/validate them.
+ try:
+ action = args[0]
+ except IndexError:
+ parser.print_usage_and_exit()
+ if not action_mapping.has_key(action):
+ print_error("Your action, %r, was invalid." % action, argv[0])
+
+ # Switch to English, because django-admin.py creates database content
+ # like permissions, and those shouldn't contain any translations.
+ # But only do this if we should have a working settings file.
+ if action not in ('startproject', 'startapp'):
+ from django.utils import translation
+ translation.activate('en-us')
+
+ if action == 'shell':
+ action_mapping[action](options.plain is True)
+ elif action in ('validate', 'diffsettings', 'dbshell'):
+ action_mapping[action]()
+ elif action in ('flush', 'syncdb'):
+ action_mapping[action](int(options.verbosity), options.interactive)
+ elif action == 'inspectdb':
+ try:
+ for line in action_mapping[action]():
+ print line
+ except NotImplementedError:
+ sys.stderr.write(style.ERROR("Error: %r isn't supported for the currently selected database backend.\n" % action))
+ sys.exit(1)
+ elif action == 'createcachetable':
+ try:
+ action_mapping[action](args[1])
+ except IndexError:
+ parser.print_usage_and_exit()
+ elif action in ('test', 'loaddata'):
+ try:
+ action_mapping[action](args[1:], int(options.verbosity))
+ except IndexError:
+ parser.print_usage_and_exit()
+ elif action == 'dumpdata':
+ try:
+ print action_mapping[action](args[1:], options.format, options.indent)
+ except IndexError:
+ parser.print_usage_and_exit()
+ elif action in ('startapp', 'startproject'):
+ try:
+ name = args[1]
+ except IndexError:
+ parser.print_usage_and_exit()
+ action_mapping[action](name, os.getcwd())
+ elif action == 'runserver':
+ if len(args) < 2:
+ addr = ''
+ port = '8000'
+ else:
+ try:
+ addr, port = args[1].split(':')
+ except ValueError:
+ addr, port = '', args[1]
+ action_mapping[action](addr, port, options.use_reloader, options.admin_media_path)
+ elif action == 'runfcgi':
+ action_mapping[action](args[1:])
+ elif action == 'sqlinitialdata':
+ print action_mapping[action](args[1:])
+ elif action == 'sqlflush':
+ print '\n'.join(action_mapping[action]())
+ else:
+ from django.db import models
+ validate(silent_success=True)
+ try:
+ mod_list = [models.get_app(app_label) for app_label in args[1:]]
+ except ImportError, e:
+ sys.stderr.write(style.ERROR("Error: %s. Are you sure your INSTALLED_APPS setting is correct?\n" % e))
+ sys.exit(1)
+ if not mod_list:
+ parser.print_usage_and_exit()
+ if action not in NO_SQL_TRANSACTION:
+ print style.SQL_KEYWORD("BEGIN;")
+ for mod in mod_list:
+ if action == 'reset':
+ output = action_mapping[action](mod, options.interactive)
+ else:
+ output = action_mapping[action](mod)
+ if output:
+ print '\n'.join(output)
+ if action not in NO_SQL_TRANSACTION:
+ print style.SQL_KEYWORD("COMMIT;")
+
+def setup_environ(settings_mod):
+ """
+ Configure the runtime environment. This can also be used by external
+ scripts wanting to set up a similar environment to manage.py.
+ """
+ # Add this project to sys.path so that it's importable in the conventional
+ # way. For example, if this file (manage.py) lives in a directory
+ # "myproject", this code would add "/path/to/myproject" to sys.path.
+ project_directory = os.path.dirname(settings_mod.__file__)
+ project_name = os.path.basename(project_directory)
+ sys.path.append(os.path.join(project_directory, '..'))
+ project_module = __import__(project_name, {}, {}, [''])
+ sys.path.pop()
+
+ # Set DJANGO_SETTINGS_MODULE appropriately.
+ os.environ['DJANGO_SETTINGS_MODULE'] = '%s.settings' % project_name
+ return project_directory
+
+def execute_manager(settings_mod, argv=None):
+ project_directory = setup_environ(settings_mod)
+ action_mapping = DEFAULT_ACTION_MAPPING.copy()
+
+ # Remove the "startproject" command from the action_mapping, because that's
+ # a django-admin.py command, not a manage.py command.
+ del action_mapping['startproject']
+
+ # Override the startapp handler so that it always uses the
+ # project_directory, not the current working directory (which is default).
+ action_mapping['startapp'] = lambda app_name, directory: startapp(app_name, project_directory)
+ action_mapping['startapp'].__doc__ = startapp.__doc__
+ action_mapping['startapp'].help_doc = startapp.help_doc
+ action_mapping['startapp'].args = startapp.args
+
+ # Run the django-admin.py command.
+ execute_from_command_line(action_mapping, argv)
diff --git a/google_appengine/lib/django/django/core/paginator.py b/google_appengine/lib/django/django/core/paginator.py
new file mode 100755
index 0000000..380808a
--- /dev/null
+++ b/google_appengine/lib/django/django/core/paginator.py
@@ -0,0 +1,88 @@
+class InvalidPage(Exception):
+ pass
+
+class ObjectPaginator(object):
+ """
+ This class makes pagination easy. Feed it a QuerySet or list, plus the number
+ of objects you want on each page. Then read the hits and pages properties to
+ see how many pages it involves. Call get_page with a page number (starting
+ at 0) to get back a list of objects for that page.
+
+ Finally, check if a page number has a next/prev page using
+ has_next_page(page_number) and has_previous_page(page_number).
+
+ Use orphans to avoid small final pages. For example:
+ 13 records, num_per_page=10, orphans=2 --> pages==2, len(self.get_page(0))==10
+ 12 records, num_per_page=10, orphans=2 --> pages==1, len(self.get_page(0))==12
+ """
+ def __init__(self, query_set, num_per_page, orphans=0):
+ self.query_set = query_set
+ self.num_per_page = num_per_page
+ self.orphans = orphans
+ self._hits = self._pages = None
+
+ def validate_page_number(self, page_number):
+ try:
+ page_number = int(page_number)
+ except ValueError:
+ raise InvalidPage
+ if page_number < 0 or page_number > self.pages - 1:
+ raise InvalidPage
+ return page_number
+
+ def get_page(self, page_number):
+ page_number = self.validate_page_number(page_number)
+ bottom = page_number * self.num_per_page
+ top = bottom + self.num_per_page
+ if top + self.orphans >= self.hits:
+ top = self.hits
+ return self.query_set[bottom:top]
+
+ def has_next_page(self, page_number):
+ "Does page $page_number have a 'next' page?"
+ return page_number < self.pages - 1
+
+ def has_previous_page(self, page_number):
+ return page_number > 0
+
+ def first_on_page(self, page_number):
+ """
+ Returns the 1-based index of the first object on the given page,
+ relative to total objects found (hits).
+ """
+ page_number = self.validate_page_number(page_number)
+ return (self.num_per_page * page_number) + 1
+
+ def last_on_page(self, page_number):
+ """
+ Returns the 1-based index of the last object on the given page,
+ relative to total objects found (hits).
+ """
+ page_number = self.validate_page_number(page_number)
+ page_number += 1 # 1-base
+ if page_number == self.pages:
+ return self.hits
+ return page_number * self.num_per_page
+
+ def _get_hits(self):
+ if self._hits is None:
+ # Try .count() or fall back to len().
+ try:
+ self._hits = int(self.query_set.count())
+ except (AttributeError, TypeError, ValueError):
+ # AttributeError if query_set has no object count.
+ # TypeError if query_set.count() required arguments.
+ # ValueError if int() fails.
+ self._hits = len(self.query_set)
+ return self._hits
+
+ def _get_pages(self):
+ if self._pages is None:
+ hits = (self.hits - 1 - self.orphans)
+ if hits < 1:
+ hits = 0
+ self._pages = hits // self.num_per_page + 1
+ return self._pages
+
+ hits = property(_get_hits)
+ pages = property(_get_pages)
diff --git a/google_appengine/lib/django/django/core/serializers/__init__.py b/google_appengine/lib/django/django/core/serializers/__init__.py
new file mode 100755
index 0000000..494393f
--- /dev/null
+++ b/google_appengine/lib/django/django/core/serializers/__init__.py
@@ -0,0 +1,90 @@
+"""
+Interfaces for serializing Django objects.
+
+Usage::
+
+ >>> from django.core import serializers
+ >>> json = serializers.serialize("json", some_query_set)
+ >>> objects = list(serializers.deserialize("json", json))
+
+To add your own serializers, use the SERIALIZATION_MODULES setting::
+
+ SERIALIZATION_MODULES = {
+ "csv" : "path.to.csv.serializer",
+ "txt" : "path.to.txt.serializer",
+ }
+
+"""
+
+from django.conf import settings
+
+# Built-in serializers
+BUILTIN_SERIALIZERS = {
+ "xml" : "django.core.serializers.xml_serializer",
+ "python" : "django.core.serializers.python",
+ "json" : "django.core.serializers.json",
+}
+
+# Check for PyYaml and register the serializer if it's available.
+try:
+ import yaml
+ BUILTIN_SERIALIZERS["yaml"] = "django.core.serializers.pyyaml"
+except ImportError:
+ pass
+
+_serializers = {}
+
+def register_serializer(format, serializer_module):
+ """Register a new serializer by passing in a module name."""
+ module = __import__(serializer_module, {}, {}, [''])
+ _serializers[format] = module
+
+def unregister_serializer(format):
+ """Unregister a given serializer"""
+ del _serializers[format]
+
+def get_serializer(format):
+ if not _serializers:
+ _load_serializers()
+ return _serializers[format].Serializer
+
+def get_serializer_formats():
+ if not _serializers:
+ _load_serializers()
+ return _serializers.keys()
+
+def get_deserializer(format):
+ if not _serializers:
+ _load_serializers()
+ return _serializers[format].Deserializer
+
+def serialize(format, queryset, **options):
+ """
+ Serialize a queryset (or any iterator that returns database objects) using
+ a certain serializer.
+ """
+ s = get_serializer(format)()
+ s.serialize(queryset, **options)
+ return s.getvalue()
+
+def deserialize(format, stream_or_string):
+ """
+ Deserialize a stream or a string. Returns an iterator that yields ``(obj,
+ m2m_relation_dict)``, where ``obj`` is a instantiated -- but *unsaved* --
+ object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
+ list_of_related_objects}``.
+ """
+ d = get_deserializer(format)
+ return d(stream_or_string)
+
+def _load_serializers():
+ """
+ Register built-in and settings-defined serializers. This is done lazily so
+ that user code has a chance to (e.g.) set up custom settings without
+ needing to be careful of import order.
+ """
+ for format in BUILTIN_SERIALIZERS:
+ register_serializer(format, BUILTIN_SERIALIZERS[format])
+ if hasattr(settings, "SERIALIZATION_MODULES"):
+ for format in settings.SERIALIZATION_MODULES:
+ register_serializer(format, settings.SERIALIZATION_MODULES[format]) \ No newline at end of file
diff --git a/google_appengine/lib/django/django/core/serializers/base.py b/google_appengine/lib/django/django/core/serializers/base.py
new file mode 100755
index 0000000..8e610ad
--- /dev/null
+++ b/google_appengine/lib/django/django/core/serializers/base.py
@@ -0,0 +1,165 @@
+"""
+Module for abstract serializer/unserializer base classes.
+"""
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+from django.db import models
+
+class SerializationError(Exception):
+ """Something bad happened during serialization."""
+ pass
+
+class DeserializationError(Exception):
+ """Something bad happened during deserialization."""
+ pass
+
+class Serializer(object):
+ """
+ Abstract serializer base class.
+ """
+
+ def serialize(self, queryset, **options):
+ """
+ Serialize a queryset.
+ """
+ self.options = options
+
+ self.stream = options.get("stream", StringIO())
+ self.selected_fields = options.get("fields")
+
+ self.start_serialization()
+ for obj in queryset:
+ self.start_object(obj)
+ for field in obj._meta.fields:
+ if field.serialize:
+ if field.rel is None:
+ if self.selected_fields is None or field.attname in self.selected_fields:
+ self.handle_field(obj, field)
+ else:
+ if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
+ self.handle_fk_field(obj, field)
+ for field in obj._meta.many_to_many:
+ if field.serialize:
+ if self.selected_fields is None or field.attname in self.selected_fields:
+ self.handle_m2m_field(obj, field)
+ self.end_object(obj)
+ self.end_serialization()
+ return self.getvalue()
+
+ def get_string_value(self, obj, field):
+ """
+ Convert a field's value to a string.
+ """
+ if isinstance(field, models.DateTimeField):
+ value = getattr(obj, field.name).strftime("%Y-%m-%d %H:%M:%S")
+ elif isinstance(field, models.FileField):
+ value = getattr(obj, "get_%s_url" % field.name, lambda: None)()
+ else:
+ value = field.flatten_data(follow=None, obj=obj).get(field.name, "")
+ return str(value)
+
+ def start_serialization(self):
+ """
+ Called when serializing of the queryset starts.
+ """
+ raise NotImplementedError
+
+ def end_serialization(self):
+ """
+ Called when serializing of the queryset ends.
+ """
+ pass
+
+ def start_object(self, obj):
+ """
+ Called when serializing of an object starts.
+ """
+ raise NotImplementedError
+
+ def end_object(self, obj):
+ """
+ Called when serializing of an object ends.
+ """
+ pass
+
+ def handle_field(self, obj, field):
+ """
+ Called to handle each individual (non-relational) field on an object.
+ """
+ raise NotImplementedError
+
+ def handle_fk_field(self, obj, field):
+ """
+ Called to handle a ForeignKey field.
+ """
+ raise NotImplementedError
+
+ def handle_m2m_field(self, obj, field):
+ """
+ Called to handle a ManyToManyField.
+ """
+ raise NotImplementedError
+
+ def getvalue(self):
+ """
+ Return the fully serialized queryset.
+ """
+ return self.stream.getvalue()
+
+class Deserializer(object):
+ """
+ Abstract base deserializer class.
+ """
+
+ def __init__(self, stream_or_string, **options):
+ """
+ Init this serializer given a stream or a string
+ """
+ self.options = options
+ if isinstance(stream_or_string, basestring):
+ self.stream = StringIO(stream_or_string)
+ else:
+ self.stream = stream_or_string
+ # hack to make sure that the models have all been loaded before
+ # deserialization starts (otherwise subclass calls to get_model()
+ # and friends might fail...)
+ models.get_apps()
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ """Iteration iterface -- return the next item in the stream"""
+ raise NotImplementedError
+
+class DeserializedObject(object):
+ """
+ A deserialized model.
+
+ Basically a container for holding the pre-saved deserialized data along
+ with the many-to-many data saved with the object.
+
+ Call ``save()`` to save the object (with the many-to-many data) to the
+ database; call ``save(save_m2m=False)`` to save just the object fields
+ (and not touch the many-to-many stuff.)
+ """
+
+ def __init__(self, obj, m2m_data=None):
+ self.object = obj
+ self.m2m_data = m2m_data
+
+ def __repr__(self):
+ return "<DeserializedObject: %s>" % str(self.object)
+
+ def save(self, save_m2m=True):
+ self.object.save()
+ if self.m2m_data and save_m2m:
+ for accessor_name, object_list in self.m2m_data.items():
+ setattr(self.object, accessor_name, object_list)
+
+ # prevent a second (possibly accidental) call to save() from saving
+ # the m2m data twice.
+ self.m2m_data = None
diff --git a/google_appengine/lib/django/django/core/serializers/json.py b/google_appengine/lib/django/django/core/serializers/json.py
new file mode 100755
index 0000000..15770f1
--- /dev/null
+++ b/google_appengine/lib/django/django/core/serializers/json.py
@@ -0,0 +1,51 @@
+"""
+Serialize data to/from JSON
+"""
+
+import datetime
+from django.utils import simplejson
+from django.core.serializers.python import Serializer as PythonSerializer
+from django.core.serializers.python import Deserializer as PythonDeserializer
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+class Serializer(PythonSerializer):
+ """
+ Convert a queryset to JSON.
+ """
+ def end_serialization(self):
+ simplejson.dump(self.objects, self.stream, cls=DateTimeAwareJSONEncoder, **self.options)
+
+ def getvalue(self):
+ return self.stream.getvalue()
+
+def Deserializer(stream_or_string, **options):
+ """
+ Deserialize a stream or string of JSON data.
+ """
+ if isinstance(stream_or_string, basestring):
+ stream = StringIO(stream_or_string)
+ else:
+ stream = stream_or_string
+ for obj in PythonDeserializer(simplejson.load(stream)):
+ yield obj
+
+class DateTimeAwareJSONEncoder(simplejson.JSONEncoder):
+ """
+ JSONEncoder subclass that knows how to encode date/time types
+ """
+
+ DATE_FORMAT = "%Y-%m-%d"
+ TIME_FORMAT = "%H:%M:%S"
+
+ def default(self, o):
+ if isinstance(o, datetime.datetime):
+ return o.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT))
+ elif isinstance(o, datetime.date):
+ return o.strftime(self.DATE_FORMAT)
+ elif isinstance(o, datetime.time):
+ return o.strftime(self.TIME_FORMAT)
+ else:
+ return super(DateTimeAwareJSONEncoder, self).default(o)
diff --git a/google_appengine/lib/django/django/core/serializers/python.py b/google_appengine/lib/django/django/core/serializers/python.py
new file mode 100755
index 0000000..29ce6bf
--- /dev/null
+++ b/google_appengine/lib/django/django/core/serializers/python.py
@@ -0,0 +1,101 @@
+"""
+A Python "serializer". Doesn't do much serializing per se -- just converts to
+and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
+other serializers.
+"""
+
+from django.conf import settings
+from django.core.serializers import base
+from django.db import models
+
+class Serializer(base.Serializer):
+ """
+ Serializes a QuerySet to basic Python objects.
+ """
+
+ def start_serialization(self):
+ self._current = None
+ self.objects = []
+
+ def end_serialization(self):
+ pass
+
+ def start_object(self, obj):
+ self._current = {}
+
+ def end_object(self, obj):
+ self.objects.append({
+ "model" : str(obj._meta),
+ "pk" : str(obj._get_pk_val()),
+ "fields" : self._current
+ })
+ self._current = None
+
+ def handle_field(self, obj, field):
+ self._current[field.name] = getattr(obj, field.name)
+
+ def handle_fk_field(self, obj, field):
+ related = getattr(obj, field.name)
+ if related is not None:
+ related = related._get_pk_val()
+ self._current[field.name] = related
+
+ def handle_m2m_field(self, obj, field):
+ self._current[field.name] = [related._get_pk_val() for related in getattr(obj, field.name).iterator()]
+
+ def getvalue(self):
+ return self.objects
+
+def Deserializer(object_list, **options):
+ """
+ Deserialize simple Python objects back into Django ORM instances.
+
+ It's expected that you pass the Python objects themselves (instead of a
+ stream or a string) to the constructor
+ """
+ models.get_apps()
+ for d in object_list:
+ # Look up the model and starting build a dict of data for it.
+ Model = _get_model(d["model"])
+ data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
+ m2m_data = {}
+
+ # Handle each field
+ for (field_name, field_value) in d["fields"].iteritems():
+ if isinstance(field_value, unicode):
+ field_value = field_value.encode(options.get("encoding", settings.DEFAULT_CHARSET))
+
+ field = Model._meta.get_field(field_name)
+
+ # Handle M2M relations
+ if field.rel and isinstance(field.rel, models.ManyToManyRel):
+ pks = []
+ m2m_convert = field.rel.to._meta.pk.to_python
+ for pk in field_value:
+ if isinstance(pk, unicode):
+ pks.append(m2m_convert(pk.encode(options.get("encoding", settings.DEFAULT_CHARSET))))
+ else:
+ pks.append(m2m_convert(pk))
+ m2m_data[field.name] = pks
+
+ # Handle FK fields
+ elif field.rel and isinstance(field.rel, models.ManyToOneRel):
+ data[field.attname] = field.rel.to._meta.pk.to_python(field_value)
+
+ # Handle all other fields
+ else:
+ data[field.name] = field.to_python(field_value)
+
+ yield base.DeserializedObject(Model(**data), m2m_data)
+
+def _get_model(model_identifier):
+ """
+ Helper to look up a model from an "app_label.module_name" string.
+ """
+ try:
+ Model = models.get_model(*model_identifier.split("."))
+ except TypeError:
+ Model = None
+ if Model is None:
+ raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
+ return Model
diff --git a/google_appengine/lib/django/django/core/serializers/pyyaml.py b/google_appengine/lib/django/django/core/serializers/pyyaml.py
new file mode 100755
index 0000000..fa3dec9
--- /dev/null
+++ b/google_appengine/lib/django/django/core/serializers/pyyaml.py
@@ -0,0 +1,36 @@
+"""
+YAML serializer.
+
+Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__.
+"""
+
+import datetime
+from django.core.serializers.python import Serializer as PythonSerializer
+from django.core.serializers.python import Deserializer as PythonDeserializer
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+import yaml
+
+class Serializer(PythonSerializer):
+ """
+ Convert a queryset to YAML.
+ """
+ def end_serialization(self):
+ yaml.dump(self.objects, self.stream, **self.options)
+
+ def getvalue(self):
+ return self.stream.getvalue()
+
+def Deserializer(stream_or_string, **options):
+ """
+ Deserialize a stream or string of YAML data.
+ """
+ if isinstance(stream_or_string, basestring):
+ stream = StringIO(stream_or_string)
+ else:
+ stream = stream_or_string
+ for obj in PythonDeserializer(yaml.load(stream)):
+ yield obj
+
diff --git a/google_appengine/lib/django/django/core/serializers/xml_serializer.py b/google_appengine/lib/django/django/core/serializers/xml_serializer.py
new file mode 100755
index 0000000..3a0fdb5
--- /dev/null
+++ b/google_appengine/lib/django/django/core/serializers/xml_serializer.py
@@ -0,0 +1,229 @@
+"""
+XML serializer.
+"""
+
+from django.conf import settings
+from django.core.serializers import base
+from django.db import models
+from django.utils.xmlutils import SimplerXMLGenerator
+from xml.dom import pulldom
+
+class Serializer(base.Serializer):
+ """
+ Serializes a QuerySet to XML.
+ """
+
+ def indent(self, level):
+ if self.options.get('indent', None) is not None:
+ self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent', None) * level)
+
+ def start_serialization(self):
+ """
+ Start serialization -- open the XML document and the root element.
+ """
+ self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
+ self.xml.startDocument()
+ self.xml.startElement("django-objects", {"version" : "1.0"})
+
+ def end_serialization(self):
+ """
+ End serialization -- end the document.
+ """
+ self.indent(0)
+ self.xml.endElement("django-objects")
+ self.xml.endDocument()
+
+ def start_object(self, obj):
+ """
+ Called as each object is handled.
+ """
+ if not hasattr(obj, "_meta"):
+ raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
+
+ self.indent(1)
+ self.xml.startElement("object", {
+ "pk" : str(obj._get_pk_val()),
+ "model" : str(obj._meta),
+ })
+
+ def end_object(self, obj):
+ """
+ Called after handling all fields for an object.
+ """
+ self.indent(1)
+ self.xml.endElement("object")
+
+ def handle_field(self, obj, field):
+ """
+ Called to handle each field on an object (except for ForeignKeys and
+ ManyToManyFields)
+ """
+ self.indent(2)
+ self.xml.startElement("field", {
+ "name" : field.name,
+ "type" : field.get_internal_type()
+ })
+
+ # Get a "string version" of the object's data (this is handled by the
+ # serializer base class).
+ if getattr(obj, field.name) is not None:
+ value = self.get_string_value(obj, field)
+ self.xml.characters(str(value))
+ else:
+ self.xml.addQuickElement("None")
+
+ self.xml.endElement("field")
+
+ def handle_fk_field(self, obj, field):
+ """
+ Called to handle a ForeignKey (we need to treat them slightly
+ differently from regular fields).
+ """
+ self._start_relational_field(field)
+ related = getattr(obj, field.name)
+ if related is not None:
+ self.xml.characters(str(related._get_pk_val()))
+ else:
+ self.xml.addQuickElement("None")
+ self.xml.endElement("field")
+
+ def handle_m2m_field(self, obj, field):
+ """
+ Called to handle a ManyToManyField. Related objects are only
+ serialized as references to the object's PK (i.e. the related *data*
+ is not dumped, just the relation).
+ """
+ self._start_relational_field(field)
+ for relobj in getattr(obj, field.name).iterator():
+ self.xml.addQuickElement("object", attrs={"pk" : str(relobj._get_pk_val())})
+ self.xml.endElement("field")
+
+ def _start_relational_field(self, field):
+ """
+ Helper to output the <field> element for relational fields
+ """
+ self.indent(2)
+ self.xml.startElement("field", {
+ "name" : field.name,
+ "rel" : field.rel.__class__.__name__,
+ "to" : str(field.rel.to._meta),
+ })
+
+class Deserializer(base.Deserializer):
+ """
+ Deserialize XML.
+ """
+
+ def __init__(self, stream_or_string, **options):
+ super(Deserializer, self).__init__(stream_or_string, **options)
+ self.encoding = self.options.get("encoding", settings.DEFAULT_CHARSET)
+ self.event_stream = pulldom.parse(self.stream)
+
+ def next(self):
+ for event, node in self.event_stream:
+ if event == "START_ELEMENT" and node.nodeName == "object":
+ self.event_stream.expandNode(node)
+ return self._handle_object(node)
+ raise StopIteration
+
+ def _handle_object(self, node):
+ """
+ Convert an <object> node to a DeserializedObject.
+ """
+ # Look up the model using the model loading mechanism. If this fails, bail.
+ Model = self._get_model_from_node(node, "model")
+
+ # Start building a data dictionary from the object. If the node is
+ # missing the pk attribute, bail.
+ pk = node.getAttribute("pk")
+ if not pk:
+ raise base.DeserializationError("<object> node is missing the 'pk' attribute")
+
+ data = {Model._meta.pk.attname : Model._meta.pk.to_python(pk)}
+
+ # Also start building a dict of m2m data (this is saved as
+ # {m2m_accessor_attribute : [list_of_related_objects]})
+ m2m_data = {}
+
+ # Deseralize each field.
+ for field_node in node.getElementsByTagName("field"):
+ # If the field is missing the name attribute, bail (are you
+ # sensing a pattern here?)
+ field_name = field_node.getAttribute("name")
+ if not field_name:
+ raise base.DeserializationError("<field> node is missing the 'name' attribute")
+
+ # Get the field from the Model. This will raise a
+ # FieldDoesNotExist if, well, the field doesn't exist, which will
+ # be propagated correctly.
+ field = Model._meta.get_field(field_name)
+
+ # As is usually the case, relation fields get the special treatment.
+ if field.rel and isinstance(field.rel, models.ManyToManyRel):
+ m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
+ elif field.rel and isinstance(field.rel, models.ManyToOneRel):
+ data[field.attname] = self._handle_fk_field_node(field_node, field)
+ else:
+ if len(field_node.childNodes) == 1 and field_node.childNodes[0].nodeName == 'None':
+ value = None
+ else:
+ value = field.to_python(getInnerText(field_node).strip().encode(self.encoding))
+ data[field.name] = value
+
+ # Return a DeserializedObject so that the m2m data has a place to live.
+ return base.DeserializedObject(Model(**data), m2m_data)
+
+ def _handle_fk_field_node(self, node, field):
+ """
+ Handle a <field> node for a ForeignKey
+ """
+ # Check if there is a child node named 'None', returning None if so.
+ if len(node.childNodes) == 1 and node.childNodes[0].nodeName == 'None':
+ return None
+ else:
+ return field.rel.to._meta.pk.to_python(
+ getInnerText(node).strip().encode(self.encoding))
+
+ def _handle_m2m_field_node(self, node, field):
+ """
+ Handle a <field> node for a ManyToManyField
+ """
+ return [field.rel.to._meta.pk.to_python(
+ c.getAttribute("pk").encode(self.encoding))
+ for c in node.getElementsByTagName("object")]
+
+ def _get_model_from_node(self, node, attr):
+ """
+ Helper to look up a model from a <object model=...> or a <field
+ rel=... to=...> node.
+ """
+ model_identifier = node.getAttribute(attr)
+ if not model_identifier:
+ raise base.DeserializationError(
+ "<%s> node is missing the required '%s' attribute" \
+ % (node.nodeName, attr))
+ try:
+ Model = models.get_model(*model_identifier.split("."))
+ except TypeError:
+ Model = None
+ if Model is None:
+ raise base.DeserializationError(
+ "<%s> node has invalid model identifier: '%s'" % \
+ (node.nodeName, model_identifier))
+ return Model
+
+
+def getInnerText(node):
+ """
+ Get all the inner text of a DOM node (recursively).
+ """
+ # inspired by http://mail.python.org/pipermail/xml-sig/2005-March/011022.html
+ inner_text = []
+ for child in node.childNodes:
+ if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
+ inner_text.append(child.data)
+ elif child.nodeType == child.ELEMENT_NODE:
+ inner_text.extend(getInnerText(child))
+ else:
+ pass
+ return "".join(inner_text) \ No newline at end of file
diff --git a/google_appengine/lib/django/django/core/servers/__init__.py b/google_appengine/lib/django/django/core/servers/__init__.py
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/google_appengine/lib/django/django/core/servers/__init__.py
diff --git a/google_appengine/lib/django/django/core/servers/basehttp.py b/google_appengine/lib/django/django/core/servers/basehttp.py
new file mode 100755
index 0000000..27051d4
--- /dev/null
+++ b/google_appengine/lib/django/django/core/servers/basehttp.py
@@ -0,0 +1,664 @@
+"""
+BaseHTTPServer that implements the Python WSGI protocol (PEP 333, rev 1.21).
+
+Adapted from wsgiref.simple_server: http://svn.eby-sarna.com/wsgiref/
+
+This is a simple server for use in testing or debugging Django apps. It hasn't
+been reviewed for security issues. Don't use it for production use.
+"""
+
+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from types import ListType, StringType
+import os, re, sys, time, urllib
+
+from django.utils._os import safe_join
+
+__version__ = "0.1"
+__all__ = ['WSGIServer','WSGIRequestHandler','demo_app']
+
+server_version = "WSGIServer/" + __version__
+sys_version = "Python/" + sys.version.split()[0]
+software_version = server_version + ' ' + sys_version
+
+class WSGIServerException(Exception):
+ pass
+
+class FileWrapper(object):
+ """Wrapper to convert file-like objects to iterables"""
+
+ def __init__(self, filelike, blksize=8192):
+ self.filelike = filelike
+ self.blksize = blksize
+ if hasattr(filelike,'close'):
+ self.close = filelike.close
+
+ def __getitem__(self,key):
+ data = self.filelike.read(self.blksize)
+ if data:
+ return data
+ raise IndexError
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ data = self.filelike.read(self.blksize)
+ if data:
+ return data
+ raise StopIteration
+
+# Regular expression that matches `special' characters in parameters, the
+# existence of which force quoting of the parameter value.
+tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
+
+def _formatparam(param, value=None, quote=1):
+ """Convenience function to format and return a key=value pair.
+
+ This will quote the value if needed or if quote is true.
+ """
+ if value is not None and len(value) > 0:
+ if quote or tspecials.search(value):
+ value = value.replace('\\', '\\\\').replace('"', r'\"')
+ return '%s="%s"' % (param, value)
+ else:
+ return '%s=%s' % (param, value)
+ else:
+ return param
+
+class Headers(object):
+ """Manage a collection of HTTP response headers"""
+ def __init__(self,headers):
+ if type(headers) is not ListType:
+ raise TypeError("Headers must be a list of name/value tuples")
+ self._headers = headers
+
+ def __len__(self):
+ """Return the total number of headers, including duplicates."""
+ return len(self._headers)
+
+ def __setitem__(self, name, val):
+ """Set the value of a header."""
+ del self[name]
+ self._headers.append((name, val))
+
+ def __delitem__(self,name):
+ """Delete all occurrences of a header, if present.
+
+ Does *not* raise an exception if the header is missing.
+ """
+ name = name.lower()
+ self._headers[:] = [kv for kv in self._headers if kv[0].lower()<>name]
+
+ def __getitem__(self,name):
+ """Get the first header value for 'name'
+
+ Return None if the header is missing instead of raising an exception.
+
+ Note that if the header appeared multiple times, the first exactly which
+ occurrance gets returned is undefined. Use getall() to get all
+ the values matching a header field name.
+ """
+ return self.get(name)
+
+ def has_key(self, name):
+ """Return true if the message contains the header."""
+ return self.get(name) is not None
+
+ __contains__ = has_key
+
+ def get_all(self, name):
+ """Return a list of all the values for the named field.
+
+ These will be sorted in the order they appeared in the original header
+ list or were added to this instance, and may contain duplicates. Any
+ fields deleted and re-inserted are always appended to the header list.
+ If no fields exist with the given name, returns an empty list.
+ """
+ name = name.lower()
+ return [kv[1] for kv in self._headers if kv[0].lower()==name]
+
+
+ def get(self,name,default=None):
+ """Get the first header value for 'name', or return 'default'"""
+ name = name.lower()
+ for k,v in self._headers:
+ if k.lower()==name:
+ return v
+ return default
+
+ def keys(self):
+ """Return a list of all the header field names.
+
+ These will be sorted in the order they appeared in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return [k for k, v in self._headers]
+
+ def values(self):
+ """Return a list of all header values.
+
+ These will be sorted in the order they appeared in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return [v for k, v in self._headers]
+
+ def items(self):
+ """Get all the header fields and values.
+
+ These will be sorted in the order they were in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return self._headers[:]
+
+ def __repr__(self):
+ return "Headers(%s)" % `self._headers`
+
+ def __str__(self):
+ """str() returns the formatted headers, complete with end line,
+ suitable for direct HTTP transmission."""
+ return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
+
+ def setdefault(self,name,value):
+ """Return first matching header value for 'name', or 'value'
+
+ If there is no header named 'name', add a new header with name 'name'
+ and value 'value'."""
+ result = self.get(name)
+ if result is None:
+ self._headers.append((name,value))
+ return value
+ else:
+ return result
+
+ def add_header(self, _name, _value, **_params):
+ """Extended header setting.
+
+ _name is the header field to add. keyword arguments can be used to set
+ additional parameters for the header field, with underscores converted
+ to dashes. Normally the parameter will be added as key="value" unless
+ value is None, in which case only the key will be added.
+
+ Example:
+
+ h.add_header('content-disposition', 'attachment', filename='bud.gif')
+
+ Note that unlike the corresponding 'email.Message' method, this does
+ *not* handle '(charset, language, value)' tuples: all values must be
+ strings or None.
+ """
+ parts = []
+ if _value is not None:
+ parts.append(_value)
+ for k, v in _params.items():
+ if v is None:
+ parts.append(k.replace('_', '-'))
+ else:
+ parts.append(_formatparam(k.replace('_', '-'), v))
+ self._headers.append((_name, "; ".join(parts)))
+
+def guess_scheme(environ):
+ """Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'
+ """
+ if environ.get("HTTPS") in ('yes','on','1'):
+ return 'https'
+ else:
+ return 'http'
+
+_hoppish = {
+ 'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
+ 'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
+ 'upgrade':1
+}.has_key
+
+def is_hop_by_hop(header_name):
+ """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""
+ return _hoppish(header_name.lower())
+
+class ServerHandler(object):
+ """Manage the invocation of a WSGI application"""
+
+ # Configuration parameters; can override per-subclass or per-instance
+ wsgi_version = (1,0)
+ wsgi_multithread = True
+ wsgi_multiprocess = True
+ wsgi_run_once = False
+
+ origin_server = True # We are transmitting direct to client
+ http_version = "1.0" # Version that should be used for response
+ server_software = software_version
+
+ # os_environ is used to supply configuration from the OS environment:
+ # by default it's a copy of 'os.environ' as of import time, but you can
+ # override this in e.g. your __init__ method.
+ os_environ = dict(os.environ.items())
+
+ # Collaborator classes
+ wsgi_file_wrapper = FileWrapper # set to None to disable
+ headers_class = Headers # must be a Headers-like class
+
+ # Error handling (also per-subclass or per-instance)
+ traceback_limit = None # Print entire traceback to self.get_stderr()
+ error_status = "500 INTERNAL SERVER ERROR"
+ error_headers = [('Content-Type','text/plain')]
+
+ # State variables (don't mess with these)
+ status = result = None
+ headers_sent = False
+ headers = None
+ bytes_sent = 0
+
+ def __init__(self, stdin, stdout, stderr, environ, multithread=True,
+ multiprocess=False):
+ self.stdin = stdin
+ self.stdout = stdout
+ self.stderr = stderr
+ self.base_env = environ
+ self.wsgi_multithread = multithread
+ self.wsgi_multiprocess = multiprocess
+
+ def run(self, application):
+ """Invoke the application"""
+ # Note to self: don't move the close()! Asynchronous servers shouldn't
+ # call close() from finish_response(), so if you close() anywhere but
+ # the double-error branch here, you'll break asynchronous servers by
+ # prematurely closing. Async servers must return from 'run()' without
+ # closing if there might still be output to iterate over.
+ try:
+ self.setup_environ()
+ self.result = application(self.environ, self.start_response)
+ self.finish_response()
+ except:
+ try:
+ self.handle_error()
+ except:
+ # If we get an error handling an error, just give up already!
+ self.close()
+ raise # ...and let the actual server figure it out.
+
+ def setup_environ(self):
+ """Set up the environment for one request"""
+
+ env = self.environ = self.os_environ.copy()
+ self.add_cgi_vars()
+
+ env['wsgi.input'] = self.get_stdin()
+ env['wsgi.errors'] = self.get_stderr()
+ env['wsgi.version'] = self.wsgi_version
+ env['wsgi.run_once'] = self.wsgi_run_once
+ env['wsgi.url_scheme'] = self.get_scheme()
+ env['wsgi.multithread'] = self.wsgi_multithread
+ env['wsgi.multiprocess'] = self.wsgi_multiprocess
+
+ if self.wsgi_file_wrapper is not None:
+ env['wsgi.file_wrapper'] = self.wsgi_file_wrapper
+
+ if self.origin_server and self.server_software:
+ env.setdefault('SERVER_SOFTWARE',self.server_software)
+
+ def finish_response(self):
+ """Send any iterable data, then close self and the iterable
+
+ Subclasses intended for use in asynchronous servers will
+ want to redefine this method, such that it sets up callbacks
+ in the event loop to iterate over the data, and to call
+ 'self.close()' once the response is finished.
+ """
+ if not self.result_is_file() and not self.sendfile():
+ for data in self.result:
+ self.write(data)
+ self.finish_content()
+ self.close()
+
+ def get_scheme(self):
+ """Return the URL scheme being used"""
+ return guess_scheme(self.environ)
+
+ def set_content_length(self):
+ """Compute Content-Length or switch to chunked encoding if possible"""
+ try:
+ blocks = len(self.result)
+ except (TypeError,AttributeError,NotImplementedError):
+ pass
+ else:
+ if blocks==1:
+ self.headers['Content-Length'] = str(self.bytes_sent)
+ return
+ # XXX Try for chunked encoding if origin server and client is 1.1
+
+ def cleanup_headers(self):
+ """Make any necessary header changes or defaults
+
+ Subclasses can extend this to add other defaults.
+ """
+ if not self.headers.has_key('Content-Length'):
+ self.set_content_length()
+
+ def start_response(self, status, headers,exc_info=None):
+ """'start_response()' callable as specified by PEP 333"""
+
+ if exc_info:
+ try:
+ if self.headers_sent:
+ # Re-raise original exception if headers sent
+ raise exc_info[0], exc_info[1], exc_info[2]
+ finally:
+ exc_info = None # avoid dangling circular ref
+ elif self.headers is not None:
+ raise AssertionError("Headers already set!")
+
+ assert type(status) is StringType,"Status must be a string"
+ assert len(status)>=4,"Status must be at least 4 characters"
+ assert int(status[:3]),"Status message must begin w/3-digit code"
+ assert status[3]==" ", "Status message must have a space after code"
+ if __debug__:
+ for name,val in headers:
+ assert type(name) is StringType,"Header names must be strings"
+ assert type(val) is StringType,"Header values must be strings"
+ assert not is_hop_by_hop(name),"Hop-by-hop headers not allowed"
+ self.status = status
+ self.headers = self.headers_class(headers)
+ return self.write
+
+ def send_preamble(self):
+ """Transmit version/status/date/server, via self._write()"""
+ if self.origin_server:
+ if self.client_is_modern():
+ self._write('HTTP/%s %s\r\n' % (self.http_version,self.status))
+ if not self.headers.has_key('Date'):
+ self._write(
+ 'Date: %s\r\n' % time.asctime(time.gmtime(time.time()))
+ )
+ if self.server_software and not self.headers.has_key('Server'):
+ self._write('Server: %s\r\n' % self.server_software)
+ else:
+ self._write('Status: %s\r\n' % self.status)
+
+ def write(self, data):
+ """'write()' callable as specified by PEP 333"""
+
+ assert type(data) is StringType,"write() argument must be string"
+
+ if not self.status:
+ raise AssertionError("write() before start_response()")
+
+ elif not self.headers_sent:
+ # Before the first output, send the stored headers
+ self.bytes_sent = len(data) # make sure we know content-length
+ self.send_headers()
+ else:
+ self.bytes_sent += len(data)
+
+ # XXX check Content-Length and truncate if too many bytes written?
+ self._write(data)
+ self._flush()
+
+ def sendfile(self):
+ """Platform-specific file transmission
+
+ Override this method in subclasses to support platform-specific
+ file transmission. It is only called if the application's
+ return iterable ('self.result') is an instance of
+ 'self.wsgi_file_wrapper'.
+
+ This method should return a true value if it was able to actually
+ transmit the wrapped file-like object using a platform-specific
+ approach. It should return a false value if normal iteration
+ should be used instead. An exception can be raised to indicate
+ that transmission was attempted, but failed.
+
+ NOTE: this method should call 'self.send_headers()' if
+ 'self.headers_sent' is false and it is going to attempt direct
+ transmission of the file1.
+ """
+ return False # No platform-specific transmission by default
+
+ def finish_content(self):
+ """Ensure headers and content have both been sent"""
+ if not self.headers_sent:
+ self.headers['Content-Length'] = "0"
+ self.send_headers()
+ else:
+ pass # XXX check if content-length was too short?
+
+ def close(self):
+ try:
+ self.request_handler.log_request(self.status.split(' ',1)[0], self.bytes_sent)
+ finally:
+ try:
+ if hasattr(self.result,'close'):
+ self.result.close()
+ finally:
+ self.result = self.headers = self.status = self.environ = None
+ self.bytes_sent = 0; self.headers_sent = False
+
+ def send_headers(self):
+ """Transmit headers to the client, via self._write()"""
+ self.cleanup_headers()
+ self.headers_sent = True
+ if not self.origin_server or self.client_is_modern():
+ self.send_preamble()
+ self._write(str(self.headers))
+
+ def result_is_file(self):
+ """True if 'self.result' is an instance of 'self.wsgi_file_wrapper'"""
+ wrapper = self.wsgi_file_wrapper
+ return wrapper is not None and isinstance(self.result,wrapper)
+
+ def client_is_modern(self):
+ """True if client can accept status and headers"""
+ return self.environ['SERVER_PROTOCOL'].upper() != 'HTTP/0.9'
+
+ def log_exception(self,exc_info):
+ """Log the 'exc_info' tuple in the server log
+
+ Subclasses may override to retarget the output or change its format.
+ """
+ try:
+ from traceback import print_exception
+ stderr = self.get_stderr()
+ print_exception(
+ exc_info[0], exc_info[1], exc_info[2],
+ self.traceback_limit, stderr
+ )
+ stderr.flush()
+ finally:
+ exc_info = None
+
+ def handle_error(self):
+ """Log current error, and send error output to client if possible"""
+ self.log_exception(sys.exc_info())
+ if not self.headers_sent:
+ self.result = self.error_output(self.environ, self.start_response)
+ self.finish_response()
+ # XXX else: attempt advanced recovery techniques for HTML or text?
+
+ def error_output(self, environ, start_response):
+ import traceback
+ start_response(self.error_status, self.error_headers[:], sys.exc_info())
+ return ['\n'.join(traceback.format_exception(*sys.exc_info()))]
+
+ # Pure abstract methods; *must* be overridden in subclasses
+
+ def _write(self,data):
+ self.stdout.write(data)
+ self._write = self.stdout.write
+
+ def _flush(self):
+ self.stdout.flush()
+ self._flush = self.stdout.flush
+
+ def get_stdin(self):
+ return self.stdin
+
+ def get_stderr(self):
+ return self.stderr
+
+ def add_cgi_vars(self):
+ self.environ.update(self.base_env)
+
+class WSGIServer(HTTPServer):
+ """BaseHTTPServer that implements the Python WSGI protocol"""
+ application = None
+
+ def server_bind(self):
+ """Override server_bind to store the server name."""
+ try:
+ HTTPServer.server_bind(self)
+ except Exception, e:
+ raise WSGIServerException, e
+ self.setup_environ()
+
+ def setup_environ(self):
+ # Set up base environment
+ env = self.base_environ = {}
+ env['SERVER_NAME'] = self.server_name
+ env['GATEWAY_INTERFACE'] = 'CGI/1.1'
+ env['SERVER_PORT'] = str(self.server_port)
+ env['REMOTE_HOST']=''
+ env['CONTENT_LENGTH']=''
+ env['SCRIPT_NAME'] = ''
+
+ def get_app(self):
+ return self.application
+
+ def set_app(self,application):
+ self.application = application
+
+class WSGIRequestHandler(BaseHTTPRequestHandler):
+ server_version = "WSGIServer/" + __version__
+
+ def __init__(self, *args, **kwargs):
+ from django.conf import settings
+ self.admin_media_prefix = settings.ADMIN_MEDIA_PREFIX
+ BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
+
+ def get_environ(self):
+ env = self.server.base_environ.copy()
+ env['SERVER_PROTOCOL'] = self.request_version
+ env['REQUEST_METHOD'] = self.command
+ if '?' in self.path:
+ path,query = self.path.split('?',1)
+ else:
+ path,query = self.path,''
+
+ env['PATH_INFO'] = urllib.unquote(path)
+ env['QUERY_STRING'] = query
+ env['REMOTE_ADDR'] = self.client_address[0]
+
+ if self.headers.typeheader is None:
+ env['CONTENT_TYPE'] = self.headers.type
+ else:
+ env['CONTENT_TYPE'] = self.headers.typeheader
+
+ length = self.headers.getheader('content-length')
+ if length:
+ env['CONTENT_LENGTH'] = length
+
+ for h in self.headers.headers:
+ k,v = h.split(':',1)
+ k=k.replace('-','_').upper(); v=v.strip()
+ if k in env:
+ continue # skip content length, type,etc.
+ if 'HTTP_'+k in env:
+ env['HTTP_'+k] += ','+v # comma-separate multiple headers
+ else:
+ env['HTTP_'+k] = v
+ return env
+
+ def get_stderr(self):
+ return sys.stderr
+
+ def handle(self):
+ """Handle a single HTTP request"""
+ self.raw_requestline = self.rfile.readline()
+ if not self.parse_request(): # An error code has been sent, just exit
+ return
+ handler = ServerHandler(self.rfile, self.wfile, self.get_stderr(), self.get_environ())
+ handler.request_handler = self # backpointer for logging
+ handler.run(self.server.get_app())
+
+ def log_message(self, format, *args):
+ # Don't bother logging requests for admin images or the favicon.
+ if self.path.startswith(self.admin_media_prefix) or self.path == '/favicon.ico':
+ return
+ sys.stderr.write("[%s] %s\n" % (self.log_date_time_string(), format % args))
+
+class AdminMediaHandler(object):
+ """
+ WSGI middleware that intercepts calls to the admin media directory, as
+ defined by the ADMIN_MEDIA_PREFIX setting, and serves those images.
+ Use this ONLY LOCALLY, for development! This hasn't been tested for
+ security and is not super efficient.
+ """
+ def __init__(self, application, media_dir=None):
+ from django.conf import settings
+ self.application = application
+ if not media_dir:
+ import django
+ self.media_dir = \
+ os.path.join(django.__path__[0], 'contrib', 'admin', 'media')
+ else:
+ self.media_dir = media_dir
+ self.media_url = settings.ADMIN_MEDIA_PREFIX
+
+ def file_path(self, url):
+ """
+ Returns the path to the media file on disk for the given URL.
+
+ The passed URL is assumed to begin with ADMIN_MEDIA_PREFIX. If the
+ resultant file path is outside the media directory, then a ValueError
+ is raised.
+ """
+ # Remove ADMIN_MEDIA_PREFIX.
+ relative_url = url[len(self.media_url):]
+ relative_path = urllib.url2pathname(relative_url)
+ return safe_join(self.media_dir, relative_path)
+
+ def __call__(self, environ, start_response):
+ import os.path
+
+ # Ignore requests that aren't under ADMIN_MEDIA_PREFIX. Also ignore
+ # all requests if ADMIN_MEDIA_PREFIX isn't a relative URL.
+ if self.media_url.startswith('http://') or self.media_url.startswith('https://') \
+ or not environ['PATH_INFO'].startswith(self.media_url):
+ return self.application(environ, start_response)
+
+ # Find the admin file and serve it up, if it exists and is readable.
+ try:
+ file_path = self.file_path(environ['PATH_INFO'])
+ except ValueError: # Resulting file path was not valid.
+ status = '404 NOT FOUND'
+ headers = {'Content-type': 'text/plain'}
+ output = ['Page not found: %s' % environ['PATH_INFO']]
+ start_response(status, headers.items())
+ return output
+ if not os.path.exists(file_path):
+ status = '404 NOT FOUND'
+ headers = {'Content-type': 'text/plain'}
+ output = ['Page not found: %s' % environ['PATH_INFO']]
+ else:
+ try:
+ fp = open(file_path, 'rb')
+ except IOError:
+ status = '401 UNAUTHORIZED'
+ headers = {'Content-type': 'text/plain'}
+ output = ['Permission denied: %s' % environ['PATH_INFO']]
+ else:
+ status = '200 OK'
+ headers = {}
+ output = [fp.read()]
+ fp.close()
+ start_response(status, headers.items())
+ return output
+
+def run(addr, port, wsgi_handler):
+ server_address = (addr, port)
+ httpd = WSGIServer(server_address, WSGIRequestHandler)
+ httpd.set_app(wsgi_handler)
+ httpd.serve_forever()
diff --git a/google_appengine/lib/django/django/core/servers/fastcgi.py b/google_appengine/lib/django/django/core/servers/fastcgi.py
new file mode 100755
index 0000000..649dd69
--- /dev/null
+++ b/google_appengine/lib/django/django/core/servers/fastcgi.py
@@ -0,0 +1,158 @@
+"""
+FastCGI server that implements the WSGI protocol.
+
+Uses the flup python package: http://www.saddi.com/software/flup/
+
+This is a adaptation of the flup package to add FastCGI server support
+to run Django apps from Web servers that support the FastCGI protocol.
+This module can be run standalone or from the django-admin / manage.py
+scripts using the "runfcgi" directive.
+
+Run with the extra option "help" for a list of additional options you can
+pass to this server.
+"""
+
+import sys, os
+
+__version__ = "0.1"
+__all__ = ["runfastcgi"]
+
+FASTCGI_HELP = r"""runfcgi:
+ Run this project as a fastcgi application. To do this, the
+ flup package from http://www.saddi.com/software/flup/ is
+ required.
+
+Usage:
+ django-admin.py runfcgi --settings=yourproject.settings [fcgi settings]
+ manage.py runfcgi [fcgi settings]
+
+Optional Fcgi settings: (setting=value)
+ host=HOSTNAME hostname to listen on..
+ port=PORTNUM port to listen on.
+ socket=FILE UNIX socket to listen on.
+ method=IMPL prefork or threaded (default prefork)
+ maxrequests=NUMBER number of requests a child handles before it is
+ killed and a new child is forked (0 = no limit).
+ maxspare=NUMBER max number of spare processes / threads
+ minspare=NUMBER min number of spare processes / threads.
+ maxchildren=NUMBER hard limit number of processes / threads
+ daemonize=BOOL whether to detach from terminal.
+ pidfile=FILE write the spawned process-id to this file.
+ workdir=DIRECTORY change to this directory when daemonizing
+
+Examples:
+ Run a "standard" fastcgi process on a file-descriptor
+ (for webservers which spawn your processes for you)
+ $ manage.py runfcgi method=threaded
+
+ Run a fastcgi server on a TCP host/port
+ $ manage.py runfcgi method=prefork host=127.0.0.1 port=8025
+
+ Run a fastcgi server on a UNIX domain socket (posix platforms only)
+ $ manage.py runfcgi method=prefork socket=/tmp/fcgi.sock
+
+ Run a fastCGI as a daemon and write the spawned PID in a file
+ $ manage.py runfcgi socket=/tmp/fcgi.sock method=prefork \
+ daemonize=true pidfile=/var/run/django-fcgi.pid
+
+"""
+
+FASTCGI_OPTIONS = {
+ 'host': None,
+ 'port': None,
+ 'socket': None,
+ 'method': 'fork',
+ 'daemonize': None,
+ 'workdir': '/',
+ 'pidfile': None,
+ 'maxspare': 5,
+ 'minspare': 2,
+ 'maxchildren': 50,
+ 'maxrequests': 0,
+}
+
+def fastcgi_help(message=None):
+ print FASTCGI_HELP
+ if message:
+ print message
+ return False
+
+def runfastcgi(argset=[], **kwargs):
+ options = FASTCGI_OPTIONS.copy()
+ options.update(kwargs)
+ for x in argset:
+ if "=" in x:
+ k, v = x.split('=', 1)
+ else:
+ k, v = x, True
+ options[k.lower()] = v
+
+ if "help" in options:
+ return fastcgi_help()
+
+ try:
+ import flup
+ except ImportError, e:
+ print >> sys.stderr, "ERROR: %s" % e
+ print >> sys.stderr, " Unable to load the flup package. In order to run django"
+ print >> sys.stderr, " as a FastCGI application, you will need to get flup from"
+ print >> sys.stderr, " http://www.saddi.com/software/flup/ If you've already"
+ print >> sys.stderr, " installed flup, then make sure you have it in your PYTHONPATH."
+ return False
+
+ if options['method'] in ('prefork', 'fork'):
+ from flup.server.fcgi_fork import WSGIServer
+ wsgi_opts = {
+ 'maxSpare': int(options["maxspare"]),
+ 'minSpare': int(options["minspare"]),
+ 'maxChildren': int(options["maxchildren"]),
+ 'maxRequests': int(options["maxrequests"]),
+ }
+ elif options['method'] in ('thread', 'threaded'):
+ from flup.server.fcgi import WSGIServer
+ wsgi_opts = {
+ 'maxSpare': int(options["maxspare"]),
+ 'minSpare': int(options["minspare"]),
+ 'maxThreads': int(options["maxchildren"]),
+ }
+ else:
+ return fastcgi_help("ERROR: Implementation must be one of prefork or thread.")
+
+ wsgi_opts['debug'] = False # Turn off flup tracebacks
+
+ # Prep up and go
+ from django.core.handlers.wsgi import WSGIHandler
+
+ if options["host"] and options["port"] and not options["socket"]:
+ wsgi_opts['bindAddress'] = (options["host"], int(options["port"]))
+ elif options["socket"] and not options["host"] and not options["port"]:
+ wsgi_opts['bindAddress'] = options["socket"]
+ elif not options["socket"] and not options["host"] and not options["port"]:
+ wsgi_opts['bindAddress'] = None
+ else:
+ return fastcgi_help("Invalid combination of host, port, socket.")
+
+ if options["daemonize"] is None:
+ # Default to daemonizing if we're running on a socket/named pipe.
+ daemonize = (wsgi_opts['bindAddress'] is not None)
+ else:
+ if options["daemonize"].lower() in ('true', 'yes', 't'):
+ daemonize = True
+ elif options["daemonize"].lower() in ('false', 'no', 'f'):
+ daemonize = False
+ else:
+ return fastcgi_help("ERROR: Invalid option for daemonize parameter.")
+
+ if daemonize:
+ from django.utils.daemonize import become_daemon
+ become_daemon(our_home_dir=options["workdir"])
+
+ if options["pidfile"]:
+ fp = open(options["pidfile"], "w")
+ fp.write("%d\n" % os.getpid())
+ fp.close()
+
+ WSGIServer(WSGIHandler(), **wsgi_opts).run()
+
+if __name__ == '__main__':
+ runfastcgi(sys.argv[1:])
diff --git a/google_appengine/lib/django/django/core/signals.py b/google_appengine/lib/django/django/core/signals.py
new file mode 100755
index 0000000..7a23607
--- /dev/null
+++ b/google_appengine/lib/django/django/core/signals.py
@@ -0,0 +1,3 @@
+request_started = object()
+request_finished = object()
+got_request_exception = object()
diff --git a/google_appengine/lib/django/django/core/template_loader.py b/google_appengine/lib/django/django/core/template_loader.py
new file mode 100755
index 0000000..ee86178
--- /dev/null
+++ b/google_appengine/lib/django/django/core/template_loader.py
@@ -0,0 +1,7 @@
+# This module is DEPRECATED!
+#
+# You should no longer be using django.template_loader.
+#
+# Use django.template.loader instead.
+
+from django.template.loader import *
diff --git a/google_appengine/lib/django/django/core/urlresolvers.py b/google_appengine/lib/django/django/core/urlresolvers.py
new file mode 100755
index 0000000..3f1004c
--- /dev/null
+++ b/google_appengine/lib/django/django/core/urlresolvers.py
@@ -0,0 +1,241 @@
+"""
+This module converts requested URLs to callback view functions.
+
+RegexURLResolver is the main class here. Its resolve() method takes a URL (as
+a string) and returns a tuple in this format:
+
+ (view_function, function_args, function_kwargs)
+"""
+
+from django.http import Http404
+from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
+import re
+
+class Resolver404(Http404):
+ pass
+
+class NoReverseMatch(Exception):
+ # Don't make this raise an error when used in a template.
+ silent_variable_failure = True
+
+def get_mod_func(callback):
+ # Converts 'django.views.news.stories.story_detail' to
+ # ['django.views.news.stories', 'story_detail']
+ try:
+ dot = callback.rindex('.')
+ except ValueError:
+ return callback, ''
+ return callback[:dot], callback[dot+1:]
+
+def reverse_helper(regex, *args, **kwargs):
+ """
+ Does a "reverse" lookup -- returns the URL for the given args/kwargs.
+ The args/kwargs are applied to the given compiled regular expression.
+ For example:
+
+ >>> reverse_helper(re.compile('^places/(\d+)/$'), 3)
+ 'places/3/'
+ >>> reverse_helper(re.compile('^places/(?P<id>\d+)/$'), id=3)
+ 'places/3/'
+ >>> reverse_helper(re.compile('^people/(?P<state>\w\w)/(\w+)/$'), 'adrian', state='il')
+ 'people/il/adrian/'
+
+ Raises NoReverseMatch if the args/kwargs aren't valid for the regex.
+ """
+ # TODO: Handle nested parenthesis in the following regex.
+ result = re.sub(r'\(([^)]+)\)', MatchChecker(args, kwargs), regex.pattern)
+ return result.replace('^', '').replace('$', '')
+
+class MatchChecker(object):
+ "Class used in reverse RegexURLPattern lookup."
+ def __init__(self, args, kwargs):
+ self.args, self.kwargs = args, kwargs
+ self.current_arg = 0
+
+ def __call__(self, match_obj):
+ # match_obj.group(1) is the contents of the parenthesis.
+ # First we need to figure out whether it's a named or unnamed group.
+ #
+ grouped = match_obj.group(1)
+ m = re.search(r'^\?P<(\w+)>(.*?)$', grouped)
+ if m: # If this was a named group...
+ # m.group(1) is the name of the group
+ # m.group(2) is the regex.
+ try:
+ value = self.kwargs[m.group(1)]
+ except KeyError:
+ # It was a named group, but the arg was passed in as a
+ # positional arg or not at all.
+ try:
+ value = self.args[self.current_arg]
+ self.current_arg += 1
+ except IndexError:
+ # The arg wasn't passed in.
+ raise NoReverseMatch('Not enough positional arguments passed in')
+ test_regex = m.group(2)
+ else: # Otherwise, this was a positional (unnamed) group.
+ try:
+ value = self.args[self.current_arg]
+ self.current_arg += 1
+ except IndexError:
+ # The arg wasn't passed in.
+ raise NoReverseMatch('Not enough positional arguments passed in')
+ test_regex = grouped
+ # Note we're using re.match here on purpose because the start of
+ # to string needs to match.
+ if not re.match(test_regex + '$', str(value)): # TODO: Unicode?
+ raise NoReverseMatch("Value %r didn't match regular expression %r" % (value, test_regex))
+ return str(value) # TODO: Unicode?
+
+class RegexURLPattern(object):
+ def __init__(self, regex, callback, default_args=None):
+ # regex is a string representing a regular expression.
+ # callback is either a string like 'foo.views.news.stories.story_detail'
+ # which represents the path to a module and a view function name, or a
+ # callable object (view).
+ self.regex = re.compile(regex)
+ if callable(callback):
+ self._callback = callback
+ else:
+ self._callback = None
+ self._callback_str = callback
+ self.default_args = default_args or {}
+
+ def resolve(self, path):
+ match = self.regex.search(path)
+ if match:
+ # If there are any named groups, use those as kwargs, ignoring
+ # non-named groups. Otherwise, pass all non-named arguments as
+ # positional arguments.
+ kwargs = match.groupdict()
+ if kwargs:
+ args = ()
+ else:
+ args = match.groups()
+ # In both cases, pass any extra_kwargs as **kwargs.
+ kwargs.update(self.default_args)
+
+ return self.callback, args, kwargs
+
+ def _get_callback(self):
+ if self._callback is not None:
+ return self._callback
+ mod_name, func_name = get_mod_func(self._callback_str)
+ try:
+ self._callback = getattr(__import__(mod_name, {}, {}, ['']), func_name)
+ except ImportError, e:
+ raise ViewDoesNotExist, "Could not import %s. Error was: %s" % (mod_name, str(e))
+ except AttributeError, e:
+ raise ViewDoesNotExist, "Tried %s in module %s. Error was: %s" % (func_name, mod_name, str(e))
+ return self._callback
+ callback = property(_get_callback)
+
+ def reverse(self, viewname, *args, **kwargs):
+ mod_name, func_name = get_mod_func(viewname)
+ try:
+ lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
+ except (ImportError, AttributeError):
+ raise NoReverseMatch
+ if lookup_view != self.callback:
+ raise NoReverseMatch
+ return self.reverse_helper(*args, **kwargs)
+
+ def reverse_helper(self, *args, **kwargs):
+ return reverse_helper(self.regex, *args, **kwargs)
+
+class RegexURLResolver(object):
+ def __init__(self, regex, urlconf_name, default_kwargs=None):
+ # regex is a string representing a regular expression.
+ # urlconf_name is a string representing the module containing urlconfs.
+ self.regex = re.compile(regex)
+ self.urlconf_name = urlconf_name
+ self.callback = None
+ self.default_kwargs = default_kwargs or {}
+
+ def resolve(self, path):
+ tried = []
+ match = self.regex.search(path)
+ if match:
+ new_path = path[match.end():]
+ for pattern in self.urlconf_module.urlpatterns:
+ try:
+ sub_match = pattern.resolve(new_path)
+ except Resolver404, e:
+ tried.extend([(pattern.regex.pattern + ' ' + t) for t in e.args[0]['tried']])
+ else:
+ if sub_match:
+ sub_match_dict = dict(self.default_kwargs, **sub_match[2])
+ return sub_match[0], sub_match[1], dict(match.groupdict(), **sub_match_dict)
+ tried.append(pattern.regex.pattern)
+ raise Resolver404, {'tried': tried, 'path': new_path}
+
+ def _get_urlconf_module(self):
+ try:
+ return self._urlconf_module
+ except AttributeError:
+ try:
+ self._urlconf_module = __import__(self.urlconf_name, {}, {}, [''])
+ except ValueError, e:
+ # Invalid urlconf_name, such as "foo.bar." (note trailing period)
+ raise ImproperlyConfigured, "Error while importing URLconf %r: %s" % (self.urlconf_name, e)
+ return self._urlconf_module
+ urlconf_module = property(_get_urlconf_module)
+
+ def _get_url_patterns(self):
+ return self.urlconf_module.urlpatterns
+ url_patterns = property(_get_url_patterns)
+
+ def _resolve_special(self, view_type):
+ callback = getattr(self.urlconf_module, 'handler%s' % view_type)
+ mod_name, func_name = get_mod_func(callback)
+ try:
+ return getattr(__import__(mod_name, {}, {}, ['']), func_name), {}
+ except (ImportError, AttributeError), e:
+ raise ViewDoesNotExist, "Tried %s. Error was: %s" % (callback, str(e))
+
+ def resolve404(self):
+ return self._resolve_special('404')
+
+ def resolve500(self):
+ return self._resolve_special('500')
+
+ def reverse(self, lookup_view, *args, **kwargs):
+ if not callable(lookup_view):
+ mod_name, func_name = get_mod_func(lookup_view)
+ try:
+ lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
+ except (ImportError, AttributeError):
+ raise NoReverseMatch
+ for pattern in self.urlconf_module.urlpatterns:
+ if isinstance(pattern, RegexURLResolver):
+ try:
+ return pattern.reverse_helper(lookup_view, *args, **kwargs)
+ except NoReverseMatch:
+ continue
+ elif pattern.callback == lookup_view:
+ try:
+ return pattern.reverse_helper(*args, **kwargs)
+ except NoReverseMatch:
+ continue
+ raise NoReverseMatch
+
+ def reverse_helper(self, lookup_view, *args, **kwargs):
+ sub_match = self.reverse(lookup_view, *args, **kwargs)
+ result = reverse_helper(self.regex, *args, **kwargs)
+ return result + sub_match
+
+def resolve(path, urlconf=None):
+ if urlconf is None:
+ from django.conf import settings
+ urlconf = settings.ROOT_URLCONF
+ resolver = RegexURLResolver(r'^/', urlconf)
+ return resolver.resolve(path)
+
+def reverse(viewname, urlconf=None, args=None, kwargs=None):
+ args = args or []
+ kwargs = kwargs or {}
+ if urlconf is None:
+ from django.conf import settings
+ urlconf = settings.ROOT_URLCONF
+ resolver = RegexURLResolver(r'^/', urlconf)
+ return '/' + resolver.reverse(viewname, *args, **kwargs)
diff --git a/google_appengine/lib/django/django/core/validators.py b/google_appengine/lib/django/django/core/validators.py
new file mode 100755
index 0000000..bd7d790
--- /dev/null
+++ b/google_appengine/lib/django/django/core/validators.py
@@ -0,0 +1,573 @@
+"""
+A library of validators that return None and raise ValidationError when the
+provided data isn't valid.
+
+Validators may be callable classes, and they may have an 'always_test'
+attribute. If an 'always_test' attribute exists (regardless of value), the
+validator will *always* be run, regardless of whether its associated
+form field is required.
+"""
+
+import urllib2
+from django.conf import settings
+from django.utils.translation import gettext, gettext_lazy, ngettext
+from django.utils.functional import Promise, lazy
+import re
+
+_datere = r'\d{4}-\d{1,2}-\d{1,2}'
+_timere = r'(?:[01]?[0-9]|2[0-3]):[0-5][0-9](?::[0-5][0-9])?'
+alnum_re = re.compile(r'^\w+$')
+alnumurl_re = re.compile(r'^[-\w/]+$')
+ansi_date_re = re.compile('^%s$' % _datere)
+ansi_time_re = re.compile('^%s$' % _timere)
+ansi_datetime_re = re.compile('^%s %s$' % (_datere, _timere))
+email_re = re.compile(
+ r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
+ r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
+ r')@(?:[A-Z0-9-]+\.)+[A-Z]{2,6}$', re.IGNORECASE) # domain
+integer_re = re.compile(r'^-?\d+$')
+ip4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$')
+phone_re = re.compile(r'^[A-PR-Y0-9]{3}-[A-PR-Y0-9]{3}-[A-PR-Y0-9]{4}$', re.IGNORECASE)
+slug_re = re.compile(r'^[-\w]+$')
+url_re = re.compile(r'^https?://\S+$')
+
+lazy_inter = lazy(lambda a,b: str(a) % b, str)
+
+class ValidationError(Exception):
+ def __init__(self, message):
+ "ValidationError can be passed a string or a list."
+ if isinstance(message, list):
+ self.messages = message
+ else:
+ assert isinstance(message, (basestring, Promise)), ("%s should be a string" % repr(message))
+ self.messages = [message]
+ def __str__(self):
+ # This is needed because, without a __str__(), printing an exception
+ # instance would result in this:
+ # AttributeError: ValidationError instance has no attribute 'args'
+ # See http://www.python.org/doc/current/tut/node10.html#handling
+ return str(self.messages)
+
+class CriticalValidationError(Exception):
+ def __init__(self, message):
+ "ValidationError can be passed a string or a list."
+ if isinstance(message, list):
+ self.messages = message
+ else:
+ assert isinstance(message, (basestring, Promise)), ("'%s' should be a string" % message)
+ self.messages = [message]
+ def __str__(self):
+ return str(self.messages)
+
+def isAlphaNumeric(field_data, all_data):
+ if not alnum_re.search(field_data):
+ raise ValidationError, gettext("This value must contain only letters, numbers and underscores.")
+
+def isAlphaNumericURL(field_data, all_data):
+ if not alnumurl_re.search(field_data):
+ raise ValidationError, gettext("This value must contain only letters, numbers, underscores, dashes or slashes.")
+
+def isSlug(field_data, all_data):
+ if not slug_re.search(field_data):
+ raise ValidationError, gettext("This value must contain only letters, numbers, underscores or hyphens.")
+
+def isLowerCase(field_data, all_data):
+ if field_data.lower() != field_data:
+ raise ValidationError, gettext("Uppercase letters are not allowed here.")
+
+def isUpperCase(field_data, all_data):
+ if field_data.upper() != field_data:
+ raise ValidationError, gettext("Lowercase letters are not allowed here.")
+
+def isCommaSeparatedIntegerList(field_data, all_data):
+ for supposed_int in field_data.split(','):
+ try:
+ int(supposed_int)
+ except ValueError:
+ raise ValidationError, gettext("Enter only digits separated by commas.")
+
+def isCommaSeparatedEmailList(field_data, all_data):
+ """
+ Checks that field_data is a string of e-mail addresses separated by commas.
+ Blank field_data values will not throw a validation error, and whitespace
+ is allowed around the commas.
+ """
+ for supposed_email in field_data.split(','):
+ try:
+ isValidEmail(supposed_email.strip(), '')
+ except ValidationError:
+ raise ValidationError, gettext("Enter valid e-mail addresses separated by commas.")
+
+def isValidIPAddress4(field_data, all_data):
+ if not ip4_re.search(field_data):
+ raise ValidationError, gettext("Please enter a valid IP address.")
+
+def isNotEmpty(field_data, all_data):
+ if field_data.strip() == '':
+ raise ValidationError, gettext("Empty values are not allowed here.")
+
+def isOnlyDigits(field_data, all_data):
+ if not field_data.isdigit():
+ raise ValidationError, gettext("Non-numeric characters aren't allowed here.")
+
+def isNotOnlyDigits(field_data, all_data):
+ if field_data.isdigit():
+ raise ValidationError, gettext("This value can't be comprised solely of digits.")
+
+def isInteger(field_data, all_data):
+ # This differs from isOnlyDigits because this accepts the negative sign
+ if not integer_re.search(field_data):
+ raise ValidationError, gettext("Enter a whole number.")
+
+def isOnlyLetters(field_data, all_data):
+ if not field_data.isalpha():
+ raise ValidationError, gettext("Only alphabetical characters are allowed here.")
+
+def _isValidDate(date_string):
+ """
+ A helper function used by isValidANSIDate and isValidANSIDatetime to
+ check if the date is valid. The date string is assumed to already be in
+ YYYY-MM-DD format.
+ """
+ from datetime import date
+ # Could use time.strptime here and catch errors, but datetime.date below
+ # produces much friendlier error messages.
+ year, month, day = map(int, date_string.split('-'))
+ # This check is needed because strftime is used when saving the date
+ # value to the database, and strftime requires that the year be >=1900.
+ if year < 1900:
+ raise ValidationError, gettext('Year must be 1900 or later.')
+ try:
+ date(year, month, day)
+ except ValueError, e:
+ msg = gettext('Invalid date: %s') % gettext(str(e))
+ raise ValidationError, msg
+
+def isValidANSIDate(field_data, all_data):
+ if not ansi_date_re.search(field_data):
+ raise ValidationError, gettext('Enter a valid date in YYYY-MM-DD format.')
+ _isValidDate(field_data)
+
+def isValidANSITime(field_data, all_data):
+ if not ansi_time_re.search(field_data):
+ raise ValidationError, gettext('Enter a valid time in HH:MM format.')
+
+def isValidANSIDatetime(field_data, all_data):
+ if not ansi_datetime_re.search(field_data):
+ raise ValidationError, gettext('Enter a valid date/time in YYYY-MM-DD HH:MM format.')
+ _isValidDate(field_data.split()[0])
+
+def isValidEmail(field_data, all_data):
+ if not email_re.search(field_data):
+ raise ValidationError, gettext('Enter a valid e-mail address.')
+
+def isValidImage(field_data, all_data):
+ """
+ Checks that the file-upload field data contains a valid image (GIF, JPG,
+ PNG, possibly others -- whatever the Python Imaging Library supports).
+ """
+ from PIL import Image
+ from cStringIO import StringIO
+ try:
+ content = field_data['content']
+ except TypeError:
+ raise ValidationError, gettext("No file was submitted. Check the encoding type on the form.")
+ try:
+ Image.open(StringIO(content))
+ except IOError: # Python Imaging Library doesn't recognize it as an image
+ raise ValidationError, gettext("Upload a valid image. The file you uploaded was either not an image or a corrupted image.")
+
+def isValidImageURL(field_data, all_data):
+ uc = URLMimeTypeCheck(('image/jpeg', 'image/gif', 'image/png'))
+ try:
+ uc(field_data, all_data)
+ except URLMimeTypeCheck.InvalidContentType:
+ raise ValidationError, gettext("The URL %s does not point to a valid image.") % field_data
+
+def isValidPhone(field_data, all_data):
+ if not phone_re.search(field_data):
+ raise ValidationError, gettext('Phone numbers must be in XXX-XXX-XXXX format. "%s" is invalid.') % field_data
+
+def isValidQuicktimeVideoURL(field_data, all_data):
+ "Checks that the given URL is a video that can be played by QuickTime (qt, mpeg)"
+ uc = URLMimeTypeCheck(('video/quicktime', 'video/mpeg',))
+ try:
+ uc(field_data, all_data)
+ except URLMimeTypeCheck.InvalidContentType:
+ raise ValidationError, gettext("The URL %s does not point to a valid QuickTime video.") % field_data
+
+def isValidURL(field_data, all_data):
+ if not url_re.search(field_data):
+ raise ValidationError, gettext("A valid URL is required.")
+
+def isValidHTML(field_data, all_data):
+ import urllib, urllib2
+ try:
+ u = urllib2.urlopen('http://validator.w3.org/check', urllib.urlencode({'fragment': field_data, 'output': 'xml'}))
+ except:
+ # Validator or Internet connection is unavailable. Fail silently.
+ return
+ html_is_valid = (u.headers.get('x-w3c-validator-status', 'Invalid') == 'Valid')
+ if html_is_valid:
+ return
+ from xml.dom.minidom import parseString
+ error_messages = [e.firstChild.wholeText for e in parseString(u.read()).getElementsByTagName('messages')[0].getElementsByTagName('msg')]
+ raise ValidationError, gettext("Valid HTML is required. Specific errors are:\n%s") % "\n".join(error_messages)
+
+def isWellFormedXml(field_data, all_data):
+ from xml.dom.minidom import parseString
+ try:
+ parseString(field_data)
+ except Exception, e: # Naked except because we're not sure what will be thrown
+ raise ValidationError, gettext("Badly formed XML: %s") % str(e)
+
+def isWellFormedXmlFragment(field_data, all_data):
+ isWellFormedXml('<root>%s</root>' % field_data, all_data)
+
+def isExistingURL(field_data, all_data):
+ try:
+ headers = {
+ "Accept" : "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5",
+ "Accept-Language" : "en-us,en;q=0.5",
+ "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.7",
+ "Connection" : "close",
+ "User-Agent": settings.URL_VALIDATOR_USER_AGENT
+ }
+ req = urllib2.Request(field_data,None, headers)
+ u = urllib2.urlopen(req)
+ except ValueError:
+ raise ValidationError, _("Invalid URL: %s") % field_data
+ except urllib2.HTTPError, e:
+ # 401s are valid; they just mean authorization is required.
+ # 301 and 302 are redirects; they just mean look somewhere else.
+ if str(e.code) not in ('401','301','302'):
+ raise ValidationError, _("The URL %s is a broken link.") % field_data
+ except: # urllib2.URLError, httplib.InvalidURL, etc.
+ raise ValidationError, _("The URL %s is a broken link.") % field_data
+
+def isValidUSState(field_data, all_data):
+ "Checks that the given string is a valid two-letter U.S. state abbreviation"
+ states = ['AA', 'AE', 'AK', 'AL', 'AP', 'AR', 'AS', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE', 'FL', 'FM', 'GA', 'GU', 'HI', 'IA', 'ID', 'IL', 'IN', 'KS', 'KY', 'LA', 'MA', 'MD', 'ME', 'MH', 'MI', 'MN', 'MO', 'MP', 'MS', 'MT', 'NC', 'ND', 'NE', 'NH', 'NJ', 'NM', 'NV', 'NY', 'OH', 'OK', 'OR', 'PA', 'PR', 'PW', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VA', 'VI', 'VT', 'WA', 'WI', 'WV', 'WY']
+ if field_data.upper() not in states:
+ raise ValidationError, gettext("Enter a valid U.S. state abbreviation.")
+
+def hasNoProfanities(field_data, all_data):
+ """
+ Checks that the given string has no profanities in it. This does a simple
+ check for whether each profanity exists within the string, so 'fuck' will
+ catch 'motherfucker' as well. Raises a ValidationError such as:
+ Watch your mouth! The words "f--k" and "s--t" are not allowed here.
+ """
+ field_data = field_data.lower() # normalize
+ words_seen = [w for w in settings.PROFANITIES_LIST if w in field_data]
+ if words_seen:
+ from django.utils.text import get_text_list
+ plural = len(words_seen) > 1
+ raise ValidationError, ngettext("Watch your mouth! The word %s is not allowed here.",
+ "Watch your mouth! The words %s are not allowed here.", plural) % \
+ get_text_list(['"%s%s%s"' % (i[0], '-'*(len(i)-2), i[-1]) for i in words_seen], 'and')
+
+class AlwaysMatchesOtherField(object):
+ def __init__(self, other_field_name, error_message=None):
+ self.other = other_field_name
+ self.error_message = error_message or lazy_inter(gettext_lazy("This field must match the '%s' field."), self.other)
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if field_data != all_data[self.other]:
+ raise ValidationError, self.error_message
+
+class ValidateIfOtherFieldEquals(object):
+ def __init__(self, other_field, other_value, validator_list):
+ self.other_field, self.other_value = other_field, other_value
+ self.validator_list = validator_list
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if all_data.has_key(self.other_field) and all_data[self.other_field] == self.other_value:
+ for v in self.validator_list:
+ v(field_data, all_data)
+
+class RequiredIfOtherFieldNotGiven(object):
+ def __init__(self, other_field_name, error_message=gettext_lazy("Please enter something for at least one field.")):
+ self.other, self.error_message = other_field_name, error_message
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if not all_data.get(self.other, False) and not field_data:
+ raise ValidationError, self.error_message
+
+class RequiredIfOtherFieldsGiven(object):
+ def __init__(self, other_field_names, error_message=gettext_lazy("Please enter both fields or leave them both empty.")):
+ self.other, self.error_message = other_field_names, error_message
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ for field in self.other:
+ if all_data.get(field, False) and not field_data:
+ raise ValidationError, self.error_message
+
+class RequiredIfOtherFieldGiven(RequiredIfOtherFieldsGiven):
+ "Like RequiredIfOtherFieldsGiven, but takes a single field name instead of a list."
+ def __init__(self, other_field_name, error_message=gettext_lazy("Please enter both fields or leave them both empty.")):
+ RequiredIfOtherFieldsGiven.__init__(self, [other_field_name], error_message)
+
+class RequiredIfOtherFieldEquals(object):
+ def __init__(self, other_field, other_value, error_message=None, other_label=None):
+ self.other_field = other_field
+ self.other_value = other_value
+ other_label = other_label or other_value
+ self.error_message = error_message or lazy_inter(gettext_lazy("This field must be given if %(field)s is %(value)s"), {
+ 'field': other_field, 'value': other_label})
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if all_data.has_key(self.other_field) and all_data[self.other_field] == self.other_value and not field_data:
+ raise ValidationError(self.error_message)
+
+class RequiredIfOtherFieldDoesNotEqual(object):
+ def __init__(self, other_field, other_value, other_label=None, error_message=None):
+ self.other_field = other_field
+ self.other_value = other_value
+ other_label = other_label or other_value
+ self.error_message = error_message or lazy_inter(gettext_lazy("This field must be given if %(field)s is not %(value)s"), {
+ 'field': other_field, 'value': other_label})
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ if all_data.has_key(self.other_field) and all_data[self.other_field] != self.other_value and not field_data:
+ raise ValidationError(self.error_message)
+
+class IsLessThanOtherField(object):
+ def __init__(self, other_field_name, error_message):
+ self.other, self.error_message = other_field_name, error_message
+
+ def __call__(self, field_data, all_data):
+ if field_data > all_data[self.other]:
+ raise ValidationError, self.error_message
+
+class UniqueAmongstFieldsWithPrefix(object):
+ def __init__(self, field_name, prefix, error_message):
+ self.field_name, self.prefix = field_name, prefix
+ self.error_message = error_message or gettext_lazy("Duplicate values are not allowed.")
+
+ def __call__(self, field_data, all_data):
+ for field_name, value in all_data.items():
+ if field_name != self.field_name and value == field_data:
+ raise ValidationError, self.error_message
+
+class NumberIsInRange(object):
+ """
+ Validator that tests if a value is in a range (inclusive).
+ """
+ def __init__(self, lower=None, upper=None, error_message=''):
+ self.lower, self.upper = lower, upper
+ if not error_message:
+ if lower and upper:
+ self.error_message = gettext("This value must be between %(lower)s and %(upper)s.") % {'lower': lower, 'upper': upper}
+ elif lower:
+ self.error_message = gettext("This value must be at least %s.") % lower
+ elif upper:
+ self.error_message = gettext("This value must be no more than %s.") % upper
+ else:
+ self.error_message = error_message
+
+ def __call__(self, field_data, all_data):
+ # Try to make the value numeric. If this fails, we assume another
+ # validator will catch the problem.
+ try:
+ val = float(field_data)
+ except ValueError:
+ return
+
+ # Now validate
+ if self.lower and self.upper and (val < self.lower or val > self.upper):
+ raise ValidationError(self.error_message)
+ elif self.lower and val < self.lower:
+ raise ValidationError(self.error_message)
+ elif self.upper and val > self.upper:
+ raise ValidationError(self.error_message)
+
+class IsAPowerOf(object):
+ """
+ >>> v = IsAPowerOf(2)
+ >>> v(4, None)
+ >>> v(8, None)
+ >>> v(16, None)
+ >>> v(17, None)
+ django.core.validators.ValidationError: ['This value must be a power of 2.']
+ """
+ def __init__(self, power_of):
+ self.power_of = power_of
+
+ def __call__(self, field_data, all_data):
+ from math import log
+ val = log(int(field_data)) / log(self.power_of)
+ if val != int(val):
+ raise ValidationError, gettext("This value must be a power of %s.") % self.power_of
+
+class IsValidFloat(object):
+ def __init__(self, max_digits, decimal_places):
+ self.max_digits, self.decimal_places = max_digits, decimal_places
+
+ def __call__(self, field_data, all_data):
+ data = str(field_data)
+ try:
+ float(data)
+ except ValueError:
+ raise ValidationError, gettext("Please enter a valid decimal number.")
+ # Negative floats require more space to input.
+ max_allowed_length = data.startswith('-') and (self.max_digits + 2) or (self.max_digits + 1)
+ if len(data) > max_allowed_length:
+ raise ValidationError, ngettext("Please enter a valid decimal number with at most %s total digit.",
+ "Please enter a valid decimal number with at most %s total digits.", self.max_digits) % self.max_digits
+ if (not '.' in data and len(data) > (max_allowed_length - self.decimal_places - 1)) or ('.' in data and len(data) > (max_allowed_length - (self.decimal_places - len(data.split('.')[1])))):
+ raise ValidationError, ngettext( "Please enter a valid decimal number with a whole part of at most %s digit.",
+ "Please enter a valid decimal number with a whole part of at most %s digits.", str(self.max_digits-self.decimal_places)) % str(self.max_digits-self.decimal_places)
+ if '.' in data and len(data.split('.')[1]) > self.decimal_places:
+ raise ValidationError, ngettext("Please enter a valid decimal number with at most %s decimal place.",
+ "Please enter a valid decimal number with at most %s decimal places.", self.decimal_places) % self.decimal_places
+
+class HasAllowableSize(object):
+ """
+ Checks that the file-upload field data is a certain size. min_size and
+ max_size are measurements in bytes.
+ """
+ def __init__(self, min_size=None, max_size=None, min_error_message=None, max_error_message=None):
+ self.min_size, self.max_size = min_size, max_size
+ self.min_error_message = min_error_message or lazy_inter(gettext_lazy("Make sure your uploaded file is at least %s bytes big."), min_size)
+ self.max_error_message = max_error_message or lazy_inter(gettext_lazy("Make sure your uploaded file is at most %s bytes big."), max_size)
+
+ def __call__(self, field_data, all_data):
+ try:
+ content = field_data['content']
+ except TypeError:
+ raise ValidationError, gettext_lazy("No file was submitted. Check the encoding type on the form.")
+ if self.min_size is not None and len(content) < self.min_size:
+ raise ValidationError, self.min_error_message
+ if self.max_size is not None and len(content) > self.max_size:
+ raise ValidationError, self.max_error_message
+
+class MatchesRegularExpression(object):
+ """
+ Checks that the field matches the given regular-expression. The regex
+ should be in string format, not already compiled.
+ """
+ def __init__(self, regexp, error_message=gettext_lazy("The format for this field is wrong.")):
+ self.regexp = re.compile(regexp)
+ self.error_message = error_message
+
+ def __call__(self, field_data, all_data):
+ if not self.regexp.search(field_data):
+ raise ValidationError(self.error_message)
+
+class AnyValidator(object):
+ """
+ This validator tries all given validators. If any one of them succeeds,
+ validation passes. If none of them succeeds, the given message is thrown
+ as a validation error. The message is rather unspecific, so it's best to
+ specify one on instantiation.
+ """
+ def __init__(self, validator_list=None, error_message=gettext_lazy("This field is invalid.")):
+ if validator_list is None: validator_list = []
+ self.validator_list = validator_list
+ self.error_message = error_message
+ for v in validator_list:
+ if hasattr(v, 'always_test'):
+ self.always_test = True
+
+ def __call__(self, field_data, all_data):
+ for v in self.validator_list:
+ try:
+ v(field_data, all_data)
+ return
+ except ValidationError, e:
+ pass
+ raise ValidationError(self.error_message)
+
+class URLMimeTypeCheck(object):
+ "Checks that the provided URL points to a document with a listed mime type"
+ class CouldNotRetrieve(ValidationError):
+ pass
+ class InvalidContentType(ValidationError):
+ pass
+
+ def __init__(self, mime_type_list):
+ self.mime_type_list = mime_type_list
+
+ def __call__(self, field_data, all_data):
+ import urllib2
+ try:
+ isValidURL(field_data, all_data)
+ except ValidationError:
+ raise
+ try:
+ info = urllib2.urlopen(field_data).info()
+ except (urllib2.HTTPError, urllib2.URLError):
+ raise URLMimeTypeCheck.CouldNotRetrieve, gettext("Could not retrieve anything from %s.") % field_data
+ content_type = info['content-type']
+ if content_type not in self.mime_type_list:
+ raise URLMimeTypeCheck.InvalidContentType, gettext("The URL %(url)s returned the invalid Content-Type header '%(contenttype)s'.") % {
+ 'url': field_data, 'contenttype': content_type}
+
+class RelaxNGCompact(object):
+ "Validate against a Relax NG compact schema"
+ def __init__(self, schema_path, additional_root_element=None):
+ self.schema_path = schema_path
+ self.additional_root_element = additional_root_element
+
+ def __call__(self, field_data, all_data):
+ import os, tempfile
+ if self.additional_root_element:
+ field_data = '<%(are)s>%(data)s\n</%(are)s>' % {
+ 'are': self.additional_root_element,
+ 'data': field_data
+ }
+ filename = tempfile.mktemp() # Insecure, but nothing else worked
+ fp = open(filename, 'w')
+ fp.write(field_data)
+ fp.close()
+ if not os.path.exists(settings.JING_PATH):
+ raise Exception, "%s not found!" % settings.JING_PATH
+ p = os.popen('%s -c %s %s' % (settings.JING_PATH, self.schema_path, filename))
+ errors = [line.strip() for line in p.readlines()]
+ p.close()
+ os.unlink(filename)
+ display_errors = []
+ lines = field_data.split('\n')
+ for error in errors:
+ ignored, line, level, message = error.split(':', 3)
+ # Scrape the Jing error messages to reword them more nicely.
+ m = re.search(r'Expected "(.*?)" to terminate element starting on line (\d+)', message)
+ if m:
+ display_errors.append(_('Please close the unclosed %(tag)s tag from line %(line)s. (Line starts with "%(start)s".)') % \
+ {'tag':m.group(1).replace('/', ''), 'line':m.group(2), 'start':lines[int(m.group(2)) - 1][:30]})
+ continue
+ if message.strip() == 'text not allowed here':
+ display_errors.append(_('Some text starting on line %(line)s is not allowed in that context. (Line starts with "%(start)s".)') % \
+ {'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ m = re.search(r'\s*attribute "(.*?)" not allowed at this point; ignored', message)
+ if m:
+ display_errors.append(_('"%(attr)s" on line %(line)s is an invalid attribute. (Line starts with "%(start)s".)') % \
+ {'attr':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ m = re.search(r'\s*unknown element "(.*?)"', message)
+ if m:
+ display_errors.append(_('"<%(tag)s>" on line %(line)s is an invalid tag. (Line starts with "%(start)s".)') % \
+ {'tag':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ if message.strip() == 'required attributes missing':
+ display_errors.append(_('A tag on line %(line)s is missing one or more required attributes. (Line starts with "%(start)s".)') % \
+ {'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ m = re.search(r'\s*bad value for attribute "(.*?)"', message)
+ if m:
+ display_errors.append(_('The "%(attr)s" attribute on line %(line)s has an invalid value. (Line starts with "%(start)s".)') % \
+ {'attr':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
+ continue
+ # Failing all those checks, use the default error message.
+ display_error = 'Line %s: %s [%s]' % (line, message, level.strip())
+ display_errors.append(display_error)
+ if len(display_errors) > 0:
+ raise ValidationError, display_errors
diff --git a/google_appengine/lib/django/django/core/xheaders.py b/google_appengine/lib/django/django/core/xheaders.py
new file mode 100755
index 0000000..3beb930
--- /dev/null
+++ b/google_appengine/lib/django/django/core/xheaders.py
@@ -0,0 +1,22 @@
+"""
+Pages in Django can are served up with custom HTTP headers containing useful
+information about those pages -- namely, the content type and object ID.
+
+This module contains utility functions for retrieving and doing interesting
+things with these special "X-Headers" (so called because the HTTP spec demands
+that custom headers are prefxed with "X-").
+
+Next time you're at slashdot.org, watch out for X-Fry and X-Bender. :)
+"""
+
+def populate_xheaders(request, response, model, object_id):
+ """
+ Adds the "X-Object-Type" and "X-Object-Id" headers to the given
+ HttpResponse according to the given model and object_id -- but only if the
+ given HttpRequest object has an IP address within the INTERNAL_IPS setting
+ or if the request is from a logged in staff member.
+ """
+ from django.conf import settings
+ if request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or (hasattr(request, 'user') and request.user.is_authenticated() and request.user.is_staff):
+ response['X-Object-Type'] = "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
+ response['X-Object-Id'] = str(object_id)