summaryrefslogtreecommitdiffstats
path: root/google-appengine/google/appengine/tools/appcfg.py
diff options
context:
space:
mode:
Diffstat (limited to 'google-appengine/google/appengine/tools/appcfg.py')
-rwxr-xr-xgoogle-appengine/google/appengine/tools/appcfg.py171
1 files changed, 133 insertions, 38 deletions
diff --git a/google-appengine/google/appengine/tools/appcfg.py b/google-appengine/google/appengine/tools/appcfg.py
index c6aacc2..4f828ab 100755
--- a/google-appengine/google/appengine/tools/appcfg.py
+++ b/google-appengine/google/appengine/tools/appcfg.py
@@ -204,12 +204,14 @@ def GetVersionObject(isfile=os.path.isfile, open_fn=open):
return version
-def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable_func):
+def RetryWithBackoff(initial_delay, backoff_factor, max_delay, max_tries,
+ callable_func):
"""Calls a function multiple times, backing off more and more each time.
Args:
initial_delay: Initial delay after first try, in seconds.
backoff_factor: Delay will be multiplied by this factor after each try.
+ max_delay: Max delay factor.
max_tries: Maximum number of tries.
callable_func: The method to call, will pass no arguments.
@@ -220,12 +222,18 @@ def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable_func):
Whatever the function raises--an exception will immediately stop retries.
"""
delay = initial_delay
- while not callable_func() and max_tries > 0:
+ if callable_func():
+ return True
+ while max_tries > 1:
StatusUpdate('Will check again in %s seconds.' % delay)
time.sleep(delay)
delay *= backoff_factor
+ if max_delay and delay > max_delay:
+ delay = max_delay
max_tries -= 1
- return max_tries > 0
+ if callable_func():
+ return True
+ return False
def _VersionList(release):
@@ -603,6 +611,11 @@ class DosEntryUpload(object):
def DoUpload(self):
"""Uploads the dos entries."""
+ StatusUpdate('Uploading DOS entries.')
+ self.server.Send('/api/dos/update',
+ app_id=self.config.application,
+ version=self.config.version,
+ payload=self.dos.ToYAML())
class IndexOperation(object):
@@ -764,7 +777,8 @@ class LogsRequester(object):
"""Provide facilities to export request logs."""
def __init__(self, server, config, output_file,
- num_days, append, severity, now, vhost, include_vhost):
+ num_days, append, severity, end, vhost, include_vhost,
+ time_func=time.time):
"""Constructor.
Args:
@@ -775,9 +789,10 @@ class LogsRequester(object):
num_days: Number of days worth of logs to export; 0 for all available.
append: True if appending to an existing file.
severity: App log severity to request (0-4); None for no app logs.
- now: POSIX timestamp used for calculating valid dates for num_days.
+ end: date object representing last day of logs to return.
vhost: The virtual host of log messages to get. None for all hosts.
include_vhost: If true, the virtual host is included in log messages.
+ time_func: Method that return a timestamp representing now (for testing).
"""
self.server = server
self.config = config
@@ -793,15 +808,18 @@ class LogsRequester(object):
if self.append:
self.sentinel = FindSentinel(self.output_file)
self.write_mode = 'a'
+
+ self.skip_until = False
+ now = PacificDate(time_func())
+ if end < now:
+ self.skip_until = end
+ else:
+ end = now
+
self.valid_dates = None
if self.num_days:
- patterns = []
- now = PacificTime(now)
- for i in xrange(self.num_days):
- then = time.gmtime(now - 24*3600 * i)
- patterns.append(re.escape(time.strftime('%d/%m/%Y', then)))
- patterns.append(re.escape(time.strftime('%d/%b/%Y', then)))
- self.valid_dates = re.compile(r'[^[]+\[(' + '|'.join(patterns) + r'):')
+ start = end - datetime.timedelta(self.num_days - 1)
+ self.valid_dates = (start, end)
def DownloadLogs(self):
"""Download the requested logs.
@@ -813,13 +831,14 @@ class LogsRequester(object):
StatusUpdate('Downloading request logs for %s %s.' %
(self.config.application, self.version_id))
tf = tempfile.TemporaryFile()
- offset = None
+ last_offset = None
try:
while True:
try:
- offset = self.RequestLogLines(tf, offset)
- if not offset:
+ new_offset = self.RequestLogLines(tf, last_offset)
+ if not new_offset or new_offset == last_offset:
break
+ last_offset = new_offset
except KeyboardInterrupt:
StatusUpdate('Keyboard interrupt; saving data downloaded so far.')
break
@@ -858,7 +877,7 @@ class LogsRequester(object):
logging.info('Request with offset %r.', offset)
kwds = {'app_id': self.config.application,
'version': self.version_id,
- 'limit': 100,
+ 'limit': 1000,
}
if offset:
kwds['offset'] = offset
@@ -882,14 +901,27 @@ class LogsRequester(object):
del lines[-1]
valid_dates = self.valid_dates
sentinel = self.sentinel
+ skip_until = self.skip_until
len_sentinel = None
if sentinel:
len_sentinel = len(sentinel)
for line in lines:
- if ((sentinel and
- line.startswith(sentinel) and
- line[len_sentinel : len_sentinel+1] in ('', '\0')) or
- (valid_dates and not valid_dates.match(line))):
+ if (sentinel and
+ line.startswith(sentinel) and
+ line[len_sentinel : len_sentinel+1] in ('', '\0')):
+ return None
+
+ linedate = DateOfLogLine(line)
+ if not linedate:
+ continue
+
+ if skip_until:
+ if linedate > skip_until:
+ continue
+ else:
+ self.skip_until = skip_until = False
+
+ if valid_dates and not valid_dates[0] <= linedate <= valid_dates[1]:
return None
tf.write(line + '\n')
if not lines:
@@ -897,6 +929,35 @@ class LogsRequester(object):
return offset
+def DateOfLogLine(line):
+ """Returns a date object representing the log line's timestamp.
+
+ Args:
+ line: a log line string.
+ Returns:
+ A date object representing the timestamp or None if parsing fails.
+ """
+ m = re.compile(r'[^[]+\[(\d+/[A-Za-z]+/\d+):[^\d]*').match(line)
+ if not m:
+ return None
+ try:
+ return datetime.date(*time.strptime(m.group(1), '%d/%b/%Y')[:3])
+ except ValueError:
+ return None
+
+
+def PacificDate(now):
+ """For a UTC timestamp, return the date in the US/Pacific timezone.
+
+ Args:
+ now: A posix timestamp giving current UTC time.
+
+ Returns:
+ A date object representing what day it is in the US/Pacific timezone.
+ """
+ return datetime.date(*time.gmtime(PacificTime(now))[:3])
+
+
def PacificTime(now):
"""Helper to return the number of seconds between UTC and Pacific time.
@@ -1314,6 +1375,38 @@ class AppVersionUpload(object):
else:
self.blob_batcher.AddToBatch(path, payload, mime_type)
+ def Precompile(self):
+ """Handle bytecode precompilation."""
+ StatusUpdate('Precompilation starting.')
+ files = []
+ while True:
+ if files:
+ StatusUpdate('Precompilation: %d files left.' % len(files))
+ files = self.PrecompileBatch(files)
+ if not files:
+ break
+ StatusUpdate('Precompilation completed.')
+
+ def PrecompileBatch(self, files):
+ """Precompile a batch of files.
+
+ Args:
+ files: Either an empty list (for the initial request) or a list
+ of files to be precompiled.
+
+ Returns:
+ Either an empty list (if no more files need to be precompiled)
+ or a list of files to be precompiled subsequently.
+ """
+ payload = LIST_DELIMITER.join(files)
+ response = self.server.Send('/api/appversion/precompile',
+ app_id=self.app_id,
+ version=self.version,
+ payload=payload)
+ if not response:
+ return []
+ return response.split(LIST_DELIMITER)
+
def Commit(self):
"""Commits the transaction, making the new app version available.
@@ -1331,7 +1424,7 @@ class AppVersionUpload(object):
try:
self.Deploy()
- if not RetryWithBackoff(1, 2, 8, self.IsReady):
+ if not RetryWithBackoff(1, 2, 60, 20, self.IsReady):
logging.warning('Version still not ready to serve, aborting.')
raise Exception('Version not ready.')
self.StartServing()
@@ -1457,6 +1550,10 @@ class AppVersionUpload(object):
self.blob_batcher.Flush()
StatusUpdate('Uploaded %d files and blobs' % num_files)
+ if (self.config.derived_file_type and
+ appinfo.PYTHON_PRECOMPILED in self.config.derived_file_type):
+ self.Precompile()
+
self.Commit()
except KeyboardInterrupt:
@@ -2121,7 +2218,7 @@ class AppCfgApp(object):
try:
end_date = self._ParseEndDate(self.options.end_date)
- except ValueError:
+ except (TypeError, ValueError):
self.parser.error('End date must be in the format YYYY-MM-DD.')
basepath = self.args[0]
@@ -2137,20 +2234,19 @@ class AppCfgApp(object):
logs_requester.DownloadLogs()
def _ParseEndDate(self, date, time_func=time.time):
- """Translates a user-readable end date to a POSIX timestamp.
+ """Translates an ISO 8601 date to a date object.
Args:
- date: A utc date string as YYYY-MM-DD.
+ date: A date string as YYYY-MM-DD.
time_func: time.time() function for testing.
Returns:
- A POSIX timestamp representing the last moment of that day.
- If no date is given, returns a timestamp representing now.
+ A date object representing the last day of logs to get.
+ If no date is given, returns today in the US/Pacific timezone.
"""
if not date:
- return time_func()
- struct_time = time.strptime('%s' % date, '%Y-%m-%d')
- return calendar.timegm(struct_time) + 86400
+ return PacificDate(time_func())
+ return datetime.date(*[int(i) for i in date.split('-')])
def _RequestLogsOptions(self, parser):
"""Adds request_logs-specific options to 'parser'.
@@ -2161,7 +2257,7 @@ class AppCfgApp(object):
parser.add_option('-n', '--num_days', type='int', dest='num_days',
action='store', default=None,
help='Number of days worth of log data to get. '
- 'The cut-off point is midnight UTC. '
+ 'The cut-off point is midnight US/Pacific. '
'Use 0 to get all available logs. '
'Default is 1, unless --append is also given; '
'then the default is 0.')
@@ -2526,14 +2622,13 @@ in production as well as restart any indexes that were not completed."""),
The 'update_queue' command will update any new, removed or changed task queue
definitions from the optional queue.yaml file."""),
-
-
-
-
-
-
-
-
+ 'update_dos': Action(
+ function='UpdateDos',
+ usage='%prog [options] update_dos <directory>',
+ short_desc='Update application dos definitions.',
+ long_desc="""
+The 'update_dos' command will update any new, removed or changed dos
+definitions from the optional dos.yaml file."""),
'vacuum_indexes': Action(
function='VacuumIndexes',