1# Copyright 2011 Google Inc. All Rights Reserved.
2#
3
4__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
5
6from collections import namedtuple
7import glob
8import gzip
9import os.path
10import pickle
11import time
12import xmlrpclib
13
14from django import forms
15from django.http import HttpResponseRedirect
16from django.shortcuts import render_to_response
17from django.template import Context
18from django.views import static
19
20Link = namedtuple('Link', 'href name')
21
22
23def GetServerConnection():
24  return xmlrpclib.Server('http://localhost:8000')
25
26
27def MakeDefaultContext(*args):
28  context = Context({'links': [
29      Link('/job-group', 'Job Groups'), Link('/machine', 'Machines')
30  ]})
31
32  for arg in args:
33    context.update(arg)
34
35  return context
36
37
38class JobInfo(object):
39
40  def __init__(self, job_id):
41    self._job = pickle.loads(GetServerConnection().GetJob(job_id))
42
43  def GetAttributes(self):
44    job = self._job
45
46    group = [Link('/job-group/%d' % job.group.id, job.group.label)]
47
48    predecessors = [Link('/job/%d' % pred.id, pred.label)
49                    for pred in job.predecessors]
50
51    successors = [Link('/job/%d' % succ.id, succ.label)
52                  for succ in job.successors]
53
54    machines = [Link('/machine/%s' % mach.hostname, mach.hostname)
55                for mach in job.machines]
56
57    logs = [Link('/job/%d/log' % job.id, 'Log')]
58
59    commands = enumerate(job.PrettyFormatCommand().split('\n'), start=1)
60
61    return {'text': [('Label', job.label), ('Directory', job.work_dir)],
62            'link': [('Group', group), ('Predecessors', predecessors),
63                     ('Successors', successors), ('Machines', machines),
64                     ('Logs', logs)],
65            'code': [('Command', commands)]}
66
67  def GetTimeline(self):
68    return [{'started': evlog.GetTimeStartedFormatted(),
69             'state_from': evlog.event.from_,
70             'state_to': evlog.event.to_,
71             'elapsed': evlog.GetTimeElapsedRounded()}
72            for evlog in self._job.timeline.GetTransitionEventHistory()]
73
74  def GetLog(self):
75    log_path = os.path.join(self._job.logs_dir,
76                            '%s.gz' % self._job.log_filename_prefix)
77
78    try:
79      log = gzip.open(log_path, 'r')
80    except IOError:
81      content = []
82    else:
83      # There's a good chance that file is not closed yet, so EOF handling
84      # function and CRC calculation will fail, thus we need to monkey patch the
85      # _read_eof method.
86      log._read_eof = lambda: None
87
88      def SplitLine(line):
89        prefix, msg = line.split(': ', 1)
90        datetime, stream = prefix.rsplit(' ', 1)
91
92        return datetime, stream, msg
93
94      content = map(SplitLine, log.readlines())
95    finally:
96      log.close()
97
98    return content
99
100
101class JobGroupInfo(object):
102
103  def __init__(self, job_group_id):
104    self._job_group = pickle.loads(GetServerConnection().GetJobGroup(
105        job_group_id))
106
107  def GetAttributes(self):
108    group = self._job_group
109
110    home_dir = [Link('/job-group/%d/files/' % group.id, group.home_dir)]
111
112    return {'text': [('Label', group.label),
113                     ('Time submitted', time.ctime(group.time_submitted)),
114                     ('State', group.status),
115                     ('Cleanup on completion', group.cleanup_on_completion),
116                     ('Cleanup on failure', group.cleanup_on_failure)],
117            'link': [('Directory', home_dir)]}
118
119  def _GetJobStatus(self, job):
120    status_map = {'SUCCEEDED': 'success', 'FAILED': 'failure'}
121    return status_map.get(str(job.status), None)
122
123  def GetJobList(self):
124    return [{'id': job.id,
125             'label': job.label,
126             'state': job.status,
127             'status': self._GetJobStatus(job),
128             'elapsed': job.timeline.GetTotalTime()}
129            for job in self._job_group.jobs]
130
131  def GetHomeDirectory(self):
132    return self._job_group.home_dir
133
134  def GetReportList(self):
135    job_dir_pattern = os.path.join(self._job_group.home_dir, 'job-*')
136
137    filenames = []
138
139    for job_dir in glob.glob(job_dir_pattern):
140      filename = os.path.join(job_dir, 'report.html')
141
142      if os.access(filename, os.F_OK):
143        filenames.append(filename)
144
145    reports = []
146
147    for filename in sorted(filenames, key=lambda f: os.stat(f).st_ctime):
148      try:
149        with open(filename, 'r') as report:
150          reports.append(report.read())
151      except IOError:
152        pass
153
154    return reports
155
156
157class JobGroupListInfo(object):
158
159  def __init__(self):
160    self._all_job_groups = pickle.loads(GetServerConnection().GetAllJobGroups())
161
162  def _GetJobGroupState(self, group):
163    return str(group.status)
164
165  def _GetJobGroupStatus(self, group):
166    status_map = {'SUCCEEDED': 'success', 'FAILED': 'failure'}
167    return status_map.get(self._GetJobGroupState(group), None)
168
169  def GetList(self):
170    return [{'id': group.id,
171             'label': group.label,
172             'submitted': time.ctime(group.time_submitted),
173             'state': self._GetJobGroupState(group),
174             'status': self._GetJobGroupStatus(group)}
175            for group in self._all_job_groups]
176
177  def GetLabelList(self):
178    return sorted(set(group.label for group in self._all_job_groups))
179
180
181def JobPageHandler(request, job_id):
182  job = JobInfo(int(job_id))
183
184  ctx = MakeDefaultContext({
185      'job_id': job_id,
186      'attributes': job.GetAttributes(),
187      'timeline': job.GetTimeline()
188  })
189
190  return render_to_response('job.html', ctx)
191
192
193def LogPageHandler(request, job_id):
194  job = JobInfo(int(job_id))
195
196  ctx = MakeDefaultContext({'job_id': job_id, 'log_lines': job.GetLog()})
197
198  return render_to_response('job_log.html', ctx)
199
200
201def JobGroupPageHandler(request, job_group_id):
202  group = JobGroupInfo(int(job_group_id))
203
204  ctx = MakeDefaultContext({
205      'group_id': job_group_id,
206      'attributes': group.GetAttributes(),
207      'job_list': group.GetJobList(),
208      'reports': group.GetReportList()
209  })
210
211  return render_to_response('job_group.html', ctx)
212
213
214def JobGroupFilesPageHandler(request, job_group_id, path):
215  group = JobGroupInfo(int(job_group_id))
216
217  return static.serve(request,
218                      path,
219                      document_root=group.GetHomeDirectory(),
220                      show_indexes=True)
221
222
223class FilterJobGroupsForm(forms.Form):
224  label = forms.ChoiceField(label='Filter by label:', required=False)
225
226
227def JobGroupListPageHandler(request):
228  groups = JobGroupListInfo()
229  group_list = groups.GetList()
230
231  field = FilterJobGroupsForm.base_fields['label']
232  field.choices = [('*', '--- no filtering ---')]
233  field.choices.extend([(label, label) for label in groups.GetLabelList()])
234
235  if request.method == 'POST':
236    form = FilterJobGroupsForm(request.POST)
237
238    if form.is_valid():
239      label = form.cleaned_data['label']
240
241      if label != '*':
242        group_list = [group for group in group_list if group['label'] == label]
243  else:
244    form = FilterJobGroupsForm({'initial': '*'})
245
246  ctx = MakeDefaultContext({'filter': form, 'groups': group_list})
247
248  return render_to_response('job_group_list.html', ctx)
249
250
251def MachineListPageHandler(request):
252  machine_list = pickle.loads(GetServerConnection().GetMachineList())
253
254  return render_to_response('machine_list.html',
255                            MakeDefaultContext({'machines': machine_list}))
256
257
258def DefaultPageHandler(request):
259  return HttpResponseRedirect('/job-group')
260