base_job.py revision d656d56473f50b9c1a9f5e2b2f5a9472181ee342
1import os, copy, logging, errno, fcntl, time, re, weakref, traceback
2import tarfile
3import cPickle as pickle
4from autotest_lib.client.common_lib import autotemp, error, log
5
6
7class job_directory(object):
8    """Represents a job.*dir directory."""
9
10
11    class JobDirectoryException(error.AutotestError):
12        """Generic job_directory exception superclass."""
13
14
15    class MissingDirectoryException(JobDirectoryException):
16        """Raised when a directory required by the job does not exist."""
17        def __init__(self, path):
18            Exception.__init__(self, 'Directory %s does not exist' % path)
19
20
21    class UncreatableDirectoryException(JobDirectoryException):
22        """Raised when a directory required by the job is missing and cannot
23        be created."""
24        def __init__(self, path, error):
25            msg = 'Creation of directory %s failed with exception %s'
26            msg %= (path, error)
27            Exception.__init__(self, msg)
28
29
30    class UnwritableDirectoryException(JobDirectoryException):
31        """Raised when a writable directory required by the job exists
32        but is not writable."""
33        def __init__(self, path):
34            msg = 'Directory %s exists but is not writable' % path
35            Exception.__init__(self, msg)
36
37
38    def __init__(self, path, is_writable=False):
39        """
40        Instantiate a job directory.
41
42        @param path: The path of the directory. If None a temporary directory
43            will be created instead.
44        @param is_writable: If True, expect the directory to be writable.
45
46        @raise MissingDirectoryException: raised if is_writable=False and the
47            directory does not exist.
48        @raise UnwritableDirectoryException: raised if is_writable=True and
49            the directory exists but is not writable.
50        @raise UncreatableDirectoryException: raised if is_writable=True, the
51            directory does not exist and it cannot be created.
52        """
53        if path is None:
54            if is_writable:
55                self._tempdir = autotemp.tempdir(unique_id='autotest')
56                self.path = self._tempdir.name
57            else:
58                raise self.MissingDirectoryException(path)
59        else:
60            self._tempdir = None
61            self.path = path
62        self._ensure_valid(is_writable)
63
64
65    def _ensure_valid(self, is_writable):
66        """
67        Ensure that this is a valid directory.
68
69        Will check if a directory exists, can optionally also enforce that
70        it be writable. It can optionally create it if necessary. Creation
71        will still fail if the path is rooted in a non-writable directory, or
72        if a file already exists at the given location.
73
74        @param dir_path A path where a directory should be located
75        @param is_writable A boolean indicating that the directory should
76            not only exist, but also be writable.
77
78        @raises MissingDirectoryException raised if is_writable=False and the
79            directory does not exist.
80        @raises UnwritableDirectoryException raised if is_writable=True and
81            the directory is not wrtiable.
82        @raises UncreatableDirectoryException raised if is_writable=True, the
83            directory does not exist and it cannot be created
84        """
85        # ensure the directory exists
86        if is_writable:
87            try:
88                os.makedirs(self.path)
89            except OSError, e:
90                if e.errno != errno.EEXIST or not os.path.isdir(self.path):
91                    raise self.UncreatableDirectoryException(self.path, e)
92        elif not os.path.isdir(self.path):
93            raise self.MissingDirectoryException(self.path)
94
95        # if is_writable=True, also check that the directory is writable
96        if is_writable and not os.access(self.path, os.W_OK):
97            raise self.UnwritableDirectoryException(self.path)
98
99
100    @staticmethod
101    def property_factory(attribute):
102        """
103        Create a job.*dir -> job._*dir.path property accessor.
104
105        @param attribute A string with the name of the attribute this is
106            exposed as. '_'+attribute must then be attribute that holds
107            either None or a job_directory-like object.
108
109        @returns A read-only property object that exposes a job_directory path
110        """
111        @property
112        def dir_property(self):
113            underlying_attribute = getattr(self, '_' + attribute)
114            if underlying_attribute is None:
115                return None
116            else:
117                return underlying_attribute.path
118        return dir_property
119
120
121# decorator for use with job_state methods
122def with_backing_lock(method):
123    """A decorator to perform a lock-*-unlock cycle.
124
125    When applied to a method, this decorator will automatically wrap
126    calls to the method in a backing file lock and before the call
127    followed by a backing file unlock.
128    """
129    def wrapped_method(self, *args, **dargs):
130        already_have_lock = self._backing_file_lock is not None
131        if not already_have_lock:
132            self._lock_backing_file()
133        try:
134            return method(self, *args, **dargs)
135        finally:
136            if not already_have_lock:
137                self._unlock_backing_file()
138    wrapped_method.__name__ = method.__name__
139    wrapped_method.__doc__ = method.__doc__
140    return wrapped_method
141
142
143# decorator for use with job_state methods
144def with_backing_file(method):
145    """A decorator to perform a lock-read-*-write-unlock cycle.
146
147    When applied to a method, this decorator will automatically wrap
148    calls to the method in a lock-and-read before the call followed by a
149    write-and-unlock. Any operation that is reading or writing state
150    should be decorated with this method to ensure that backing file
151    state is consistently maintained.
152    """
153    @with_backing_lock
154    def wrapped_method(self, *args, **dargs):
155        self._read_from_backing_file()
156        try:
157            return method(self, *args, **dargs)
158        finally:
159            self._write_to_backing_file()
160    wrapped_method.__name__ = method.__name__
161    wrapped_method.__doc__ = method.__doc__
162    return wrapped_method
163
164
165
166class job_state(object):
167    """A class for managing explicit job and user state, optionally persistent.
168
169    The class allows you to save state by name (like a dictionary). Any state
170    stored in this class should be picklable and deep copyable. While this is
171    not enforced it is recommended that only valid python identifiers be used
172    as names. Additionally, the namespace 'stateful_property' is used for
173    storing the valued associated with properties constructed using the
174    property_factory method.
175    """
176
177    NO_DEFAULT = object()
178    PICKLE_PROTOCOL = 2  # highest protocol available in python 2.4
179
180
181    def __init__(self):
182        """Initialize the job state."""
183        self._state = {}
184        self._backing_file = None
185        self._backing_file_initialized = False
186        self._backing_file_lock = None
187
188
189    def _lock_backing_file(self):
190        """Acquire a lock on the backing file."""
191        if self._backing_file:
192            self._backing_file_lock = open(self._backing_file, 'a')
193            fcntl.flock(self._backing_file_lock, fcntl.LOCK_EX)
194
195
196    def _unlock_backing_file(self):
197        """Release a lock on the backing file."""
198        if self._backing_file_lock:
199            fcntl.flock(self._backing_file_lock, fcntl.LOCK_UN)
200            self._backing_file_lock.close()
201            self._backing_file_lock = None
202
203
204    def read_from_file(self, file_path, merge=True):
205        """Read in any state from the file at file_path.
206
207        When merge=True, any state specified only in-memory will be preserved.
208        Any state specified on-disk will be set in-memory, even if an in-memory
209        setting already exists.
210
211        @param file_path: The path where the state should be read from. It must
212            exist but it can be empty.
213        @param merge: If true, merge the on-disk state with the in-memory
214            state. If false, replace the in-memory state with the on-disk
215            state.
216
217        @warning: This method is intentionally concurrency-unsafe. It makes no
218            attempt to control concurrent access to the file at file_path.
219        """
220
221        # we can assume that the file exists
222        if os.path.getsize(file_path) == 0:
223            on_disk_state = {}
224        else:
225            on_disk_state = pickle.load(open(file_path))
226
227        if merge:
228            # merge the on-disk state with the in-memory state
229            for namespace, namespace_dict in on_disk_state.iteritems():
230                in_memory_namespace = self._state.setdefault(namespace, {})
231                for name, value in namespace_dict.iteritems():
232                    if name in in_memory_namespace:
233                        if in_memory_namespace[name] != value:
234                            logging.info('Persistent value of %s.%s from %s '
235                                         'overridding existing in-memory '
236                                         'value', namespace, name, file_path)
237                            in_memory_namespace[name] = value
238                        else:
239                            logging.debug('Value of %s.%s is unchanged, '
240                                          'skipping import', namespace, name)
241                    else:
242                        logging.debug('Importing %s.%s from state file %s',
243                                      namespace, name, file_path)
244                        in_memory_namespace[name] = value
245        else:
246            # just replace the in-memory state with the on-disk state
247            self._state = on_disk_state
248
249        # lock the backing file before we refresh it
250        with_backing_lock(self.__class__._write_to_backing_file)(self)
251
252
253    def write_to_file(self, file_path):
254        """Write out the current state to the given path.
255
256        @param file_path: The path where the state should be written out to.
257            Must be writable.
258
259        @warning: This method is intentionally concurrency-unsafe. It makes no
260            attempt to control concurrent access to the file at file_path.
261        """
262        outfile = open(file_path, 'w')
263        try:
264            pickle.dump(self._state, outfile, self.PICKLE_PROTOCOL)
265        finally:
266            outfile.close()
267
268
269    def _read_from_backing_file(self):
270        """Refresh the current state from the backing file.
271
272        If the backing file has never been read before (indicated by checking
273        self._backing_file_initialized) it will merge the file with the
274        in-memory state, rather than overwriting it.
275        """
276        if self._backing_file:
277            merge_backing_file = not self._backing_file_initialized
278            self.read_from_file(self._backing_file, merge=merge_backing_file)
279            self._backing_file_initialized = True
280
281
282    def _write_to_backing_file(self):
283        """Flush the current state to the backing file."""
284        if self._backing_file:
285            self.write_to_file(self._backing_file)
286
287
288    @with_backing_file
289    def _synchronize_backing_file(self):
290        """Synchronizes the contents of the in-memory and on-disk state."""
291        # state is implicitly synchronized in _with_backing_file methods
292        pass
293
294
295    def set_backing_file(self, file_path):
296        """Change the path used as the backing file for the persistent state.
297
298        When a new backing file is specified if a file already exists then
299        its contents will be added into the current state, with conflicts
300        between the file and memory being resolved in favor of the file
301        contents. The file will then be kept in sync with the (combined)
302        in-memory state. The syncing can be disabled by setting this to None.
303
304        @param file_path: A path on the filesystem that can be read from and
305            written to, or None to turn off the backing store.
306        """
307        self._synchronize_backing_file()
308        self._backing_file = file_path
309        self._backing_file_initialized = False
310        self._synchronize_backing_file()
311
312
313    @with_backing_file
314    def get(self, namespace, name, default=NO_DEFAULT):
315        """Returns the value associated with a particular name.
316
317        @param namespace: The namespace that the property should be stored in.
318        @param name: The name the value was saved with.
319        @param default: A default value to return if no state is currently
320            associated with var.
321
322        @return: A deep copy of the value associated with name. Note that this
323            explicitly returns a deep copy to avoid problems with mutable
324            values; mutations are not persisted or shared.
325        @raise KeyError: raised when no state is associated with var and a
326            default value is not provided.
327        """
328        if self.has(namespace, name):
329            return copy.deepcopy(self._state[namespace][name])
330        elif default is self.NO_DEFAULT:
331            raise KeyError('No key %s in namespace %s' % (name, namespace))
332        else:
333            return default
334
335
336    @with_backing_file
337    def set(self, namespace, name, value):
338        """Saves the value given with the provided name.
339
340        @param namespace: The namespace that the property should be stored in.
341        @param name: The name the value should be saved with.
342        @param value: The value to save.
343        """
344        namespace_dict = self._state.setdefault(namespace, {})
345        namespace_dict[name] = copy.deepcopy(value)
346        logging.debug('Persistent state %s.%s now set to %r', namespace,
347                      name, value)
348
349
350    @with_backing_file
351    def has(self, namespace, name):
352        """Return a boolean indicating if namespace.name is defined.
353
354        @param namespace: The namespace to check for a definition.
355        @param name: The name to check for a definition.
356
357        @return: True if the given name is defined in the given namespace and
358            False otherwise.
359        """
360        return namespace in self._state and name in self._state[namespace]
361
362
363    @with_backing_file
364    def discard(self, namespace, name):
365        """If namespace.name is a defined value, deletes it.
366
367        @param namespace: The namespace that the property is stored in.
368        @param name: The name the value is saved with.
369        """
370        if self.has(namespace, name):
371            del self._state[namespace][name]
372            if len(self._state[namespace]) == 0:
373                del self._state[namespace]
374            logging.debug('Persistent state %s.%s deleted', namespace, name)
375        else:
376            logging.debug(
377                'Persistent state %s.%s not defined so nothing is discarded',
378                namespace, name)
379
380
381    @with_backing_file
382    def discard_namespace(self, namespace):
383        """Delete all defined namespace.* names.
384
385        @param namespace: The namespace to be cleared.
386        """
387        if namespace in self._state:
388            del self._state[namespace]
389        logging.debug('Persistent state %s.* deleted', namespace)
390
391
392    @staticmethod
393    def property_factory(state_attribute, property_attribute, default,
394                         namespace='global_properties'):
395        """
396        Create a property object for an attribute using self.get and self.set.
397
398        @param state_attribute: A string with the name of the attribute on
399            job that contains the job_state instance.
400        @param property_attribute: A string with the name of the attribute
401            this property is exposed as.
402        @param default: A default value that should be used for this property
403            if it is not set.
404        @param namespace: The namespace to store the attribute value in.
405
406        @return: A read-write property object that performs self.get calls
407            to read the value and self.set calls to set it.
408        """
409        def getter(job):
410            state = getattr(job, state_attribute)
411            return state.get(namespace, property_attribute, default)
412        def setter(job, value):
413            state = getattr(job, state_attribute)
414            state.set(namespace, property_attribute, value)
415        return property(getter, setter)
416
417
418class status_log_entry(object):
419    """Represents a single status log entry."""
420
421    RENDERED_NONE_VALUE = '----'
422    TIMESTAMP_FIELD = 'timestamp'
423    LOCALTIME_FIELD = 'localtime'
424
425    # non-space whitespace is forbidden in any fields
426    BAD_CHAR_REGEX = re.compile(r'[\t\n\r\v\f]')
427
428    def __init__(self, status_code, subdir, operation, message, fields,
429                 timestamp=None):
430        """Construct a status.log entry.
431
432        @param status_code: A message status code. Must match the codes
433            accepted by autotest_lib.common_lib.log.is_valid_status.
434        @param subdir: A valid job subdirectory, or None.
435        @param operation: Description of the operation, or None.
436        @param message: A printable string describing event to be recorded.
437        @param fields: A dictionary of arbitrary alphanumeric key=value pairs
438            to be included in the log, or None.
439        @param timestamp: An optional integer timestamp, in the same format
440            as a time.time() timestamp. If unspecified, the current time is
441            used.
442
443        @raise ValueError: if any of the parameters are invalid
444        """
445
446        if not log.is_valid_status(status_code):
447            raise ValueError('status code %r is not valid' % status_code)
448        self.status_code = status_code
449
450        if subdir and self.BAD_CHAR_REGEX.search(subdir):
451            raise ValueError('Invalid character in subdir string')
452        self.subdir = subdir
453
454        if operation and self.BAD_CHAR_REGEX.search(operation):
455            raise ValueError('Invalid character in operation string')
456        self.operation = operation
457
458        # break the message line into a single-line message that goes into the
459        # database, and a block of additional lines that goes into the status
460        # log but will never be parsed
461        message_lines = message.split('\n')
462        self.message = message_lines[0].replace('\t', ' ' * 8)
463        self.extra_message_lines = message_lines[1:]
464        if self.BAD_CHAR_REGEX.search(self.message):
465            raise ValueError('Invalid character in message %r' % self.message)
466
467        if not fields:
468            self.fields = {}
469        else:
470            self.fields = fields.copy()
471        for key, value in self.fields.iteritems():
472            if type(value) is int:
473                value = str(value)
474            if self.BAD_CHAR_REGEX.search(key + value):
475                raise ValueError('Invalid character in %r=%r field'
476                                 % (key, value))
477
478        # build up the timestamp
479        if timestamp is None:
480            timestamp = int(time.time())
481        self.fields[self.TIMESTAMP_FIELD] = str(timestamp)
482        self.fields[self.LOCALTIME_FIELD] = time.strftime(
483            '%b %d %H:%M:%S', time.localtime(timestamp))
484
485
486    def is_start(self):
487        """Indicates if this status log is the start of a new nested block.
488
489        @return: A boolean indicating if this entry starts a new nested block.
490        """
491        return self.status_code == 'START'
492
493
494    def is_end(self):
495        """Indicates if this status log is the end of a nested block.
496
497        @return: A boolean indicating if this entry ends a nested block.
498        """
499        return self.status_code.startswith('END ')
500
501
502    def render(self):
503        """Render the status log entry into a text string.
504
505        @return: A text string suitable for writing into a status log file.
506        """
507        # combine all the log line data into a tab-delimited string
508        subdir = self.subdir or self.RENDERED_NONE_VALUE
509        operation = self.operation or self.RENDERED_NONE_VALUE
510        extra_fields = ['%s=%s' % field for field in self.fields.iteritems()]
511        line_items = [self.status_code, subdir, operation]
512        line_items += extra_fields + [self.message]
513        first_line = '\t'.join(line_items)
514
515        # append the extra unparsable lines, two-space indented
516        all_lines = [first_line]
517        all_lines += ['  ' + line for line in self.extra_message_lines]
518        return '\n'.join(all_lines)
519
520
521    @classmethod
522    def parse(cls, line):
523        """Parse a status log entry from a text string.
524
525        This method is the inverse of render; it should always be true that
526        parse(entry.render()) produces a new status_log_entry equivalent to
527        entry.
528
529        @return: A new status_log_entry instance with fields extracted from the
530            given status line. If the line is an extra message line then None
531            is returned.
532        """
533        # extra message lines are always prepended with two spaces
534        if line.startswith('  '):
535            return None
536
537        line = line.lstrip('\t')  # ignore indentation
538        entry_parts = line.split('\t')
539        if len(entry_parts) < 4:
540            raise ValueError('%r is not a valid status line' % line)
541        status_code, subdir, operation = entry_parts[:3]
542        if subdir == cls.RENDERED_NONE_VALUE:
543            subdir = None
544        if operation == cls.RENDERED_NONE_VALUE:
545            operation = None
546        message = entry_parts[-1]
547        fields = dict(part.split('=', 1) for part in entry_parts[3:-1])
548        if cls.TIMESTAMP_FIELD in fields:
549            timestamp = int(fields[cls.TIMESTAMP_FIELD])
550        else:
551            timestamp = None
552        return cls(status_code, subdir, operation, message, fields, timestamp)
553
554
555class status_indenter(object):
556    """Abstract interface that a status log indenter should use."""
557
558    @property
559    def indent(self):
560        raise NotImplementedError
561
562
563    def increment(self):
564        """Increase indentation by one level."""
565        raise NotImplementedError
566
567
568    def decrement(self):
569        """Decrease indentation by one level."""
570
571
572class status_logger(object):
573    """Represents a status log file. Responsible for translating messages
574    into on-disk status log lines.
575
576    @property global_filename: The filename to write top-level logs to.
577    @property subdir_filename: The filename to write subdir-level logs to.
578    """
579    def __init__(self, job, indenter, global_filename='status',
580                 subdir_filename='status', record_hook=None,
581                 tap_writer=None):
582        """Construct a logger instance.
583
584        @param job: A reference to the job object this is logging for. Only a
585            weak reference to the job is held, to avoid a
586            status_logger <-> job circular reference.
587        @param indenter: A status_indenter instance, for tracking the
588            indentation level.
589        @param global_filename: An optional filename to initialize the
590            self.global_filename attribute.
591        @param subdir_filename: An optional filename to initialize the
592            self.subdir_filename attribute.
593        @param record_hook: An optional function to be called before an entry
594            is logged. The function should expect a single parameter, a
595            copy of the status_log_entry object.
596        @param tap_writer: An instance of the class TAPReport for addionally
597            writing TAP files
598        """
599        self._jobref = weakref.ref(job)
600        self._indenter = indenter
601        self.global_filename = global_filename
602        self.subdir_filename = subdir_filename
603        self._record_hook = record_hook
604        if tap_writer is None:
605            self._tap_writer = TAPReport(None)
606        else:
607            self._tap_writer = tap_writer
608
609
610    def render_entry(self, log_entry):
611        """Render a status_log_entry as it would be written to a log file.
612
613        @param log_entry: A status_log_entry instance to be rendered.
614
615        @return: The status log entry, rendered as it would be written to the
616            logs (including indentation).
617        """
618        if log_entry.is_end():
619            indent = self._indenter.indent - 1
620        else:
621            indent = self._indenter.indent
622        return '\t' * indent + log_entry.render().rstrip('\n')
623
624
625    def record_entry(self, log_entry, log_in_subdir=True):
626        """Record a status_log_entry into the appropriate status log files.
627
628        @param log_entry: A status_log_entry instance to be recorded into the
629                status logs.
630        @param log_in_subdir: A boolean that indicates (when true) that subdir
631                logs should be written into the subdirectory status log file.
632        """
633        # acquire a strong reference for the duration of the method
634        job = self._jobref()
635        if job is None:
636            logging.warning('Something attempted to write a status log entry '
637                            'after its job terminated, ignoring the attempt.')
638            logging.warning(traceback.format_stack())
639            return
640
641        # call the record hook if one was given
642        if self._record_hook:
643            self._record_hook(log_entry)
644
645        # figure out where we need to log to
646        log_files = [os.path.join(job.resultdir, self.global_filename)]
647        if log_in_subdir and log_entry.subdir:
648            log_files.append(os.path.join(job.resultdir, log_entry.subdir,
649                                          self.subdir_filename))
650
651        # write out to entry to the log files
652        log_text = self.render_entry(log_entry)
653        for log_file in log_files:
654            fileobj = open(log_file, 'a')
655            try:
656                print >> fileobj, log_text
657            finally:
658                fileobj.close()
659
660        # write to TAPRecord instance
661        if log_entry.is_end() and self._tap_writer.do_tap_report:
662            self._tap_writer.record(log_entry, self._indenter.indent, log_files)
663
664        # adjust the indentation if this was a START or END entry
665        if log_entry.is_start():
666            self._indenter.increment()
667        elif log_entry.is_end():
668            self._indenter.decrement()
669
670
671class TAPReport(object):
672    """
673    Deal with TAP reporting for the Autotest client.
674    """
675
676    job_statuses = {
677        "TEST_NA": False,
678        "ABORT": False,
679        "ERROR": False,
680        "FAIL": False,
681        "WARN": False,
682        "GOOD": True,
683        "START": True,
684        "END GOOD": True,
685        "ALERT": False,
686        "RUNNING": False,
687        "NOSTATUS": False
688    }
689
690
691    def __init__(self, enable, resultdir=None, global_filename='status'):
692        """
693        @param enable: Set self.do_tap_report to trigger TAP reporting.
694        @param resultdir: Path where the TAP report files will be written.
695        @param global_filename: File name of the status files .tap extensions
696                will be appended.
697        """
698        self.do_tap_report = enable
699        if resultdir is not None:
700            self.resultdir = os.path.abspath(resultdir)
701        self._reports_container = {}
702        self._keyval_container = {} # {'path1': [entries],}
703        self.global_filename = global_filename
704
705
706    @classmethod
707    def tap_ok(self, success, counter, message):
708        """
709        return a TAP message string.
710
711        @param success: True for positive message string.
712        @param counter: number of TAP line in plan.
713        @param message: additional message to report in TAP line.
714        """
715        if success:
716            message = "ok %s - %s" % (counter, message)
717        else:
718            message = "not ok %s - %s" % (counter, message)
719        return message
720
721
722    def record(self, log_entry, indent, log_files):
723        """
724        Append a job-level status event to self._reports_container. All
725        events will be written to TAP log files at the end of the test run.
726        Otherwise, it's impossilble to determine the TAP plan.
727
728        @param log_entry: A string status code describing the type of status
729                entry being recorded. It must pass log.is_valid_status to be
730                considered valid.
731        @param indent: Level of the log_entry to determine the operation if
732                log_entry.operation is not given.
733        @param log_files: List of full path of files the TAP report will be
734                written to at the end of the test.
735        """
736        for log_file in log_files:
737            log_file_path = os.path.dirname(log_file)
738            key = log_file_path.split(self.resultdir, 1)[1].strip(os.sep)
739            if not key:
740                key = 'root'
741
742            if not self._reports_container.has_key(key):
743                self._reports_container[key] = []
744
745            if log_entry.operation:
746                operation = log_entry.operation
747            elif indent == 1:
748                operation = "job"
749            else:
750                operation = "unknown"
751            entry = self.tap_ok(
752                self.job_statuses.get(log_entry.status_code, False),
753                len(self._reports_container[key]) + 1, operation + "\n"
754            )
755            self._reports_container[key].append(entry)
756
757
758    def record_keyval(self, path, dictionary, type_tag=None):
759        """
760        Append a key-value pairs of dictionary to self._keyval_container in
761        TAP format. Once finished write out the keyval.tap file to the file
762        system.
763
764        If type_tag is None, then the key must be composed of alphanumeric
765        characters (or dashes + underscores). However, if type-tag is not
766        null then the keys must also have "{type_tag}" as a suffix. At
767        the moment the only valid values of type_tag are "attr" and "perf".
768
769        @param path: The full path of the keyval.tap file to be created
770        @param dictionary: The keys and values.
771        @param type_tag: The type of the values
772        """
773        self._keyval_container.setdefault(path, [0, []])
774        self._keyval_container[path][0] += 1
775
776        if type_tag is None:
777            key_regex = re.compile(r'^[-\.\w]+$')
778        else:
779            if type_tag not in ('attr', 'perf'):
780                raise ValueError('Invalid type tag: %s' % type_tag)
781            escaped_tag = re.escape(type_tag)
782            key_regex = re.compile(r'^[-\.\w]+\{%s\}$' % escaped_tag)
783        self._keyval_container[path][1].extend([
784            self.tap_ok(True, self._keyval_container[path][0], "results"),
785            "\n  ---\n",
786        ])
787        try:
788            for key in sorted(dictionary.keys()):
789                if not key_regex.search(key):
790                    raise ValueError('Invalid key: %s' % key)
791                self._keyval_container[path][1].append(
792                    '  %s: %s\n' % (key.replace('{', '_').rstrip('}'),
793                                    dictionary[key])
794                )
795        finally:
796            self._keyval_container[path][1].append("  ...\n")
797        self._write_keyval()
798
799
800    def _write_reports(self):
801        """
802        Write TAP reports to file.
803        """
804        for key in self._reports_container.keys():
805            if key == 'root':
806                sub_dir = ''
807            else:
808                sub_dir = key
809            tap_fh = open(os.sep.join(
810                [self.resultdir, sub_dir, self.global_filename]
811            ) + ".tap", 'w')
812            tap_fh.write('1..' + str(len(self._reports_container[key])) + '\n')
813            tap_fh.writelines(self._reports_container[key])
814            tap_fh.close()
815
816
817    def _write_keyval(self):
818        """
819        Write the self._keyval_container key values to a file.
820        """
821        for path in self._keyval_container.keys():
822            tap_fh = open(path + ".tap", 'w')
823            tap_fh.write('1..' + str(self._keyval_container[path][0]) + '\n')
824            tap_fh.writelines(self._keyval_container[path][1])
825            tap_fh.close()
826
827
828    def write(self):
829        """
830        Write the TAP reports to files.
831        """
832        self._write_reports()
833
834
835    def _write_tap_archive(self):
836        """
837        Write a tar archive containing all the TAP files and
838        a meta.yml containing the file names.
839        """
840        os.chdir(self.resultdir)
841        tap_files = []
842        for rel_path, d, files in os.walk('.'):
843            tap_files.extend(["/".join(
844                [rel_path, f]) for f in files if f.endswith('.tap')])
845        meta_yaml = open('meta.yml', 'w')
846        meta_yaml.write('file_order:\n')
847        tap_tar = tarfile.open(self.resultdir + '/tap.tar.gz', 'w:gz')
848        for f in tap_files:
849            meta_yaml.write("  - " + f.lstrip('./') + "\n")
850            tap_tar.add(f)
851        meta_yaml.close()
852        tap_tar.add('meta.yml')
853        tap_tar.close()
854
855
856class base_job(object):
857    """An abstract base class for the various autotest job classes.
858
859    @property autodir: The top level autotest directory.
860    @property clientdir: The autotest client directory.
861    @property serverdir: The autotest server directory. [OPTIONAL]
862    @property resultdir: The directory where results should be written out.
863        [WRITABLE]
864
865    @property pkgdir: The job packages directory. [WRITABLE]
866    @property tmpdir: The job temporary directory. [WRITABLE]
867    @property testdir: The job test directory. [WRITABLE]
868    @property site_testdir: The job site test directory. [WRITABLE]
869
870    @property bindir: The client bin/ directory.
871    @property configdir: The client config/ directory.
872    @property profdir: The client profilers/ directory.
873    @property toolsdir: The client tools/ directory.
874
875    @property conmuxdir: The conmux directory. [OPTIONAL]
876
877    @property control: A path to the control file to be executed. [OPTIONAL]
878    @property hosts: A set of all live Host objects currently in use by the
879        job. Code running in the context of a local client can safely assume
880        that this set contains only a single entry.
881    @property machines: A list of the machine names associated with the job.
882    @property user: The user executing the job.
883    @property tag: A tag identifying the job. Often used by the scheduler to
884        give a name of the form NUMBER-USERNAME/HOSTNAME.
885    @property args: A list of addtional miscellaneous command-line arguments
886        provided when starting the job.
887
888    @property last_boot_tag: The label of the kernel from the last reboot.
889        [OPTIONAL,PERSISTENT]
890    @property automatic_test_tag: A string which, if set, will be automatically
891        added to the test name when running tests.
892
893    @property default_profile_only: A boolean indicating the default value of
894        profile_only used by test.execute. [PERSISTENT]
895    @property drop_caches: A boolean indicating if caches should be dropped
896        before each test is executed.
897    @property drop_caches_between_iterations: A boolean indicating if caches
898        should be dropped before each test iteration is executed.
899    @property run_test_cleanup: A boolean indicating if test.cleanup should be
900        run by default after a test completes, if the run_cleanup argument is
901        not specified. [PERSISTENT]
902
903    @property num_tests_run: The number of tests run during the job. [OPTIONAL]
904    @property num_tests_failed: The number of tests failed during the job.
905        [OPTIONAL]
906
907    @property bootloader: An instance of the boottool class. May not be
908        available on job instances where access to the bootloader is not
909        available (e.g. on the server running a server job). [OPTIONAL]
910    @property harness: An instance of the client test harness. Only available
911        in contexts where client test execution happens. [OPTIONAL]
912    @property logging: An instance of the logging manager associated with the
913        job.
914    @property profilers: An instance of the profiler manager associated with
915        the job.
916    @property sysinfo: An instance of the sysinfo object. Only available in
917        contexts where it's possible to collect sysinfo.
918    @property warning_manager: A class for managing which types of WARN
919        messages should be logged and which should be supressed. [OPTIONAL]
920    @property warning_loggers: A set of readable streams that will be monitored
921        for WARN messages to be logged. [OPTIONAL]
922
923    Abstract methods:
924        _find_base_directories [CLASSMETHOD]
925            Returns the location of autodir, clientdir and serverdir
926
927        _find_resultdir
928            Returns the location of resultdir. Gets a copy of any parameters
929            passed into base_job.__init__. Can return None to indicate that
930            no resultdir is to be used.
931
932        _get_status_logger
933            Returns a status_logger instance for recording job status logs.
934    """
935
936   # capture the dependency on several helper classes with factories
937    _job_directory = job_directory
938    _job_state = job_state
939
940
941    # all the job directory attributes
942    autodir = _job_directory.property_factory('autodir')
943    clientdir = _job_directory.property_factory('clientdir')
944    serverdir = _job_directory.property_factory('serverdir')
945    resultdir = _job_directory.property_factory('resultdir')
946    pkgdir = _job_directory.property_factory('pkgdir')
947    tmpdir = _job_directory.property_factory('tmpdir')
948    testdir = _job_directory.property_factory('testdir')
949    site_testdir = _job_directory.property_factory('site_testdir')
950    bindir = _job_directory.property_factory('bindir')
951    configdir = _job_directory.property_factory('configdir')
952    profdir = _job_directory.property_factory('profdir')
953    toolsdir = _job_directory.property_factory('toolsdir')
954    conmuxdir = _job_directory.property_factory('conmuxdir')
955
956
957    # all the generic persistent properties
958    tag = _job_state.property_factory('_state', 'tag', '')
959    default_profile_only = _job_state.property_factory(
960        '_state', 'default_profile_only', False)
961    run_test_cleanup = _job_state.property_factory(
962        '_state', 'run_test_cleanup', True)
963    last_boot_tag = _job_state.property_factory(
964        '_state', 'last_boot_tag', None)
965    automatic_test_tag = _job_state.property_factory(
966        '_state', 'automatic_test_tag', None)
967
968    # the use_sequence_number property
969    _sequence_number = _job_state.property_factory(
970        '_state', '_sequence_number', None)
971    def _get_use_sequence_number(self):
972        return bool(self._sequence_number)
973    def _set_use_sequence_number(self, value):
974        if value:
975            self._sequence_number = 1
976        else:
977            self._sequence_number = None
978    use_sequence_number = property(_get_use_sequence_number,
979                                   _set_use_sequence_number)
980
981
982    def __init__(self, *args, **dargs):
983        # initialize the base directories, all others are relative to these
984        autodir, clientdir, serverdir = self._find_base_directories()
985        self._autodir = self._job_directory(autodir)
986        self._clientdir = self._job_directory(clientdir)
987        if serverdir:
988            self._serverdir = self._job_directory(serverdir)
989        else:
990            self._serverdir = None
991
992        # initialize all the other directories relative to the base ones
993        self._initialize_dir_properties()
994        self._resultdir = self._job_directory(
995            self._find_resultdir(*args, **dargs), True)
996        self._execution_contexts = []
997
998        # initialize all the job state
999        self._state = self._job_state()
1000
1001        # initialize tap reporting
1002        if dargs.has_key('options'):
1003            self._tap = self._tap_init(dargs['options'].tap_report)
1004        else:
1005            self._tap = self._tap_init(False)
1006
1007    @classmethod
1008    def _find_base_directories(cls):
1009        raise NotImplementedError()
1010
1011
1012    def _initialize_dir_properties(self):
1013        """
1014        Initializes all the secondary self.*dir properties. Requires autodir,
1015        clientdir and serverdir to already be initialized.
1016        """
1017        # create some stubs for use as shortcuts
1018        def readonly_dir(*args):
1019            return self._job_directory(os.path.join(*args))
1020        def readwrite_dir(*args):
1021            return self._job_directory(os.path.join(*args), True)
1022
1023        # various client-specific directories
1024        self._bindir = readonly_dir(self.clientdir, 'bin')
1025        self._configdir = readonly_dir(self.clientdir, 'config')
1026        self._profdir = readonly_dir(self.clientdir, 'profilers')
1027        self._pkgdir = readwrite_dir(self.clientdir, 'packages')
1028        self._toolsdir = readonly_dir(self.clientdir, 'tools')
1029
1030        # directories which are in serverdir on a server, clientdir on a client
1031        if self.serverdir:
1032            root = self.serverdir
1033        else:
1034            root = self.clientdir
1035        self._tmpdir = readwrite_dir(root, 'tmp')
1036        self._testdir = readwrite_dir(root, 'tests')
1037        self._site_testdir = readwrite_dir(root, 'site_tests')
1038
1039        # various server-specific directories
1040        if self.serverdir:
1041            self._conmuxdir = readonly_dir(self.autodir, 'conmux')
1042        else:
1043            self._conmuxdir = None
1044
1045
1046    def _find_resultdir(self, *args, **dargs):
1047        raise NotImplementedError()
1048
1049
1050    def push_execution_context(self, resultdir):
1051        """
1052        Save off the current context of the job and change to the given one.
1053
1054        In practice method just changes the resultdir, but it may become more
1055        extensive in the future. The expected use case is for when a child
1056        job needs to be executed in some sort of nested context (for example
1057        the way parallel_simple does). The original context can be restored
1058        with a pop_execution_context call.
1059
1060        @param resultdir: The new resultdir, relative to the current one.
1061        """
1062        new_dir = self._job_directory(
1063            os.path.join(self.resultdir, resultdir), True)
1064        self._execution_contexts.append(self._resultdir)
1065        self._resultdir = new_dir
1066
1067
1068    def pop_execution_context(self):
1069        """
1070        Reverse the effects of the previous push_execution_context call.
1071
1072        @raise IndexError: raised when the stack of contexts is empty.
1073        """
1074        if not self._execution_contexts:
1075            raise IndexError('No old execution context to restore')
1076        self._resultdir = self._execution_contexts.pop()
1077
1078
1079    def get_state(self, name, default=_job_state.NO_DEFAULT):
1080        """Returns the value associated with a particular name.
1081
1082        @param name: The name the value was saved with.
1083        @param default: A default value to return if no state is currently
1084            associated with var.
1085
1086        @return: A deep copy of the value associated with name. Note that this
1087            explicitly returns a deep copy to avoid problems with mutable
1088            values; mutations are not persisted or shared.
1089        @raise KeyError: raised when no state is associated with var and a
1090            default value is not provided.
1091        """
1092        try:
1093            return self._state.get('public', name, default=default)
1094        except KeyError:
1095            raise KeyError(name)
1096
1097
1098    def set_state(self, name, value):
1099        """Saves the value given with the provided name.
1100
1101        @param name: The name the value should be saved with.
1102        @param value: The value to save.
1103        """
1104        self._state.set('public', name, value)
1105
1106
1107    def _build_tagged_test_name(self, testname, dargs):
1108        """Builds the fully tagged testname and subdirectory for job.run_test.
1109
1110        @param testname: The base name of the test
1111        @param dargs: The ** arguments passed to run_test. And arguments
1112            consumed by this method will be removed from the dictionary.
1113
1114        @return: A 3-tuple of the full name of the test, the subdirectory it
1115            should be stored in, and the full tag of the subdir.
1116        """
1117        tag_parts = []
1118
1119        # build up the parts of the tag used for the test name
1120        base_tag = dargs.pop('tag', None)
1121        if base_tag:
1122            tag_parts.append(str(base_tag))
1123        if self.use_sequence_number:
1124            tag_parts.append('_%02d_' % self._sequence_number)
1125            self._sequence_number += 1
1126        if self.automatic_test_tag:
1127            tag_parts.append(self.automatic_test_tag)
1128        full_testname = '.'.join([testname] + tag_parts)
1129
1130        # build up the subdir and tag as well
1131        subdir_tag = dargs.pop('subdir_tag', None)
1132        if subdir_tag:
1133            tag_parts.append(subdir_tag)
1134        subdir = '.'.join([testname] + tag_parts)
1135        tag = '.'.join(tag_parts)
1136
1137        return full_testname, subdir, tag
1138
1139
1140    def _make_test_outputdir(self, subdir):
1141        """Creates an output directory for a test to run it.
1142
1143        @param subdir: The subdirectory of the test. Generally computed by
1144            _build_tagged_test_name.
1145
1146        @return: A job_directory instance corresponding to the outputdir of
1147            the test.
1148        @raise TestError: If the output directory is invalid.
1149        """
1150        # explicitly check that this subdirectory is new
1151        path = os.path.join(self.resultdir, subdir)
1152        if os.path.exists(path):
1153            msg = ('%s already exists; multiple tests cannot run with the '
1154                   'same subdirectory' % subdir)
1155            raise error.TestError(msg)
1156
1157        # create the outputdir and raise a TestError if it isn't valid
1158        try:
1159            outputdir = self._job_directory(path, True)
1160            return outputdir
1161        except self._job_directory.JobDirectoryException, e:
1162            logging.exception('%s directory creation failed with %s',
1163                              subdir, e)
1164            raise error.TestError('%s directory creation failed' % subdir)
1165
1166    def _tap_init(self, enable):
1167        """Initialize TAP reporting
1168        """
1169        return TAPReport(enable, resultdir=self.resultdir)
1170
1171
1172    def record(self, status_code, subdir, operation, status='',
1173               optional_fields=None):
1174        """Record a job-level status event.
1175
1176        Logs an event noteworthy to the Autotest job as a whole. Messages will
1177        be written into a global status log file, as well as a subdir-local
1178        status log file (if subdir is specified).
1179
1180        @param status_code: A string status code describing the type of status
1181            entry being recorded. It must pass log.is_valid_status to be
1182            considered valid.
1183        @param subdir: A specific results subdirectory this also applies to, or
1184            None. If not None the subdirectory must exist.
1185        @param operation: A string describing the operation that was run.
1186        @param status: An optional human-readable message describing the status
1187            entry, for example an error message or "completed successfully".
1188        @param optional_fields: An optional dictionary of addtional named fields
1189            to be included with the status message. Every time timestamp and
1190            localtime entries are generated with the current time and added
1191            to this dictionary.
1192        """
1193        entry = status_log_entry(status_code, subdir, operation, status,
1194                                 optional_fields)
1195        self.record_entry(entry)
1196
1197
1198    def record_entry(self, entry, log_in_subdir=True):
1199        """Record a job-level status event, using a status_log_entry.
1200
1201        This is the same as self.record but using an existing status log
1202        entry object rather than constructing one for you.
1203
1204        @param entry: A status_log_entry object
1205        @param log_in_subdir: A boolean that indicates (when true) that subdir
1206                logs should be written into the subdirectory status log file.
1207        """
1208        self._get_status_logger().record_entry(entry, log_in_subdir)
1209