base_job.py revision 2a89dac0b6e319ef58d41c7a591c3d88cf6dd8a1
1import os, copy, logging, errno, fcntl, time, re, weakref, traceback
2import cPickle as pickle
3
4from autotest_lib.client.common_lib import autotemp, error, log
5
6
7class job_directory(object):
8    """Represents a job.*dir directory."""
9
10
11    class JobDirectoryException(error.AutotestError):
12        """Generic job_directory exception superclass."""
13
14
15    class MissingDirectoryException(JobDirectoryException):
16        """Raised when a directory required by the job does not exist."""
17        def __init__(self, path):
18            Exception.__init__(self, 'Directory %s does not exist' % path)
19
20
21    class UncreatableDirectoryException(JobDirectoryException):
22        """Raised when a directory required by the job is missing and cannot
23        be created."""
24        def __init__(self, path, error):
25            msg = 'Creation of directory %s failed with exception %s'
26            msg %= (path, error)
27            Exception.__init__(self, msg)
28
29
30    class UnwritableDirectoryException(JobDirectoryException):
31        """Raised when a writable directory required by the job exists
32        but is not writable."""
33        def __init__(self, path):
34            msg = 'Directory %s exists but is not writable' % path
35            Exception.__init__(self, msg)
36
37
38    def __init__(self, path, is_writable=False):
39        """
40        Instantiate a job directory.
41
42        @param path: The path of the directory. If None a temporary directory
43            will be created instead.
44        @param is_writable: If True, expect the directory to be writable.
45
46        @raise MissingDirectoryException: raised if is_writable=False and the
47            directory does not exist.
48        @raise UnwritableDirectoryException: raised if is_writable=True and
49            the directory exists but is not writable.
50        @raise UncreatableDirectoryException: raised if is_writable=True, the
51            directory does not exist and it cannot be created.
52        """
53        if path is None:
54            if is_writable:
55                self._tempdir = autotemp.tempdir(unique_id='autotest')
56                self.path = self._tempdir.name
57            else:
58                raise self.MissingDirectoryException(path)
59        else:
60            self._tempdir = None
61            self.path = path
62        self._ensure_valid(is_writable)
63
64
65    def _ensure_valid(self, is_writable):
66        """
67        Ensure that this is a valid directory.
68
69        Will check if a directory exists, can optionally also enforce that
70        it be writable. It can optionally create it if necessary. Creation
71        will still fail if the path is rooted in a non-writable directory, or
72        if a file already exists at the given location.
73
74        @param dir_path A path where a directory should be located
75        @param is_writable A boolean indicating that the directory should
76            not only exist, but also be writable.
77
78        @raises MissingDirectoryException raised if is_writable=False and the
79            directory does not exist.
80        @raises UnwritableDirectoryException raised if is_writable=True and
81            the directory is not wrtiable.
82        @raises UncreatableDirectoryException raised if is_writable=True, the
83            directory does not exist and it cannot be created
84        """
85        # ensure the directory exists
86        if is_writable:
87            try:
88                os.makedirs(self.path)
89            except OSError, e:
90                if e.errno != errno.EEXIST or not os.path.isdir(self.path):
91                    raise self.UncreatableDirectoryException(self.path, e)
92        elif not os.path.isdir(self.path):
93            raise self.MissingDirectoryException(self.path)
94
95        # if is_writable=True, also check that the directory is writable
96        if is_writable and not os.access(self.path, os.W_OK):
97            raise self.UnwritableDirectoryException(self.path)
98
99
100    @staticmethod
101    def property_factory(attribute):
102        """
103        Create a job.*dir -> job._*dir.path property accessor.
104
105        @param attribute A string with the name of the attribute this is
106            exposed as. '_'+attribute must then be attribute that holds
107            either None or a job_directory-like object.
108
109        @returns A read-only property object that exposes a job_directory path
110        """
111        @property
112        def dir_property(self):
113            underlying_attribute = getattr(self, '_' + attribute)
114            if underlying_attribute is None:
115                return None
116            else:
117                return underlying_attribute.path
118        return dir_property
119
120
121# decorator for use with job_state methods
122def with_backing_lock(method):
123    """A decorator to perform a lock-*-unlock cycle.
124
125    When applied to a method, this decorator will automatically wrap
126    calls to the method in a backing file lock and before the call
127    followed by a backing file unlock.
128    """
129    def wrapped_method(self, *args, **dargs):
130        already_have_lock = self._backing_file_lock is not None
131        if not already_have_lock:
132            self._lock_backing_file()
133        try:
134            return method(self, *args, **dargs)
135        finally:
136            if not already_have_lock:
137                self._unlock_backing_file()
138    wrapped_method.__name__ = method.__name__
139    wrapped_method.__doc__ = method.__doc__
140    return wrapped_method
141
142
143# decorator for use with job_state methods
144def with_backing_file(method):
145    """A decorator to perform a lock-read-*-write-unlock cycle.
146
147    When applied to a method, this decorator will automatically wrap
148    calls to the method in a lock-and-read before the call followed by a
149    write-and-unlock. Any operation that is reading or writing state
150    should be decorated with this method to ensure that backing file
151    state is consistently maintained.
152    """
153    @with_backing_lock
154    def wrapped_method(self, *args, **dargs):
155        self._read_from_backing_file()
156        try:
157            return method(self, *args, **dargs)
158        finally:
159            self._write_to_backing_file()
160    wrapped_method.__name__ = method.__name__
161    wrapped_method.__doc__ = method.__doc__
162    return wrapped_method
163
164
165
166class job_state(object):
167    """A class for managing explicit job and user state, optionally persistent.
168
169    The class allows you to save state by name (like a dictionary). Any state
170    stored in this class should be picklable and deep copyable. While this is
171    not enforced it is recommended that only valid python identifiers be used
172    as names. Additionally, the namespace 'stateful_property' is used for
173    storing the valued associated with properties constructed using the
174    property_factory method.
175    """
176
177    NO_DEFAULT = object()
178    PICKLE_PROTOCOL = 2  # highest protocol available in python 2.4
179
180
181    def __init__(self):
182        """Initialize the job state."""
183        self._state = {}
184        self._backing_file = None
185        self._backing_file_initialized = False
186        self._backing_file_lock = None
187
188
189    def _lock_backing_file(self):
190        """Acquire a lock on the backing file."""
191        if self._backing_file:
192            self._backing_file_lock = open(self._backing_file, 'a')
193            fcntl.flock(self._backing_file_lock, fcntl.LOCK_EX)
194
195
196    def _unlock_backing_file(self):
197        """Release a lock on the backing file."""
198        if self._backing_file_lock:
199            fcntl.flock(self._backing_file_lock, fcntl.LOCK_UN)
200            self._backing_file_lock.close()
201            self._backing_file_lock = None
202
203
204    def read_from_file(self, file_path, merge=True):
205        """Read in any state from the file at file_path.
206
207        When merge=True, any state specified only in-memory will be preserved.
208        Any state specified on-disk will be set in-memory, even if an in-memory
209        setting already exists.
210
211        @param file_path: The path where the state should be read from. It must
212            exist but it can be empty.
213        @param merge: If true, merge the on-disk state with the in-memory
214            state. If false, replace the in-memory state with the on-disk
215            state.
216
217        @warning: This method is intentionally concurrency-unsafe. It makes no
218            attempt to control concurrent access to the file at file_path.
219        """
220
221        # we can assume that the file exists
222        if os.path.getsize(file_path) == 0:
223            on_disk_state = {}
224        else:
225            on_disk_state = pickle.load(open(file_path))
226
227        if merge:
228            # merge the on-disk state with the in-memory state
229            for namespace, namespace_dict in on_disk_state.iteritems():
230                in_memory_namespace = self._state.setdefault(namespace, {})
231                for name, value in namespace_dict.iteritems():
232                    if name in in_memory_namespace:
233                        if in_memory_namespace[name] != value:
234                            logging.info('Persistent value of %s.%s from %s '
235                                         'overridding existing in-memory '
236                                         'value', namespace, name, file_path)
237                            in_memory_namespace[name] = value
238                        else:
239                            logging.debug('Value of %s.%s is unchanged, '
240                                          'skipping import', namespace, name)
241                    else:
242                        logging.debug('Importing %s.%s from state file %s',
243                                      namespace, name, file_path)
244                        in_memory_namespace[name] = value
245        else:
246            # just replace the in-memory state with the on-disk state
247            self._state = on_disk_state
248
249        # lock the backing file before we refresh it
250        with_backing_lock(self.__class__._write_to_backing_file)(self)
251
252
253    def write_to_file(self, file_path):
254        """Write out the current state to the given path.
255
256        @param file_path: The path where the state should be written out to.
257            Must be writable.
258
259        @warning: This method is intentionally concurrency-unsafe. It makes no
260            attempt to control concurrent access to the file at file_path.
261        """
262        outfile = open(file_path, 'w')
263        try:
264            pickle.dump(self._state, outfile, self.PICKLE_PROTOCOL)
265        finally:
266            outfile.close()
267
268
269    def _read_from_backing_file(self):
270        """Refresh the current state from the backing file.
271
272        If the backing file has never been read before (indicated by checking
273        self._backing_file_initialized) it will merge the file with the
274        in-memory state, rather than overwriting it.
275        """
276        if self._backing_file:
277            merge_backing_file = not self._backing_file_initialized
278            self.read_from_file(self._backing_file, merge=merge_backing_file)
279            self._backing_file_initialized = True
280
281
282    def _write_to_backing_file(self):
283        """Flush the current state to the backing file."""
284        if self._backing_file:
285            self.write_to_file(self._backing_file)
286
287
288    @with_backing_file
289    def _synchronize_backing_file(self):
290        """Synchronizes the contents of the in-memory and on-disk state."""
291        # state is implicitly synchronized in _with_backing_file methods
292        pass
293
294
295    def set_backing_file(self, file_path):
296        """Change the path used as the backing file for the persistent state.
297
298        When a new backing file is specified if a file already exists then
299        its contents will be added into the current state, with conflicts
300        between the file and memory being resolved in favor of the file
301        contents. The file will then be kept in sync with the (combined)
302        in-memory state. The syncing can be disabled by setting this to None.
303
304        @param file_path: A path on the filesystem that can be read from and
305            written to, or None to turn off the backing store.
306        """
307        self._synchronize_backing_file()
308        self._backing_file = file_path
309        self._backing_file_initialized = False
310        self._synchronize_backing_file()
311
312
313    @with_backing_file
314    def get(self, namespace, name, default=NO_DEFAULT):
315        """Returns the value associated with a particular name.
316
317        @param namespace: The namespace that the property should be stored in.
318        @param name: The name the value was saved with.
319        @param default: A default value to return if no state is currently
320            associated with var.
321
322        @return: A deep copy of the value associated with name. Note that this
323            explicitly returns a deep copy to avoid problems with mutable
324            values; mutations are not persisted or shared.
325        @raise KeyError: raised when no state is associated with var and a
326            default value is not provided.
327        """
328        if self.has(namespace, name):
329            return copy.deepcopy(self._state[namespace][name])
330        elif default is self.NO_DEFAULT:
331            raise KeyError('No key %s in namespace %s' % (name, namespace))
332        else:
333            return default
334
335
336    @with_backing_file
337    def set(self, namespace, name, value):
338        """Saves the value given with the provided name.
339
340        @param namespace: The namespace that the property should be stored in.
341        @param name: The name the value should be saved with.
342        @param value: The value to save.
343        """
344        namespace_dict = self._state.setdefault(namespace, {})
345        namespace_dict[name] = copy.deepcopy(value)
346        logging.debug('Persistent state %s.%s now set to %r', namespace,
347                      name, value)
348
349
350    @with_backing_file
351    def has(self, namespace, name):
352        """Return a boolean indicating if namespace.name is defined.
353
354        @param namespace: The namespace to check for a definition.
355        @param name: The name to check for a definition.
356
357        @return: True if the given name is defined in the given namespace and
358            False otherwise.
359        """
360        return namespace in self._state and name in self._state[namespace]
361
362
363    @with_backing_file
364    def discard(self, namespace, name):
365        """If namespace.name is a defined value, deletes it.
366
367        @param namespace: The namespace that the property is stored in.
368        @param name: The name the value is saved with.
369        """
370        if self.has(namespace, name):
371            del self._state[namespace][name]
372            if len(self._state[namespace]) == 0:
373                del self._state[namespace]
374            logging.debug('Persistent state %s.%s deleted', namespace, name)
375        else:
376            logging.debug(
377                'Persistent state %s.%s not defined so nothing is discarded',
378                namespace, name)
379
380
381    @with_backing_file
382    def discard_namespace(self, namespace):
383        """Delete all defined namespace.* names.
384
385        @param namespace: The namespace to be cleared.
386        """
387        if namespace in self._state:
388            del self._state[namespace]
389        logging.debug('Persistent state %s.* deleted', namespace)
390
391
392    @staticmethod
393    def property_factory(state_attribute, property_attribute, default,
394                         namespace='global_properties'):
395        """
396        Create a property object for an attribute using self.get and self.set.
397
398        @param state_attribute: A string with the name of the attribute on
399            job that contains the job_state instance.
400        @param property_attribute: A string with the name of the attribute
401            this property is exposed as.
402        @param default: A default value that should be used for this property
403            if it is not set.
404        @param namespace: The namespace to store the attribute value in.
405
406        @return: A read-write property object that performs self.get calls
407            to read the value and self.set calls to set it.
408        """
409        def getter(job):
410            state = getattr(job, state_attribute)
411            return state.get(namespace, property_attribute, default)
412        def setter(job, value):
413            state = getattr(job, state_attribute)
414            state.set(namespace, property_attribute, value)
415        return property(getter, setter)
416
417
418class status_log_entry(object):
419    """Represents a single status log entry."""
420
421    RENDERED_NONE_VALUE = '----'
422    TIMESTAMP_FIELD = 'timestamp'
423    LOCALTIME_FIELD = 'localtime'
424
425    def __init__(self, status_code, subdir, operation, message, fields,
426                 timestamp=None):
427        """Construct a status.log entry.
428
429        @param status_code: A message status code. Must match the codes
430            accepted by autotest_lib.common_lib.log.is_valid_status.
431        @param subdir: A valid job subdirectory, or None.
432        @param operation: Description of the operation, or None.
433        @param message: A printable string describing event to be recorded.
434        @param fields: A dictionary of arbitrary alphanumeric key=value pairs
435            to be included in the log, or None.
436        @param timestamp: An optional integer timestamp, in the same format
437            as a time.time() timestamp. If unspecified, the current time is
438            used.
439
440        @raise ValueError: if any of the parameters are invalid
441        """
442        # non-space whitespace is forbidden in any fields
443        bad_char_regex = r'[\t\n\r\v\f]'
444
445        if not log.is_valid_status(status_code):
446            raise ValueError('status code %r is not valid' % status_code)
447        self.status_code = status_code
448
449        if subdir and re.search(bad_char_regex, subdir):
450            raise ValueError('Invalid character in subdir string')
451        self.subdir = subdir
452
453        if operation and re.search(bad_char_regex, operation):
454            raise ValueError('Invalid character in operation string')
455        self.operation = operation
456
457        # break the message line into a single-line message that goes into the
458        # database, and a block of additional lines that goes into the status
459        # log but will never be parsed
460        message_lines = message.split('\n')
461        self.message = message_lines[0].replace('\t', ' ' * 8)
462        self.extra_message_lines = message_lines[1:]
463        if re.search(bad_char_regex, self.message):
464            raise ValueError('Invalid character in message %r' % self.message)
465
466        if not fields:
467            self.fields = {}
468        else:
469            self.fields = fields.copy()
470        for key, value in self.fields.iteritems():
471            if re.search(bad_char_regex, key + value):
472                raise ValueError('Invalid character in %r=%r field'
473                                 % (key, value))
474
475        # build up the timestamp
476        if timestamp is None:
477            timestamp = int(time.time())
478        self.fields[self.TIMESTAMP_FIELD] = str(timestamp)
479        self.fields[self.LOCALTIME_FIELD] = time.strftime(
480            '%b %d %H:%M:%S', time.localtime(timestamp))
481
482
483    def is_start(self):
484        """Indicates if this status log is the start of a new nested block.
485
486        @return: A boolean indicating if this entry starts a new nested block.
487        """
488        return self.status_code == 'START'
489
490
491    def is_end(self):
492        """Indicates if this status log is the end of a nested block.
493
494        @return: A boolean indicating if this entry ends a nested block.
495        """
496        return self.status_code.startswith('END ')
497
498
499    def render(self):
500        """Render the status log entry into a text string.
501
502        @return: A text string suitable for writing into a status log file.
503        """
504        # combine all the log line data into a tab-delimited string
505        subdir = self.subdir or self.RENDERED_NONE_VALUE
506        operation = self.operation or self.RENDERED_NONE_VALUE
507        extra_fields = ['%s=%s' % field for field in self.fields.iteritems()]
508        line_items = [self.status_code, subdir, operation]
509        line_items += extra_fields + [self.message]
510        first_line = '\t'.join(line_items)
511
512        # append the extra unparsable lines, two-space indented
513        all_lines = [first_line]
514        all_lines += ['  ' + line for line in self.extra_message_lines]
515        return '\n'.join(all_lines)
516
517
518    @classmethod
519    def parse(cls, line):
520        """Parse a status log entry from a text string.
521
522        This method is the inverse of render; it should always be true that
523        parse(entry.render()) produces a new status_log_entry equivalent to
524        entry.
525
526        @return: A new status_log_entry instance with fields extracted from the
527            given status line. If the line is an extra message line then None
528            is returned.
529        """
530        # extra message lines are always prepended with two spaces
531        if line.startswith('  '):
532            return None
533
534        line = line.lstrip('\t')  # ignore indentation
535        entry_parts = line.split('\t')
536        if len(entry_parts) < 4:
537            raise ValueError('%r is not a valid status line' % line)
538        status_code, subdir, operation = entry_parts[:3]
539        if subdir == cls.RENDERED_NONE_VALUE:
540            subdir = None
541        if operation == cls.RENDERED_NONE_VALUE:
542            operation = None
543        message = entry_parts[-1]
544        fields = dict(part.split('=', 1) for part in entry_parts[3:-1])
545        if cls.TIMESTAMP_FIELD in fields:
546            timestamp = int(fields[cls.TIMESTAMP_FIELD])
547        else:
548            timestamp = None
549        return cls(status_code, subdir, operation, message, fields, timestamp)
550
551
552class status_indenter(object):
553    """Abstract interface that a status log indenter should use."""
554
555    @property
556    def indent(self):
557        raise NotImplementedError
558
559
560    def increment(self):
561        """Increase indentation by one level."""
562        raise NotImplementedError
563
564
565    def decrement(self):
566        """Decrease indentation by one level."""
567
568
569class status_logger(object):
570    """Represents a status log file. Responsible for translating messages
571    into on-disk status log lines.
572
573    @property global_filename: The filename to write top-level logs to.
574    @property subdir_filename: The filename to write subdir-level logs to.
575    """
576    def __init__(self, job, indenter, global_filename='status',
577                 subdir_filename='status', record_hook=None):
578        """Construct a logger instance.
579
580        @param job: A reference to the job object this is logging for. Only a
581            weak reference to the job is held, to avoid a
582            status_logger <-> job circular reference.
583        @param indenter: A status_indenter instance, for tracking the
584            indentation level.
585        @param global_filename: An optional filename to initialize the
586            self.global_filename attribute.
587        @param subdir_filename: An optional filename to initialize the
588            self.subdir_filename attribute.
589        @param record_hook: An optional function to be called before an entry
590            is logged. The function should expect a single parameter, a
591            copy of the status_log_entry object.
592        """
593        self._jobref = weakref.ref(job)
594        self._indenter = indenter
595        self.global_filename = global_filename
596        self.subdir_filename = subdir_filename
597        self._record_hook = record_hook
598
599    @staticmethod
600    def _indent_multiline_text(multiline_string, num_tabs):
601        """Indents all the lines of a multiline block of text.
602
603        @param multiline_string: A string to indent
604        @param num_tabs: The number of tabs to prepend to each line
605
606        @return: A copy of multiline_string with each line prepended with
607            num_tabs hard tabs.
608        """
609        prefix = '\t' * num_tabs
610        # indent every line after the first
611        indented = multiline_string.rstrip('\n').replace('\n', '\n' + prefix)
612        # stick an indent on the first line as well
613        return prefix + indented
614
615
616    def render_entry(self, log_entry):
617        """Render a status_log_entry as it would be written to a log file.
618
619        @param log_entry: A status_log_entry instance to be rendered.
620
621        @return: The status log entry, rendered as it would be written to the
622            logs (including indentation).
623        """
624        if log_entry.is_end():
625            indent = self._indenter.indent - 1
626        else:
627            indent = self._indenter.indent
628        return self._indent_multiline_text(log_entry.render(), indent)
629
630
631    def record_entry(self, log_entry, log_in_subdir=True):
632        """Record a status_log_entry into the appropriate status log files.
633
634        @param log_entry: A status_log_entry instance to be recorded into the
635                status logs.
636        @param log_in_subdir: A boolean that indicates (when true) that subdir
637                logs should be written into the subdirectory status log file.
638        """
639        # acquire a strong reference for the duration of the method
640        job = self._jobref()
641        if job is None:
642            logging.warning('Something attempted to write a status log entry '
643                            'after its job terminated, ignoring the attempt.')
644            logging.warning(traceback.format_stack())
645            return
646
647        # call the record hook if one was given
648        if self._record_hook:
649            self._record_hook(log_entry)
650
651        # figure out where we need to log to
652        log_files = [os.path.join(job.resultdir, self.global_filename)]
653        if log_in_subdir and log_entry.subdir:
654            log_files.append(os.path.join(job.resultdir, log_entry.subdir,
655                                          self.subdir_filename))
656
657        # write out to entry to the log files
658        log_text = self.render_entry(log_entry)
659        for log_file in log_files:
660            fileobj = open(log_file, 'a')
661            try:
662                print >> fileobj, log_text
663            finally:
664                fileobj.close()
665
666        # adjust the indentation if this was a START or END entry
667        if log_entry.is_start():
668            self._indenter.increment()
669        elif log_entry.is_end():
670            self._indenter.decrement()
671
672
673class base_job(object):
674    """An abstract base class for the various autotest job classes.
675
676    @property autodir: The top level autotest directory.
677    @property clientdir: The autotest client directory.
678    @property serverdir: The autotest server directory. [OPTIONAL]
679    @property resultdir: The directory where results should be written out.
680        [WRITABLE]
681
682    @property pkgdir: The job packages directory. [WRITABLE]
683    @property tmpdir: The job temporary directory. [WRITABLE]
684    @property testdir: The job test directory. [WRITABLE]
685    @property site_testdir: The job site test directory. [WRITABLE]
686
687    @property bindir: The client bin/ directory.
688    @property configdir: The client config/ directory.
689    @property profdir: The client profilers/ directory.
690    @property toolsdir: The client tools/ directory.
691
692    @property conmuxdir: The conmux directory. [OPTIONAL]
693
694    @property control: A path to the control file to be executed. [OPTIONAL]
695    @property hosts: A set of all live Host objects currently in use by the
696        job. Code running in the context of a local client can safely assume
697        that this set contains only a single entry.
698    @property machines: A list of the machine names associated with the job.
699    @property user: The user executing the job.
700    @property tag: A tag identifying the job. Often used by the scheduler to
701        give a name of the form NUMBER-USERNAME/HOSTNAME.
702    @property args: A list of addtional miscellaneous command-line arguments
703        provided when starting the job.
704
705    @property last_boot_tag: The label of the kernel from the last reboot.
706        [OPTIONAL,PERSISTENT]
707    @property automatic_test_tag: A string which, if set, will be automatically
708        added to the test name when running tests.
709
710    @property default_profile_only: A boolean indicating the default value of
711        profile_only used by test.execute. [PERSISTENT]
712    @property drop_caches: A boolean indicating if caches should be dropped
713        before each test is executed.
714    @property drop_caches_between_iterations: A boolean indicating if caches
715        should be dropped before each test iteration is executed.
716    @property run_test_cleanup: A boolean indicating if test.cleanup should be
717        run by default after a test completes, if the run_cleanup argument is
718        not specified. [PERSISTENT]
719
720    @property num_tests_run: The number of tests run during the job. [OPTIONAL]
721    @property num_tests_failed: The number of tests failed during the job.
722        [OPTIONAL]
723
724    @property bootloader: An instance of the boottool class. May not be
725        available on job instances where access to the bootloader is not
726        available (e.g. on the server running a server job). [OPTIONAL]
727    @property harness: An instance of the client test harness. Only available
728        in contexts where client test execution happens. [OPTIONAL]
729    @property logging: An instance of the logging manager associated with the
730        job.
731    @property profilers: An instance of the profiler manager associated with
732        the job.
733    @property sysinfo: An instance of the sysinfo object. Only available in
734        contexts where it's possible to collect sysinfo.
735    @property warning_manager: A class for managing which types of WARN
736        messages should be logged and which should be supressed. [OPTIONAL]
737    @property warning_loggers: A set of readable streams that will be monitored
738        for WARN messages to be logged. [OPTIONAL]
739
740    Abstract methods:
741        _find_base_directories [CLASSMETHOD]
742            Returns the location of autodir, clientdir and serverdir
743
744        _find_resultdir
745            Returns the location of resultdir. Gets a copy of any parameters
746            passed into base_job.__init__. Can return None to indicate that
747            no resultdir is to be used.
748
749        _get_status_logger
750            Returns a status_logger instance for recording job status logs.
751    """
752
753   # capture the dependency on several helper classes with factories
754    _job_directory = job_directory
755    _job_state = job_state
756
757
758    # all the job directory attributes
759    autodir = _job_directory.property_factory('autodir')
760    clientdir = _job_directory.property_factory('clientdir')
761    serverdir = _job_directory.property_factory('serverdir')
762    resultdir = _job_directory.property_factory('resultdir')
763    pkgdir = _job_directory.property_factory('pkgdir')
764    tmpdir = _job_directory.property_factory('tmpdir')
765    testdir = _job_directory.property_factory('testdir')
766    site_testdir = _job_directory.property_factory('site_testdir')
767    bindir = _job_directory.property_factory('bindir')
768    configdir = _job_directory.property_factory('configdir')
769    profdir = _job_directory.property_factory('profdir')
770    toolsdir = _job_directory.property_factory('toolsdir')
771    conmuxdir = _job_directory.property_factory('conmuxdir')
772
773
774    # all the generic persistent properties
775    tag = _job_state.property_factory('_state', 'tag', '')
776    default_profile_only = _job_state.property_factory(
777        '_state', 'default_profile_only', False)
778    run_test_cleanup = _job_state.property_factory(
779        '_state', 'run_test_cleanup', True)
780    last_boot_tag = _job_state.property_factory(
781        '_state', 'last_boot_tag', None)
782    automatic_test_tag = _job_state.property_factory(
783        '_state', 'automatic_test_tag', None)
784
785    # the use_sequence_number property
786    _sequence_number = _job_state.property_factory(
787        '_state', '_sequence_number', None)
788    def _get_use_sequence_number(self):
789        return bool(self._sequence_number)
790    def _set_use_sequence_number(self, value):
791        if value:
792            self._sequence_number = 1
793        else:
794            self._sequence_number = None
795    use_sequence_number = property(_get_use_sequence_number,
796                                   _set_use_sequence_number)
797
798
799    def __init__(self, *args, **dargs):
800        # initialize the base directories, all others are relative to these
801        autodir, clientdir, serverdir = self._find_base_directories()
802        self._autodir = self._job_directory(autodir)
803        self._clientdir = self._job_directory(clientdir)
804        if serverdir:
805            self._serverdir = self._job_directory(serverdir)
806        else:
807            self._serverdir = None
808
809        # initialize all the other directories relative to the base ones
810        self._initialize_dir_properties()
811        self._resultdir = self._job_directory(
812            self._find_resultdir(*args, **dargs), True)
813        self._execution_contexts = []
814
815        # initialize all the job state
816        self._state = self._job_state()
817
818
819    @classmethod
820    def _find_base_directories(cls):
821        raise NotImplementedError()
822
823
824    def _initialize_dir_properties(self):
825        """
826        Initializes all the secondary self.*dir properties. Requires autodir,
827        clientdir and serverdir to already be initialized.
828        """
829        # create some stubs for use as shortcuts
830        def readonly_dir(*args):
831            return self._job_directory(os.path.join(*args))
832        def readwrite_dir(*args):
833            return self._job_directory(os.path.join(*args), True)
834
835        # various client-specific directories
836        self._bindir = readonly_dir(self.clientdir, 'bin')
837        self._configdir = readonly_dir(self.clientdir, 'config')
838        self._profdir = readonly_dir(self.clientdir, 'profilers')
839        self._pkgdir = readwrite_dir(self.clientdir, 'packages')
840        self._toolsdir = readonly_dir(self.clientdir, 'tools')
841
842        # directories which are in serverdir on a server, clientdir on a client
843        if self.serverdir:
844            root = self.serverdir
845        else:
846            root = self.clientdir
847        self._tmpdir = readwrite_dir(root, 'tmp')
848        self._testdir = readwrite_dir(root, 'tests')
849        self._site_testdir = readwrite_dir(root, 'site_tests')
850
851        # various server-specific directories
852        if self.serverdir:
853            self._conmuxdir = readonly_dir(self.autodir, 'conmux')
854        else:
855            self._conmuxdir = None
856
857
858    def _find_resultdir(self, *args, **dargs):
859        raise NotImplementedError()
860
861
862    def push_execution_context(self, resultdir):
863        """
864        Save off the current context of the job and change to the given one.
865
866        In practice method just changes the resultdir, but it may become more
867        extensive in the future. The expected use case is for when a child
868        job needs to be executed in some sort of nested context (for example
869        the way parallel_simple does). The original context can be restored
870        with a pop_execution_context call.
871
872        @param resultdir: The new resultdir, relative to the current one.
873        """
874        new_dir = self._job_directory(
875            os.path.join(self.resultdir, resultdir), True)
876        self._execution_contexts.append(self._resultdir)
877        self._resultdir = new_dir
878
879
880    def pop_execution_context(self):
881        """
882        Reverse the effects of the previous push_execution_context call.
883
884        @raise IndexError: raised when the stack of contexts is empty.
885        """
886        if not self._execution_contexts:
887            raise IndexError('No old execution context to restore')
888        self._resultdir = self._execution_contexts.pop()
889
890
891    def get_state(self, name, default=_job_state.NO_DEFAULT):
892        """Returns the value associated with a particular name.
893
894        @param name: The name the value was saved with.
895        @param default: A default value to return if no state is currently
896            associated with var.
897
898        @return: A deep copy of the value associated with name. Note that this
899            explicitly returns a deep copy to avoid problems with mutable
900            values; mutations are not persisted or shared.
901        @raise KeyError: raised when no state is associated with var and a
902            default value is not provided.
903        """
904        try:
905            return self._state.get('public', name, default=default)
906        except KeyError:
907            raise KeyError(name)
908
909
910    def set_state(self, name, value):
911        """Saves the value given with the provided name.
912
913        @param name: The name the value should be saved with.
914        @param value: The value to save.
915        """
916        self._state.set('public', name, value)
917
918
919    def _build_tagged_test_name(self, testname, dargs):
920        """Builds the fully tagged testname and subdirectory for job.run_test.
921
922        @param testname: The base name of the test
923        @param dargs: The ** arguments passed to run_test. And arguments
924            consumed by this method will be removed from the dictionary.
925
926        @return: A 3-tuple of the full name of the test, the subdirectory it
927            should be stored in, and the full tag of the subdir.
928        """
929        tag_parts = []
930
931        # build up the parts of the tag used for the test name
932        base_tag = dargs.pop('tag', None)
933        if base_tag:
934            tag_parts.append(str(base_tag))
935        if self.use_sequence_number:
936            tag_parts.append('_%02d_' % self._sequence_number)
937            self._sequence_number += 1
938        if self.automatic_test_tag:
939            tag_parts.append(self.automatic_test_tag)
940        full_testname = '.'.join([testname] + tag_parts)
941
942        # build up the subdir and tag as well
943        subdir_tag = dargs.pop('subdir_tag', None)
944        if subdir_tag:
945            tag_parts.append(subdir_tag)
946        subdir = '.'.join([testname] + tag_parts)
947        tag = '.'.join(tag_parts)
948
949        return full_testname, subdir, tag
950
951
952    def _make_test_outputdir(self, subdir):
953        """Creates an output directory for a test to run it.
954
955        @param subdir: The subdirectory of the test. Generally computed by
956            _build_tagged_test_name.
957
958        @return: A job_directory instance corresponding to the outputdir of
959            the test.
960        @raise TestError: If the output directory is invalid.
961        """
962        # explicitly check that this subdirectory is new
963        path = os.path.join(self.resultdir, subdir)
964        if os.path.exists(path):
965            msg = ('%s already exists; multiple tests cannot run with the '
966                   'same subdirectory' % subdir)
967            raise error.TestError(msg)
968
969        # create the outputdir and raise a TestError if it isn't valid
970        try:
971            outputdir = self._job_directory(path, True)
972            return outputdir
973        except self._job_directory.JobDirectoryException, e:
974            logging.exception('%s directory creation failed with %s',
975                              subdir, e)
976            raise error.TestError('%s directory creation failed' % subdir)
977
978
979    def record(self, status_code, subdir, operation, status='',
980               optional_fields=None):
981        """Record a job-level status event.
982
983        Logs an event noteworthy to the Autotest job as a whole. Messages will
984        be written into a global status log file, as well as a subdir-local
985        status log file (if subdir is specified).
986
987        @param status_code: A string status code describing the type of status
988            entry being recorded. It must pass log.is_valid_status to be
989            considered valid.
990        @param subdir: A specific results subdirectory this also applies to, or
991            None. If not None the subdirectory must exist.
992        @param operation: A string describing the operation that was run.
993        @param status: An optional human-readable message describing the status
994            entry, for example an error message or "completed successfully".
995        @param optional_fields: An optional dictionary of addtional named fields
996            to be included with the status message. Every time timestamp and
997            localtime entries are generated with the current time and added
998            to this dictionary.
999        """
1000        entry = status_log_entry(status_code, subdir, operation, status,
1001                                 optional_fields)
1002        self.record_entry(entry)
1003
1004
1005    def record_entry(self, entry, log_in_subdir=True):
1006        """Record a job-level status event, using a status_log_entry.
1007
1008        This is the same as self.record but using an existing status log
1009        entry object rather than constructing one for you.
1010
1011        @param entry: A status_log_entry object
1012        @param log_in_subdir: A boolean that indicates (when true) that subdir
1013                logs should be written into the subdirectory status log file.
1014        """
1015        self._get_status_logger().record_entry(entry, log_in_subdir)
1016