runtime.cc revision 9583fbcf597eff6d0b3c5359b8e8d5f70ed82c40
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "runtime.h"
18
19// sys/mount.h has to come before linux/fs.h due to redefinition of MS_RDONLY, MS_BIND, etc
20#include <sys/mount.h>
21#include <linux/fs.h>
22
23#include <signal.h>
24#include <sys/syscall.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <limits>
29#include <vector>
30
31#include "arch/arm/registers_arm.h"
32#include "arch/mips/registers_mips.h"
33#include "arch/x86/registers_x86.h"
34#include "arch/x86_64/registers_x86_64.h"
35#include "atomic.h"
36#include "class_linker.h"
37#include "debugger.h"
38#include "gc/accounting/card_table-inl.h"
39#include "gc/heap.h"
40#include "gc/space/space.h"
41#include "image.h"
42#include "instrumentation.h"
43#include "intern_table.h"
44#include "invoke_arg_array_builder.h"
45#include "jni_internal.h"
46#include "mirror/art_field-inl.h"
47#include "mirror/art_method-inl.h"
48#include "mirror/array.h"
49#include "mirror/class-inl.h"
50#include "mirror/class_loader.h"
51#include "mirror/stack_trace_element.h"
52#include "mirror/throwable.h"
53#include "monitor.h"
54#include "parsed_options.h"
55#include "oat_file.h"
56#include "ScopedLocalRef.h"
57#include "scoped_thread_state_change.h"
58#include "signal_catcher.h"
59#include "signal_set.h"
60#include "sirt_ref.h"
61#include "thread.h"
62#include "thread_list.h"
63#include "trace.h"
64#include "transaction.h"
65#include "profiler.h"
66#include "UniquePtr.h"
67#include "verifier/method_verifier.h"
68#include "well_known_classes.h"
69
70#include "JniConstants.h"  // Last to avoid LOG redefinition in ics-mr1-plus-art.
71
72namespace art {
73
74Runtime* Runtime::instance_ = NULL;
75
76Runtime::Runtime()
77    : compiler_callbacks_(nullptr),
78      is_zygote_(false),
79      is_concurrent_gc_enabled_(true),
80      is_explicit_gc_disabled_(false),
81      default_stack_size_(0),
82      heap_(nullptr),
83      max_spins_before_thin_lock_inflation_(Monitor::kDefaultMaxSpinsBeforeThinLockInflation),
84      monitor_list_(nullptr),
85      monitor_pool_(nullptr),
86      thread_list_(nullptr),
87      intern_table_(nullptr),
88      class_linker_(nullptr),
89      signal_catcher_(nullptr),
90      java_vm_(nullptr),
91      pre_allocated_OutOfMemoryError_(nullptr),
92      resolution_method_(nullptr),
93      imt_conflict_method_(nullptr),
94      default_imt_(nullptr),
95      fault_message_lock_("Fault message lock"),
96      fault_message_(""),
97      method_verifiers_lock_("Method verifiers lock"),
98      threads_being_born_(0),
99      shutdown_cond_(new ConditionVariable("Runtime shutdown", *Locks::runtime_shutdown_lock_)),
100      shutting_down_(false),
101      shutting_down_started_(false),
102      started_(false),
103      finished_starting_(false),
104      vfprintf_(nullptr),
105      exit_(nullptr),
106      abort_(nullptr),
107      stats_enabled_(false),
108      profile_(false),
109      profile_period_s_(0),
110      profile_duration_s_(0),
111      profile_interval_us_(0),
112      profile_backoff_coefficient_(0),
113      method_trace_(false),
114      method_trace_file_size_(0),
115      instrumentation_(),
116      use_compile_time_class_path_(false),
117      main_thread_group_(nullptr),
118      system_thread_group_(nullptr),
119      system_class_loader_(nullptr),
120      dump_gc_performance_on_shutdown_(false),
121      preinitialization_transaction(nullptr) {
122  for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
123    callee_save_methods_[i] = nullptr;
124  }
125}
126
127Runtime::~Runtime() {
128  if (dump_gc_performance_on_shutdown_) {
129    // This can't be called from the Heap destructor below because it
130    // could call RosAlloc::InspectAll() which needs the thread_list
131    // to be still alive.
132    heap_->DumpGcPerformanceInfo(LOG(INFO));
133  }
134
135  Thread* self = Thread::Current();
136  {
137    MutexLock mu(self, *Locks::runtime_shutdown_lock_);
138    shutting_down_started_ = true;
139    while (threads_being_born_ > 0) {
140      shutdown_cond_->Wait(self);
141    }
142    shutting_down_ = true;
143  }
144  Trace::Shutdown();
145
146  // Make sure to let the GC complete if it is running.
147  heap_->WaitForGcToComplete(self);
148  heap_->DeleteThreadPool();
149
150  // Make sure our internal threads are dead before we start tearing down things they're using.
151  Dbg::StopJdwp();
152  delete signal_catcher_;
153
154  // Make sure all other non-daemon threads have terminated, and all daemon threads are suspended.
155  delete thread_list_;
156  delete monitor_list_;
157  delete monitor_pool_;
158  delete class_linker_;
159  delete heap_;
160  delete intern_table_;
161  delete java_vm_;
162  Thread::Shutdown();
163  QuasiAtomic::Shutdown();
164  verifier::MethodVerifier::Shutdown();
165  // TODO: acquire a static mutex on Runtime to avoid racing.
166  CHECK(instance_ == nullptr || instance_ == this);
167  instance_ = nullptr;
168}
169
170struct AbortState {
171  void Dump(std::ostream& os) {
172    if (gAborting > 1) {
173      os << "Runtime aborting --- recursively, so no thread-specific detail!\n";
174      return;
175    }
176    gAborting++;
177    os << "Runtime aborting...\n";
178    if (Runtime::Current() == NULL) {
179      os << "(Runtime does not yet exist!)\n";
180      return;
181    }
182    Thread* self = Thread::Current();
183    if (self == NULL) {
184      os << "(Aborting thread was not attached to runtime!)\n";
185    } else {
186      // TODO: we're aborting and the ScopedObjectAccess may attempt to acquire the mutator_lock_
187      //       which may block indefinitely if there's a misbehaving thread holding it exclusively.
188      //       The code below should be made robust to this.
189      ScopedObjectAccess soa(self);
190      os << "Aborting thread:\n";
191      self->Dump(os);
192      if (self->IsExceptionPending()) {
193        ThrowLocation throw_location;
194        mirror::Throwable* exception = self->GetException(&throw_location);
195        os << "Pending exception " << PrettyTypeOf(exception)
196            << " thrown by '" << throw_location.Dump() << "'\n"
197            << exception->Dump();
198      }
199    }
200    DumpAllThreads(os, self);
201  }
202
203  void DumpAllThreads(std::ostream& os, Thread* self) NO_THREAD_SAFETY_ANALYSIS {
204    bool tll_already_held = Locks::thread_list_lock_->IsExclusiveHeld(self);
205    bool ml_already_held = Locks::mutator_lock_->IsSharedHeld(self);
206    if (!tll_already_held || !ml_already_held) {
207      os << "Dumping all threads without appropriate locks held:"
208          << (!tll_already_held ? " thread list lock" : "")
209          << (!ml_already_held ? " mutator lock" : "")
210          << "\n";
211    }
212    os << "All threads:\n";
213    Runtime::Current()->GetThreadList()->DumpLocked(os);
214  }
215};
216
217void Runtime::Abort() {
218  gAborting++;  // set before taking any locks
219
220  // Ensure that we don't have multiple threads trying to abort at once,
221  // which would result in significantly worse diagnostics.
222  MutexLock mu(Thread::Current(), *Locks::abort_lock_);
223
224  // Get any pending output out of the way.
225  fflush(NULL);
226
227  // Many people have difficulty distinguish aborts from crashes,
228  // so be explicit.
229  AbortState state;
230  LOG(INTERNAL_FATAL) << Dumpable<AbortState>(state);
231
232  // Call the abort hook if we have one.
233  if (Runtime::Current() != NULL && Runtime::Current()->abort_ != NULL) {
234    LOG(INTERNAL_FATAL) << "Calling abort hook...";
235    Runtime::Current()->abort_();
236    // notreached
237    LOG(INTERNAL_FATAL) << "Unexpectedly returned from abort hook!";
238  }
239
240#if defined(__GLIBC__)
241  // TODO: we ought to be able to use pthread_kill(3) here (or abort(3),
242  // which POSIX defines in terms of raise(3), which POSIX defines in terms
243  // of pthread_kill(3)). On Linux, though, libcorkscrew can't unwind through
244  // libpthread, which means the stacks we dump would be useless. Calling
245  // tgkill(2) directly avoids that.
246  syscall(__NR_tgkill, getpid(), GetTid(), SIGABRT);
247  // TODO: LLVM installs it's own SIGABRT handler so exit to be safe... Can we disable that in LLVM?
248  // If not, we could use sigaction(3) before calling tgkill(2) and lose this call to exit(3).
249  exit(1);
250#else
251  abort();
252#endif
253  // notreached
254}
255
256bool Runtime::PreZygoteFork() {
257  heap_->PreZygoteFork();
258  return true;
259}
260
261void Runtime::CallExitHook(jint status) {
262  if (exit_ != NULL) {
263    ScopedThreadStateChange tsc(Thread::Current(), kNative);
264    exit_(status);
265    LOG(WARNING) << "Exit hook returned instead of exiting!";
266  }
267}
268
269void Runtime::SweepSystemWeaks(IsMarkedCallback* visitor, void* arg) {
270  GetInternTable()->SweepInternTableWeaks(visitor, arg);
271  GetMonitorList()->SweepMonitorList(visitor, arg);
272  GetJavaVM()->SweepJniWeakGlobals(visitor, arg);
273  Dbg::UpdateObjectPointers(visitor, arg);
274}
275
276bool Runtime::Create(const Options& options, bool ignore_unrecognized) {
277  // TODO: acquire a static mutex on Runtime to avoid racing.
278  if (Runtime::instance_ != NULL) {
279    return false;
280  }
281  InitLogging(NULL);  // Calls Locks::Init() as a side effect.
282  instance_ = new Runtime;
283  if (!instance_->Init(options, ignore_unrecognized)) {
284    delete instance_;
285    instance_ = NULL;
286    return false;
287  }
288  return true;
289}
290
291jobject CreateSystemClassLoader() {
292  if (Runtime::Current()->UseCompileTimeClassPath()) {
293    return NULL;
294  }
295
296  ScopedObjectAccess soa(Thread::Current());
297  ClassLinker* cl = Runtime::Current()->GetClassLinker();
298
299  SirtRef<mirror::Class> class_loader_class(
300      soa.Self(), soa.Decode<mirror::Class*>(WellKnownClasses::java_lang_ClassLoader));
301  CHECK(cl->EnsureInitialized(class_loader_class, true, true));
302
303  mirror::ArtMethod* getSystemClassLoader =
304      class_loader_class->FindDirectMethod("getSystemClassLoader", "()Ljava/lang/ClassLoader;");
305  CHECK(getSystemClassLoader != NULL);
306
307  JValue result;
308  ArgArray arg_array(nullptr, 0);
309  InvokeWithArgArray(soa, getSystemClassLoader, &arg_array, &result, "L");
310  SirtRef<mirror::ClassLoader> class_loader(soa.Self(),
311                                            down_cast<mirror::ClassLoader*>(result.GetL()));
312  CHECK(class_loader.get() != nullptr);
313  JNIEnv* env = soa.Self()->GetJniEnv();
314  ScopedLocalRef<jobject> system_class_loader(env,
315                                              soa.AddLocalReference<jobject>(class_loader.get()));
316  CHECK(system_class_loader.get() != nullptr);
317
318  soa.Self()->SetClassLoaderOverride(class_loader.get());
319
320  SirtRef<mirror::Class> thread_class(
321      soa.Self(),
322      soa.Decode<mirror::Class*>(WellKnownClasses::java_lang_Thread));
323  CHECK(cl->EnsureInitialized(thread_class, true, true));
324
325  mirror::ArtField* contextClassLoader =
326      thread_class->FindDeclaredInstanceField("contextClassLoader", "Ljava/lang/ClassLoader;");
327  CHECK(contextClassLoader != NULL);
328
329  // We can't run in a transaction yet.
330  contextClassLoader->SetObject<false>(soa.Self()->GetPeer(), class_loader.get());
331
332  return env->NewGlobalRef(system_class_loader.get());
333}
334
335bool Runtime::Start() {
336  VLOG(startup) << "Runtime::Start entering";
337
338  // Restore main thread state to kNative as expected by native code.
339  Thread* self = Thread::Current();
340  self->TransitionFromRunnableToSuspended(kNative);
341
342  started_ = true;
343
344  // InitNativeMethods needs to be after started_ so that the classes
345  // it touches will have methods linked to the oat file if necessary.
346  InitNativeMethods();
347
348  // Initialize well known thread group values that may be accessed threads while attaching.
349  InitThreadGroups(self);
350
351  Thread::FinishStartup();
352
353  if (is_zygote_) {
354    if (!InitZygote()) {
355      return false;
356    }
357  } else {
358    DidForkFromZygote();
359  }
360
361  StartDaemonThreads();
362
363  system_class_loader_ = CreateSystemClassLoader();
364
365  self->GetJniEnv()->locals.AssertEmpty();
366
367  VLOG(startup) << "Runtime::Start exiting";
368
369  finished_starting_ = true;
370
371  if (profile_) {
372    // User has asked for a profile using -Xprofile
373    StartProfiler(profile_output_filename_.c_str(), true);
374  }
375
376  return true;
377}
378
379void Runtime::EndThreadBirth() EXCLUSIVE_LOCKS_REQUIRED(Locks::runtime_shutdown_lock_) {
380  DCHECK_GT(threads_being_born_, 0U);
381  threads_being_born_--;
382  if (shutting_down_started_ && threads_being_born_ == 0) {
383    shutdown_cond_->Broadcast(Thread::Current());
384  }
385}
386
387// Do zygote-mode-only initialization.
388bool Runtime::InitZygote() {
389  // zygote goes into its own process group
390  setpgid(0, 0);
391
392  // See storage config details at http://source.android.com/tech/storage/
393  // Create private mount namespace shared by all children
394  if (unshare(CLONE_NEWNS) == -1) {
395    PLOG(WARNING) << "Failed to unshare()";
396    return false;
397  }
398
399  // Mark rootfs as being a slave so that changes from default
400  // namespace only flow into our children.
401  if (mount("rootfs", "/", NULL, (MS_SLAVE | MS_REC), NULL) == -1) {
402    PLOG(WARNING) << "Failed to mount() rootfs as MS_SLAVE";
403    return false;
404  }
405
406  // Create a staging tmpfs that is shared by our children; they will
407  // bind mount storage into their respective private namespaces, which
408  // are isolated from each other.
409  const char* target_base = getenv("EMULATED_STORAGE_TARGET");
410  if (target_base != NULL) {
411    if (mount("tmpfs", target_base, "tmpfs", MS_NOSUID | MS_NODEV,
412              "uid=0,gid=1028,mode=0751") == -1) {
413      LOG(WARNING) << "Failed to mount tmpfs to " << target_base;
414      return false;
415    }
416  }
417
418  return true;
419}
420
421void Runtime::DidForkFromZygote() {
422  is_zygote_ = false;
423
424  // Create the thread pool.
425  heap_->CreateThreadPool();
426
427  StartSignalCatcher();
428
429  // Start the JDWP thread. If the command-line debugger flags specified "suspend=y",
430  // this will pause the runtime, so we probably want this to come last.
431  Dbg::StartJdwp();
432}
433
434void Runtime::StartSignalCatcher() {
435  if (!is_zygote_) {
436    signal_catcher_ = new SignalCatcher(stack_trace_file_);
437  }
438}
439
440bool Runtime::IsShuttingDown(Thread* self) {
441  MutexLock mu(self, *Locks::runtime_shutdown_lock_);
442  return IsShuttingDownLocked();
443}
444
445void Runtime::StartDaemonThreads() {
446  VLOG(startup) << "Runtime::StartDaemonThreads entering";
447
448  Thread* self = Thread::Current();
449
450  // Must be in the kNative state for calling native methods.
451  CHECK_EQ(self->GetState(), kNative);
452
453  JNIEnv* env = self->GetJniEnv();
454  env->CallStaticVoidMethod(WellKnownClasses::java_lang_Daemons,
455                            WellKnownClasses::java_lang_Daemons_start);
456  if (env->ExceptionCheck()) {
457    env->ExceptionDescribe();
458    LOG(FATAL) << "Error starting java.lang.Daemons";
459  }
460
461  VLOG(startup) << "Runtime::StartDaemonThreads exiting";
462}
463
464bool Runtime::Init(const Options& raw_options, bool ignore_unrecognized) {
465  CHECK_EQ(sysconf(_SC_PAGE_SIZE), kPageSize);
466
467  UniquePtr<ParsedOptions> options(ParsedOptions::Create(raw_options, ignore_unrecognized));
468  if (options.get() == NULL) {
469    LOG(ERROR) << "Failed to parse options";
470    return false;
471  }
472  VLOG(startup) << "Runtime::Init -verbose:startup enabled";
473
474  QuasiAtomic::Startup();
475
476  Monitor::Init(options->lock_profiling_threshold_, options->hook_is_sensitive_thread_);
477
478  boot_class_path_string_ = options->boot_class_path_string_;
479  class_path_string_ = options->class_path_string_;
480  properties_ = options->properties_;
481
482  compiler_callbacks_ = options->compiler_callbacks_;
483  is_zygote_ = options->is_zygote_;
484  is_explicit_gc_disabled_ = options->is_explicit_gc_disabled_;
485
486  vfprintf_ = options->hook_vfprintf_;
487  exit_ = options->hook_exit_;
488  abort_ = options->hook_abort_;
489
490  default_stack_size_ = options->stack_size_;
491  stack_trace_file_ = options->stack_trace_file_;
492
493  max_spins_before_thin_lock_inflation_ = options->max_spins_before_thin_lock_inflation_;
494
495  monitor_list_ = new MonitorList;
496  monitor_pool_ = MonitorPool::Create();
497  thread_list_ = new ThreadList;
498  intern_table_ = new InternTable;
499
500
501  if (options->interpreter_only_) {
502    GetInstrumentation()->ForceInterpretOnly();
503  }
504
505  heap_ = new gc::Heap(options->heap_initial_size_,
506                       options->heap_growth_limit_,
507                       options->heap_min_free_,
508                       options->heap_max_free_,
509                       options->heap_target_utilization_,
510                       options->heap_maximum_size_,
511                       options->image_,
512                       options->collector_type_,
513                       options->background_collector_type_,
514                       options->parallel_gc_threads_,
515                       options->conc_gc_threads_,
516                       options->low_memory_mode_,
517                       options->long_pause_log_threshold_,
518                       options->long_gc_log_threshold_,
519                       options->ignore_max_footprint_,
520                       options->use_tlab_,
521                       options->verify_pre_gc_heap_,
522                       options->verify_post_gc_heap_,
523                       options->verify_pre_gc_rosalloc_,
524                       options->verify_post_gc_rosalloc_);
525
526  dump_gc_performance_on_shutdown_ = options->dump_gc_performance_on_shutdown_;
527
528  BlockSignals();
529  InitPlatformSignalHandlers();
530
531  java_vm_ = new JavaVMExt(this, options.get());
532
533  Thread::Startup();
534
535  // ClassLinker needs an attached thread, but we can't fully attach a thread without creating
536  // objects. We can't supply a thread group yet; it will be fixed later. Since we are the main
537  // thread, we do not get a java peer.
538  Thread* self = Thread::Attach("main", false, NULL, false);
539  CHECK_EQ(self->thin_lock_thread_id_, ThreadList::kMainThreadId);
540  CHECK(self != NULL);
541
542  // Set us to runnable so tools using a runtime can allocate and GC by default
543  self->TransitionFromSuspendedToRunnable();
544
545  // Now we're attached, we can take the heap locks and validate the heap.
546  GetHeap()->EnableObjectValidation();
547
548  CHECK_GE(GetHeap()->GetContinuousSpaces().size(), 1U);
549  class_linker_ = new ClassLinker(intern_table_);
550  if (GetHeap()->HasImageSpace()) {
551    class_linker_->InitFromImage();
552  } else {
553    CHECK(options->boot_class_path_ != NULL);
554    CHECK_NE(options->boot_class_path_->size(), 0U);
555    class_linker_->InitFromCompiler(*options->boot_class_path_);
556  }
557  CHECK(class_linker_ != NULL);
558  verifier::MethodVerifier::Init();
559
560  method_trace_ = options->method_trace_;
561  method_trace_file_ = options->method_trace_file_;
562  method_trace_file_size_ = options->method_trace_file_size_;
563
564  // Extract the profile options.
565  // TODO: move into a Trace options struct?
566  profile_period_s_ = options->profile_period_s_;
567  profile_duration_s_ = options->profile_duration_s_;
568  profile_interval_us_ = options->profile_interval_us_;
569  profile_backoff_coefficient_ = options->profile_backoff_coefficient_;
570  profile_ = options->profile_;
571  profile_output_filename_ = options->profile_output_filename_;
572  // TODO: move this to just be an Trace::Start argument
573  Trace::SetDefaultClockSource(options->profile_clock_source_);
574
575  if (options->method_trace_) {
576    Trace::Start(options->method_trace_file_.c_str(), -1, options->method_trace_file_size_, 0,
577                 false, false, 0);
578  }
579
580  // Pre-allocate an OutOfMemoryError for the double-OOME case.
581  self->ThrowNewException(ThrowLocation(), "Ljava/lang/OutOfMemoryError;",
582                          "OutOfMemoryError thrown while trying to throw OutOfMemoryError; "
583                          "no stack available");
584  pre_allocated_OutOfMemoryError_ = self->GetException(NULL);
585  self->ClearException();
586
587  VLOG(startup) << "Runtime::Init exiting";
588  return true;
589}
590
591void Runtime::InitNativeMethods() {
592  VLOG(startup) << "Runtime::InitNativeMethods entering";
593  Thread* self = Thread::Current();
594  JNIEnv* env = self->GetJniEnv();
595
596  // Must be in the kNative state for calling native methods (JNI_OnLoad code).
597  CHECK_EQ(self->GetState(), kNative);
598
599  // First set up JniConstants, which is used by both the runtime's built-in native
600  // methods and libcore.
601  JniConstants::init(env);
602  WellKnownClasses::Init(env);
603
604  // Then set up the native methods provided by the runtime itself.
605  RegisterRuntimeNativeMethods(env);
606
607  // Then set up libcore, which is just a regular JNI library with a regular JNI_OnLoad.
608  // Most JNI libraries can just use System.loadLibrary, but libcore can't because it's
609  // the library that implements System.loadLibrary!
610  {
611    std::string mapped_name(StringPrintf(OS_SHARED_LIB_FORMAT_STR, "javacore"));
612    std::string reason;
613    self->TransitionFromSuspendedToRunnable();
614    SirtRef<mirror::ClassLoader> class_loader(self, nullptr);
615    if (!instance_->java_vm_->LoadNativeLibrary(mapped_name, class_loader, &reason)) {
616      LOG(FATAL) << "LoadNativeLibrary failed for \"" << mapped_name << "\": " << reason;
617    }
618    self->TransitionFromRunnableToSuspended(kNative);
619  }
620
621  // Initialize well known classes that may invoke runtime native methods.
622  WellKnownClasses::LateInit(env);
623
624  VLOG(startup) << "Runtime::InitNativeMethods exiting";
625}
626
627void Runtime::InitThreadGroups(Thread* self) {
628  JNIEnvExt* env = self->GetJniEnv();
629  ScopedJniEnvLocalRefState env_state(env);
630  main_thread_group_ =
631      env->NewGlobalRef(env->GetStaticObjectField(
632          WellKnownClasses::java_lang_ThreadGroup,
633          WellKnownClasses::java_lang_ThreadGroup_mainThreadGroup));
634  CHECK(main_thread_group_ != NULL || IsCompiler());
635  system_thread_group_ =
636      env->NewGlobalRef(env->GetStaticObjectField(
637          WellKnownClasses::java_lang_ThreadGroup,
638          WellKnownClasses::java_lang_ThreadGroup_systemThreadGroup));
639  CHECK(system_thread_group_ != NULL || IsCompiler());
640}
641
642jobject Runtime::GetMainThreadGroup() const {
643  CHECK(main_thread_group_ != NULL || IsCompiler());
644  return main_thread_group_;
645}
646
647jobject Runtime::GetSystemThreadGroup() const {
648  CHECK(system_thread_group_ != NULL || IsCompiler());
649  return system_thread_group_;
650}
651
652jobject Runtime::GetSystemClassLoader() const {
653  CHECK(system_class_loader_ != NULL || IsCompiler());
654  return system_class_loader_;
655}
656
657void Runtime::RegisterRuntimeNativeMethods(JNIEnv* env) {
658#define REGISTER(FN) extern void FN(JNIEnv*); FN(env)
659  // Register Throwable first so that registration of other native methods can throw exceptions
660  REGISTER(register_java_lang_Throwable);
661  REGISTER(register_dalvik_system_DexFile);
662  REGISTER(register_dalvik_system_VMDebug);
663  REGISTER(register_dalvik_system_VMRuntime);
664  REGISTER(register_dalvik_system_VMStack);
665  REGISTER(register_dalvik_system_Zygote);
666  REGISTER(register_java_lang_Class);
667  REGISTER(register_java_lang_DexCache);
668  REGISTER(register_java_lang_Object);
669  REGISTER(register_java_lang_Runtime);
670  REGISTER(register_java_lang_String);
671  REGISTER(register_java_lang_System);
672  REGISTER(register_java_lang_Thread);
673  REGISTER(register_java_lang_VMClassLoader);
674  REGISTER(register_java_lang_reflect_Array);
675  REGISTER(register_java_lang_reflect_Constructor);
676  REGISTER(register_java_lang_reflect_Field);
677  REGISTER(register_java_lang_reflect_Method);
678  REGISTER(register_java_lang_reflect_Proxy);
679  REGISTER(register_java_util_concurrent_atomic_AtomicLong);
680  REGISTER(register_org_apache_harmony_dalvik_ddmc_DdmServer);
681  REGISTER(register_org_apache_harmony_dalvik_ddmc_DdmVmInternal);
682  REGISTER(register_sun_misc_Unsafe);
683#undef REGISTER
684}
685
686void Runtime::DumpForSigQuit(std::ostream& os) {
687  GetClassLinker()->DumpForSigQuit(os);
688  GetInternTable()->DumpForSigQuit(os);
689  GetJavaVM()->DumpForSigQuit(os);
690  GetHeap()->DumpForSigQuit(os);
691  os << "\n";
692
693  thread_list_->DumpForSigQuit(os);
694  BaseMutex::DumpAll(os);
695}
696
697void Runtime::DumpLockHolders(std::ostream& os) {
698  uint64_t mutator_lock_owner = Locks::mutator_lock_->GetExclusiveOwnerTid();
699  pid_t thread_list_lock_owner = GetThreadList()->GetLockOwner();
700  pid_t classes_lock_owner = GetClassLinker()->GetClassesLockOwner();
701  pid_t dex_lock_owner = GetClassLinker()->GetDexLockOwner();
702  if ((thread_list_lock_owner | classes_lock_owner | dex_lock_owner) != 0) {
703    os << "Mutator lock exclusive owner tid: " << mutator_lock_owner << "\n"
704       << "ThreadList lock owner tid: " << thread_list_lock_owner << "\n"
705       << "ClassLinker classes lock owner tid: " << classes_lock_owner << "\n"
706       << "ClassLinker dex lock owner tid: " << dex_lock_owner << "\n";
707  }
708}
709
710void Runtime::SetStatsEnabled(bool new_state) {
711  if (new_state == true) {
712    GetStats()->Clear(~0);
713    // TODO: wouldn't it make more sense to clear _all_ threads' stats?
714    Thread::Current()->GetStats()->Clear(~0);
715    GetInstrumentation()->InstrumentQuickAllocEntryPoints();
716  } else {
717    GetInstrumentation()->UninstrumentQuickAllocEntryPoints();
718  }
719  stats_enabled_ = new_state;
720}
721
722void Runtime::ResetStats(int kinds) {
723  GetStats()->Clear(kinds & 0xffff);
724  // TODO: wouldn't it make more sense to clear _all_ threads' stats?
725  Thread::Current()->GetStats()->Clear(kinds >> 16);
726}
727
728int32_t Runtime::GetStat(int kind) {
729  RuntimeStats* stats;
730  if (kind < (1<<16)) {
731    stats = GetStats();
732  } else {
733    stats = Thread::Current()->GetStats();
734    kind >>= 16;
735  }
736  switch (kind) {
737  case KIND_ALLOCATED_OBJECTS:
738    return stats->allocated_objects;
739  case KIND_ALLOCATED_BYTES:
740    return stats->allocated_bytes;
741  case KIND_FREED_OBJECTS:
742    return stats->freed_objects;
743  case KIND_FREED_BYTES:
744    return stats->freed_bytes;
745  case KIND_GC_INVOCATIONS:
746    return stats->gc_for_alloc_count;
747  case KIND_CLASS_INIT_COUNT:
748    return stats->class_init_count;
749  case KIND_CLASS_INIT_TIME:
750    // Convert ns to us, reduce to 32 bits.
751    return static_cast<int>(stats->class_init_time_ns / 1000);
752  case KIND_EXT_ALLOCATED_OBJECTS:
753  case KIND_EXT_ALLOCATED_BYTES:
754  case KIND_EXT_FREED_OBJECTS:
755  case KIND_EXT_FREED_BYTES:
756    return 0;  // backward compatibility
757  default:
758    LOG(FATAL) << "Unknown statistic " << kind;
759    return -1;  // unreachable
760  }
761}
762
763void Runtime::BlockSignals() {
764  SignalSet signals;
765  signals.Add(SIGPIPE);
766  // SIGQUIT is used to dump the runtime's state (including stack traces).
767  signals.Add(SIGQUIT);
768  // SIGUSR1 is used to initiate a GC.
769  signals.Add(SIGUSR1);
770  signals.Block();
771}
772
773bool Runtime::AttachCurrentThread(const char* thread_name, bool as_daemon, jobject thread_group,
774                                  bool create_peer) {
775  bool success = Thread::Attach(thread_name, as_daemon, thread_group, create_peer) != NULL;
776  if (thread_name == NULL) {
777    LOG(WARNING) << *Thread::Current() << " attached without supplying a name";
778  }
779  return success;
780}
781
782void Runtime::DetachCurrentThread() {
783  Thread* self = Thread::Current();
784  if (self == NULL) {
785    LOG(FATAL) << "attempting to detach thread that is not attached";
786  }
787  if (self->HasManagedStack()) {
788    LOG(FATAL) << *Thread::Current() << " attempting to detach while still running code";
789  }
790  thread_list_->Unregister(self);
791}
792
793  mirror::Throwable* Runtime::GetPreAllocatedOutOfMemoryError() const {
794  if (pre_allocated_OutOfMemoryError_ == NULL) {
795    LOG(ERROR) << "Failed to return pre-allocated OOME";
796  }
797  return pre_allocated_OutOfMemoryError_;
798}
799
800void Runtime::VisitConcurrentRoots(RootCallback* callback, void* arg, bool only_dirty,
801                                   bool clean_dirty) {
802  intern_table_->VisitRoots(callback, arg, only_dirty, clean_dirty);
803  class_linker_->VisitRoots(callback, arg, only_dirty, clean_dirty);
804  // TODO: is it the right place ?
805  if (preinitialization_transaction != nullptr) {
806    preinitialization_transaction->VisitRoots(callback, arg);
807  }
808}
809
810void Runtime::VisitNonThreadRoots(RootCallback* callback, void* arg) {
811  // Visit the classes held as static in mirror classes.
812  mirror::ArtField::VisitRoots(callback, arg);
813  mirror::ArtMethod::VisitRoots(callback, arg);
814  mirror::Class::VisitRoots(callback, arg);
815  mirror::StackTraceElement::VisitRoots(callback, arg);
816  mirror::String::VisitRoots(callback, arg);
817  mirror::Throwable::VisitRoots(callback, arg);
818  // Visit all the primitive array types classes.
819  mirror::PrimitiveArray<uint8_t>::VisitRoots(callback, arg);   // BooleanArray
820  mirror::PrimitiveArray<int8_t>::VisitRoots(callback, arg);    // ByteArray
821  mirror::PrimitiveArray<uint16_t>::VisitRoots(callback, arg);  // CharArray
822  mirror::PrimitiveArray<double>::VisitRoots(callback, arg);    // DoubleArray
823  mirror::PrimitiveArray<float>::VisitRoots(callback, arg);     // FloatArray
824  mirror::PrimitiveArray<int32_t>::VisitRoots(callback, arg);   // IntArray
825  mirror::PrimitiveArray<int64_t>::VisitRoots(callback, arg);   // LongArray
826  mirror::PrimitiveArray<int16_t>::VisitRoots(callback, arg);   // ShortArray
827  java_vm_->VisitRoots(callback, arg);
828  if (pre_allocated_OutOfMemoryError_ != nullptr) {
829    callback(reinterpret_cast<mirror::Object**>(&pre_allocated_OutOfMemoryError_), arg, 0,
830             kRootVMInternal);
831    DCHECK(pre_allocated_OutOfMemoryError_ != nullptr);
832  }
833  callback(reinterpret_cast<mirror::Object**>(&resolution_method_), arg, 0, kRootVMInternal);
834  DCHECK(resolution_method_ != nullptr);
835  if (HasImtConflictMethod()) {
836    callback(reinterpret_cast<mirror::Object**>(&imt_conflict_method_), arg, 0, kRootVMInternal);
837  }
838  if (HasDefaultImt()) {
839    callback(reinterpret_cast<mirror::Object**>(&default_imt_), arg, 0, kRootVMInternal);
840  }
841
842  for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
843    if (callee_save_methods_[i] != nullptr) {
844      callback(reinterpret_cast<mirror::Object**>(&callee_save_methods_[i]), arg, 0,
845               kRootVMInternal);
846    }
847  }
848  {
849    MutexLock mu(Thread::Current(), method_verifiers_lock_);
850    for (verifier::MethodVerifier* verifier : method_verifiers_) {
851      verifier->VisitRoots(callback, arg);
852    }
853  }
854}
855
856void Runtime::VisitNonConcurrentRoots(RootCallback* callback, void* arg) {
857  thread_list_->VisitRoots(callback, arg);
858  VisitNonThreadRoots(callback, arg);
859}
860
861void Runtime::VisitRoots(RootCallback* callback, void* arg, bool only_dirty, bool clean_dirty) {
862  VisitConcurrentRoots(callback, arg, only_dirty, clean_dirty);
863  VisitNonConcurrentRoots(callback, arg);
864}
865
866mirror::ObjectArray<mirror::ArtMethod>* Runtime::CreateDefaultImt(ClassLinker* cl) {
867  Thread* self = Thread::Current();
868  SirtRef<mirror::ObjectArray<mirror::ArtMethod> > imtable(self, cl->AllocArtMethodArray(self, 64));
869  mirror::ArtMethod* imt_conflict_method = Runtime::Current()->GetImtConflictMethod();
870  for (size_t i = 0; i < static_cast<size_t>(imtable->GetLength()); i++) {
871    imtable->Set<false>(i, imt_conflict_method);
872  }
873  return imtable.get();
874}
875
876mirror::ArtMethod* Runtime::CreateImtConflictMethod() {
877  Thread* self = Thread::Current();
878  Runtime* runtime = Runtime::Current();
879  ClassLinker* class_linker = runtime->GetClassLinker();
880  SirtRef<mirror::ArtMethod> method(self, class_linker->AllocArtMethod(self));
881  method->SetDeclaringClass(mirror::ArtMethod::GetJavaLangReflectArtMethod());
882  // TODO: use a special method for imt conflict method saves.
883  method->SetDexMethodIndex(DexFile::kDexNoIndex);
884  // When compiling, the code pointer will get set later when the image is loaded.
885  if (runtime->IsCompiler()) {
886    method->SetEntryPointFromPortableCompiledCode(nullptr);
887    method->SetEntryPointFromQuickCompiledCode(nullptr);
888  } else {
889    method->SetEntryPointFromPortableCompiledCode(GetPortableImtConflictTrampoline(class_linker));
890    method->SetEntryPointFromQuickCompiledCode(GetQuickImtConflictTrampoline(class_linker));
891  }
892  return method.get();
893}
894
895mirror::ArtMethod* Runtime::CreateResolutionMethod() {
896  Thread* self = Thread::Current();
897  Runtime* runtime = Runtime::Current();
898  ClassLinker* class_linker = runtime->GetClassLinker();
899  SirtRef<mirror::ArtMethod> method(self, class_linker->AllocArtMethod(self));
900  method->SetDeclaringClass(mirror::ArtMethod::GetJavaLangReflectArtMethod());
901  // TODO: use a special method for resolution method saves
902  method->SetDexMethodIndex(DexFile::kDexNoIndex);
903  // When compiling, the code pointer will get set later when the image is loaded.
904  if (runtime->IsCompiler()) {
905    method->SetEntryPointFromPortableCompiledCode(nullptr);
906    method->SetEntryPointFromQuickCompiledCode(nullptr);
907  } else {
908    method->SetEntryPointFromPortableCompiledCode(GetPortableResolutionTrampoline(class_linker));
909    method->SetEntryPointFromQuickCompiledCode(GetQuickResolutionTrampoline(class_linker));
910  }
911  return method.get();
912}
913
914mirror::ArtMethod* Runtime::CreateCalleeSaveMethod(InstructionSet instruction_set,
915                                                   CalleeSaveType type) {
916  Thread* self = Thread::Current();
917  Runtime* runtime = Runtime::Current();
918  ClassLinker* class_linker = runtime->GetClassLinker();
919  SirtRef<mirror::ArtMethod> method(self, class_linker->AllocArtMethod(self));
920  method->SetDeclaringClass(mirror::ArtMethod::GetJavaLangReflectArtMethod());
921  // TODO: use a special method for callee saves
922  method->SetDexMethodIndex(DexFile::kDexNoIndex);
923  method->SetEntryPointFromPortableCompiledCode(nullptr);
924  method->SetEntryPointFromQuickCompiledCode(nullptr);
925  if ((instruction_set == kThumb2) || (instruction_set == kArm)) {
926    uint32_t ref_spills = (1 << art::arm::R5) | (1 << art::arm::R6)  | (1 << art::arm::R7) |
927                          (1 << art::arm::R8) | (1 << art::arm::R10) | (1 << art::arm::R11);
928    uint32_t arg_spills = (1 << art::arm::R1) | (1 << art::arm::R2) | (1 << art::arm::R3);
929    uint32_t all_spills = (1 << art::arm::R4) | (1 << art::arm::R9);
930    uint32_t core_spills = ref_spills | (type == kRefsAndArgs ? arg_spills : 0) |
931                           (type == kSaveAll ? all_spills : 0) | (1 << art::arm::LR);
932    uint32_t fp_all_spills = (1 << art::arm::S0)  | (1 << art::arm::S1)  | (1 << art::arm::S2) |
933                             (1 << art::arm::S3)  | (1 << art::arm::S4)  | (1 << art::arm::S5) |
934                             (1 << art::arm::S6)  | (1 << art::arm::S7)  | (1 << art::arm::S8) |
935                             (1 << art::arm::S9)  | (1 << art::arm::S10) | (1 << art::arm::S11) |
936                             (1 << art::arm::S12) | (1 << art::arm::S13) | (1 << art::arm::S14) |
937                             (1 << art::arm::S15) | (1 << art::arm::S16) | (1 << art::arm::S17) |
938                             (1 << art::arm::S18) | (1 << art::arm::S19) | (1 << art::arm::S20) |
939                             (1 << art::arm::S21) | (1 << art::arm::S22) | (1 << art::arm::S23) |
940                             (1 << art::arm::S24) | (1 << art::arm::S25) | (1 << art::arm::S26) |
941                             (1 << art::arm::S27) | (1 << art::arm::S28) | (1 << art::arm::S29) |
942                             (1 << art::arm::S30) | (1 << art::arm::S31);
943    uint32_t fp_spills = type == kSaveAll ? fp_all_spills : 0;
944    size_t frame_size = RoundUp((__builtin_popcount(core_spills) /* gprs */ +
945                                 __builtin_popcount(fp_spills) /* fprs */ +
946                                 1 /* Method* */) * kPointerSize, kStackAlignment);
947    method->SetFrameSizeInBytes(frame_size);
948    method->SetCoreSpillMask(core_spills);
949    method->SetFpSpillMask(fp_spills);
950  } else if (instruction_set == kMips) {
951    uint32_t ref_spills = (1 << art::mips::S2) | (1 << art::mips::S3) | (1 << art::mips::S4) |
952                          (1 << art::mips::S5) | (1 << art::mips::S6) | (1 << art::mips::S7) |
953                          (1 << art::mips::GP) | (1 << art::mips::FP);
954    uint32_t arg_spills = (1 << art::mips::A1) | (1 << art::mips::A2) | (1 << art::mips::A3);
955    uint32_t all_spills = (1 << art::mips::S0) | (1 << art::mips::S1);
956    uint32_t core_spills = ref_spills | (type == kRefsAndArgs ? arg_spills : 0) |
957                           (type == kSaveAll ? all_spills : 0) | (1 << art::mips::RA);
958    size_t frame_size = RoundUp((__builtin_popcount(core_spills) /* gprs */ +
959                                (type == kRefsAndArgs ? 0 : 3) + 1 /* Method* */) *
960                                kPointerSize, kStackAlignment);
961    method->SetFrameSizeInBytes(frame_size);
962    method->SetCoreSpillMask(core_spills);
963    method->SetFpSpillMask(0);
964  } else if (instruction_set == kX86) {
965    uint32_t ref_spills = (1 << art::x86::EBP) | (1 << art::x86::ESI) | (1 << art::x86::EDI);
966    uint32_t arg_spills = (1 << art::x86::ECX) | (1 << art::x86::EDX) | (1 << art::x86::EBX);
967    uint32_t core_spills = ref_spills | (type == kRefsAndArgs ? arg_spills : 0) |
968                         (1 << art::x86::kNumberOfCpuRegisters);  // fake return address callee save
969    size_t frame_size = RoundUp((__builtin_popcount(core_spills) /* gprs */ +
970                                 1 /* Method* */) * kPointerSize, kStackAlignment);
971    method->SetFrameSizeInBytes(frame_size);
972    method->SetCoreSpillMask(core_spills);
973    method->SetFpSpillMask(0);
974  } else if (instruction_set == kX86_64) {
975    uint32_t ref_spills =
976        (1 << art::x86_64::RBX) | (1 << art::x86_64::RBP) | (1 << art::x86_64::R12) |
977        (1 << art::x86_64::R13) | (1 << art::x86_64::R14) | (1 << art::x86_64::R15);
978    uint32_t arg_spills =
979        (1 << art::x86_64::RSI) | (1 << art::x86_64::RDX) | (1 << art::x86_64::RCX) |
980        (1 << art::x86_64::R8) | (1 << art::x86_64::R9);
981    uint32_t core_spills = ref_spills | (type == kRefsAndArgs ? arg_spills : 0) |
982        (1 << art::x86_64::kNumberOfCpuRegisters);  // fake return address callee save
983    uint32_t fp_arg_spills =
984        (1 << art::x86_64::XMM0) | (1 << art::x86_64::XMM1) | (1 << art::x86_64::XMM2) |
985        (1 << art::x86_64::XMM3) | (1 << art::x86_64::XMM4) | (1 << art::x86_64::XMM5) |
986        (1 << art::x86_64::XMM6) | (1 << art::x86_64::XMM7);
987    uint32_t fp_spills = (type == kRefsAndArgs ? fp_arg_spills : 0);
988    size_t frame_size = RoundUp((__builtin_popcount(core_spills) /* gprs */ +
989                                 __builtin_popcount(fp_spills) /* fprs */ +
990                                 1 /* Method* */) * kPointerSize, kStackAlignment);
991    method->SetFrameSizeInBytes(frame_size);
992    method->SetCoreSpillMask(core_spills);
993    method->SetFpSpillMask(fp_spills);
994  } else {
995    UNIMPLEMENTED(FATAL) << instruction_set;
996  }
997  return method.get();
998}
999
1000void Runtime::DisallowNewSystemWeaks() {
1001  monitor_list_->DisallowNewMonitors();
1002  intern_table_->DisallowNewInterns();
1003  java_vm_->DisallowNewWeakGlobals();
1004  Dbg::DisallowNewObjectRegistryObjects();
1005}
1006
1007void Runtime::AllowNewSystemWeaks() {
1008  monitor_list_->AllowNewMonitors();
1009  intern_table_->AllowNewInterns();
1010  java_vm_->AllowNewWeakGlobals();
1011  Dbg::AllowNewObjectRegistryObjects();
1012}
1013
1014void Runtime::SetCalleeSaveMethod(mirror::ArtMethod* method, CalleeSaveType type) {
1015  DCHECK_LT(static_cast<int>(type), static_cast<int>(kLastCalleeSaveType));
1016  callee_save_methods_[type] = method;
1017}
1018
1019const std::vector<const DexFile*>& Runtime::GetCompileTimeClassPath(jobject class_loader) {
1020  if (class_loader == NULL) {
1021    return GetClassLinker()->GetBootClassPath();
1022  }
1023  CHECK(UseCompileTimeClassPath());
1024  CompileTimeClassPaths::const_iterator it = compile_time_class_paths_.find(class_loader);
1025  CHECK(it != compile_time_class_paths_.end());
1026  return it->second;
1027}
1028
1029void Runtime::SetCompileTimeClassPath(jobject class_loader,
1030                                      std::vector<const DexFile*>& class_path) {
1031  CHECK(!IsStarted());
1032  use_compile_time_class_path_ = true;
1033  compile_time_class_paths_.Put(class_loader, class_path);
1034}
1035
1036void Runtime::AddMethodVerifier(verifier::MethodVerifier* verifier) {
1037  DCHECK(verifier != nullptr);
1038  MutexLock mu(Thread::Current(), method_verifiers_lock_);
1039  method_verifiers_.insert(verifier);
1040}
1041
1042void Runtime::RemoveMethodVerifier(verifier::MethodVerifier* verifier) {
1043  DCHECK(verifier != nullptr);
1044  MutexLock mu(Thread::Current(), method_verifiers_lock_);
1045  auto it = method_verifiers_.find(verifier);
1046  CHECK(it != method_verifiers_.end());
1047  method_verifiers_.erase(it);
1048}
1049
1050void Runtime::StartProfiler(const char *appDir, bool startImmediately) {
1051  BackgroundMethodSamplingProfiler::Start(profile_period_s_, profile_duration_s_, appDir,
1052                                          profile_interval_us_, profile_backoff_coefficient_,
1053                                          startImmediately);
1054}
1055
1056// Transaction support.
1057// TODO move them to header file for inlining.
1058bool Runtime::IsActiveTransaction() const {
1059  return preinitialization_transaction != nullptr;
1060}
1061
1062void Runtime::EnterTransactionMode(Transaction* transaction) {
1063  DCHECK(IsCompiler());
1064  DCHECK(transaction != nullptr);
1065  DCHECK(!IsActiveTransaction());
1066  preinitialization_transaction = transaction;
1067}
1068
1069void Runtime::ExitTransactionMode() {
1070  DCHECK(IsCompiler());
1071  DCHECK(IsActiveTransaction());
1072  preinitialization_transaction = nullptr;
1073}
1074
1075void Runtime::RecordWriteField32(mirror::Object* obj, MemberOffset field_offset,
1076                                 uint32_t value, bool is_volatile) const {
1077  DCHECK(IsCompiler());
1078  DCHECK(IsActiveTransaction());
1079  preinitialization_transaction->RecordWriteField32(obj, field_offset, value, is_volatile);
1080}
1081
1082void Runtime::RecordWriteField64(mirror::Object* obj, MemberOffset field_offset,
1083                                 uint64_t value, bool is_volatile) const {
1084  DCHECK(IsCompiler());
1085  DCHECK(IsActiveTransaction());
1086  preinitialization_transaction->RecordWriteField64(obj, field_offset, value, is_volatile);
1087}
1088
1089void Runtime::RecordWriteFieldReference(mirror::Object* obj, MemberOffset field_offset,
1090                                        mirror::Object* value, bool is_volatile) const {
1091  DCHECK(IsCompiler());
1092  DCHECK(IsActiveTransaction());
1093  preinitialization_transaction->RecordWriteFieldReference(obj, field_offset, value, is_volatile);
1094}
1095
1096void Runtime::RecordWriteArray(mirror::Array* array, size_t index, uint64_t value) const {
1097  DCHECK(IsCompiler());
1098  DCHECK(IsActiveTransaction());
1099  preinitialization_transaction->RecordWriteArray(array, index, value);
1100}
1101
1102void Runtime::RecordStrongStringInsertion(mirror::String* s, uint32_t hash_code) const {
1103  DCHECK(IsCompiler());
1104  DCHECK(IsActiveTransaction());
1105  preinitialization_transaction->RecordStrongStringInsertion(s, hash_code);
1106}
1107
1108void Runtime::RecordWeakStringInsertion(mirror::String* s, uint32_t hash_code) const {
1109  DCHECK(IsCompiler());
1110  DCHECK(IsActiveTransaction());
1111  preinitialization_transaction->RecordWeakStringInsertion(s, hash_code);
1112}
1113
1114void Runtime::RecordStrongStringRemoval(mirror::String* s, uint32_t hash_code) const {
1115  DCHECK(IsCompiler());
1116  DCHECK(IsActiveTransaction());
1117  preinitialization_transaction->RecordStrongStringRemoval(s, hash_code);
1118}
1119
1120void Runtime::RecordWeakStringRemoval(mirror::String* s, uint32_t hash_code) const {
1121  DCHECK(IsCompiler());
1122  DCHECK(IsActiveTransaction());
1123  preinitialization_transaction->RecordWeakStringRemoval(s, hash_code);
1124}
1125
1126void Runtime::SetFaultMessage(const std::string& message) {
1127  MutexLock mu(Thread::Current(), fault_message_lock_);
1128  fault_message_ = message;
1129}
1130
1131}  // namespace art
1132