1ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// Copyright 2012 the V8 project authors. All rights reserved. 2ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// Use of this source code is governed by a BSD-style license that can be 3ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// found in the LICENSE file. 4ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 5ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/v8.h" 6ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 7ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#if V8_TARGET_ARCH_MIPS 8ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 9ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// Note on Mips implementation: 10ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// 11ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// The result_register() for mips is the 'v0' register, which is defined 12ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// by the ABI to contain function return values. However, the first 13ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// parameter to a function is defined to be 'a0'. So there are many 14ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// places where we have to move a previous result in v0 to a0 for the 15ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// next call: mov(a0, v0). This is not needed on the other architectures. 16ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 17ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/code-factory.h" 18ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/code-stubs.h" 19ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/codegen.h" 20ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/compiler.h" 21ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/debug.h" 22ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/full-codegen.h" 23ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/ic/ic.h" 24ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/isolate-inl.h" 25ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/parser.h" 26ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/scopes.h" 27ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 28ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/mips/code-stubs-mips.h" 29ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#include "src/mips/macro-assembler-mips.h" 30ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 31ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Querunamespace v8 { 32ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Querunamespace internal { 33ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 34ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#define __ ACCESS_MASM(masm_) 35ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 36ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 37ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// A patch site is a location in the code which it is possible to patch. This 38ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// class has a number of methods to emit the code which is patchable and the 39ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// method EmitPatchInfo to record a marker back to the patchable code. This 40ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy 41ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// (raw 16 bit immediate value is used) is the delta from the pc to the first 42ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// instruction of the patchable code. 43ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// The marker instruction is effectively a NOP (dest is zero_reg) and will 44ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru// never be emitted by normal code. 45ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queruclass JumpPatchSite BASE_EMBEDDED { 46ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru public: 47ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 48ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#ifdef DEBUG 49ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru info_emitted_ = false; 50ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru#endif 51ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru } 52ac04d0bbe12b3ef54518635711412f178cb4d16Jean-Baptiste Queru 53 ~JumpPatchSite() { 54 DCHECK(patch_site_.is_bound() == info_emitted_); 55 } 56 57 // When initially emitting this ensure that a jump is always generated to skip 58 // the inlined smi code. 59 void EmitJumpIfNotSmi(Register reg, Label* target) { 60 DCHECK(!patch_site_.is_bound() && !info_emitted_); 61 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 62 __ bind(&patch_site_); 63 __ andi(at, reg, 0); 64 // Always taken before patched. 65 __ BranchShort(target, eq, at, Operand(zero_reg)); 66 } 67 68 // When initially emitting this ensure that a jump is never generated to skip 69 // the inlined smi code. 70 void EmitJumpIfSmi(Register reg, Label* target) { 71 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 72 DCHECK(!patch_site_.is_bound() && !info_emitted_); 73 __ bind(&patch_site_); 74 __ andi(at, reg, 0); 75 // Never taken before patched. 76 __ BranchShort(target, ne, at, Operand(zero_reg)); 77 } 78 79 void EmitPatchInfo() { 80 if (patch_site_.is_bound()) { 81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 82 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); 83 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask); 84#ifdef DEBUG 85 info_emitted_ = true; 86#endif 87 } else { 88 __ nop(); // Signals no inlined code. 89 } 90 } 91 92 private: 93 MacroAssembler* masm_; 94 Label patch_site_; 95#ifdef DEBUG 96 bool info_emitted_; 97#endif 98}; 99 100 101// Generate code for a JS function. On entry to the function the receiver 102// and arguments have been pushed on the stack left to right. The actual 103// argument count matches the formal parameter count expected by the 104// function. 105// 106// The live registers are: 107// o a1: the JS function object being called (i.e. ourselves) 108// o cp: our context 109// o fp: our caller's frame pointer 110// o sp: stack pointer 111// o ra: return address 112// 113// The function builds a JS frame. Please see JavaScriptFrameConstants in 114// frames-mips.h for its layout. 115void FullCodeGenerator::Generate() { 116 CompilationInfo* info = info_; 117 handler_table_ = 118 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 119 120 profiling_counter_ = isolate()->factory()->NewCell( 121 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 122 SetFunctionPosition(function()); 123 Comment cmnt(masm_, "[ function compiled by full code generator"); 124 125 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 126 127#ifdef DEBUG 128 if (strlen(FLAG_stop_at) > 0 && 129 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 130 __ stop("stop-at"); 131 } 132#endif 133 134 // Sloppy mode functions and builtins need to replace the receiver with the 135 // global proxy when called as functions (without an explicit receiver 136 // object). 137 if (info->strict_mode() == SLOPPY && !info->is_native()) { 138 Label ok; 139 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 140 __ lw(at, MemOperand(sp, receiver_offset)); 141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 142 __ Branch(&ok, ne, a2, Operand(at)); 143 144 __ lw(a2, GlobalObjectOperand()); 145 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset)); 146 147 __ sw(a2, MemOperand(sp, receiver_offset)); 148 149 __ bind(&ok); 150 } 151 152 // Open a frame scope to indicate that there is a frame on the stack. The 153 // MANUAL indicates that the scope shouldn't actually generate code to set up 154 // the frame (that is done below). 155 FrameScope frame_scope(masm_, StackFrame::MANUAL); 156 157 info->set_prologue_offset(masm_->pc_offset()); 158 __ Prologue(info->IsCodePreAgingActive()); 159 info->AddNoFrameRange(0, masm_->pc_offset()); 160 161 { Comment cmnt(masm_, "[ Allocate locals"); 162 int locals_count = info->scope()->num_stack_slots(); 163 // Generators allocate locals, if any, in context slots. 164 DCHECK(!info->function()->is_generator() || locals_count == 0); 165 if (locals_count > 0) { 166 if (locals_count >= 128) { 167 Label ok; 168 __ Subu(t5, sp, Operand(locals_count * kPointerSize)); 169 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); 170 __ Branch(&ok, hs, t5, Operand(a2)); 171 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 172 __ bind(&ok); 173 } 174 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex); 175 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; 176 if (locals_count >= kMaxPushes) { 177 int loop_iterations = locals_count / kMaxPushes; 178 __ li(a2, Operand(loop_iterations)); 179 Label loop_header; 180 __ bind(&loop_header); 181 // Do pushes. 182 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize)); 183 for (int i = 0; i < kMaxPushes; i++) { 184 __ sw(t5, MemOperand(sp, i * kPointerSize)); 185 } 186 // Continue loop if not done. 187 __ Subu(a2, a2, Operand(1)); 188 __ Branch(&loop_header, ne, a2, Operand(zero_reg)); 189 } 190 int remaining = locals_count % kMaxPushes; 191 // Emit the remaining pushes. 192 __ Subu(sp, sp, Operand(remaining * kPointerSize)); 193 for (int i = 0; i < remaining; i++) { 194 __ sw(t5, MemOperand(sp, i * kPointerSize)); 195 } 196 } 197 } 198 199 bool function_in_register = true; 200 201 // Possibly allocate a local context. 202 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 203 if (heap_slots > 0) { 204 Comment cmnt(masm_, "[ Allocate context"); 205 // Argument to NewContext is the function, which is still in a1. 206 bool need_write_barrier = true; 207 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 208 __ push(a1); 209 __ Push(info->scope()->GetScopeInfo()); 210 __ CallRuntime(Runtime::kNewGlobalContext, 2); 211 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 212 FastNewContextStub stub(isolate(), heap_slots); 213 __ CallStub(&stub); 214 // Result of FastNewContextStub is always in new space. 215 need_write_barrier = false; 216 } else { 217 __ push(a1); 218 __ CallRuntime(Runtime::kNewFunctionContext, 1); 219 } 220 function_in_register = false; 221 // Context is returned in v0. It replaces the context passed to us. 222 // It's saved in the stack and kept live in cp. 223 __ mov(cp, v0); 224 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 225 // Copy any necessary parameters into the context. 226 int num_parameters = info->scope()->num_parameters(); 227 for (int i = 0; i < num_parameters; i++) { 228 Variable* var = scope()->parameter(i); 229 if (var->IsContextSlot()) { 230 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 231 (num_parameters - 1 - i) * kPointerSize; 232 // Load parameter from stack. 233 __ lw(a0, MemOperand(fp, parameter_offset)); 234 // Store it in the context. 235 MemOperand target = ContextOperand(cp, var->index()); 236 __ sw(a0, target); 237 238 // Update the write barrier. 239 if (need_write_barrier) { 240 __ RecordWriteContextSlot( 241 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs); 242 } else if (FLAG_debug_code) { 243 Label done; 244 __ JumpIfInNewSpace(cp, a0, &done); 245 __ Abort(kExpectedNewSpaceObject); 246 __ bind(&done); 247 } 248 } 249 } 250 } 251 252 Variable* arguments = scope()->arguments(); 253 if (arguments != NULL) { 254 // Function uses arguments object. 255 Comment cmnt(masm_, "[ Allocate arguments object"); 256 if (!function_in_register) { 257 // Load this again, if it's used by the local context below. 258 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 259 } else { 260 __ mov(a3, a1); 261 } 262 // Receiver is just before the parameters on the caller's stack. 263 int num_parameters = info->scope()->num_parameters(); 264 int offset = num_parameters * kPointerSize; 265 __ Addu(a2, fp, 266 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 267 __ li(a1, Operand(Smi::FromInt(num_parameters))); 268 __ Push(a3, a2, a1); 269 270 // Arguments to ArgumentsAccessStub: 271 // function, receiver address, parameter count. 272 // The stub will rewrite receiever and parameter count if the previous 273 // stack frame was an arguments adapter frame. 274 ArgumentsAccessStub::Type type; 275 if (strict_mode() == STRICT) { 276 type = ArgumentsAccessStub::NEW_STRICT; 277 } else if (function()->has_duplicate_parameters()) { 278 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; 279 } else { 280 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; 281 } 282 ArgumentsAccessStub stub(isolate(), type); 283 __ CallStub(&stub); 284 285 SetVar(arguments, v0, a1, a2); 286 } 287 288 if (FLAG_trace) { 289 __ CallRuntime(Runtime::kTraceEnter, 0); 290 } 291 292 // Visit the declarations and body unless there is an illegal 293 // redeclaration. 294 if (scope()->HasIllegalRedeclaration()) { 295 Comment cmnt(masm_, "[ Declarations"); 296 scope()->VisitIllegalRedeclaration(this); 297 298 } else { 299 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 300 { Comment cmnt(masm_, "[ Declarations"); 301 // For named function expressions, declare the function name as a 302 // constant. 303 if (scope()->is_function_scope() && scope()->function() != NULL) { 304 VariableDeclaration* function = scope()->function(); 305 DCHECK(function->proxy()->var()->mode() == CONST || 306 function->proxy()->var()->mode() == CONST_LEGACY); 307 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED); 308 VisitVariableDeclaration(function); 309 } 310 VisitDeclarations(scope()->declarations()); 311 } 312 313 { Comment cmnt(masm_, "[ Stack check"); 314 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 315 Label ok; 316 __ LoadRoot(at, Heap::kStackLimitRootIndex); 317 __ Branch(&ok, hs, sp, Operand(at)); 318 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 319 PredictableCodeSizeScope predictable(masm_, 320 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); 321 __ Call(stack_check, RelocInfo::CODE_TARGET); 322 __ bind(&ok); 323 } 324 325 { Comment cmnt(masm_, "[ Body"); 326 DCHECK(loop_depth() == 0); 327 VisitStatements(function()->body()); 328 DCHECK(loop_depth() == 0); 329 } 330 } 331 332 // Always emit a 'return undefined' in case control fell off the end of 333 // the body. 334 { Comment cmnt(masm_, "[ return <undefined>;"); 335 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 336 } 337 EmitReturnSequence(); 338} 339 340 341void FullCodeGenerator::ClearAccumulator() { 342 DCHECK(Smi::FromInt(0) == 0); 343 __ mov(v0, zero_reg); 344} 345 346 347void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 348 __ li(a2, Operand(profiling_counter_)); 349 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); 350 __ Subu(a3, a3, Operand(Smi::FromInt(delta))); 351 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); 352} 353 354 355void FullCodeGenerator::EmitProfilingCounterReset() { 356 int reset_value = FLAG_interrupt_budget; 357 if (info_->is_debug()) { 358 // Detect debug break requests as soon as possible. 359 reset_value = FLAG_interrupt_budget >> 4; 360 } 361 __ li(a2, Operand(profiling_counter_)); 362 __ li(a3, Operand(Smi::FromInt(reset_value))); 363 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); 364} 365 366 367void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 368 Label* back_edge_target) { 369 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need 370 // to make sure it is constant. Branch may emit a skip-or-jump sequence 371 // instead of the normal Branch. It seems that the "skip" part of that 372 // sequence is about as long as this Branch would be so it is safe to ignore 373 // that. 374 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 375 Comment cmnt(masm_, "[ Back edge bookkeeping"); 376 Label ok; 377 DCHECK(back_edge_target->is_bound()); 378 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 379 int weight = Min(kMaxBackEdgeWeight, 380 Max(1, distance / kCodeSizeMultiplier)); 381 EmitProfilingCounterDecrement(weight); 382 __ slt(at, a3, zero_reg); 383 __ beq(at, zero_reg, &ok); 384 // Call will emit a li t9 first, so it is safe to use the delay slot. 385 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 386 // Record a mapping of this PC offset to the OSR id. This is used to find 387 // the AST id from the unoptimized code in order to use it as a key into 388 // the deoptimization input data found in the optimized code. 389 RecordBackEdge(stmt->OsrEntryId()); 390 EmitProfilingCounterReset(); 391 392 __ bind(&ok); 393 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 394 // Record a mapping of the OSR id to this PC. This is used if the OSR 395 // entry becomes the target of a bailout. We don't expect it to be, but 396 // we want it to work if it is. 397 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 398} 399 400 401void FullCodeGenerator::EmitReturnSequence() { 402 Comment cmnt(masm_, "[ Return sequence"); 403 if (return_label_.is_bound()) { 404 __ Branch(&return_label_); 405 } else { 406 __ bind(&return_label_); 407 if (FLAG_trace) { 408 // Push the return value on the stack as the parameter. 409 // Runtime::TraceExit returns its parameter in v0. 410 __ push(v0); 411 __ CallRuntime(Runtime::kTraceExit, 1); 412 } 413 // Pretend that the exit is a backwards jump to the entry. 414 int weight = 1; 415 if (info_->ShouldSelfOptimize()) { 416 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 417 } else { 418 int distance = masm_->pc_offset(); 419 weight = Min(kMaxBackEdgeWeight, 420 Max(1, distance / kCodeSizeMultiplier)); 421 } 422 EmitProfilingCounterDecrement(weight); 423 Label ok; 424 __ Branch(&ok, ge, a3, Operand(zero_reg)); 425 __ push(v0); 426 __ Call(isolate()->builtins()->InterruptCheck(), 427 RelocInfo::CODE_TARGET); 428 __ pop(v0); 429 EmitProfilingCounterReset(); 430 __ bind(&ok); 431 432#ifdef DEBUG 433 // Add a label for checking the size of the code used for returning. 434 Label check_exit_codesize; 435 masm_->bind(&check_exit_codesize); 436#endif 437 // Make sure that the constant pool is not emitted inside of the return 438 // sequence. 439 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 440 // Here we use masm_-> instead of the __ macro to avoid the code coverage 441 // tool from instrumenting as we rely on the code size here. 442 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 443 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 444 __ RecordJSReturn(); 445 masm_->mov(sp, fp); 446 int no_frame_start = masm_->pc_offset(); 447 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit())); 448 masm_->Addu(sp, sp, Operand(sp_delta)); 449 masm_->Jump(ra); 450 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 451 } 452 453#ifdef DEBUG 454 // Check that the size of the code used for returning is large enough 455 // for the debugger's requirements. 456 DCHECK(Assembler::kJSReturnSequenceInstructions <= 457 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 458#endif 459 } 460} 461 462 463void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 464 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 465} 466 467 468void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 469 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 470 codegen()->GetVar(result_register(), var); 471} 472 473 474void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 475 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 476 codegen()->GetVar(result_register(), var); 477 __ push(result_register()); 478} 479 480 481void FullCodeGenerator::TestContext::Plug(Variable* var) const { 482 // For simplicity we always test the accumulator register. 483 codegen()->GetVar(result_register(), var); 484 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 485 codegen()->DoTest(this); 486} 487 488 489void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 490} 491 492 493void FullCodeGenerator::AccumulatorValueContext::Plug( 494 Heap::RootListIndex index) const { 495 __ LoadRoot(result_register(), index); 496} 497 498 499void FullCodeGenerator::StackValueContext::Plug( 500 Heap::RootListIndex index) const { 501 __ LoadRoot(result_register(), index); 502 __ push(result_register()); 503} 504 505 506void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 507 codegen()->PrepareForBailoutBeforeSplit(condition(), 508 true, 509 true_label_, 510 false_label_); 511 if (index == Heap::kUndefinedValueRootIndex || 512 index == Heap::kNullValueRootIndex || 513 index == Heap::kFalseValueRootIndex) { 514 if (false_label_ != fall_through_) __ Branch(false_label_); 515 } else if (index == Heap::kTrueValueRootIndex) { 516 if (true_label_ != fall_through_) __ Branch(true_label_); 517 } else { 518 __ LoadRoot(result_register(), index); 519 codegen()->DoTest(this); 520 } 521} 522 523 524void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 525} 526 527 528void FullCodeGenerator::AccumulatorValueContext::Plug( 529 Handle<Object> lit) const { 530 __ li(result_register(), Operand(lit)); 531} 532 533 534void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 535 // Immediates cannot be pushed directly. 536 __ li(result_register(), Operand(lit)); 537 __ push(result_register()); 538} 539 540 541void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 542 codegen()->PrepareForBailoutBeforeSplit(condition(), 543 true, 544 true_label_, 545 false_label_); 546 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. 547 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 548 if (false_label_ != fall_through_) __ Branch(false_label_); 549 } else if (lit->IsTrue() || lit->IsJSObject()) { 550 if (true_label_ != fall_through_) __ Branch(true_label_); 551 } else if (lit->IsString()) { 552 if (String::cast(*lit)->length() == 0) { 553 if (false_label_ != fall_through_) __ Branch(false_label_); 554 } else { 555 if (true_label_ != fall_through_) __ Branch(true_label_); 556 } 557 } else if (lit->IsSmi()) { 558 if (Smi::cast(*lit)->value() == 0) { 559 if (false_label_ != fall_through_) __ Branch(false_label_); 560 } else { 561 if (true_label_ != fall_through_) __ Branch(true_label_); 562 } 563 } else { 564 // For simplicity we always test the accumulator register. 565 __ li(result_register(), Operand(lit)); 566 codegen()->DoTest(this); 567 } 568} 569 570 571void FullCodeGenerator::EffectContext::DropAndPlug(int count, 572 Register reg) const { 573 DCHECK(count > 0); 574 __ Drop(count); 575} 576 577 578void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 579 int count, 580 Register reg) const { 581 DCHECK(count > 0); 582 __ Drop(count); 583 __ Move(result_register(), reg); 584} 585 586 587void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 588 Register reg) const { 589 DCHECK(count > 0); 590 if (count > 1) __ Drop(count - 1); 591 __ sw(reg, MemOperand(sp, 0)); 592} 593 594 595void FullCodeGenerator::TestContext::DropAndPlug(int count, 596 Register reg) const { 597 DCHECK(count > 0); 598 // For simplicity we always test the accumulator register. 599 __ Drop(count); 600 __ Move(result_register(), reg); 601 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 602 codegen()->DoTest(this); 603} 604 605 606void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 607 Label* materialize_false) const { 608 DCHECK(materialize_true == materialize_false); 609 __ bind(materialize_true); 610} 611 612 613void FullCodeGenerator::AccumulatorValueContext::Plug( 614 Label* materialize_true, 615 Label* materialize_false) const { 616 Label done; 617 __ bind(materialize_true); 618 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 619 __ Branch(&done); 620 __ bind(materialize_false); 621 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 622 __ bind(&done); 623} 624 625 626void FullCodeGenerator::StackValueContext::Plug( 627 Label* materialize_true, 628 Label* materialize_false) const { 629 Label done; 630 __ bind(materialize_true); 631 __ LoadRoot(at, Heap::kTrueValueRootIndex); 632 // Push the value as the following branch can clobber at in long branch mode. 633 __ push(at); 634 __ Branch(&done); 635 __ bind(materialize_false); 636 __ LoadRoot(at, Heap::kFalseValueRootIndex); 637 __ push(at); 638 __ bind(&done); 639} 640 641 642void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 643 Label* materialize_false) const { 644 DCHECK(materialize_true == true_label_); 645 DCHECK(materialize_false == false_label_); 646} 647 648 649void FullCodeGenerator::EffectContext::Plug(bool flag) const { 650} 651 652 653void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 654 Heap::RootListIndex value_root_index = 655 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 656 __ LoadRoot(result_register(), value_root_index); 657} 658 659 660void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 661 Heap::RootListIndex value_root_index = 662 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 663 __ LoadRoot(at, value_root_index); 664 __ push(at); 665} 666 667 668void FullCodeGenerator::TestContext::Plug(bool flag) const { 669 codegen()->PrepareForBailoutBeforeSplit(condition(), 670 true, 671 true_label_, 672 false_label_); 673 if (flag) { 674 if (true_label_ != fall_through_) __ Branch(true_label_); 675 } else { 676 if (false_label_ != fall_through_) __ Branch(false_label_); 677 } 678} 679 680 681void FullCodeGenerator::DoTest(Expression* condition, 682 Label* if_true, 683 Label* if_false, 684 Label* fall_through) { 685 __ mov(a0, result_register()); 686 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 687 CallIC(ic, condition->test_id()); 688 __ mov(at, zero_reg); 689 Split(ne, v0, Operand(at), if_true, if_false, fall_through); 690} 691 692 693void FullCodeGenerator::Split(Condition cc, 694 Register lhs, 695 const Operand& rhs, 696 Label* if_true, 697 Label* if_false, 698 Label* fall_through) { 699 if (if_false == fall_through) { 700 __ Branch(if_true, cc, lhs, rhs); 701 } else if (if_true == fall_through) { 702 __ Branch(if_false, NegateCondition(cc), lhs, rhs); 703 } else { 704 __ Branch(if_true, cc, lhs, rhs); 705 __ Branch(if_false); 706 } 707} 708 709 710MemOperand FullCodeGenerator::StackOperand(Variable* var) { 711 DCHECK(var->IsStackAllocated()); 712 // Offset is negative because higher indexes are at lower addresses. 713 int offset = -var->index() * kPointerSize; 714 // Adjust by a (parameter or local) base offset. 715 if (var->IsParameter()) { 716 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 717 } else { 718 offset += JavaScriptFrameConstants::kLocal0Offset; 719 } 720 return MemOperand(fp, offset); 721} 722 723 724MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 725 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 726 if (var->IsContextSlot()) { 727 int context_chain_length = scope()->ContextChainLength(var->scope()); 728 __ LoadContext(scratch, context_chain_length); 729 return ContextOperand(scratch, var->index()); 730 } else { 731 return StackOperand(var); 732 } 733} 734 735 736void FullCodeGenerator::GetVar(Register dest, Variable* var) { 737 // Use destination as scratch. 738 MemOperand location = VarOperand(var, dest); 739 __ lw(dest, location); 740} 741 742 743void FullCodeGenerator::SetVar(Variable* var, 744 Register src, 745 Register scratch0, 746 Register scratch1) { 747 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 748 DCHECK(!scratch0.is(src)); 749 DCHECK(!scratch0.is(scratch1)); 750 DCHECK(!scratch1.is(src)); 751 MemOperand location = VarOperand(var, scratch0); 752 __ sw(src, location); 753 // Emit the write barrier code if the location is in the heap. 754 if (var->IsContextSlot()) { 755 __ RecordWriteContextSlot(scratch0, 756 location.offset(), 757 src, 758 scratch1, 759 kRAHasBeenSaved, 760 kDontSaveFPRegs); 761 } 762} 763 764 765void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 766 bool should_normalize, 767 Label* if_true, 768 Label* if_false) { 769 // Only prepare for bailouts before splits if we're in a test 770 // context. Otherwise, we let the Visit function deal with the 771 // preparation to avoid preparing with the same AST id twice. 772 if (!context()->IsTest() || !info_->IsOptimizable()) return; 773 774 Label skip; 775 if (should_normalize) __ Branch(&skip); 776 PrepareForBailout(expr, TOS_REG); 777 if (should_normalize) { 778 __ LoadRoot(t0, Heap::kTrueValueRootIndex); 779 Split(eq, a0, Operand(t0), if_true, if_false, NULL); 780 __ bind(&skip); 781 } 782} 783 784 785void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 786 // The variable in the declaration always resides in the current function 787 // context. 788 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 789 if (generate_debug_code_) { 790 // Check that we're not inside a with or catch context. 791 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); 792 __ LoadRoot(t0, Heap::kWithContextMapRootIndex); 793 __ Check(ne, kDeclarationInWithContext, 794 a1, Operand(t0)); 795 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); 796 __ Check(ne, kDeclarationInCatchContext, 797 a1, Operand(t0)); 798 } 799} 800 801 802void FullCodeGenerator::VisitVariableDeclaration( 803 VariableDeclaration* declaration) { 804 // If it was not possible to allocate the variable at compile time, we 805 // need to "declare" it at runtime to make sure it actually exists in the 806 // local context. 807 VariableProxy* proxy = declaration->proxy(); 808 VariableMode mode = declaration->mode(); 809 Variable* variable = proxy->var(); 810 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; 811 switch (variable->location()) { 812 case Variable::UNALLOCATED: 813 globals_->Add(variable->name(), zone()); 814 globals_->Add(variable->binding_needs_init() 815 ? isolate()->factory()->the_hole_value() 816 : isolate()->factory()->undefined_value(), 817 zone()); 818 break; 819 820 case Variable::PARAMETER: 821 case Variable::LOCAL: 822 if (hole_init) { 823 Comment cmnt(masm_, "[ VariableDeclaration"); 824 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 825 __ sw(t0, StackOperand(variable)); 826 } 827 break; 828 829 case Variable::CONTEXT: 830 if (hole_init) { 831 Comment cmnt(masm_, "[ VariableDeclaration"); 832 EmitDebugCheckDeclarationContext(variable); 833 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 834 __ sw(at, ContextOperand(cp, variable->index())); 835 // No write barrier since the_hole_value is in old space. 836 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 837 } 838 break; 839 840 case Variable::LOOKUP: { 841 Comment cmnt(masm_, "[ VariableDeclaration"); 842 __ li(a2, Operand(variable->name())); 843 // Declaration nodes are always introduced in one of four modes. 844 DCHECK(IsDeclaredVariableMode(mode)); 845 PropertyAttributes attr = 846 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 847 __ li(a1, Operand(Smi::FromInt(attr))); 848 // Push initial value, if any. 849 // Note: For variables we must not push an initial value (such as 850 // 'undefined') because we may have a (legal) redeclaration and we 851 // must not destroy the current value. 852 if (hole_init) { 853 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); 854 __ Push(cp, a2, a1, a0); 855 } else { 856 DCHECK(Smi::FromInt(0) == 0); 857 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. 858 __ Push(cp, a2, a1, a0); 859 } 860 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 861 break; 862 } 863 } 864} 865 866 867void FullCodeGenerator::VisitFunctionDeclaration( 868 FunctionDeclaration* declaration) { 869 VariableProxy* proxy = declaration->proxy(); 870 Variable* variable = proxy->var(); 871 switch (variable->location()) { 872 case Variable::UNALLOCATED: { 873 globals_->Add(variable->name(), zone()); 874 Handle<SharedFunctionInfo> function = 875 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_); 876 // Check for stack-overflow exception. 877 if (function.is_null()) return SetStackOverflow(); 878 globals_->Add(function, zone()); 879 break; 880 } 881 882 case Variable::PARAMETER: 883 case Variable::LOCAL: { 884 Comment cmnt(masm_, "[ FunctionDeclaration"); 885 VisitForAccumulatorValue(declaration->fun()); 886 __ sw(result_register(), StackOperand(variable)); 887 break; 888 } 889 890 case Variable::CONTEXT: { 891 Comment cmnt(masm_, "[ FunctionDeclaration"); 892 EmitDebugCheckDeclarationContext(variable); 893 VisitForAccumulatorValue(declaration->fun()); 894 __ sw(result_register(), ContextOperand(cp, variable->index())); 895 int offset = Context::SlotOffset(variable->index()); 896 // We know that we have written a function, which is not a smi. 897 __ RecordWriteContextSlot(cp, 898 offset, 899 result_register(), 900 a2, 901 kRAHasBeenSaved, 902 kDontSaveFPRegs, 903 EMIT_REMEMBERED_SET, 904 OMIT_SMI_CHECK); 905 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 906 break; 907 } 908 909 case Variable::LOOKUP: { 910 Comment cmnt(masm_, "[ FunctionDeclaration"); 911 __ li(a2, Operand(variable->name())); 912 __ li(a1, Operand(Smi::FromInt(NONE))); 913 __ Push(cp, a2, a1); 914 // Push initial value for function declaration. 915 VisitForStackValue(declaration->fun()); 916 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 917 break; 918 } 919 } 920} 921 922 923void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 924 Variable* variable = declaration->proxy()->var(); 925 DCHECK(variable->location() == Variable::CONTEXT); 926 DCHECK(variable->interface()->IsFrozen()); 927 928 Comment cmnt(masm_, "[ ModuleDeclaration"); 929 EmitDebugCheckDeclarationContext(variable); 930 931 // Load instance object. 932 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope())); 933 __ lw(a1, ContextOperand(a1, variable->interface()->Index())); 934 __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX)); 935 936 // Assign it. 937 __ sw(a1, ContextOperand(cp, variable->index())); 938 // We know that we have written a module, which is not a smi. 939 __ RecordWriteContextSlot(cp, 940 Context::SlotOffset(variable->index()), 941 a1, 942 a3, 943 kRAHasBeenSaved, 944 kDontSaveFPRegs, 945 EMIT_REMEMBERED_SET, 946 OMIT_SMI_CHECK); 947 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 948 949 // Traverse into body. 950 Visit(declaration->module()); 951} 952 953 954void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 955 VariableProxy* proxy = declaration->proxy(); 956 Variable* variable = proxy->var(); 957 switch (variable->location()) { 958 case Variable::UNALLOCATED: 959 // TODO(rossberg) 960 break; 961 962 case Variable::CONTEXT: { 963 Comment cmnt(masm_, "[ ImportDeclaration"); 964 EmitDebugCheckDeclarationContext(variable); 965 // TODO(rossberg) 966 break; 967 } 968 969 case Variable::PARAMETER: 970 case Variable::LOCAL: 971 case Variable::LOOKUP: 972 UNREACHABLE(); 973 } 974} 975 976 977void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 978 // TODO(rossberg) 979} 980 981 982void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 983 // Call the runtime to declare the globals. 984 // The context is the first argument. 985 __ li(a1, Operand(pairs)); 986 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 987 __ Push(cp, a1, a0); 988 __ CallRuntime(Runtime::kDeclareGlobals, 3); 989 // Return value is ignored. 990} 991 992 993void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 994 // Call the runtime to declare the modules. 995 __ Push(descriptions); 996 __ CallRuntime(Runtime::kDeclareModules, 1); 997 // Return value is ignored. 998} 999 1000 1001void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 1002 Comment cmnt(masm_, "[ SwitchStatement"); 1003 Breakable nested_statement(this, stmt); 1004 SetStatementPosition(stmt); 1005 1006 // Keep the switch value on the stack until a case matches. 1007 VisitForStackValue(stmt->tag()); 1008 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 1009 1010 ZoneList<CaseClause*>* clauses = stmt->cases(); 1011 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 1012 1013 Label next_test; // Recycled for each test. 1014 // Compile all the tests with branches to their bodies. 1015 for (int i = 0; i < clauses->length(); i++) { 1016 CaseClause* clause = clauses->at(i); 1017 clause->body_target()->Unuse(); 1018 1019 // The default is not a test, but remember it as final fall through. 1020 if (clause->is_default()) { 1021 default_clause = clause; 1022 continue; 1023 } 1024 1025 Comment cmnt(masm_, "[ Case comparison"); 1026 __ bind(&next_test); 1027 next_test.Unuse(); 1028 1029 // Compile the label expression. 1030 VisitForAccumulatorValue(clause->label()); 1031 __ mov(a0, result_register()); // CompareStub requires args in a0, a1. 1032 1033 // Perform the comparison as if via '==='. 1034 __ lw(a1, MemOperand(sp, 0)); // Switch value. 1035 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 1036 JumpPatchSite patch_site(masm_); 1037 if (inline_smi_code) { 1038 Label slow_case; 1039 __ or_(a2, a1, a0); 1040 patch_site.EmitJumpIfNotSmi(a2, &slow_case); 1041 1042 __ Branch(&next_test, ne, a1, Operand(a0)); 1043 __ Drop(1); // Switch value is no longer needed. 1044 __ Branch(clause->body_target()); 1045 1046 __ bind(&slow_case); 1047 } 1048 1049 // Record position before stub call for type feedback. 1050 SetSourcePosition(clause->position()); 1051 Handle<Code> ic = 1052 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 1053 CallIC(ic, clause->CompareId()); 1054 patch_site.EmitPatchInfo(); 1055 1056 Label skip; 1057 __ Branch(&skip); 1058 PrepareForBailout(clause, TOS_REG); 1059 __ LoadRoot(at, Heap::kTrueValueRootIndex); 1060 __ Branch(&next_test, ne, v0, Operand(at)); 1061 __ Drop(1); 1062 __ Branch(clause->body_target()); 1063 __ bind(&skip); 1064 1065 __ Branch(&next_test, ne, v0, Operand(zero_reg)); 1066 __ Drop(1); // Switch value is no longer needed. 1067 __ Branch(clause->body_target()); 1068 } 1069 1070 // Discard the test value and jump to the default if present, otherwise to 1071 // the end of the statement. 1072 __ bind(&next_test); 1073 __ Drop(1); // Switch value is no longer needed. 1074 if (default_clause == NULL) { 1075 __ Branch(nested_statement.break_label()); 1076 } else { 1077 __ Branch(default_clause->body_target()); 1078 } 1079 1080 // Compile all the case bodies. 1081 for (int i = 0; i < clauses->length(); i++) { 1082 Comment cmnt(masm_, "[ Case body"); 1083 CaseClause* clause = clauses->at(i); 1084 __ bind(clause->body_target()); 1085 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 1086 VisitStatements(clause->statements()); 1087 } 1088 1089 __ bind(nested_statement.break_label()); 1090 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1091} 1092 1093 1094void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1095 Comment cmnt(masm_, "[ ForInStatement"); 1096 int slot = stmt->ForInFeedbackSlot(); 1097 SetStatementPosition(stmt); 1098 1099 Label loop, exit; 1100 ForIn loop_statement(this, stmt); 1101 increment_loop_depth(); 1102 1103 // Get the object to enumerate over. If the object is null or undefined, skip 1104 // over the loop. See ECMA-262 version 5, section 12.6.4. 1105 VisitForAccumulatorValue(stmt->enumerable()); 1106 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below. 1107 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1108 __ Branch(&exit, eq, a0, Operand(at)); 1109 Register null_value = t1; 1110 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1111 __ Branch(&exit, eq, a0, Operand(null_value)); 1112 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1113 __ mov(a0, v0); 1114 // Convert the object to a JS object. 1115 Label convert, done_convert; 1116 __ JumpIfSmi(a0, &convert); 1117 __ GetObjectType(a0, a1, a1); 1118 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 1119 __ bind(&convert); 1120 __ push(a0); 1121 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1122 __ mov(a0, v0); 1123 __ bind(&done_convert); 1124 __ push(a0); 1125 1126 // Check for proxies. 1127 Label call_runtime; 1128 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1129 __ GetObjectType(a0, a1, a1); 1130 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE)); 1131 1132 // Check cache validity in generated code. This is a fast case for 1133 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1134 // guarantee cache validity, call the runtime system to check cache 1135 // validity or get the property names in a fixed array. 1136 __ CheckEnumCache(null_value, &call_runtime); 1137 1138 // The enum cache is valid. Load the map of the object being 1139 // iterated over and use the cache for the iteration. 1140 Label use_cache; 1141 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); 1142 __ Branch(&use_cache); 1143 1144 // Get the set of properties to enumerate. 1145 __ bind(&call_runtime); 1146 __ push(a0); // Duplicate the enumerable object on the stack. 1147 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1148 1149 // If we got a map from the runtime call, we can do a fast 1150 // modification check. Otherwise, we got a fixed array, and we have 1151 // to do a slow check. 1152 Label fixed_array; 1153 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); 1154 __ LoadRoot(at, Heap::kMetaMapRootIndex); 1155 __ Branch(&fixed_array, ne, a2, Operand(at)); 1156 1157 // We got a map in register v0. Get the enumeration cache from it. 1158 Label no_descriptors; 1159 __ bind(&use_cache); 1160 1161 __ EnumLength(a1, v0); 1162 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0))); 1163 1164 __ LoadInstanceDescriptors(v0, a2); 1165 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset)); 1166 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1167 1168 // Set up the four remaining stack slots. 1169 __ li(a0, Operand(Smi::FromInt(0))); 1170 // Push map, enumeration cache, enumeration cache length (as smi) and zero. 1171 __ Push(v0, a2, a1, a0); 1172 __ jmp(&loop); 1173 1174 __ bind(&no_descriptors); 1175 __ Drop(1); 1176 __ jmp(&exit); 1177 1178 // We got a fixed array in register v0. Iterate through that. 1179 Label non_proxy; 1180 __ bind(&fixed_array); 1181 1182 __ li(a1, FeedbackVector()); 1183 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 1184 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot))); 1185 1186 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1187 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1188 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1189 __ GetObjectType(a2, a3, a3); 1190 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE)); 1191 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1192 __ bind(&non_proxy); 1193 __ Push(a1, v0); // Smi and array 1194 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); 1195 __ li(a0, Operand(Smi::FromInt(0))); 1196 __ Push(a1, a0); // Fixed array length (as smi) and initial index. 1197 1198 // Generate code for doing the condition check. 1199 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1200 __ bind(&loop); 1201 // Load the current count to a0, load the length to a1. 1202 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); 1203 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); 1204 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1)); 1205 1206 // Get the current entry of the array into register a3. 1207 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); 1208 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1209 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); 1210 __ addu(t0, a2, t0); // Array base + scaled (smi) index. 1211 __ lw(a3, MemOperand(t0)); // Current entry. 1212 1213 // Get the expected map from the stack or a smi in the 1214 // permanent slow case into register a2. 1215 __ lw(a2, MemOperand(sp, 3 * kPointerSize)); 1216 1217 // Check if the expected map still matches that of the enumerable. 1218 // If not, we may have to filter the key. 1219 Label update_each; 1220 __ lw(a1, MemOperand(sp, 4 * kPointerSize)); 1221 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); 1222 __ Branch(&update_each, eq, t0, Operand(a2)); 1223 1224 // For proxies, no filtering is done. 1225 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1226 DCHECK_EQ(Smi::FromInt(0), 0); 1227 __ Branch(&update_each, eq, a2, Operand(zero_reg)); 1228 1229 // Convert the entry to a string or (smi) 0 if it isn't a property 1230 // any more. If the property has been removed while iterating, we 1231 // just skip it. 1232 __ Push(a1, a3); // Enumerable and current entry. 1233 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1234 __ mov(a3, result_register()); 1235 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg)); 1236 1237 // Update the 'each' property or variable from the possibly filtered 1238 // entry in register a3. 1239 __ bind(&update_each); 1240 __ mov(result_register(), a3); 1241 // Perform the assignment as if via '='. 1242 { EffectContext context(this); 1243 EmitAssignment(stmt->each()); 1244 } 1245 1246 // Generate code for the body of the loop. 1247 Visit(stmt->body()); 1248 1249 // Generate code for the going to the next element by incrementing 1250 // the index (smi) stored on top of the stack. 1251 __ bind(loop_statement.continue_label()); 1252 __ pop(a0); 1253 __ Addu(a0, a0, Operand(Smi::FromInt(1))); 1254 __ push(a0); 1255 1256 EmitBackEdgeBookkeeping(stmt, &loop); 1257 __ Branch(&loop); 1258 1259 // Remove the pointers stored on the stack. 1260 __ bind(loop_statement.break_label()); 1261 __ Drop(5); 1262 1263 // Exit and decrement the loop depth. 1264 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1265 __ bind(&exit); 1266 decrement_loop_depth(); 1267} 1268 1269 1270void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { 1271 Comment cmnt(masm_, "[ ForOfStatement"); 1272 SetStatementPosition(stmt); 1273 1274 Iteration loop_statement(this, stmt); 1275 increment_loop_depth(); 1276 1277 // var iterator = iterable[Symbol.iterator](); 1278 VisitForEffect(stmt->assign_iterator()); 1279 1280 // Loop entry. 1281 __ bind(loop_statement.continue_label()); 1282 1283 // result = iterator.next() 1284 VisitForEffect(stmt->next_result()); 1285 1286 // if (result.done) break; 1287 Label result_not_done; 1288 VisitForControl(stmt->result_done(), 1289 loop_statement.break_label(), 1290 &result_not_done, 1291 &result_not_done); 1292 __ bind(&result_not_done); 1293 1294 // each = result.value 1295 VisitForEffect(stmt->assign_each()); 1296 1297 // Generate code for the body of the loop. 1298 Visit(stmt->body()); 1299 1300 // Check stack before looping. 1301 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); 1302 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); 1303 __ jmp(loop_statement.continue_label()); 1304 1305 // Exit and decrement the loop depth. 1306 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1307 __ bind(loop_statement.break_label()); 1308 decrement_loop_depth(); 1309} 1310 1311 1312void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1313 bool pretenure) { 1314 // Use the fast case closure allocation code that allocates in new 1315 // space for nested functions that don't need literals cloning. If 1316 // we're running with the --always-opt or the --prepare-always-opt 1317 // flag, we need to use the runtime function so that the new function 1318 // we are creating here gets a chance to have its code optimized and 1319 // doesn't just get a copy of the existing unoptimized code. 1320 if (!FLAG_always_opt && 1321 !FLAG_prepare_always_opt && 1322 !pretenure && 1323 scope()->is_function_scope() && 1324 info->num_literals() == 0) { 1325 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind()); 1326 __ li(a2, Operand(info)); 1327 __ CallStub(&stub); 1328 } else { 1329 __ li(a0, Operand(info)); 1330 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex 1331 : Heap::kFalseValueRootIndex); 1332 __ Push(cp, a0, a1); 1333 __ CallRuntime(Runtime::kNewClosure, 3); 1334 } 1335 context()->Plug(v0); 1336} 1337 1338 1339void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1340 Comment cmnt(masm_, "[ VariableProxy"); 1341 EmitVariableLoad(expr); 1342} 1343 1344 1345void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) { 1346 Comment cnmt(masm_, "[ SuperReference "); 1347 1348 __ lw(LoadDescriptor::ReceiverRegister(), 1349 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1350 1351 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol()); 1352 __ li(LoadDescriptor::NameRegister(), home_object_symbol); 1353 1354 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId()); 1355 1356 Label done; 1357 __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value())); 1358 __ CallRuntime(Runtime::kThrowNonMethodError, 0); 1359 __ bind(&done); 1360} 1361 1362 1363void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, 1364 TypeofState typeof_state, 1365 Label* slow) { 1366 Register current = cp; 1367 Register next = a1; 1368 Register temp = a2; 1369 1370 Scope* s = scope(); 1371 while (s != NULL) { 1372 if (s->num_heap_slots() > 0) { 1373 if (s->calls_sloppy_eval()) { 1374 // Check that extension is NULL. 1375 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1376 __ Branch(slow, ne, temp, Operand(zero_reg)); 1377 } 1378 // Load next context in chain. 1379 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1380 // Walk the rest of the chain without clobbering cp. 1381 current = next; 1382 } 1383 // If no outer scope calls eval, we do not need to check more 1384 // context extensions. 1385 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1386 s = s->outer_scope(); 1387 } 1388 1389 if (s->is_eval_scope()) { 1390 Label loop, fast; 1391 if (!current.is(next)) { 1392 __ Move(next, current); 1393 } 1394 __ bind(&loop); 1395 // Terminate at native context. 1396 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1397 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex); 1398 __ Branch(&fast, eq, temp, Operand(t0)); 1399 // Check that extension is NULL. 1400 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1401 __ Branch(slow, ne, temp, Operand(zero_reg)); 1402 // Load next context in chain. 1403 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1404 __ Branch(&loop); 1405 __ bind(&fast); 1406 } 1407 1408 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1409 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name())); 1410 if (FLAG_vector_ics) { 1411 __ li(VectorLoadICDescriptor::SlotRegister(), 1412 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1413 } 1414 1415 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1416 ? NOT_CONTEXTUAL 1417 : CONTEXTUAL; 1418 CallLoadIC(mode); 1419} 1420 1421 1422MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1423 Label* slow) { 1424 DCHECK(var->IsContextSlot()); 1425 Register context = cp; 1426 Register next = a3; 1427 Register temp = t0; 1428 1429 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1430 if (s->num_heap_slots() > 0) { 1431 if (s->calls_sloppy_eval()) { 1432 // Check that extension is NULL. 1433 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1434 __ Branch(slow, ne, temp, Operand(zero_reg)); 1435 } 1436 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1437 // Walk the rest of the chain without clobbering cp. 1438 context = next; 1439 } 1440 } 1441 // Check that last extension is NULL. 1442 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1443 __ Branch(slow, ne, temp, Operand(zero_reg)); 1444 1445 // This function is used only for loads, not stores, so it's safe to 1446 // return an cp-based operand (the write barrier cannot be allowed to 1447 // destroy the cp register). 1448 return ContextOperand(context, var->index()); 1449} 1450 1451 1452void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1453 TypeofState typeof_state, 1454 Label* slow, 1455 Label* done) { 1456 // Generate fast-case code for variables that might be shadowed by 1457 // eval-introduced variables. Eval is used a lot without 1458 // introducing variables. In those cases, we do not want to 1459 // perform a runtime call for all variables in the scope 1460 // containing the eval. 1461 Variable* var = proxy->var(); 1462 if (var->mode() == DYNAMIC_GLOBAL) { 1463 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow); 1464 __ Branch(done); 1465 } else if (var->mode() == DYNAMIC_LOCAL) { 1466 Variable* local = var->local_if_not_shadowed(); 1467 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow)); 1468 if (local->mode() == LET || local->mode() == CONST || 1469 local->mode() == CONST_LEGACY) { 1470 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1471 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. 1472 if (local->mode() == CONST_LEGACY) { 1473 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1474 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole. 1475 } else { // LET || CONST 1476 __ Branch(done, ne, at, Operand(zero_reg)); 1477 __ li(a0, Operand(var->name())); 1478 __ push(a0); 1479 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1480 } 1481 } 1482 __ Branch(done); 1483 } 1484} 1485 1486 1487void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1488 // Record position before possible IC call. 1489 SetSourcePosition(proxy->position()); 1490 Variable* var = proxy->var(); 1491 1492 // Three cases: global variables, lookup variables, and all other types of 1493 // variables. 1494 switch (var->location()) { 1495 case Variable::UNALLOCATED: { 1496 Comment cmnt(masm_, "[ Global variable"); 1497 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1498 __ li(LoadDescriptor::NameRegister(), Operand(var->name())); 1499 if (FLAG_vector_ics) { 1500 __ li(VectorLoadICDescriptor::SlotRegister(), 1501 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1502 } 1503 CallLoadIC(CONTEXTUAL); 1504 context()->Plug(v0); 1505 break; 1506 } 1507 1508 case Variable::PARAMETER: 1509 case Variable::LOCAL: 1510 case Variable::CONTEXT: { 1511 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1512 : "[ Stack variable"); 1513 if (var->binding_needs_init()) { 1514 // var->scope() may be NULL when the proxy is located in eval code and 1515 // refers to a potential outside binding. Currently those bindings are 1516 // always looked up dynamically, i.e. in that case 1517 // var->location() == LOOKUP. 1518 // always holds. 1519 DCHECK(var->scope() != NULL); 1520 1521 // Check if the binding really needs an initialization check. The check 1522 // can be skipped in the following situation: we have a LET or CONST 1523 // binding in harmony mode, both the Variable and the VariableProxy have 1524 // the same declaration scope (i.e. they are both in global code, in the 1525 // same function or in the same eval code) and the VariableProxy is in 1526 // the source physically located after the initializer of the variable. 1527 // 1528 // We cannot skip any initialization checks for CONST in non-harmony 1529 // mode because const variables may be declared but never initialized: 1530 // if (false) { const x; }; var y = x; 1531 // 1532 // The condition on the declaration scopes is a conservative check for 1533 // nested functions that access a binding and are called before the 1534 // binding is initialized: 1535 // function() { f(); let x = 1; function f() { x = 2; } } 1536 // 1537 bool skip_init_check; 1538 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1539 skip_init_check = false; 1540 } else { 1541 // Check that we always have valid source position. 1542 DCHECK(var->initializer_position() != RelocInfo::kNoPosition); 1543 DCHECK(proxy->position() != RelocInfo::kNoPosition); 1544 skip_init_check = var->mode() != CONST_LEGACY && 1545 var->initializer_position() < proxy->position(); 1546 } 1547 1548 if (!skip_init_check) { 1549 // Let and const need a read barrier. 1550 GetVar(v0, var); 1551 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1552 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. 1553 if (var->mode() == LET || var->mode() == CONST) { 1554 // Throw a reference error when using an uninitialized let/const 1555 // binding in harmony mode. 1556 Label done; 1557 __ Branch(&done, ne, at, Operand(zero_reg)); 1558 __ li(a0, Operand(var->name())); 1559 __ push(a0); 1560 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1561 __ bind(&done); 1562 } else { 1563 // Uninitalized const bindings outside of harmony mode are unholed. 1564 DCHECK(var->mode() == CONST_LEGACY); 1565 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1566 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole. 1567 } 1568 context()->Plug(v0); 1569 break; 1570 } 1571 } 1572 context()->Plug(var); 1573 break; 1574 } 1575 1576 case Variable::LOOKUP: { 1577 Comment cmnt(masm_, "[ Lookup variable"); 1578 Label done, slow; 1579 // Generate code for loading from variables potentially shadowed 1580 // by eval-introduced variables. 1581 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 1582 __ bind(&slow); 1583 __ li(a1, Operand(var->name())); 1584 __ Push(cp, a1); // Context and name. 1585 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 1586 __ bind(&done); 1587 context()->Plug(v0); 1588 } 1589 } 1590} 1591 1592 1593void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1594 Comment cmnt(masm_, "[ RegExpLiteral"); 1595 Label materialized; 1596 // Registers will be used as follows: 1597 // t1 = materialized value (RegExp literal) 1598 // t0 = JS function, literals array 1599 // a3 = literal index 1600 // a2 = RegExp pattern 1601 // a1 = RegExp flags 1602 // a0 = RegExp literal clone 1603 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1604 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset)); 1605 int literal_offset = 1606 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1607 __ lw(t1, FieldMemOperand(t0, literal_offset)); 1608 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1609 __ Branch(&materialized, ne, t1, Operand(at)); 1610 1611 // Create regexp literal using runtime function. 1612 // Result will be in v0. 1613 __ li(a3, Operand(Smi::FromInt(expr->literal_index()))); 1614 __ li(a2, Operand(expr->pattern())); 1615 __ li(a1, Operand(expr->flags())); 1616 __ Push(t0, a3, a2, a1); 1617 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1618 __ mov(t1, v0); 1619 1620 __ bind(&materialized); 1621 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1622 Label allocated, runtime_allocate; 1623 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); 1624 __ jmp(&allocated); 1625 1626 __ bind(&runtime_allocate); 1627 __ li(a0, Operand(Smi::FromInt(size))); 1628 __ Push(t1, a0); 1629 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1630 __ pop(t1); 1631 1632 __ bind(&allocated); 1633 1634 // After this, registers are used as follows: 1635 // v0: Newly allocated regexp. 1636 // t1: Materialized regexp. 1637 // a2: temp. 1638 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize); 1639 context()->Plug(v0); 1640} 1641 1642 1643void FullCodeGenerator::EmitAccessor(Expression* expression) { 1644 if (expression == NULL) { 1645 __ LoadRoot(a1, Heap::kNullValueRootIndex); 1646 __ push(a1); 1647 } else { 1648 VisitForStackValue(expression); 1649 } 1650} 1651 1652 1653void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1654 Comment cmnt(masm_, "[ ObjectLiteral"); 1655 1656 expr->BuildConstantProperties(isolate()); 1657 Handle<FixedArray> constant_properties = expr->constant_properties(); 1658 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1659 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); 1660 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); 1661 __ li(a1, Operand(constant_properties)); 1662 int flags = expr->fast_elements() 1663 ? ObjectLiteral::kFastElements 1664 : ObjectLiteral::kNoFlags; 1665 flags |= expr->has_function() 1666 ? ObjectLiteral::kHasFunction 1667 : ObjectLiteral::kNoFlags; 1668 __ li(a0, Operand(Smi::FromInt(flags))); 1669 int properties_count = constant_properties->length() / 2; 1670 if (expr->may_store_doubles() || expr->depth() > 1 || 1671 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements || 1672 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1673 __ Push(a3, a2, a1, a0); 1674 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1675 } else { 1676 FastCloneShallowObjectStub stub(isolate(), properties_count); 1677 __ CallStub(&stub); 1678 } 1679 1680 // If result_saved is true the result is on top of the stack. If 1681 // result_saved is false the result is in v0. 1682 bool result_saved = false; 1683 1684 // Mark all computed expressions that are bound to a key that 1685 // is shadowed by a later occurrence of the same key. For the 1686 // marked expressions, no store code is emitted. 1687 expr->CalculateEmitStore(zone()); 1688 1689 AccessorTable accessor_table(zone()); 1690 for (int i = 0; i < expr->properties()->length(); i++) { 1691 ObjectLiteral::Property* property = expr->properties()->at(i); 1692 if (property->IsCompileTimeValue()) continue; 1693 1694 Literal* key = property->key(); 1695 Expression* value = property->value(); 1696 if (!result_saved) { 1697 __ push(v0); // Save result on stack. 1698 result_saved = true; 1699 } 1700 switch (property->kind()) { 1701 case ObjectLiteral::Property::CONSTANT: 1702 UNREACHABLE(); 1703 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1704 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); 1705 // Fall through. 1706 case ObjectLiteral::Property::COMPUTED: 1707 if (key->value()->IsInternalizedString()) { 1708 if (property->emit_store()) { 1709 VisitForAccumulatorValue(value); 1710 __ mov(StoreDescriptor::ValueRegister(), result_register()); 1711 DCHECK(StoreDescriptor::ValueRegister().is(a0)); 1712 __ li(StoreDescriptor::NameRegister(), Operand(key->value())); 1713 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1714 CallStoreIC(key->LiteralFeedbackId()); 1715 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1716 } else { 1717 VisitForEffect(value); 1718 } 1719 break; 1720 } 1721 // Duplicate receiver on stack. 1722 __ lw(a0, MemOperand(sp)); 1723 __ push(a0); 1724 VisitForStackValue(key); 1725 VisitForStackValue(value); 1726 if (property->emit_store()) { 1727 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes. 1728 __ push(a0); 1729 __ CallRuntime(Runtime::kSetProperty, 4); 1730 } else { 1731 __ Drop(3); 1732 } 1733 break; 1734 case ObjectLiteral::Property::PROTOTYPE: 1735 // Duplicate receiver on stack. 1736 __ lw(a0, MemOperand(sp)); 1737 __ push(a0); 1738 VisitForStackValue(value); 1739 if (property->emit_store()) { 1740 __ CallRuntime(Runtime::kSetPrototype, 2); 1741 } else { 1742 __ Drop(2); 1743 } 1744 break; 1745 case ObjectLiteral::Property::GETTER: 1746 accessor_table.lookup(key)->second->getter = value; 1747 break; 1748 case ObjectLiteral::Property::SETTER: 1749 accessor_table.lookup(key)->second->setter = value; 1750 break; 1751 } 1752 } 1753 1754 // Emit code to define accessors, using only a single call to the runtime for 1755 // each pair of corresponding getters and setters. 1756 for (AccessorTable::Iterator it = accessor_table.begin(); 1757 it != accessor_table.end(); 1758 ++it) { 1759 __ lw(a0, MemOperand(sp)); // Duplicate receiver. 1760 __ push(a0); 1761 VisitForStackValue(it->first); 1762 EmitAccessor(it->second->getter); 1763 EmitAccessor(it->second->setter); 1764 __ li(a0, Operand(Smi::FromInt(NONE))); 1765 __ push(a0); 1766 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1767 } 1768 1769 if (expr->has_function()) { 1770 DCHECK(result_saved); 1771 __ lw(a0, MemOperand(sp)); 1772 __ push(a0); 1773 __ CallRuntime(Runtime::kToFastProperties, 1); 1774 } 1775 1776 if (result_saved) { 1777 context()->PlugTOS(); 1778 } else { 1779 context()->Plug(v0); 1780 } 1781} 1782 1783 1784void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1785 Comment cmnt(masm_, "[ ArrayLiteral"); 1786 1787 expr->BuildConstantElements(isolate()); 1788 int flags = expr->depth() == 1 1789 ? ArrayLiteral::kShallowElements 1790 : ArrayLiteral::kNoFlags; 1791 1792 ZoneList<Expression*>* subexprs = expr->values(); 1793 int length = subexprs->length(); 1794 1795 Handle<FixedArray> constant_elements = expr->constant_elements(); 1796 DCHECK_EQ(2, constant_elements->length()); 1797 ElementsKind constant_elements_kind = 1798 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1799 bool has_fast_elements = 1800 IsFastObjectElementsKind(constant_elements_kind); 1801 Handle<FixedArrayBase> constant_elements_values( 1802 FixedArrayBase::cast(constant_elements->get(1))); 1803 1804 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1805 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1806 // If the only customer of allocation sites is transitioning, then 1807 // we can turn it off if we don't have anywhere else to transition to. 1808 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1809 } 1810 1811 __ mov(a0, result_register()); 1812 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1813 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); 1814 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); 1815 __ li(a1, Operand(constant_elements)); 1816 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { 1817 __ li(a0, Operand(Smi::FromInt(flags))); 1818 __ Push(a3, a2, a1, a0); 1819 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); 1820 } else { 1821 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1822 __ CallStub(&stub); 1823 } 1824 1825 bool result_saved = false; // Is the result saved to the stack? 1826 1827 // Emit code to evaluate all the non-constant subexpressions and to store 1828 // them into the newly cloned array. 1829 for (int i = 0; i < length; i++) { 1830 Expression* subexpr = subexprs->at(i); 1831 // If the subexpression is a literal or a simple materialized literal it 1832 // is already set in the cloned array. 1833 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1834 1835 if (!result_saved) { 1836 __ push(v0); // array literal 1837 __ Push(Smi::FromInt(expr->literal_index())); 1838 result_saved = true; 1839 } 1840 1841 VisitForAccumulatorValue(subexpr); 1842 1843 if (IsFastObjectElementsKind(constant_elements_kind)) { 1844 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1845 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal. 1846 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset)); 1847 __ sw(result_register(), FieldMemOperand(a1, offset)); 1848 // Update the write barrier for the array store. 1849 __ RecordWriteField(a1, offset, result_register(), a2, 1850 kRAHasBeenSaved, kDontSaveFPRegs, 1851 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1852 } else { 1853 __ li(a3, Operand(Smi::FromInt(i))); 1854 __ mov(a0, result_register()); 1855 StoreArrayLiteralElementStub stub(isolate()); 1856 __ CallStub(&stub); 1857 } 1858 1859 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1860 } 1861 if (result_saved) { 1862 __ Pop(); // literal index 1863 context()->PlugTOS(); 1864 } else { 1865 context()->Plug(v0); 1866 } 1867} 1868 1869 1870void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1871 DCHECK(expr->target()->IsValidReferenceExpression()); 1872 1873 Comment cmnt(masm_, "[ Assignment"); 1874 1875 // Left-hand side can only be a property, a global or a (parameter or local) 1876 // slot. 1877 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1878 LhsKind assign_type = VARIABLE; 1879 Property* property = expr->target()->AsProperty(); 1880 if (property != NULL) { 1881 assign_type = (property->key()->IsPropertyName()) 1882 ? NAMED_PROPERTY 1883 : KEYED_PROPERTY; 1884 } 1885 1886 // Evaluate LHS expression. 1887 switch (assign_type) { 1888 case VARIABLE: 1889 // Nothing to do here. 1890 break; 1891 case NAMED_PROPERTY: 1892 if (expr->is_compound()) { 1893 // We need the receiver both on the stack and in the register. 1894 VisitForStackValue(property->obj()); 1895 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1896 } else { 1897 VisitForStackValue(property->obj()); 1898 } 1899 break; 1900 case KEYED_PROPERTY: 1901 // We need the key and receiver on both the stack and in v0 and a1. 1902 if (expr->is_compound()) { 1903 VisitForStackValue(property->obj()); 1904 VisitForStackValue(property->key()); 1905 __ lw(LoadDescriptor::ReceiverRegister(), 1906 MemOperand(sp, 1 * kPointerSize)); 1907 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 1908 } else { 1909 VisitForStackValue(property->obj()); 1910 VisitForStackValue(property->key()); 1911 } 1912 break; 1913 } 1914 1915 // For compound assignments we need another deoptimization point after the 1916 // variable/property load. 1917 if (expr->is_compound()) { 1918 { AccumulatorValueContext context(this); 1919 switch (assign_type) { 1920 case VARIABLE: 1921 EmitVariableLoad(expr->target()->AsVariableProxy()); 1922 PrepareForBailout(expr->target(), TOS_REG); 1923 break; 1924 case NAMED_PROPERTY: 1925 EmitNamedPropertyLoad(property); 1926 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1927 break; 1928 case KEYED_PROPERTY: 1929 EmitKeyedPropertyLoad(property); 1930 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1931 break; 1932 } 1933 } 1934 1935 Token::Value op = expr->binary_op(); 1936 __ push(v0); // Left operand goes on the stack. 1937 VisitForAccumulatorValue(expr->value()); 1938 1939 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1940 ? OVERWRITE_RIGHT 1941 : NO_OVERWRITE; 1942 SetSourcePosition(expr->position() + 1); 1943 AccumulatorValueContext context(this); 1944 if (ShouldInlineSmiCase(op)) { 1945 EmitInlineSmiBinaryOp(expr->binary_operation(), 1946 op, 1947 mode, 1948 expr->target(), 1949 expr->value()); 1950 } else { 1951 EmitBinaryOp(expr->binary_operation(), op, mode); 1952 } 1953 1954 // Deoptimization point in case the binary operation may have side effects. 1955 PrepareForBailout(expr->binary_operation(), TOS_REG); 1956 } else { 1957 VisitForAccumulatorValue(expr->value()); 1958 } 1959 1960 // Record source position before possible IC call. 1961 SetSourcePosition(expr->position()); 1962 1963 // Store the value. 1964 switch (assign_type) { 1965 case VARIABLE: 1966 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1967 expr->op()); 1968 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1969 context()->Plug(v0); 1970 break; 1971 case NAMED_PROPERTY: 1972 EmitNamedPropertyAssignment(expr); 1973 break; 1974 case KEYED_PROPERTY: 1975 EmitKeyedPropertyAssignment(expr); 1976 break; 1977 } 1978} 1979 1980 1981void FullCodeGenerator::VisitYield(Yield* expr) { 1982 Comment cmnt(masm_, "[ Yield"); 1983 // Evaluate yielded value first; the initial iterator definition depends on 1984 // this. It stays on the stack while we update the iterator. 1985 VisitForStackValue(expr->expression()); 1986 1987 switch (expr->yield_kind()) { 1988 case Yield::kSuspend: 1989 // Pop value from top-of-stack slot; box result into result register. 1990 EmitCreateIteratorResult(false); 1991 __ push(result_register()); 1992 // Fall through. 1993 case Yield::kInitial: { 1994 Label suspend, continuation, post_runtime, resume; 1995 1996 __ jmp(&suspend); 1997 1998 __ bind(&continuation); 1999 __ jmp(&resume); 2000 2001 __ bind(&suspend); 2002 VisitForAccumulatorValue(expr->generator_object()); 2003 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 2004 __ li(a1, Operand(Smi::FromInt(continuation.pos()))); 2005 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); 2006 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); 2007 __ mov(a1, cp); 2008 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, 2009 kRAHasBeenSaved, kDontSaveFPRegs); 2010 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 2011 __ Branch(&post_runtime, eq, sp, Operand(a1)); 2012 __ push(v0); // generator object 2013 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2014 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2015 __ bind(&post_runtime); 2016 __ pop(result_register()); 2017 EmitReturnSequence(); 2018 2019 __ bind(&resume); 2020 context()->Plug(result_register()); 2021 break; 2022 } 2023 2024 case Yield::kFinal: { 2025 VisitForAccumulatorValue(expr->generator_object()); 2026 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); 2027 __ sw(a1, FieldMemOperand(result_register(), 2028 JSGeneratorObject::kContinuationOffset)); 2029 // Pop value from top-of-stack slot, box result into result register. 2030 EmitCreateIteratorResult(true); 2031 EmitUnwindBeforeReturn(); 2032 EmitReturnSequence(); 2033 break; 2034 } 2035 2036 case Yield::kDelegating: { 2037 VisitForStackValue(expr->generator_object()); 2038 2039 // Initial stack layout is as follows: 2040 // [sp + 1 * kPointerSize] iter 2041 // [sp + 0 * kPointerSize] g 2042 2043 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 2044 Label l_next, l_call; 2045 Register load_receiver = LoadDescriptor::ReceiverRegister(); 2046 Register load_name = LoadDescriptor::NameRegister(); 2047 2048 // Initial send value is undefined. 2049 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 2050 __ Branch(&l_next); 2051 2052 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 2053 __ bind(&l_catch); 2054 __ mov(a0, v0); 2055 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); 2056 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" 2057 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter 2058 __ Push(load_name, a3, a0); // "throw", iter, except 2059 __ jmp(&l_call); 2060 2061 // try { received = %yield result } 2062 // Shuffle the received result above a try handler and yield it without 2063 // re-boxing. 2064 __ bind(&l_try); 2065 __ pop(a0); // result 2066 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2067 const int handler_size = StackHandlerConstants::kSize; 2068 __ push(a0); // result 2069 __ jmp(&l_suspend); 2070 __ bind(&l_continuation); 2071 __ mov(a0, v0); 2072 __ jmp(&l_resume); 2073 __ bind(&l_suspend); 2074 const int generator_object_depth = kPointerSize + handler_size; 2075 __ lw(a0, MemOperand(sp, generator_object_depth)); 2076 __ push(a0); // g 2077 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2078 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); 2079 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); 2080 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); 2081 __ mov(a1, cp); 2082 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, 2083 kRAHasBeenSaved, kDontSaveFPRegs); 2084 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2085 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2086 __ pop(v0); // result 2087 EmitReturnSequence(); 2088 __ mov(a0, v0); 2089 __ bind(&l_resume); // received in a0 2090 __ PopTryHandler(); 2091 2092 // receiver = iter; f = 'next'; arg = received; 2093 __ bind(&l_next); 2094 2095 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" 2096 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter 2097 __ Push(load_name, a3, a0); // "next", iter, received 2098 2099 // result = receiver[f](arg); 2100 __ bind(&l_call); 2101 __ lw(load_receiver, MemOperand(sp, kPointerSize)); 2102 __ lw(load_name, MemOperand(sp, 2 * kPointerSize)); 2103 if (FLAG_vector_ics) { 2104 __ li(VectorLoadICDescriptor::SlotRegister(), 2105 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot()))); 2106 } 2107 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2108 CallIC(ic, TypeFeedbackId::None()); 2109 __ mov(a0, v0); 2110 __ mov(a1, a0); 2111 __ sw(a1, MemOperand(sp, 2 * kPointerSize)); 2112 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); 2113 __ CallStub(&stub); 2114 2115 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2116 __ Drop(1); // The function is still on the stack; drop it. 2117 2118 // if (!result.done) goto l_try; 2119 __ Move(load_receiver, v0); 2120 2121 __ push(load_receiver); // save result 2122 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" 2123 if (FLAG_vector_ics) { 2124 __ li(VectorLoadICDescriptor::SlotRegister(), 2125 Operand(Smi::FromInt(expr->DoneFeedbackSlot()))); 2126 } 2127 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done 2128 __ mov(a0, v0); 2129 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2130 CallIC(bool_ic); 2131 __ Branch(&l_try, eq, v0, Operand(zero_reg)); 2132 2133 // result.value 2134 __ pop(load_receiver); // result 2135 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" 2136 if (FLAG_vector_ics) { 2137 __ li(VectorLoadICDescriptor::SlotRegister(), 2138 Operand(Smi::FromInt(expr->ValueFeedbackSlot()))); 2139 } 2140 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value 2141 context()->DropAndPlug(2, v0); // drop iter and g 2142 break; 2143 } 2144 } 2145} 2146 2147 2148void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2149 Expression *value, 2150 JSGeneratorObject::ResumeMode resume_mode) { 2151 // The value stays in a0, and is ultimately read by the resumed generator, as 2152 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it 2153 // is read to throw the value when the resumed generator is already closed. 2154 // a1 will hold the generator object until the activation has been resumed. 2155 VisitForStackValue(generator); 2156 VisitForAccumulatorValue(value); 2157 __ pop(a1); 2158 2159 // Check generator state. 2160 Label wrong_state, closed_state, done; 2161 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 2162 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); 2163 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); 2164 __ Branch(&closed_state, eq, a3, Operand(zero_reg)); 2165 __ Branch(&wrong_state, lt, a3, Operand(zero_reg)); 2166 2167 // Load suspended function and context. 2168 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset)); 2169 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); 2170 2171 // Load receiver and store as the first argument. 2172 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); 2173 __ push(a2); 2174 2175 // Push holes for the rest of the arguments to the generator function. 2176 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); 2177 __ lw(a3, 2178 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); 2179 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex); 2180 Label push_argument_holes, push_frame; 2181 __ bind(&push_argument_holes); 2182 __ Subu(a3, a3, Operand(Smi::FromInt(1))); 2183 __ Branch(&push_frame, lt, a3, Operand(zero_reg)); 2184 __ push(a2); 2185 __ jmp(&push_argument_holes); 2186 2187 // Enter a new JavaScript frame, and initialize its slots as they were when 2188 // the generator was suspended. 2189 Label resume_frame; 2190 __ bind(&push_frame); 2191 __ Call(&resume_frame); 2192 __ jmp(&done); 2193 __ bind(&resume_frame); 2194 // ra = return address. 2195 // fp = caller's frame pointer. 2196 // cp = callee's context, 2197 // t0 = callee's JS function. 2198 __ Push(ra, fp, cp, t0); 2199 // Adjust FP to point to saved FP. 2200 __ Addu(fp, sp, 2 * kPointerSize); 2201 2202 // Load the operand stack size. 2203 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); 2204 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset)); 2205 __ SmiUntag(a3); 2206 2207 // If we are sending a value and there is no operand stack, we can jump back 2208 // in directly. 2209 if (resume_mode == JSGeneratorObject::NEXT) { 2210 Label slow_resume; 2211 __ Branch(&slow_resume, ne, a3, Operand(zero_reg)); 2212 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset)); 2213 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 2214 __ SmiUntag(a2); 2215 __ Addu(a3, a3, Operand(a2)); 2216 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 2217 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 2218 __ Jump(a3); 2219 __ bind(&slow_resume); 2220 } 2221 2222 // Otherwise, we push holes for the operand stack and call the runtime to fix 2223 // up the stack and the handlers. 2224 Label push_operand_holes, call_resume; 2225 __ bind(&push_operand_holes); 2226 __ Subu(a3, a3, Operand(1)); 2227 __ Branch(&call_resume, lt, a3, Operand(zero_reg)); 2228 __ push(a2); 2229 __ Branch(&push_operand_holes); 2230 __ bind(&call_resume); 2231 DCHECK(!result_register().is(a1)); 2232 __ Push(a1, result_register()); 2233 __ Push(Smi::FromInt(resume_mode)); 2234 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2235 // Not reached: the runtime call returns elsewhere. 2236 __ stop("not-reached"); 2237 2238 // Reach here when generator is closed. 2239 __ bind(&closed_state); 2240 if (resume_mode == JSGeneratorObject::NEXT) { 2241 // Return completed iterator result when generator is closed. 2242 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 2243 __ push(a2); 2244 // Pop value from top-of-stack slot; box result into result register. 2245 EmitCreateIteratorResult(true); 2246 } else { 2247 // Throw the provided value. 2248 __ push(a0); 2249 __ CallRuntime(Runtime::kThrow, 1); 2250 } 2251 __ jmp(&done); 2252 2253 // Throw error if we attempt to operate on a running generator. 2254 __ bind(&wrong_state); 2255 __ push(a1); 2256 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); 2257 2258 __ bind(&done); 2259 context()->Plug(result_register()); 2260} 2261 2262 2263void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2264 Label gc_required; 2265 Label allocated; 2266 2267 const int instance_size = 5 * kPointerSize; 2268 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(), 2269 instance_size); 2270 2271 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT); 2272 __ jmp(&allocated); 2273 2274 __ bind(&gc_required); 2275 __ Push(Smi::FromInt(instance_size)); 2276 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2277 __ lw(context_register(), 2278 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2279 2280 __ bind(&allocated); 2281 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 2282 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset)); 2283 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX)); 2284 __ pop(a2); 2285 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); 2286 __ li(t0, Operand(isolate()->factory()->empty_fixed_array())); 2287 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 2288 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 2289 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); 2290 __ sw(a2, 2291 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset)); 2292 __ sw(a3, 2293 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset)); 2294 2295 // Only the value field needs a write barrier, as the other values are in the 2296 // root set. 2297 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset, 2298 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); 2299} 2300 2301 2302void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 2303 SetSourcePosition(prop->position()); 2304 Literal* key = prop->key()->AsLiteral(); 2305 2306 __ li(LoadDescriptor::NameRegister(), Operand(key->value())); 2307 if (FLAG_vector_ics) { 2308 __ li(VectorLoadICDescriptor::SlotRegister(), 2309 Operand(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2310 CallLoadIC(NOT_CONTEXTUAL); 2311 } else { 2312 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 2313 } 2314} 2315 2316 2317void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) { 2318 SetSourcePosition(prop->position()); 2319 Literal* key = prop->key()->AsLiteral(); 2320 DCHECK(!key->value()->IsSmi()); 2321 DCHECK(prop->IsSuperAccess()); 2322 2323 SuperReference* super_ref = prop->obj()->AsSuperReference(); 2324 EmitLoadHomeObject(super_ref); 2325 __ Push(v0); 2326 VisitForStackValue(super_ref->this_var()); 2327 __ Push(key->value()); 2328 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2329} 2330 2331 2332void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2333 SetSourcePosition(prop->position()); 2334 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2335 if (FLAG_vector_ics) { 2336 __ li(VectorLoadICDescriptor::SlotRegister(), 2337 Operand(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2338 CallIC(ic); 2339 } else { 2340 CallIC(ic, prop->PropertyFeedbackId()); 2341 } 2342} 2343 2344 2345void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2346 Token::Value op, 2347 OverwriteMode mode, 2348 Expression* left_expr, 2349 Expression* right_expr) { 2350 Label done, smi_case, stub_call; 2351 2352 Register scratch1 = a2; 2353 Register scratch2 = a3; 2354 2355 // Get the arguments. 2356 Register left = a1; 2357 Register right = a0; 2358 __ pop(left); 2359 __ mov(a0, result_register()); 2360 2361 // Perform combined smi check on both operands. 2362 __ Or(scratch1, left, Operand(right)); 2363 STATIC_ASSERT(kSmiTag == 0); 2364 JumpPatchSite patch_site(masm_); 2365 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 2366 2367 __ bind(&stub_call); 2368 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2369 CallIC(code, expr->BinaryOperationFeedbackId()); 2370 patch_site.EmitPatchInfo(); 2371 __ jmp(&done); 2372 2373 __ bind(&smi_case); 2374 // Smi case. This code works the same way as the smi-smi case in the type 2375 // recording binary operation stub, see 2376 switch (op) { 2377 case Token::SAR: 2378 __ GetLeastBitsFromSmi(scratch1, right, 5); 2379 __ srav(right, left, scratch1); 2380 __ And(v0, right, Operand(~kSmiTagMask)); 2381 break; 2382 case Token::SHL: { 2383 __ SmiUntag(scratch1, left); 2384 __ GetLeastBitsFromSmi(scratch2, right, 5); 2385 __ sllv(scratch1, scratch1, scratch2); 2386 __ Addu(scratch2, scratch1, Operand(0x40000000)); 2387 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); 2388 __ SmiTag(v0, scratch1); 2389 break; 2390 } 2391 case Token::SHR: { 2392 __ SmiUntag(scratch1, left); 2393 __ GetLeastBitsFromSmi(scratch2, right, 5); 2394 __ srlv(scratch1, scratch1, scratch2); 2395 __ And(scratch2, scratch1, 0xc0000000); 2396 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg)); 2397 __ SmiTag(v0, scratch1); 2398 break; 2399 } 2400 case Token::ADD: 2401 __ AdduAndCheckForOverflow(v0, left, right, scratch1); 2402 __ BranchOnOverflow(&stub_call, scratch1); 2403 break; 2404 case Token::SUB: 2405 __ SubuAndCheckForOverflow(v0, left, right, scratch1); 2406 __ BranchOnOverflow(&stub_call, scratch1); 2407 break; 2408 case Token::MUL: { 2409 __ SmiUntag(scratch1, right); 2410 __ Mul(scratch2, v0, left, scratch1); 2411 __ sra(scratch1, v0, 31); 2412 __ Branch(&stub_call, ne, scratch1, Operand(scratch2)); 2413 __ Branch(&done, ne, v0, Operand(zero_reg)); 2414 __ Addu(scratch2, right, left); 2415 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); 2416 DCHECK(Smi::FromInt(0) == 0); 2417 __ mov(v0, zero_reg); 2418 break; 2419 } 2420 case Token::BIT_OR: 2421 __ Or(v0, left, Operand(right)); 2422 break; 2423 case Token::BIT_AND: 2424 __ And(v0, left, Operand(right)); 2425 break; 2426 case Token::BIT_XOR: 2427 __ Xor(v0, left, Operand(right)); 2428 break; 2429 default: 2430 UNREACHABLE(); 2431 } 2432 2433 __ bind(&done); 2434 context()->Plug(v0); 2435} 2436 2437 2438void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2439 Token::Value op, 2440 OverwriteMode mode) { 2441 __ mov(a0, result_register()); 2442 __ pop(a1); 2443 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2444 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2445 CallIC(code, expr->BinaryOperationFeedbackId()); 2446 patch_site.EmitPatchInfo(); 2447 context()->Plug(v0); 2448} 2449 2450 2451void FullCodeGenerator::EmitAssignment(Expression* expr) { 2452 DCHECK(expr->IsValidReferenceExpression()); 2453 2454 // Left-hand side can only be a property, a global or a (parameter or local) 2455 // slot. 2456 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2457 LhsKind assign_type = VARIABLE; 2458 Property* prop = expr->AsProperty(); 2459 if (prop != NULL) { 2460 assign_type = (prop->key()->IsPropertyName()) 2461 ? NAMED_PROPERTY 2462 : KEYED_PROPERTY; 2463 } 2464 2465 switch (assign_type) { 2466 case VARIABLE: { 2467 Variable* var = expr->AsVariableProxy()->var(); 2468 EffectContext context(this); 2469 EmitVariableAssignment(var, Token::ASSIGN); 2470 break; 2471 } 2472 case NAMED_PROPERTY: { 2473 __ push(result_register()); // Preserve value. 2474 VisitForAccumulatorValue(prop->obj()); 2475 __ mov(StoreDescriptor::ReceiverRegister(), result_register()); 2476 __ pop(StoreDescriptor::ValueRegister()); // Restore value. 2477 __ li(StoreDescriptor::NameRegister(), 2478 Operand(prop->key()->AsLiteral()->value())); 2479 CallStoreIC(); 2480 break; 2481 } 2482 case KEYED_PROPERTY: { 2483 __ push(result_register()); // Preserve value. 2484 VisitForStackValue(prop->obj()); 2485 VisitForAccumulatorValue(prop->key()); 2486 __ mov(StoreDescriptor::NameRegister(), result_register()); 2487 __ Pop(StoreDescriptor::ValueRegister(), 2488 StoreDescriptor::ReceiverRegister()); 2489 Handle<Code> ic = 2490 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2491 CallIC(ic); 2492 break; 2493 } 2494 } 2495 context()->Plug(v0); 2496} 2497 2498 2499void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2500 Variable* var, MemOperand location) { 2501 __ sw(result_register(), location); 2502 if (var->IsContextSlot()) { 2503 // RecordWrite may destroy all its register arguments. 2504 __ Move(a3, result_register()); 2505 int offset = Context::SlotOffset(var->index()); 2506 __ RecordWriteContextSlot( 2507 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); 2508 } 2509} 2510 2511 2512void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { 2513 if (var->IsUnallocated()) { 2514 // Global var, const, or let. 2515 __ mov(StoreDescriptor::ValueRegister(), result_register()); 2516 __ li(StoreDescriptor::NameRegister(), Operand(var->name())); 2517 __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); 2518 CallStoreIC(); 2519 2520 } else if (op == Token::INIT_CONST_LEGACY) { 2521 // Const initializers need a write barrier. 2522 DCHECK(!var->IsParameter()); // No const parameters. 2523 if (var->IsLookupSlot()) { 2524 __ li(a0, Operand(var->name())); 2525 __ Push(v0, cp, a0); // Context and name. 2526 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2527 } else { 2528 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2529 Label skip; 2530 MemOperand location = VarOperand(var, a1); 2531 __ lw(a2, location); 2532 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 2533 __ Branch(&skip, ne, a2, Operand(at)); 2534 EmitStoreToStackLocalOrContextSlot(var, location); 2535 __ bind(&skip); 2536 } 2537 2538 } else if (var->mode() == LET && op != Token::INIT_LET) { 2539 // Non-initializing assignment to let variable needs a write barrier. 2540 DCHECK(!var->IsLookupSlot()); 2541 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2542 Label assign; 2543 MemOperand location = VarOperand(var, a1); 2544 __ lw(a3, location); 2545 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 2546 __ Branch(&assign, ne, a3, Operand(t0)); 2547 __ li(a3, Operand(var->name())); 2548 __ push(a3); 2549 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2550 // Perform the assignment. 2551 __ bind(&assign); 2552 EmitStoreToStackLocalOrContextSlot(var, location); 2553 2554 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2555 if (var->IsLookupSlot()) { 2556 // Assignment to var. 2557 __ li(a1, Operand(var->name())); 2558 __ li(a0, Operand(Smi::FromInt(strict_mode()))); 2559 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode. 2560 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2561 } else { 2562 // Assignment to var or initializing assignment to let/const in harmony 2563 // mode. 2564 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); 2565 MemOperand location = VarOperand(var, a1); 2566 if (generate_debug_code_ && op == Token::INIT_LET) { 2567 // Check for an uninitialized let binding. 2568 __ lw(a2, location); 2569 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 2570 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0)); 2571 } 2572 EmitStoreToStackLocalOrContextSlot(var, location); 2573 } 2574 } 2575 // Non-initializing assignments to consts are ignored. 2576} 2577 2578 2579void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2580 // Assignment to a property, using a named store IC. 2581 Property* prop = expr->target()->AsProperty(); 2582 DCHECK(prop != NULL); 2583 DCHECK(prop->key()->IsLiteral()); 2584 2585 // Record source code position before IC call. 2586 SetSourcePosition(expr->position()); 2587 __ mov(StoreDescriptor::ValueRegister(), result_register()); 2588 __ li(StoreDescriptor::NameRegister(), 2589 Operand(prop->key()->AsLiteral()->value())); 2590 __ pop(StoreDescriptor::ReceiverRegister()); 2591 CallStoreIC(expr->AssignmentFeedbackId()); 2592 2593 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2594 context()->Plug(v0); 2595} 2596 2597 2598void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2599 // Assignment to a property, using a keyed store IC. 2600 2601 // Record source code position before IC call. 2602 SetSourcePosition(expr->position()); 2603 // Call keyed store IC. 2604 // The arguments are: 2605 // - a0 is the value, 2606 // - a1 is the key, 2607 // - a2 is the receiver. 2608 __ mov(StoreDescriptor::ValueRegister(), result_register()); 2609 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister()); 2610 DCHECK(StoreDescriptor::ValueRegister().is(a0)); 2611 2612 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2613 CallIC(ic, expr->AssignmentFeedbackId()); 2614 2615 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2616 context()->Plug(v0); 2617} 2618 2619 2620void FullCodeGenerator::VisitProperty(Property* expr) { 2621 Comment cmnt(masm_, "[ Property"); 2622 Expression* key = expr->key(); 2623 2624 if (key->IsPropertyName()) { 2625 if (!expr->IsSuperAccess()) { 2626 VisitForAccumulatorValue(expr->obj()); 2627 __ Move(LoadDescriptor::ReceiverRegister(), v0); 2628 EmitNamedPropertyLoad(expr); 2629 } else { 2630 EmitNamedSuperPropertyLoad(expr); 2631 } 2632 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2633 context()->Plug(v0); 2634 } else { 2635 VisitForStackValue(expr->obj()); 2636 VisitForAccumulatorValue(expr->key()); 2637 __ Move(LoadDescriptor::NameRegister(), v0); 2638 __ pop(LoadDescriptor::ReceiverRegister()); 2639 EmitKeyedPropertyLoad(expr); 2640 context()->Plug(v0); 2641 } 2642} 2643 2644 2645void FullCodeGenerator::CallIC(Handle<Code> code, 2646 TypeFeedbackId id) { 2647 ic_total_count_++; 2648 __ Call(code, RelocInfo::CODE_TARGET, id); 2649} 2650 2651 2652// Code common for calls using the IC. 2653void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2654 Expression* callee = expr->expression(); 2655 2656 CallICState::CallType call_type = 2657 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD; 2658 2659 // Get the target function. 2660 if (call_type == CallICState::FUNCTION) { 2661 { StackValueContext context(this); 2662 EmitVariableLoad(callee->AsVariableProxy()); 2663 PrepareForBailout(callee, NO_REGISTERS); 2664 } 2665 // Push undefined as receiver. This is patched in the method prologue if it 2666 // is a sloppy mode method. 2667 __ Push(isolate()->factory()->undefined_value()); 2668 } else { 2669 // Load the function from the receiver. 2670 DCHECK(callee->IsProperty()); 2671 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2672 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2673 EmitNamedPropertyLoad(callee->AsProperty()); 2674 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2675 // Push the target function under the receiver. 2676 __ lw(at, MemOperand(sp, 0)); 2677 __ push(at); 2678 __ sw(v0, MemOperand(sp, kPointerSize)); 2679 } 2680 2681 EmitCall(expr, call_type); 2682} 2683 2684 2685void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { 2686 Expression* callee = expr->expression(); 2687 DCHECK(callee->IsProperty()); 2688 Property* prop = callee->AsProperty(); 2689 DCHECK(prop->IsSuperAccess()); 2690 2691 SetSourcePosition(prop->position()); 2692 Literal* key = prop->key()->AsLiteral(); 2693 DCHECK(!key->value()->IsSmi()); 2694 // Load the function from the receiver. 2695 const Register scratch = a1; 2696 SuperReference* super_ref = prop->obj()->AsSuperReference(); 2697 EmitLoadHomeObject(super_ref); 2698 __ Push(v0); 2699 VisitForAccumulatorValue(super_ref->this_var()); 2700 __ Push(v0); 2701 __ lw(scratch, MemOperand(sp, kPointerSize)); 2702 __ Push(scratch, v0); 2703 __ Push(key->value()); 2704 2705 // Stack here: 2706 // - home_object 2707 // - this (receiver) 2708 // - home_object <-- LoadFromSuper will pop here and below. 2709 // - this (receiver) 2710 // - key 2711 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2712 2713 // Replace home_object with target function. 2714 __ sw(v0, MemOperand(sp, kPointerSize)); 2715 2716 // Stack here: 2717 // - target function 2718 // - this (receiver) 2719 EmitCall(expr, CallICState::METHOD); 2720} 2721 2722 2723// Code common for calls using the IC. 2724void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2725 Expression* key) { 2726 // Load the key. 2727 VisitForAccumulatorValue(key); 2728 2729 Expression* callee = expr->expression(); 2730 2731 // Load the function from the receiver. 2732 DCHECK(callee->IsProperty()); 2733 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2734 __ Move(LoadDescriptor::NameRegister(), v0); 2735 EmitKeyedPropertyLoad(callee->AsProperty()); 2736 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2737 2738 // Push the target function under the receiver. 2739 __ lw(at, MemOperand(sp, 0)); 2740 __ push(at); 2741 __ sw(v0, MemOperand(sp, kPointerSize)); 2742 2743 EmitCall(expr, CallICState::METHOD); 2744} 2745 2746 2747void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) { 2748 // Load the arguments. 2749 ZoneList<Expression*>* args = expr->arguments(); 2750 int arg_count = args->length(); 2751 { PreservePositionScope scope(masm()->positions_recorder()); 2752 for (int i = 0; i < arg_count; i++) { 2753 VisitForStackValue(args->at(i)); 2754 } 2755 } 2756 2757 // Record source position of the IC call. 2758 SetSourcePosition(expr->position()); 2759 Handle<Code> ic = CallIC::initialize_stub( 2760 isolate(), arg_count, call_type); 2761 __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); 2762 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2763 // Don't assign a type feedback id to the IC, since type feedback is provided 2764 // by the vector above. 2765 CallIC(ic); 2766 2767 RecordJSReturnSite(expr); 2768 // Restore context register. 2769 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2770 context()->DropAndPlug(1, v0); 2771} 2772 2773 2774void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2775 // t3: copy of the first argument or undefined if it doesn't exist. 2776 if (arg_count > 0) { 2777 __ lw(t3, MemOperand(sp, arg_count * kPointerSize)); 2778 } else { 2779 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex); 2780 } 2781 2782 // t2: the receiver of the enclosing function. 2783 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 2784 2785 // t1: the receiver of the enclosing function. 2786 int receiver_offset = 2 + info_->scope()->num_parameters(); 2787 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize)); 2788 2789 // t0: the strict mode. 2790 __ li(t0, Operand(Smi::FromInt(strict_mode()))); 2791 2792 // a1: the start position of the scope the calls resides in. 2793 __ li(a1, Operand(Smi::FromInt(scope()->start_position()))); 2794 2795 // Do the runtime call. 2796 __ Push(t3); 2797 __ Push(t2, t1, t0, a1); 2798 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6); 2799} 2800 2801 2802void FullCodeGenerator::VisitCall(Call* expr) { 2803#ifdef DEBUG 2804 // We want to verify that RecordJSReturnSite gets called on all paths 2805 // through this function. Avoid early returns. 2806 expr->return_is_recorded_ = false; 2807#endif 2808 2809 Comment cmnt(masm_, "[ Call"); 2810 Expression* callee = expr->expression(); 2811 Call::CallType call_type = expr->GetCallType(isolate()); 2812 2813 if (call_type == Call::POSSIBLY_EVAL_CALL) { 2814 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval 2815 // to resolve the function we need to call and the receiver of the 2816 // call. Then we call the resolved function using the given 2817 // arguments. 2818 ZoneList<Expression*>* args = expr->arguments(); 2819 int arg_count = args->length(); 2820 2821 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2822 VisitForStackValue(callee); 2823 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 2824 __ push(a2); // Reserved receiver slot. 2825 2826 // Push the arguments. 2827 for (int i = 0; i < arg_count; i++) { 2828 VisitForStackValue(args->at(i)); 2829 } 2830 2831 // Push a copy of the function (found below the arguments) and 2832 // resolve eval. 2833 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2834 __ push(a1); 2835 EmitResolvePossiblyDirectEval(arg_count); 2836 2837 // The runtime call returns a pair of values in v0 (function) and 2838 // v1 (receiver). Touch up the stack with the right values. 2839 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2840 __ sw(v1, MemOperand(sp, arg_count * kPointerSize)); 2841 } 2842 // Record source position for debugger. 2843 SetSourcePosition(expr->position()); 2844 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 2845 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2846 __ CallStub(&stub); 2847 RecordJSReturnSite(expr); 2848 // Restore context register. 2849 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2850 context()->DropAndPlug(1, v0); 2851 } else if (call_type == Call::GLOBAL_CALL) { 2852 EmitCallWithLoadIC(expr); 2853 } else if (call_type == Call::LOOKUP_SLOT_CALL) { 2854 // Call to a lookup slot (dynamically introduced variable). 2855 VariableProxy* proxy = callee->AsVariableProxy(); 2856 Label slow, done; 2857 2858 { PreservePositionScope scope(masm()->positions_recorder()); 2859 // Generate code for loading from variables potentially shadowed 2860 // by eval-introduced variables. 2861 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 2862 } 2863 2864 __ bind(&slow); 2865 // Call the runtime to find the function to call (returned in v0) 2866 // and the object holding it (returned in v1). 2867 DCHECK(!context_register().is(a2)); 2868 __ li(a2, Operand(proxy->name())); 2869 __ Push(context_register(), a2); 2870 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 2871 __ Push(v0, v1); // Function, receiver. 2872 2873 // If fast case code has been generated, emit code to push the 2874 // function and receiver and have the slow path jump around this 2875 // code. 2876 if (done.is_linked()) { 2877 Label call; 2878 __ Branch(&call); 2879 __ bind(&done); 2880 // Push function. 2881 __ push(v0); 2882 // The receiver is implicitly the global receiver. Indicate this 2883 // by passing the hole to the call function stub. 2884 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); 2885 __ push(a1); 2886 __ bind(&call); 2887 } 2888 2889 // The receiver is either the global receiver or an object found 2890 // by LoadContextSlot. 2891 EmitCall(expr); 2892 } else if (call_type == Call::PROPERTY_CALL) { 2893 Property* property = callee->AsProperty(); 2894 bool is_named_call = property->key()->IsPropertyName(); 2895 // super.x() is handled in EmitCallWithLoadIC. 2896 if (property->IsSuperAccess() && is_named_call) { 2897 EmitSuperCallWithLoadIC(expr); 2898 } else { 2899 { 2900 PreservePositionScope scope(masm()->positions_recorder()); 2901 VisitForStackValue(property->obj()); 2902 } 2903 if (is_named_call) { 2904 EmitCallWithLoadIC(expr); 2905 } else { 2906 EmitKeyedCallWithLoadIC(expr, property->key()); 2907 } 2908 } 2909 } else { 2910 DCHECK(call_type == Call::OTHER_CALL); 2911 // Call to an arbitrary expression not handled specially above. 2912 { PreservePositionScope scope(masm()->positions_recorder()); 2913 VisitForStackValue(callee); 2914 } 2915 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); 2916 __ push(a1); 2917 // Emit function call. 2918 EmitCall(expr); 2919 } 2920 2921#ifdef DEBUG 2922 // RecordJSReturnSite should have been called. 2923 DCHECK(expr->return_is_recorded_); 2924#endif 2925} 2926 2927 2928void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2929 Comment cmnt(masm_, "[ CallNew"); 2930 // According to ECMA-262, section 11.2.2, page 44, the function 2931 // expression in new calls must be evaluated before the 2932 // arguments. 2933 2934 // Push constructor on the stack. If it's not a function it's used as 2935 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 2936 // ignored. 2937 VisitForStackValue(expr->expression()); 2938 2939 // Push the arguments ("left-to-right") on the stack. 2940 ZoneList<Expression*>* args = expr->arguments(); 2941 int arg_count = args->length(); 2942 for (int i = 0; i < arg_count; i++) { 2943 VisitForStackValue(args->at(i)); 2944 } 2945 2946 // Call the construct call builtin that handles allocation and 2947 // constructor invocation. 2948 SetSourcePosition(expr->position()); 2949 2950 // Load function and argument count into a1 and a0. 2951 __ li(a0, Operand(arg_count)); 2952 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); 2953 2954 // Record call targets in unoptimized code. 2955 if (FLAG_pretenuring_call_new) { 2956 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 2957 DCHECK(expr->AllocationSiteFeedbackSlot() == 2958 expr->CallNewFeedbackSlot() + 1); 2959 } 2960 2961 __ li(a2, FeedbackVector()); 2962 __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2963 2964 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 2965 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 2966 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2967 context()->Plug(v0); 2968} 2969 2970 2971void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2972 ZoneList<Expression*>* args = expr->arguments(); 2973 DCHECK(args->length() == 1); 2974 2975 VisitForAccumulatorValue(args->at(0)); 2976 2977 Label materialize_true, materialize_false; 2978 Label* if_true = NULL; 2979 Label* if_false = NULL; 2980 Label* fall_through = NULL; 2981 context()->PrepareTest(&materialize_true, &materialize_false, 2982 &if_true, &if_false, &fall_through); 2983 2984 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2985 __ SmiTst(v0, t0); 2986 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through); 2987 2988 context()->Plug(if_true, if_false); 2989} 2990 2991 2992void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2993 ZoneList<Expression*>* args = expr->arguments(); 2994 DCHECK(args->length() == 1); 2995 2996 VisitForAccumulatorValue(args->at(0)); 2997 2998 Label materialize_true, materialize_false; 2999 Label* if_true = NULL; 3000 Label* if_false = NULL; 3001 Label* fall_through = NULL; 3002 context()->PrepareTest(&materialize_true, &materialize_false, 3003 &if_true, &if_false, &fall_through); 3004 3005 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3006 __ NonNegativeSmiTst(v0, at); 3007 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through); 3008 3009 context()->Plug(if_true, if_false); 3010} 3011 3012 3013void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 3014 ZoneList<Expression*>* args = expr->arguments(); 3015 DCHECK(args->length() == 1); 3016 3017 VisitForAccumulatorValue(args->at(0)); 3018 3019 Label materialize_true, materialize_false; 3020 Label* if_true = NULL; 3021 Label* if_false = NULL; 3022 Label* fall_through = NULL; 3023 context()->PrepareTest(&materialize_true, &materialize_false, 3024 &if_true, &if_false, &fall_through); 3025 3026 __ JumpIfSmi(v0, if_false); 3027 __ LoadRoot(at, Heap::kNullValueRootIndex); 3028 __ Branch(if_true, eq, v0, Operand(at)); 3029 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); 3030 // Undetectable objects behave like undefined when tested with typeof. 3031 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset)); 3032 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); 3033 __ Branch(if_false, ne, at, Operand(zero_reg)); 3034 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset)); 3035 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3036 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3037 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE), 3038 if_true, if_false, fall_through); 3039 3040 context()->Plug(if_true, if_false); 3041} 3042 3043 3044void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 3045 ZoneList<Expression*>* args = expr->arguments(); 3046 DCHECK(args->length() == 1); 3047 3048 VisitForAccumulatorValue(args->at(0)); 3049 3050 Label materialize_true, materialize_false; 3051 Label* if_true = NULL; 3052 Label* if_false = NULL; 3053 Label* fall_through = NULL; 3054 context()->PrepareTest(&materialize_true, &materialize_false, 3055 &if_true, &if_false, &fall_through); 3056 3057 __ JumpIfSmi(v0, if_false); 3058 __ GetObjectType(v0, a1, a1); 3059 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3060 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE), 3061 if_true, if_false, fall_through); 3062 3063 context()->Plug(if_true, if_false); 3064} 3065 3066 3067void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 3068 ZoneList<Expression*>* args = expr->arguments(); 3069 DCHECK(args->length() == 1); 3070 3071 VisitForAccumulatorValue(args->at(0)); 3072 3073 Label materialize_true, materialize_false; 3074 Label* if_true = NULL; 3075 Label* if_false = NULL; 3076 Label* fall_through = NULL; 3077 context()->PrepareTest(&materialize_true, &materialize_false, 3078 &if_true, &if_false, &fall_through); 3079 3080 __ JumpIfSmi(v0, if_false); 3081 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 3082 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); 3083 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3084 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); 3085 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); 3086 3087 context()->Plug(if_true, if_false); 3088} 3089 3090 3091void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 3092 CallRuntime* expr) { 3093 ZoneList<Expression*>* args = expr->arguments(); 3094 DCHECK(args->length() == 1); 3095 3096 VisitForAccumulatorValue(args->at(0)); 3097 3098 Label materialize_true, materialize_false, skip_lookup; 3099 Label* if_true = NULL; 3100 Label* if_false = NULL; 3101 Label* fall_through = NULL; 3102 context()->PrepareTest(&materialize_true, &materialize_false, 3103 &if_true, &if_false, &fall_through); 3104 3105 __ AssertNotSmi(v0); 3106 3107 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 3108 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset)); 3109 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf); 3110 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg)); 3111 3112 // Check for fast case object. Generate false result for slow case object. 3113 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 3114 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); 3115 __ LoadRoot(t0, Heap::kHashTableMapRootIndex); 3116 __ Branch(if_false, eq, a2, Operand(t0)); 3117 3118 // Look for valueOf name in the descriptor array, and indicate false if 3119 // found. Since we omit an enumeration index check, if it is added via a 3120 // transition that shares its descriptor array, this is a false positive. 3121 Label entry, loop, done; 3122 3123 // Skip loop if no descriptors are valid. 3124 __ NumberOfOwnDescriptors(a3, a1); 3125 __ Branch(&done, eq, a3, Operand(zero_reg)); 3126 3127 __ LoadInstanceDescriptors(a1, t0); 3128 // t0: descriptor array. 3129 // a3: valid entries in the descriptor array. 3130 STATIC_ASSERT(kSmiTag == 0); 3131 STATIC_ASSERT(kSmiTagSize == 1); 3132 STATIC_ASSERT(kPointerSize == 4); 3133 __ li(at, Operand(DescriptorArray::kDescriptorSize)); 3134 __ Mul(a3, a3, at); 3135 // Calculate location of the first key name. 3136 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); 3137 // Calculate the end of the descriptor array. 3138 __ mov(a2, t0); 3139 __ sll(t1, a3, kPointerSizeLog2); 3140 __ Addu(a2, a2, t1); 3141 3142 // Loop through all the keys in the descriptor array. If one of these is the 3143 // string "valueOf" the result is false. 3144 // The use of t2 to store the valueOf string assumes that it is not otherwise 3145 // used in the loop below. 3146 __ li(t2, Operand(isolate()->factory()->value_of_string())); 3147 __ jmp(&entry); 3148 __ bind(&loop); 3149 __ lw(a3, MemOperand(t0, 0)); 3150 __ Branch(if_false, eq, a3, Operand(t2)); 3151 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 3152 __ bind(&entry); 3153 __ Branch(&loop, ne, t0, Operand(a2)); 3154 3155 __ bind(&done); 3156 3157 // Set the bit in the map to indicate that there is no local valueOf field. 3158 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset)); 3159 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3160 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset)); 3161 3162 __ bind(&skip_lookup); 3163 3164 // If a valueOf property is not found on the object check that its 3165 // prototype is the un-modified String prototype. If not result is false. 3166 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset)); 3167 __ JumpIfSmi(a2, if_false); 3168 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); 3169 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3170 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); 3171 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 3172 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3173 Split(eq, a2, Operand(a3), if_true, if_false, fall_through); 3174 3175 context()->Plug(if_true, if_false); 3176} 3177 3178 3179void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3180 ZoneList<Expression*>* args = expr->arguments(); 3181 DCHECK(args->length() == 1); 3182 3183 VisitForAccumulatorValue(args->at(0)); 3184 3185 Label materialize_true, materialize_false; 3186 Label* if_true = NULL; 3187 Label* if_false = NULL; 3188 Label* fall_through = NULL; 3189 context()->PrepareTest(&materialize_true, &materialize_false, 3190 &if_true, &if_false, &fall_through); 3191 3192 __ JumpIfSmi(v0, if_false); 3193 __ GetObjectType(v0, a1, a2); 3194 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3195 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE)); 3196 __ Branch(if_false); 3197 3198 context()->Plug(if_true, if_false); 3199} 3200 3201 3202void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3203 ZoneList<Expression*>* args = expr->arguments(); 3204 DCHECK(args->length() == 1); 3205 3206 VisitForAccumulatorValue(args->at(0)); 3207 3208 Label materialize_true, materialize_false; 3209 Label* if_true = NULL; 3210 Label* if_false = NULL; 3211 Label* fall_through = NULL; 3212 context()->PrepareTest(&materialize_true, &materialize_false, 3213 &if_true, &if_false, &fall_through); 3214 3215 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 3216 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset)); 3217 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); 3218 __ li(t0, 0x80000000); 3219 Label not_nan; 3220 __ Branch(¬_nan, ne, a2, Operand(t0)); 3221 __ mov(t0, zero_reg); 3222 __ mov(a2, a1); 3223 __ bind(¬_nan); 3224 3225 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3226 Split(eq, a2, Operand(t0), if_true, if_false, fall_through); 3227 3228 context()->Plug(if_true, if_false); 3229} 3230 3231 3232void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3233 ZoneList<Expression*>* args = expr->arguments(); 3234 DCHECK(args->length() == 1); 3235 3236 VisitForAccumulatorValue(args->at(0)); 3237 3238 Label materialize_true, materialize_false; 3239 Label* if_true = NULL; 3240 Label* if_false = NULL; 3241 Label* fall_through = NULL; 3242 context()->PrepareTest(&materialize_true, &materialize_false, 3243 &if_true, &if_false, &fall_through); 3244 3245 __ JumpIfSmi(v0, if_false); 3246 __ GetObjectType(v0, a1, a1); 3247 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3248 Split(eq, a1, Operand(JS_ARRAY_TYPE), 3249 if_true, if_false, fall_through); 3250 3251 context()->Plug(if_true, if_false); 3252} 3253 3254 3255void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3256 ZoneList<Expression*>* args = expr->arguments(); 3257 DCHECK(args->length() == 1); 3258 3259 VisitForAccumulatorValue(args->at(0)); 3260 3261 Label materialize_true, materialize_false; 3262 Label* if_true = NULL; 3263 Label* if_false = NULL; 3264 Label* fall_through = NULL; 3265 context()->PrepareTest(&materialize_true, &materialize_false, 3266 &if_true, &if_false, &fall_through); 3267 3268 __ JumpIfSmi(v0, if_false); 3269 __ GetObjectType(v0, a1, a1); 3270 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3271 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through); 3272 3273 context()->Plug(if_true, if_false); 3274} 3275 3276 3277void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3278 DCHECK(expr->arguments()->length() == 0); 3279 3280 Label materialize_true, materialize_false; 3281 Label* if_true = NULL; 3282 Label* if_false = NULL; 3283 Label* fall_through = NULL; 3284 context()->PrepareTest(&materialize_true, &materialize_false, 3285 &if_true, &if_false, &fall_through); 3286 3287 // Get the frame pointer for the calling frame. 3288 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3289 3290 // Skip the arguments adaptor frame if it exists. 3291 Label check_frame_marker; 3292 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset)); 3293 __ Branch(&check_frame_marker, ne, 3294 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3295 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); 3296 3297 // Check the marker in the calling frame. 3298 __ bind(&check_frame_marker); 3299 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); 3300 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3301 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)), 3302 if_true, if_false, fall_through); 3303 3304 context()->Plug(if_true, if_false); 3305} 3306 3307 3308void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3309 ZoneList<Expression*>* args = expr->arguments(); 3310 DCHECK(args->length() == 2); 3311 3312 // Load the two objects into registers and perform the comparison. 3313 VisitForStackValue(args->at(0)); 3314 VisitForAccumulatorValue(args->at(1)); 3315 3316 Label materialize_true, materialize_false; 3317 Label* if_true = NULL; 3318 Label* if_false = NULL; 3319 Label* fall_through = NULL; 3320 context()->PrepareTest(&materialize_true, &materialize_false, 3321 &if_true, &if_false, &fall_through); 3322 3323 __ pop(a1); 3324 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3325 Split(eq, v0, Operand(a1), if_true, if_false, fall_through); 3326 3327 context()->Plug(if_true, if_false); 3328} 3329 3330 3331void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3332 ZoneList<Expression*>* args = expr->arguments(); 3333 DCHECK(args->length() == 1); 3334 3335 // ArgumentsAccessStub expects the key in a1 and the formal 3336 // parameter count in a0. 3337 VisitForAccumulatorValue(args->at(0)); 3338 __ mov(a1, v0); 3339 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3340 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3341 __ CallStub(&stub); 3342 context()->Plug(v0); 3343} 3344 3345 3346void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3347 DCHECK(expr->arguments()->length() == 0); 3348 Label exit; 3349 // Get the number of formal parameters. 3350 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3351 3352 // Check if the calling frame is an arguments adaptor frame. 3353 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3354 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); 3355 __ Branch(&exit, ne, a3, 3356 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3357 3358 // Arguments adaptor case: Read the arguments length from the 3359 // adaptor frame. 3360 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3361 3362 __ bind(&exit); 3363 context()->Plug(v0); 3364} 3365 3366 3367void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3368 ZoneList<Expression*>* args = expr->arguments(); 3369 DCHECK(args->length() == 1); 3370 Label done, null, function, non_function_constructor; 3371 3372 VisitForAccumulatorValue(args->at(0)); 3373 3374 // If the object is a smi, we return null. 3375 __ JumpIfSmi(v0, &null); 3376 3377 // Check that the object is a JS object but take special care of JS 3378 // functions to make sure they have 'Function' as their class. 3379 // Assume that there are only two callable types, and one of them is at 3380 // either end of the type range for JS object types. Saves extra comparisons. 3381 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 3382 __ GetObjectType(v0, v0, a1); // Map is now in v0. 3383 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 3384 3385 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 3386 FIRST_SPEC_OBJECT_TYPE + 1); 3387 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 3388 3389 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 3390 LAST_SPEC_OBJECT_TYPE - 1); 3391 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE)); 3392 // Assume that there is no larger type. 3393 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 3394 3395 // Check if the constructor in the map is a JS function. 3396 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset)); 3397 __ GetObjectType(v0, a1, a1); 3398 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE)); 3399 3400 // v0 now contains the constructor function. Grab the 3401 // instance class name from there. 3402 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); 3403 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset)); 3404 __ Branch(&done); 3405 3406 // Functions have class 'Function'. 3407 __ bind(&function); 3408 __ LoadRoot(v0, Heap::kFunction_stringRootIndex); 3409 __ jmp(&done); 3410 3411 // Objects with a non-function constructor have class 'Object'. 3412 __ bind(&non_function_constructor); 3413 __ LoadRoot(v0, Heap::kObject_stringRootIndex); 3414 __ jmp(&done); 3415 3416 // Non-JS objects have class null. 3417 __ bind(&null); 3418 __ LoadRoot(v0, Heap::kNullValueRootIndex); 3419 3420 // All done. 3421 __ bind(&done); 3422 3423 context()->Plug(v0); 3424} 3425 3426 3427void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3428 // Load the arguments on the stack and call the stub. 3429 SubStringStub stub(isolate()); 3430 ZoneList<Expression*>* args = expr->arguments(); 3431 DCHECK(args->length() == 3); 3432 VisitForStackValue(args->at(0)); 3433 VisitForStackValue(args->at(1)); 3434 VisitForStackValue(args->at(2)); 3435 __ CallStub(&stub); 3436 context()->Plug(v0); 3437} 3438 3439 3440void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3441 // Load the arguments on the stack and call the stub. 3442 RegExpExecStub stub(isolate()); 3443 ZoneList<Expression*>* args = expr->arguments(); 3444 DCHECK(args->length() == 4); 3445 VisitForStackValue(args->at(0)); 3446 VisitForStackValue(args->at(1)); 3447 VisitForStackValue(args->at(2)); 3448 VisitForStackValue(args->at(3)); 3449 __ CallStub(&stub); 3450 context()->Plug(v0); 3451} 3452 3453 3454void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3455 ZoneList<Expression*>* args = expr->arguments(); 3456 DCHECK(args->length() == 1); 3457 3458 VisitForAccumulatorValue(args->at(0)); // Load the object. 3459 3460 Label done; 3461 // If the object is a smi return the object. 3462 __ JumpIfSmi(v0, &done); 3463 // If the object is not a value type, return the object. 3464 __ GetObjectType(v0, a1, a1); 3465 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE)); 3466 3467 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset)); 3468 3469 __ bind(&done); 3470 context()->Plug(v0); 3471} 3472 3473 3474void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3475 ZoneList<Expression*>* args = expr->arguments(); 3476 DCHECK(args->length() == 2); 3477 DCHECK_NE(NULL, args->at(1)->AsLiteral()); 3478 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3479 3480 VisitForAccumulatorValue(args->at(0)); // Load the object. 3481 3482 Label runtime, done, not_date_object; 3483 Register object = v0; 3484 Register result = v0; 3485 Register scratch0 = t5; 3486 Register scratch1 = a1; 3487 3488 __ JumpIfSmi(object, ¬_date_object); 3489 __ GetObjectType(object, scratch1, scratch1); 3490 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE)); 3491 3492 if (index->value() == 0) { 3493 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset)); 3494 __ jmp(&done); 3495 } else { 3496 if (index->value() < JSDate::kFirstUncachedField) { 3497 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3498 __ li(scratch1, Operand(stamp)); 3499 __ lw(scratch1, MemOperand(scratch1)); 3500 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3501 __ Branch(&runtime, ne, scratch1, Operand(scratch0)); 3502 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset + 3503 kPointerSize * index->value())); 3504 __ jmp(&done); 3505 } 3506 __ bind(&runtime); 3507 __ PrepareCallCFunction(2, scratch1); 3508 __ li(a1, Operand(index)); 3509 __ Move(a0, object); 3510 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3511 __ jmp(&done); 3512 } 3513 3514 __ bind(¬_date_object); 3515 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3516 __ bind(&done); 3517 context()->Plug(v0); 3518} 3519 3520 3521void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3522 ZoneList<Expression*>* args = expr->arguments(); 3523 DCHECK_EQ(3, args->length()); 3524 3525 Register string = v0; 3526 Register index = a1; 3527 Register value = a2; 3528 3529 VisitForStackValue(args->at(0)); // index 3530 VisitForStackValue(args->at(1)); // value 3531 VisitForAccumulatorValue(args->at(2)); // string 3532 __ Pop(index, value); 3533 3534 if (FLAG_debug_code) { 3535 __ SmiTst(value, at); 3536 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); 3537 __ SmiTst(index, at); 3538 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); 3539 __ SmiUntag(index, index); 3540 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3541 Register scratch = t5; 3542 __ EmitSeqStringSetCharCheck( 3543 string, index, value, scratch, one_byte_seq_type); 3544 __ SmiTag(index, index); 3545 } 3546 3547 __ SmiUntag(value, value); 3548 __ Addu(at, 3549 string, 3550 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3551 __ SmiUntag(index); 3552 __ Addu(at, at, index); 3553 __ sb(value, MemOperand(at)); 3554 context()->Plug(string); 3555} 3556 3557 3558void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3559 ZoneList<Expression*>* args = expr->arguments(); 3560 DCHECK_EQ(3, args->length()); 3561 3562 Register string = v0; 3563 Register index = a1; 3564 Register value = a2; 3565 3566 VisitForStackValue(args->at(0)); // index 3567 VisitForStackValue(args->at(1)); // value 3568 VisitForAccumulatorValue(args->at(2)); // string 3569 __ Pop(index, value); 3570 3571 if (FLAG_debug_code) { 3572 __ SmiTst(value, at); 3573 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); 3574 __ SmiTst(index, at); 3575 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); 3576 __ SmiUntag(index, index); 3577 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3578 Register scratch = t5; 3579 __ EmitSeqStringSetCharCheck( 3580 string, index, value, scratch, two_byte_seq_type); 3581 __ SmiTag(index, index); 3582 } 3583 3584 __ SmiUntag(value, value); 3585 __ Addu(at, 3586 string, 3587 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3588 __ Addu(at, at, index); 3589 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 3590 __ sh(value, MemOperand(at)); 3591 context()->Plug(string); 3592} 3593 3594 3595void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3596 // Load the arguments on the stack and call the runtime function. 3597 ZoneList<Expression*>* args = expr->arguments(); 3598 DCHECK(args->length() == 2); 3599 VisitForStackValue(args->at(0)); 3600 VisitForStackValue(args->at(1)); 3601 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3602 __ CallStub(&stub); 3603 context()->Plug(v0); 3604} 3605 3606 3607void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3608 ZoneList<Expression*>* args = expr->arguments(); 3609 DCHECK(args->length() == 2); 3610 3611 VisitForStackValue(args->at(0)); // Load the object. 3612 VisitForAccumulatorValue(args->at(1)); // Load the value. 3613 __ pop(a1); // v0 = value. a1 = object. 3614 3615 Label done; 3616 // If the object is a smi, return the value. 3617 __ JumpIfSmi(a1, &done); 3618 3619 // If the object is not a value type, return the value. 3620 __ GetObjectType(a1, a2, a2); 3621 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE)); 3622 3623 // Store the value. 3624 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset)); 3625 // Update the write barrier. Save the value as it will be 3626 // overwritten by the write barrier code and is needed afterward. 3627 __ mov(a2, v0); 3628 __ RecordWriteField( 3629 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); 3630 3631 __ bind(&done); 3632 context()->Plug(v0); 3633} 3634 3635 3636void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3637 ZoneList<Expression*>* args = expr->arguments(); 3638 DCHECK_EQ(args->length(), 1); 3639 3640 // Load the argument into a0 and call the stub. 3641 VisitForAccumulatorValue(args->at(0)); 3642 __ mov(a0, result_register()); 3643 3644 NumberToStringStub stub(isolate()); 3645 __ CallStub(&stub); 3646 context()->Plug(v0); 3647} 3648 3649 3650void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3651 ZoneList<Expression*>* args = expr->arguments(); 3652 DCHECK(args->length() == 1); 3653 3654 VisitForAccumulatorValue(args->at(0)); 3655 3656 Label done; 3657 StringCharFromCodeGenerator generator(v0, a1); 3658 generator.GenerateFast(masm_); 3659 __ jmp(&done); 3660 3661 NopRuntimeCallHelper call_helper; 3662 generator.GenerateSlow(masm_, call_helper); 3663 3664 __ bind(&done); 3665 context()->Plug(a1); 3666} 3667 3668 3669void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3670 ZoneList<Expression*>* args = expr->arguments(); 3671 DCHECK(args->length() == 2); 3672 3673 VisitForStackValue(args->at(0)); 3674 VisitForAccumulatorValue(args->at(1)); 3675 __ mov(a0, result_register()); 3676 3677 Register object = a1; 3678 Register index = a0; 3679 Register result = v0; 3680 3681 __ pop(object); 3682 3683 Label need_conversion; 3684 Label index_out_of_range; 3685 Label done; 3686 StringCharCodeAtGenerator generator(object, 3687 index, 3688 result, 3689 &need_conversion, 3690 &need_conversion, 3691 &index_out_of_range, 3692 STRING_INDEX_IS_NUMBER); 3693 generator.GenerateFast(masm_); 3694 __ jmp(&done); 3695 3696 __ bind(&index_out_of_range); 3697 // When the index is out of range, the spec requires us to return 3698 // NaN. 3699 __ LoadRoot(result, Heap::kNanValueRootIndex); 3700 __ jmp(&done); 3701 3702 __ bind(&need_conversion); 3703 // Load the undefined value into the result register, which will 3704 // trigger conversion. 3705 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3706 __ jmp(&done); 3707 3708 NopRuntimeCallHelper call_helper; 3709 generator.GenerateSlow(masm_, call_helper); 3710 3711 __ bind(&done); 3712 context()->Plug(result); 3713} 3714 3715 3716void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3717 ZoneList<Expression*>* args = expr->arguments(); 3718 DCHECK(args->length() == 2); 3719 3720 VisitForStackValue(args->at(0)); 3721 VisitForAccumulatorValue(args->at(1)); 3722 __ mov(a0, result_register()); 3723 3724 Register object = a1; 3725 Register index = a0; 3726 Register scratch = a3; 3727 Register result = v0; 3728 3729 __ pop(object); 3730 3731 Label need_conversion; 3732 Label index_out_of_range; 3733 Label done; 3734 StringCharAtGenerator generator(object, 3735 index, 3736 scratch, 3737 result, 3738 &need_conversion, 3739 &need_conversion, 3740 &index_out_of_range, 3741 STRING_INDEX_IS_NUMBER); 3742 generator.GenerateFast(masm_); 3743 __ jmp(&done); 3744 3745 __ bind(&index_out_of_range); 3746 // When the index is out of range, the spec requires us to return 3747 // the empty string. 3748 __ LoadRoot(result, Heap::kempty_stringRootIndex); 3749 __ jmp(&done); 3750 3751 __ bind(&need_conversion); 3752 // Move smi zero into the result register, which will trigger 3753 // conversion. 3754 __ li(result, Operand(Smi::FromInt(0))); 3755 __ jmp(&done); 3756 3757 NopRuntimeCallHelper call_helper; 3758 generator.GenerateSlow(masm_, call_helper); 3759 3760 __ bind(&done); 3761 context()->Plug(result); 3762} 3763 3764 3765void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3766 ZoneList<Expression*>* args = expr->arguments(); 3767 DCHECK_EQ(2, args->length()); 3768 VisitForStackValue(args->at(0)); 3769 VisitForAccumulatorValue(args->at(1)); 3770 3771 __ pop(a1); 3772 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1. 3773 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 3774 __ CallStub(&stub); 3775 context()->Plug(v0); 3776} 3777 3778 3779void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3780 ZoneList<Expression*>* args = expr->arguments(); 3781 DCHECK_EQ(2, args->length()); 3782 3783 VisitForStackValue(args->at(0)); 3784 VisitForStackValue(args->at(1)); 3785 3786 StringCompareStub stub(isolate()); 3787 __ CallStub(&stub); 3788 context()->Plug(v0); 3789} 3790 3791 3792void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3793 ZoneList<Expression*>* args = expr->arguments(); 3794 DCHECK(args->length() >= 2); 3795 3796 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3797 for (int i = 0; i < arg_count + 1; i++) { 3798 VisitForStackValue(args->at(i)); 3799 } 3800 VisitForAccumulatorValue(args->last()); // Function. 3801 3802 Label runtime, done; 3803 // Check for non-function argument (including proxy). 3804 __ JumpIfSmi(v0, &runtime); 3805 __ GetObjectType(v0, a1, a1); 3806 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE)); 3807 3808 // InvokeFunction requires the function in a1. Move it in there. 3809 __ mov(a1, result_register()); 3810 ParameterCount count(arg_count); 3811 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper()); 3812 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3813 __ jmp(&done); 3814 3815 __ bind(&runtime); 3816 __ push(v0); 3817 __ CallRuntime(Runtime::kCall, args->length()); 3818 __ bind(&done); 3819 3820 context()->Plug(v0); 3821} 3822 3823 3824void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3825 RegExpConstructResultStub stub(isolate()); 3826 ZoneList<Expression*>* args = expr->arguments(); 3827 DCHECK(args->length() == 3); 3828 VisitForStackValue(args->at(0)); 3829 VisitForStackValue(args->at(1)); 3830 VisitForAccumulatorValue(args->at(2)); 3831 __ mov(a0, result_register()); 3832 __ pop(a1); 3833 __ pop(a2); 3834 __ CallStub(&stub); 3835 context()->Plug(v0); 3836} 3837 3838 3839void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3840 ZoneList<Expression*>* args = expr->arguments(); 3841 DCHECK_EQ(2, args->length()); 3842 3843 DCHECK_NE(NULL, args->at(0)->AsLiteral()); 3844 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3845 3846 Handle<FixedArray> jsfunction_result_caches( 3847 isolate()->native_context()->jsfunction_result_caches()); 3848 if (jsfunction_result_caches->length() <= cache_id) { 3849 __ Abort(kAttemptToUseUndefinedCache); 3850 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 3851 context()->Plug(v0); 3852 return; 3853 } 3854 3855 VisitForAccumulatorValue(args->at(1)); 3856 3857 Register key = v0; 3858 Register cache = a1; 3859 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3860 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 3861 __ lw(cache, 3862 ContextOperand( 3863 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3864 __ lw(cache, 3865 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3866 3867 3868 Label done, not_found; 3869 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3870 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3871 // a2 now holds finger offset as a smi. 3872 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3873 // a3 now points to the start of fixed array elements. 3874 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize); 3875 __ addu(a3, a3, at); 3876 // a3 now points to key of indexed element of cache. 3877 __ lw(a2, MemOperand(a3)); 3878 __ Branch(¬_found, ne, key, Operand(a2)); 3879 3880 __ lw(v0, MemOperand(a3, kPointerSize)); 3881 __ Branch(&done); 3882 3883 __ bind(¬_found); 3884 // Call runtime to perform the lookup. 3885 __ Push(cache, key); 3886 __ CallRuntime(Runtime::kGetFromCache, 2); 3887 3888 __ bind(&done); 3889 context()->Plug(v0); 3890} 3891 3892 3893void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3894 ZoneList<Expression*>* args = expr->arguments(); 3895 VisitForAccumulatorValue(args->at(0)); 3896 3897 Label materialize_true, materialize_false; 3898 Label* if_true = NULL; 3899 Label* if_false = NULL; 3900 Label* fall_through = NULL; 3901 context()->PrepareTest(&materialize_true, &materialize_false, 3902 &if_true, &if_false, &fall_through); 3903 3904 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset)); 3905 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask)); 3906 3907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3908 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through); 3909 3910 context()->Plug(if_true, if_false); 3911} 3912 3913 3914void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3915 ZoneList<Expression*>* args = expr->arguments(); 3916 DCHECK(args->length() == 1); 3917 VisitForAccumulatorValue(args->at(0)); 3918 3919 __ AssertString(v0); 3920 3921 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset)); 3922 __ IndexFromHash(v0, v0); 3923 3924 context()->Plug(v0); 3925} 3926 3927 3928void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { 3929 Label bailout, done, one_char_separator, long_separator, 3930 non_trivial_array, not_size_one_array, loop, 3931 empty_separator_loop, one_char_separator_loop, 3932 one_char_separator_loop_entry, long_separator_loop; 3933 ZoneList<Expression*>* args = expr->arguments(); 3934 DCHECK(args->length() == 2); 3935 VisitForStackValue(args->at(1)); 3936 VisitForAccumulatorValue(args->at(0)); 3937 3938 // All aliases of the same register have disjoint lifetimes. 3939 Register array = v0; 3940 Register elements = no_reg; // Will be v0. 3941 Register result = no_reg; // Will be v0. 3942 Register separator = a1; 3943 Register array_length = a2; 3944 Register result_pos = no_reg; // Will be a2. 3945 Register string_length = a3; 3946 Register string = t0; 3947 Register element = t1; 3948 Register elements_end = t2; 3949 Register scratch1 = t3; 3950 Register scratch2 = t5; 3951 Register scratch3 = t4; 3952 3953 // Separator operand is on the stack. 3954 __ pop(separator); 3955 3956 // Check that the array is a JSArray. 3957 __ JumpIfSmi(array, &bailout); 3958 __ GetObjectType(array, scratch1, scratch2); 3959 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE)); 3960 3961 // Check that the array has fast elements. 3962 __ CheckFastElements(scratch1, scratch2, &bailout); 3963 3964 // If the array has length zero, return the empty string. 3965 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 3966 __ SmiUntag(array_length); 3967 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg)); 3968 __ LoadRoot(v0, Heap::kempty_stringRootIndex); 3969 __ Branch(&done); 3970 3971 __ bind(&non_trivial_array); 3972 3973 // Get the FixedArray containing array's elements. 3974 elements = array; 3975 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset)); 3976 array = no_reg; // End of array's live range. 3977 3978 // Check that all array elements are sequential one-byte strings, and 3979 // accumulate the sum of their lengths, as a smi-encoded value. 3980 __ mov(string_length, zero_reg); 3981 __ Addu(element, 3982 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3983 __ sll(elements_end, array_length, kPointerSizeLog2); 3984 __ Addu(elements_end, element, elements_end); 3985 // Loop condition: while (element < elements_end). 3986 // Live values in registers: 3987 // elements: Fixed array of strings. 3988 // array_length: Length of the fixed array of strings (not smi) 3989 // separator: Separator string 3990 // string_length: Accumulated sum of string lengths (smi). 3991 // element: Current array element. 3992 // elements_end: Array end. 3993 if (generate_debug_code_) { 3994 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length, 3995 Operand(zero_reg)); 3996 } 3997 __ bind(&loop); 3998 __ lw(string, MemOperand(element)); 3999 __ Addu(element, element, kPointerSize); 4000 __ JumpIfSmi(string, &bailout); 4001 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); 4002 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 4003 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout); 4004 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); 4005 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3); 4006 __ BranchOnOverflow(&bailout, scratch3); 4007 __ Branch(&loop, lt, element, Operand(elements_end)); 4008 4009 // If array_length is 1, return elements[0], a string. 4010 __ Branch(¬_size_one_array, ne, array_length, Operand(1)); 4011 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 4012 __ Branch(&done); 4013 4014 __ bind(¬_size_one_array); 4015 4016 // Live values in registers: 4017 // separator: Separator string 4018 // array_length: Length of the array. 4019 // string_length: Sum of string lengths (smi). 4020 // elements: FixedArray of strings. 4021 4022 // Check that the separator is a flat one-byte string. 4023 __ JumpIfSmi(separator, &bailout); 4024 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); 4025 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 4026 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout); 4027 4028 // Add (separator length times array_length) - separator length to the 4029 // string_length to get the length of the result string. array_length is not 4030 // smi but the other values are, so the result is a smi. 4031 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4032 __ Subu(string_length, string_length, Operand(scratch1)); 4033 __ Mul(scratch3, scratch2, array_length, scratch1); 4034 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 4035 // zero. 4036 __ Branch(&bailout, ne, scratch3, Operand(zero_reg)); 4037 __ And(scratch3, scratch2, Operand(0x80000000)); 4038 __ Branch(&bailout, ne, scratch3, Operand(zero_reg)); 4039 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3); 4040 __ BranchOnOverflow(&bailout, scratch3); 4041 __ SmiUntag(string_length); 4042 4043 // Get first element in the array to free up the elements register to be used 4044 // for the result. 4045 __ Addu(element, 4046 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 4047 result = elements; // End of live range for elements. 4048 elements = no_reg; 4049 // Live values in registers: 4050 // element: First array element 4051 // separator: Separator string 4052 // string_length: Length of result string (not smi) 4053 // array_length: Length of the array. 4054 __ AllocateOneByteString(result, string_length, scratch1, scratch2, 4055 elements_end, &bailout); 4056 // Prepare for looping. Set up elements_end to end of the array. Set 4057 // result_pos to the position of the result where to write the first 4058 // character. 4059 __ sll(elements_end, array_length, kPointerSizeLog2); 4060 __ Addu(elements_end, element, elements_end); 4061 result_pos = array_length; // End of live range for array_length. 4062 array_length = no_reg; 4063 __ Addu(result_pos, 4064 result, 4065 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4066 4067 // Check the length of the separator. 4068 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4069 __ li(at, Operand(Smi::FromInt(1))); 4070 __ Branch(&one_char_separator, eq, scratch1, Operand(at)); 4071 __ Branch(&long_separator, gt, scratch1, Operand(at)); 4072 4073 // Empty separator case. 4074 __ bind(&empty_separator_loop); 4075 // Live values in registers: 4076 // result_pos: the position to which we are currently copying characters. 4077 // element: Current array element. 4078 // elements_end: Array end. 4079 4080 // Copy next array element to the result. 4081 __ lw(string, MemOperand(element)); 4082 __ Addu(element, element, kPointerSize); 4083 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); 4084 __ SmiUntag(string_length); 4085 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 4086 __ CopyBytes(string, result_pos, string_length, scratch1); 4087 // End while (element < elements_end). 4088 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end)); 4089 DCHECK(result.is(v0)); 4090 __ Branch(&done); 4091 4092 // One-character separator case. 4093 __ bind(&one_char_separator); 4094 // Replace separator with its one-byte character value. 4095 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 4096 // Jump into the loop after the code that copies the separator, so the first 4097 // element is not preceded by a separator. 4098 __ jmp(&one_char_separator_loop_entry); 4099 4100 __ bind(&one_char_separator_loop); 4101 // Live values in registers: 4102 // result_pos: the position to which we are currently copying characters. 4103 // element: Current array element. 4104 // elements_end: Array end. 4105 // separator: Single separator one-byte char (in lower byte). 4106 4107 // Copy the separator character to the result. 4108 __ sb(separator, MemOperand(result_pos)); 4109 __ Addu(result_pos, result_pos, 1); 4110 4111 // Copy next array element to the result. 4112 __ bind(&one_char_separator_loop_entry); 4113 __ lw(string, MemOperand(element)); 4114 __ Addu(element, element, kPointerSize); 4115 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); 4116 __ SmiUntag(string_length); 4117 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 4118 __ CopyBytes(string, result_pos, string_length, scratch1); 4119 // End while (element < elements_end). 4120 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end)); 4121 DCHECK(result.is(v0)); 4122 __ Branch(&done); 4123 4124 // Long separator case (separator is more than one character). Entry is at the 4125 // label long_separator below. 4126 __ bind(&long_separator_loop); 4127 // Live values in registers: 4128 // result_pos: the position to which we are currently copying characters. 4129 // element: Current array element. 4130 // elements_end: Array end. 4131 // separator: Separator string. 4132 4133 // Copy the separator to the result. 4134 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset)); 4135 __ SmiUntag(string_length); 4136 __ Addu(string, 4137 separator, 4138 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4139 __ CopyBytes(string, result_pos, string_length, scratch1); 4140 4141 __ bind(&long_separator); 4142 __ lw(string, MemOperand(element)); 4143 __ Addu(element, element, kPointerSize); 4144 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); 4145 __ SmiUntag(string_length); 4146 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 4147 __ CopyBytes(string, result_pos, string_length, scratch1); 4148 // End while (element < elements_end). 4149 __ Branch(&long_separator_loop, lt, element, Operand(elements_end)); 4150 DCHECK(result.is(v0)); 4151 __ Branch(&done); 4152 4153 __ bind(&bailout); 4154 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 4155 __ bind(&done); 4156 context()->Plug(v0); 4157} 4158 4159 4160void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 4161 DCHECK(expr->arguments()->length() == 0); 4162 ExternalReference debug_is_active = 4163 ExternalReference::debug_is_active_address(isolate()); 4164 __ li(at, Operand(debug_is_active)); 4165 __ lb(v0, MemOperand(at)); 4166 __ SmiTag(v0); 4167 context()->Plug(v0); 4168} 4169 4170 4171void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4172 if (expr->function() != NULL && 4173 expr->function()->intrinsic_type == Runtime::INLINE) { 4174 Comment cmnt(masm_, "[ InlineRuntimeCall"); 4175 EmitInlineRuntimeCall(expr); 4176 return; 4177 } 4178 4179 Comment cmnt(masm_, "[ CallRuntime"); 4180 ZoneList<Expression*>* args = expr->arguments(); 4181 int arg_count = args->length(); 4182 4183 if (expr->is_jsruntime()) { 4184 // Push the builtins object as the receiver. 4185 Register receiver = LoadDescriptor::ReceiverRegister(); 4186 __ lw(receiver, GlobalObjectOperand()); 4187 __ lw(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); 4188 __ push(receiver); 4189 4190 // Load the function from the receiver. 4191 __ li(LoadDescriptor::NameRegister(), Operand(expr->name())); 4192 if (FLAG_vector_ics) { 4193 __ li(VectorLoadICDescriptor::SlotRegister(), 4194 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot()))); 4195 CallLoadIC(NOT_CONTEXTUAL); 4196 } else { 4197 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); 4198 } 4199 4200 // Push the target function under the receiver. 4201 __ lw(at, MemOperand(sp, 0)); 4202 __ push(at); 4203 __ sw(v0, MemOperand(sp, kPointerSize)); 4204 4205 // Push the arguments ("left-to-right"). 4206 int arg_count = args->length(); 4207 for (int i = 0; i < arg_count; i++) { 4208 VisitForStackValue(args->at(i)); 4209 } 4210 4211 // Record source position of the IC call. 4212 SetSourcePosition(expr->position()); 4213 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 4214 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 4215 __ CallStub(&stub); 4216 4217 // Restore context register. 4218 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4219 4220 context()->DropAndPlug(1, v0); 4221 } else { 4222 // Push the arguments ("left-to-right"). 4223 for (int i = 0; i < arg_count; i++) { 4224 VisitForStackValue(args->at(i)); 4225 } 4226 4227 // Call the C runtime function. 4228 __ CallRuntime(expr->function(), arg_count); 4229 context()->Plug(v0); 4230 } 4231} 4232 4233 4234void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4235 switch (expr->op()) { 4236 case Token::DELETE: { 4237 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4238 Property* property = expr->expression()->AsProperty(); 4239 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4240 4241 if (property != NULL) { 4242 VisitForStackValue(property->obj()); 4243 VisitForStackValue(property->key()); 4244 __ li(a1, Operand(Smi::FromInt(strict_mode()))); 4245 __ push(a1); 4246 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4247 context()->Plug(v0); 4248 } else if (proxy != NULL) { 4249 Variable* var = proxy->var(); 4250 // Delete of an unqualified identifier is disallowed in strict mode 4251 // but "delete this" is allowed. 4252 DCHECK(strict_mode() == SLOPPY || var->is_this()); 4253 if (var->IsUnallocated()) { 4254 __ lw(a2, GlobalObjectOperand()); 4255 __ li(a1, Operand(var->name())); 4256 __ li(a0, Operand(Smi::FromInt(SLOPPY))); 4257 __ Push(a2, a1, a0); 4258 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4259 context()->Plug(v0); 4260 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4261 // Result of deleting non-global, non-dynamic variables is false. 4262 // The subexpression does not have side effects. 4263 context()->Plug(var->is_this()); 4264 } else { 4265 // Non-global variable. Call the runtime to try to delete from the 4266 // context where the variable was introduced. 4267 DCHECK(!context_register().is(a2)); 4268 __ li(a2, Operand(var->name())); 4269 __ Push(context_register(), a2); 4270 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); 4271 context()->Plug(v0); 4272 } 4273 } else { 4274 // Result of deleting non-property, non-variable reference is true. 4275 // The subexpression may have side effects. 4276 VisitForEffect(expr->expression()); 4277 context()->Plug(true); 4278 } 4279 break; 4280 } 4281 4282 case Token::VOID: { 4283 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 4284 VisitForEffect(expr->expression()); 4285 context()->Plug(Heap::kUndefinedValueRootIndex); 4286 break; 4287 } 4288 4289 case Token::NOT: { 4290 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 4291 if (context()->IsEffect()) { 4292 // Unary NOT has no side effects so it's only necessary to visit the 4293 // subexpression. Match the optimizing compiler by not branching. 4294 VisitForEffect(expr->expression()); 4295 } else if (context()->IsTest()) { 4296 const TestContext* test = TestContext::cast(context()); 4297 // The labels are swapped for the recursive call. 4298 VisitForControl(expr->expression(), 4299 test->false_label(), 4300 test->true_label(), 4301 test->fall_through()); 4302 context()->Plug(test->true_label(), test->false_label()); 4303 } else { 4304 // We handle value contexts explicitly rather than simply visiting 4305 // for control and plugging the control flow into the context, 4306 // because we need to prepare a pair of extra administrative AST ids 4307 // for the optimizing compiler. 4308 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 4309 Label materialize_true, materialize_false, done; 4310 VisitForControl(expr->expression(), 4311 &materialize_false, 4312 &materialize_true, 4313 &materialize_true); 4314 __ bind(&materialize_true); 4315 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4316 __ LoadRoot(v0, Heap::kTrueValueRootIndex); 4317 if (context()->IsStackValue()) __ push(v0); 4318 __ jmp(&done); 4319 __ bind(&materialize_false); 4320 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4321 __ LoadRoot(v0, Heap::kFalseValueRootIndex); 4322 if (context()->IsStackValue()) __ push(v0); 4323 __ bind(&done); 4324 } 4325 break; 4326 } 4327 4328 case Token::TYPEOF: { 4329 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4330 { StackValueContext context(this); 4331 VisitForTypeofValue(expr->expression()); 4332 } 4333 __ CallRuntime(Runtime::kTypeof, 1); 4334 context()->Plug(v0); 4335 break; 4336 } 4337 4338 default: 4339 UNREACHABLE(); 4340 } 4341} 4342 4343 4344void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4345 DCHECK(expr->expression()->IsValidReferenceExpression()); 4346 4347 Comment cmnt(masm_, "[ CountOperation"); 4348 SetSourcePosition(expr->position()); 4349 4350 // Expression can only be a property, a global or a (parameter or local) 4351 // slot. 4352 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 4353 LhsKind assign_type = VARIABLE; 4354 Property* prop = expr->expression()->AsProperty(); 4355 // In case of a property we use the uninitialized expression context 4356 // of the key to detect a named property. 4357 if (prop != NULL) { 4358 assign_type = 4359 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 4360 } 4361 4362 // Evaluate expression and get value. 4363 if (assign_type == VARIABLE) { 4364 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 4365 AccumulatorValueContext context(this); 4366 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4367 } else { 4368 // Reserve space for result of postfix operation. 4369 if (expr->is_postfix() && !context()->IsEffect()) { 4370 __ li(at, Operand(Smi::FromInt(0))); 4371 __ push(at); 4372 } 4373 if (assign_type == NAMED_PROPERTY) { 4374 // Put the object both on the stack and in the register. 4375 VisitForStackValue(prop->obj()); 4376 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 4377 EmitNamedPropertyLoad(prop); 4378 } else { 4379 VisitForStackValue(prop->obj()); 4380 VisitForStackValue(prop->key()); 4381 __ lw(LoadDescriptor::ReceiverRegister(), 4382 MemOperand(sp, 1 * kPointerSize)); 4383 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 4384 EmitKeyedPropertyLoad(prop); 4385 } 4386 } 4387 4388 // We need a second deoptimization point after loading the value 4389 // in case evaluating the property load my have a side effect. 4390 if (assign_type == VARIABLE) { 4391 PrepareForBailout(expr->expression(), TOS_REG); 4392 } else { 4393 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4394 } 4395 4396 // Inline smi case if we are in a loop. 4397 Label stub_call, done; 4398 JumpPatchSite patch_site(masm_); 4399 4400 int count_value = expr->op() == Token::INC ? 1 : -1; 4401 __ mov(a0, v0); 4402 if (ShouldInlineSmiCase(expr->op())) { 4403 Label slow; 4404 patch_site.EmitJumpIfNotSmi(v0, &slow); 4405 4406 // Save result for postfix expressions. 4407 if (expr->is_postfix()) { 4408 if (!context()->IsEffect()) { 4409 // Save the result on the stack. If we have a named or keyed property 4410 // we store the result under the receiver that is currently on top 4411 // of the stack. 4412 switch (assign_type) { 4413 case VARIABLE: 4414 __ push(v0); 4415 break; 4416 case NAMED_PROPERTY: 4417 __ sw(v0, MemOperand(sp, kPointerSize)); 4418 break; 4419 case KEYED_PROPERTY: 4420 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); 4421 break; 4422 } 4423 } 4424 } 4425 4426 Register scratch1 = a1; 4427 Register scratch2 = t0; 4428 __ li(scratch1, Operand(Smi::FromInt(count_value))); 4429 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2); 4430 __ BranchOnNoOverflow(&done, scratch2); 4431 // Call stub. Undo operation first. 4432 __ Move(v0, a0); 4433 __ jmp(&stub_call); 4434 __ bind(&slow); 4435 } 4436 ToNumberStub convert_stub(isolate()); 4437 __ CallStub(&convert_stub); 4438 4439 // Save result for postfix expressions. 4440 if (expr->is_postfix()) { 4441 if (!context()->IsEffect()) { 4442 // Save the result on the stack. If we have a named or keyed property 4443 // we store the result under the receiver that is currently on top 4444 // of the stack. 4445 switch (assign_type) { 4446 case VARIABLE: 4447 __ push(v0); 4448 break; 4449 case NAMED_PROPERTY: 4450 __ sw(v0, MemOperand(sp, kPointerSize)); 4451 break; 4452 case KEYED_PROPERTY: 4453 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); 4454 break; 4455 } 4456 } 4457 } 4458 4459 __ bind(&stub_call); 4460 __ mov(a1, v0); 4461 __ li(a0, Operand(Smi::FromInt(count_value))); 4462 4463 // Record position before stub call. 4464 SetSourcePosition(expr->position()); 4465 4466 Handle<Code> code = 4467 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code(); 4468 CallIC(code, expr->CountBinOpFeedbackId()); 4469 patch_site.EmitPatchInfo(); 4470 __ bind(&done); 4471 4472 // Store the value returned in v0. 4473 switch (assign_type) { 4474 case VARIABLE: 4475 if (expr->is_postfix()) { 4476 { EffectContext context(this); 4477 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4478 Token::ASSIGN); 4479 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4480 context.Plug(v0); 4481 } 4482 // For all contexts except EffectConstant we have the result on 4483 // top of the stack. 4484 if (!context()->IsEffect()) { 4485 context()->PlugTOS(); 4486 } 4487 } else { 4488 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4489 Token::ASSIGN); 4490 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4491 context()->Plug(v0); 4492 } 4493 break; 4494 case NAMED_PROPERTY: { 4495 __ mov(StoreDescriptor::ValueRegister(), result_register()); 4496 __ li(StoreDescriptor::NameRegister(), 4497 Operand(prop->key()->AsLiteral()->value())); 4498 __ pop(StoreDescriptor::ReceiverRegister()); 4499 CallStoreIC(expr->CountStoreFeedbackId()); 4500 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4501 if (expr->is_postfix()) { 4502 if (!context()->IsEffect()) { 4503 context()->PlugTOS(); 4504 } 4505 } else { 4506 context()->Plug(v0); 4507 } 4508 break; 4509 } 4510 case KEYED_PROPERTY: { 4511 __ mov(StoreDescriptor::ValueRegister(), result_register()); 4512 __ Pop(StoreDescriptor::ReceiverRegister(), 4513 StoreDescriptor::NameRegister()); 4514 Handle<Code> ic = 4515 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 4516 CallIC(ic, expr->CountStoreFeedbackId()); 4517 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4518 if (expr->is_postfix()) { 4519 if (!context()->IsEffect()) { 4520 context()->PlugTOS(); 4521 } 4522 } else { 4523 context()->Plug(v0); 4524 } 4525 break; 4526 } 4527 } 4528} 4529 4530 4531void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4532 DCHECK(!context()->IsEffect()); 4533 DCHECK(!context()->IsTest()); 4534 VariableProxy* proxy = expr->AsVariableProxy(); 4535 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4536 Comment cmnt(masm_, "[ Global variable"); 4537 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 4538 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name())); 4539 if (FLAG_vector_ics) { 4540 __ li(VectorLoadICDescriptor::SlotRegister(), 4541 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 4542 } 4543 // Use a regular load, not a contextual load, to avoid a reference 4544 // error. 4545 CallLoadIC(NOT_CONTEXTUAL); 4546 PrepareForBailout(expr, TOS_REG); 4547 context()->Plug(v0); 4548 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4549 Comment cmnt(masm_, "[ Lookup slot"); 4550 Label done, slow; 4551 4552 // Generate code for loading from variables potentially shadowed 4553 // by eval-introduced variables. 4554 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done); 4555 4556 __ bind(&slow); 4557 __ li(a0, Operand(proxy->name())); 4558 __ Push(cp, a0); 4559 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2); 4560 PrepareForBailout(expr, TOS_REG); 4561 __ bind(&done); 4562 4563 context()->Plug(v0); 4564 } else { 4565 // This expression cannot throw a reference error at the top level. 4566 VisitInDuplicateContext(expr); 4567 } 4568} 4569 4570void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4571 Expression* sub_expr, 4572 Handle<String> check) { 4573 Label materialize_true, materialize_false; 4574 Label* if_true = NULL; 4575 Label* if_false = NULL; 4576 Label* fall_through = NULL; 4577 context()->PrepareTest(&materialize_true, &materialize_false, 4578 &if_true, &if_false, &fall_through); 4579 4580 { AccumulatorValueContext context(this); 4581 VisitForTypeofValue(sub_expr); 4582 } 4583 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4584 4585 Factory* factory = isolate()->factory(); 4586 if (String::Equals(check, factory->number_string())) { 4587 __ JumpIfSmi(v0, if_true); 4588 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 4589 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 4590 Split(eq, v0, Operand(at), if_true, if_false, fall_through); 4591 } else if (String::Equals(check, factory->string_string())) { 4592 __ JumpIfSmi(v0, if_false); 4593 // Check for undetectable objects => false. 4594 __ GetObjectType(v0, v0, a1); 4595 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE)); 4596 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 4597 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); 4598 Split(eq, a1, Operand(zero_reg), 4599 if_true, if_false, fall_through); 4600 } else if (String::Equals(check, factory->symbol_string())) { 4601 __ JumpIfSmi(v0, if_false); 4602 __ GetObjectType(v0, v0, a1); 4603 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through); 4604 } else if (String::Equals(check, factory->boolean_string())) { 4605 __ LoadRoot(at, Heap::kTrueValueRootIndex); 4606 __ Branch(if_true, eq, v0, Operand(at)); 4607 __ LoadRoot(at, Heap::kFalseValueRootIndex); 4608 Split(eq, v0, Operand(at), if_true, if_false, fall_through); 4609 } else if (String::Equals(check, factory->undefined_string())) { 4610 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 4611 __ Branch(if_true, eq, v0, Operand(at)); 4612 __ JumpIfSmi(v0, if_false); 4613 // Check for undetectable objects => true. 4614 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 4615 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 4616 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); 4617 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); 4618 } else if (String::Equals(check, factory->function_string())) { 4619 __ JumpIfSmi(v0, if_false); 4620 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 4621 __ GetObjectType(v0, v0, a1); 4622 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE)); 4623 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE), 4624 if_true, if_false, fall_through); 4625 } else if (String::Equals(check, factory->object_string())) { 4626 __ JumpIfSmi(v0, if_false); 4627 __ LoadRoot(at, Heap::kNullValueRootIndex); 4628 __ Branch(if_true, eq, v0, Operand(at)); 4629 // Check for JS objects => true. 4630 __ GetObjectType(v0, v0, a1); 4631 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 4632 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset)); 4633 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 4634 // Check for undetectable objects => false. 4635 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 4636 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); 4637 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); 4638 } else { 4639 if (if_false != fall_through) __ jmp(if_false); 4640 } 4641 context()->Plug(if_true, if_false); 4642} 4643 4644 4645void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4646 Comment cmnt(masm_, "[ CompareOperation"); 4647 SetSourcePosition(expr->position()); 4648 4649 // First we try a fast inlined version of the compare when one of 4650 // the operands is a literal. 4651 if (TryLiteralCompare(expr)) return; 4652 4653 // Always perform the comparison for its control flow. Pack the result 4654 // into the expression's context after the comparison is performed. 4655 Label materialize_true, materialize_false; 4656 Label* if_true = NULL; 4657 Label* if_false = NULL; 4658 Label* fall_through = NULL; 4659 context()->PrepareTest(&materialize_true, &materialize_false, 4660 &if_true, &if_false, &fall_through); 4661 4662 Token::Value op = expr->op(); 4663 VisitForStackValue(expr->left()); 4664 switch (op) { 4665 case Token::IN: 4666 VisitForStackValue(expr->right()); 4667 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4668 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4669 __ LoadRoot(t0, Heap::kTrueValueRootIndex); 4670 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); 4671 break; 4672 4673 case Token::INSTANCEOF: { 4674 VisitForStackValue(expr->right()); 4675 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); 4676 __ CallStub(&stub); 4677 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4678 // The stub returns 0 for true. 4679 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through); 4680 break; 4681 } 4682 4683 default: { 4684 VisitForAccumulatorValue(expr->right()); 4685 Condition cc = CompareIC::ComputeCondition(op); 4686 __ mov(a0, result_register()); 4687 __ pop(a1); 4688 4689 bool inline_smi_code = ShouldInlineSmiCase(op); 4690 JumpPatchSite patch_site(masm_); 4691 if (inline_smi_code) { 4692 Label slow_case; 4693 __ Or(a2, a0, Operand(a1)); 4694 patch_site.EmitJumpIfNotSmi(a2, &slow_case); 4695 Split(cc, a1, Operand(a0), if_true, if_false, NULL); 4696 __ bind(&slow_case); 4697 } 4698 // Record position and call the compare IC. 4699 SetSourcePosition(expr->position()); 4700 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 4701 CallIC(ic, expr->CompareOperationFeedbackId()); 4702 patch_site.EmitPatchInfo(); 4703 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4704 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); 4705 } 4706 } 4707 4708 // Convert the result of the comparison into one expected for this 4709 // expression's context. 4710 context()->Plug(if_true, if_false); 4711} 4712 4713 4714void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4715 Expression* sub_expr, 4716 NilValue nil) { 4717 Label materialize_true, materialize_false; 4718 Label* if_true = NULL; 4719 Label* if_false = NULL; 4720 Label* fall_through = NULL; 4721 context()->PrepareTest(&materialize_true, &materialize_false, 4722 &if_true, &if_false, &fall_through); 4723 4724 VisitForAccumulatorValue(sub_expr); 4725 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4726 __ mov(a0, result_register()); 4727 if (expr->op() == Token::EQ_STRICT) { 4728 Heap::RootListIndex nil_value = nil == kNullValue ? 4729 Heap::kNullValueRootIndex : 4730 Heap::kUndefinedValueRootIndex; 4731 __ LoadRoot(a1, nil_value); 4732 Split(eq, a0, Operand(a1), if_true, if_false, fall_through); 4733 } else { 4734 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4735 CallIC(ic, expr->CompareOperationFeedbackId()); 4736 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through); 4737 } 4738 context()->Plug(if_true, if_false); 4739} 4740 4741 4742void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4743 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4744 context()->Plug(v0); 4745} 4746 4747 4748Register FullCodeGenerator::result_register() { 4749 return v0; 4750} 4751 4752 4753Register FullCodeGenerator::context_register() { 4754 return cp; 4755} 4756 4757 4758void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4759 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4760 __ sw(value, MemOperand(fp, frame_offset)); 4761} 4762 4763 4764void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4765 __ lw(dst, ContextOperand(cp, context_index)); 4766} 4767 4768 4769void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4770 Scope* declaration_scope = scope()->DeclarationScope(); 4771 if (declaration_scope->is_global_scope() || 4772 declaration_scope->is_module_scope()) { 4773 // Contexts nested in the native context have a canonical empty function 4774 // as their closure, not the anonymous closure containing the global 4775 // code. Pass a smi sentinel and let the runtime look up the empty 4776 // function. 4777 __ li(at, Operand(Smi::FromInt(0))); 4778 } else if (declaration_scope->is_eval_scope()) { 4779 // Contexts created by a call to eval have the same closure as the 4780 // context calling eval, not the anonymous closure containing the eval 4781 // code. Fetch it from the context. 4782 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX)); 4783 } else { 4784 DCHECK(declaration_scope->is_function_scope()); 4785 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4786 } 4787 __ push(at); 4788} 4789 4790 4791// ---------------------------------------------------------------------------- 4792// Non-local control flow support. 4793 4794void FullCodeGenerator::EnterFinallyBlock() { 4795 DCHECK(!result_register().is(a1)); 4796 // Store result register while executing finally block. 4797 __ push(result_register()); 4798 // Cook return address in link register to stack (smi encoded Code* delta). 4799 __ Subu(a1, ra, Operand(masm_->CodeObject())); 4800 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize); 4801 STATIC_ASSERT(0 == kSmiTag); 4802 __ Addu(a1, a1, Operand(a1)); // Convert to smi. 4803 4804 // Store result register while executing finally block. 4805 __ push(a1); 4806 4807 // Store pending message while executing finally block. 4808 ExternalReference pending_message_obj = 4809 ExternalReference::address_of_pending_message_obj(isolate()); 4810 __ li(at, Operand(pending_message_obj)); 4811 __ lw(a1, MemOperand(at)); 4812 __ push(a1); 4813 4814 ExternalReference has_pending_message = 4815 ExternalReference::address_of_has_pending_message(isolate()); 4816 __ li(at, Operand(has_pending_message)); 4817 __ lw(a1, MemOperand(at)); 4818 __ SmiTag(a1); 4819 __ push(a1); 4820 4821 ExternalReference pending_message_script = 4822 ExternalReference::address_of_pending_message_script(isolate()); 4823 __ li(at, Operand(pending_message_script)); 4824 __ lw(a1, MemOperand(at)); 4825 __ push(a1); 4826} 4827 4828 4829void FullCodeGenerator::ExitFinallyBlock() { 4830 DCHECK(!result_register().is(a1)); 4831 // Restore pending message from stack. 4832 __ pop(a1); 4833 ExternalReference pending_message_script = 4834 ExternalReference::address_of_pending_message_script(isolate()); 4835 __ li(at, Operand(pending_message_script)); 4836 __ sw(a1, MemOperand(at)); 4837 4838 __ pop(a1); 4839 __ SmiUntag(a1); 4840 ExternalReference has_pending_message = 4841 ExternalReference::address_of_has_pending_message(isolate()); 4842 __ li(at, Operand(has_pending_message)); 4843 __ sw(a1, MemOperand(at)); 4844 4845 __ pop(a1); 4846 ExternalReference pending_message_obj = 4847 ExternalReference::address_of_pending_message_obj(isolate()); 4848 __ li(at, Operand(pending_message_obj)); 4849 __ sw(a1, MemOperand(at)); 4850 4851 // Restore result register from stack. 4852 __ pop(a1); 4853 4854 // Uncook return address and return. 4855 __ pop(result_register()); 4856 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize); 4857 __ sra(a1, a1, 1); // Un-smi-tag value. 4858 __ Addu(at, a1, Operand(masm_->CodeObject())); 4859 __ Jump(at); 4860} 4861 4862 4863#undef __ 4864 4865#define __ ACCESS_MASM(masm()) 4866 4867FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( 4868 int* stack_depth, 4869 int* context_length) { 4870 // The macros used here must preserve the result register. 4871 4872 // Because the handler block contains the context of the finally 4873 // code, we can restore it directly from there for the finally code 4874 // rather than iteratively unwinding contexts via their previous 4875 // links. 4876 __ Drop(*stack_depth); // Down to the handler block. 4877 if (*context_length > 0) { 4878 // Restore the context to its dedicated register and the stack. 4879 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); 4880 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4881 } 4882 __ PopTryHandler(); 4883 __ Call(finally_entry_); 4884 4885 *stack_depth = 0; 4886 *context_length = 0; 4887 return previous_; 4888} 4889 4890 4891#undef __ 4892 4893 4894void BackEdgeTable::PatchAt(Code* unoptimized_code, 4895 Address pc, 4896 BackEdgeState target_state, 4897 Code* replacement_code) { 4898 static const int kInstrSize = Assembler::kInstrSize; 4899 Address branch_address = pc - 6 * kInstrSize; 4900 CodePatcher patcher(branch_address, 1); 4901 4902 switch (target_state) { 4903 case INTERRUPT: 4904 // slt at, a3, zero_reg (in case of count based interrupts) 4905 // beq at, zero_reg, ok 4906 // lui t9, <interrupt stub address> upper 4907 // ori t9, <interrupt stub address> lower 4908 // jalr t9 4909 // nop 4910 // ok-label ----- pc_after points here 4911 patcher.masm()->slt(at, a3, zero_reg); 4912 break; 4913 case ON_STACK_REPLACEMENT: 4914 case OSR_AFTER_STACK_CHECK: 4915 // addiu at, zero_reg, 1 4916 // beq at, zero_reg, ok ;; Not changed 4917 // lui t9, <on-stack replacement address> upper 4918 // ori t9, <on-stack replacement address> lower 4919 // jalr t9 ;; Not changed 4920 // nop ;; Not changed 4921 // ok-label ----- pc_after points here 4922 patcher.masm()->addiu(at, zero_reg, 1); 4923 break; 4924 } 4925 Address pc_immediate_load_address = pc - 4 * kInstrSize; 4926 // Replace the stack check address in the load-immediate (lui/ori pair) 4927 // with the entry address of the replacement code. 4928 Assembler::set_target_address_at(pc_immediate_load_address, 4929 replacement_code->entry()); 4930 4931 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 4932 unoptimized_code, pc_immediate_load_address, replacement_code); 4933} 4934 4935 4936BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4937 Isolate* isolate, 4938 Code* unoptimized_code, 4939 Address pc) { 4940 static const int kInstrSize = Assembler::kInstrSize; 4941 Address branch_address = pc - 6 * kInstrSize; 4942 Address pc_immediate_load_address = pc - 4 * kInstrSize; 4943 4944 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize))); 4945 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) { 4946 DCHECK(reinterpret_cast<uint32_t>( 4947 Assembler::target_address_at(pc_immediate_load_address)) == 4948 reinterpret_cast<uint32_t>( 4949 isolate->builtins()->InterruptCheck()->entry())); 4950 return INTERRUPT; 4951 } 4952 4953 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address))); 4954 4955 if (reinterpret_cast<uint32_t>( 4956 Assembler::target_address_at(pc_immediate_load_address)) == 4957 reinterpret_cast<uint32_t>( 4958 isolate->builtins()->OnStackReplacement()->entry())) { 4959 return ON_STACK_REPLACEMENT; 4960 } 4961 4962 DCHECK(reinterpret_cast<uint32_t>( 4963 Assembler::target_address_at(pc_immediate_load_address)) == 4964 reinterpret_cast<uint32_t>( 4965 isolate->builtins()->OsrAfterStackCheck()->entry())); 4966 return OSR_AFTER_STACK_CHECK; 4967} 4968 4969 4970} } // namespace v8::internal 4971 4972#endif // V8_TARGET_ARCH_MIPS 4973