1// Copyright 2012 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/v8.h" 6 7#if V8_TARGET_ARCH_ARM 8 9#include "src/code-factory.h" 10#include "src/code-stubs.h" 11#include "src/codegen.h" 12#include "src/compiler.h" 13#include "src/debug.h" 14#include "src/full-codegen.h" 15#include "src/ic/ic.h" 16#include "src/isolate-inl.h" 17#include "src/parser.h" 18#include "src/scopes.h" 19 20#include "src/arm/code-stubs-arm.h" 21#include "src/arm/macro-assembler-arm.h" 22 23namespace v8 { 24namespace internal { 25 26#define __ ACCESS_MASM(masm_) 27 28 29// A patch site is a location in the code which it is possible to patch. This 30// class has a number of methods to emit the code which is patchable and the 31// method EmitPatchInfo to record a marker back to the patchable code. This 32// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit 33// immediate value is used) is the delta from the pc to the first instruction of 34// the patchable code. 35class JumpPatchSite BASE_EMBEDDED { 36 public: 37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 38#ifdef DEBUG 39 info_emitted_ = false; 40#endif 41 } 42 43 ~JumpPatchSite() { 44 DCHECK(patch_site_.is_bound() == info_emitted_); 45 } 46 47 // When initially emitting this ensure that a jump is always generated to skip 48 // the inlined smi code. 49 void EmitJumpIfNotSmi(Register reg, Label* target) { 50 DCHECK(!patch_site_.is_bound() && !info_emitted_); 51 Assembler::BlockConstPoolScope block_const_pool(masm_); 52 __ bind(&patch_site_); 53 __ cmp(reg, Operand(reg)); 54 __ b(eq, target); // Always taken before patched. 55 } 56 57 // When initially emitting this ensure that a jump is never generated to skip 58 // the inlined smi code. 59 void EmitJumpIfSmi(Register reg, Label* target) { 60 DCHECK(!patch_site_.is_bound() && !info_emitted_); 61 Assembler::BlockConstPoolScope block_const_pool(masm_); 62 __ bind(&patch_site_); 63 __ cmp(reg, Operand(reg)); 64 __ b(ne, target); // Never taken before patched. 65 } 66 67 void EmitPatchInfo() { 68 // Block literal pool emission whilst recording patch site information. 69 Assembler::BlockConstPoolScope block_const_pool(masm_); 70 if (patch_site_.is_bound()) { 71 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 72 Register reg; 73 reg.set_code(delta_to_patch_site / kOff12Mask); 74 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); 75#ifdef DEBUG 76 info_emitted_ = true; 77#endif 78 } else { 79 __ nop(); // Signals no inlined code. 80 } 81 } 82 83 private: 84 MacroAssembler* masm_; 85 Label patch_site_; 86#ifdef DEBUG 87 bool info_emitted_; 88#endif 89}; 90 91 92// Generate code for a JS function. On entry to the function the receiver 93// and arguments have been pushed on the stack left to right. The actual 94// argument count matches the formal parameter count expected by the 95// function. 96// 97// The live registers are: 98// o r1: the JS function object being called (i.e., ourselves) 99// o cp: our context 100// o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool) 101// o fp: our caller's frame pointer 102// o sp: stack pointer 103// o lr: return address 104// 105// The function builds a JS frame. Please see JavaScriptFrameConstants in 106// frames-arm.h for its layout. 107void FullCodeGenerator::Generate() { 108 CompilationInfo* info = info_; 109 handler_table_ = 110 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 111 112 profiling_counter_ = isolate()->factory()->NewCell( 113 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 114 SetFunctionPosition(function()); 115 Comment cmnt(masm_, "[ function compiled by full code generator"); 116 117 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 118 119#ifdef DEBUG 120 if (strlen(FLAG_stop_at) > 0 && 121 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 122 __ stop("stop-at"); 123 } 124#endif 125 126 // Sloppy mode functions and builtins need to replace the receiver with the 127 // global proxy when called as functions (without an explicit receiver 128 // object). 129 if (info->strict_mode() == SLOPPY && !info->is_native()) { 130 Label ok; 131 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 132 __ ldr(r2, MemOperand(sp, receiver_offset)); 133 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 134 __ b(ne, &ok); 135 136 __ ldr(r2, GlobalObjectOperand()); 137 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset)); 138 139 __ str(r2, MemOperand(sp, receiver_offset)); 140 141 __ bind(&ok); 142 } 143 144 // Open a frame scope to indicate that there is a frame on the stack. The 145 // MANUAL indicates that the scope shouldn't actually generate code to set up 146 // the frame (that is done below). 147 FrameScope frame_scope(masm_, StackFrame::MANUAL); 148 149 info->set_prologue_offset(masm_->pc_offset()); 150 __ Prologue(info->IsCodePreAgingActive()); 151 info->AddNoFrameRange(0, masm_->pc_offset()); 152 153 { Comment cmnt(masm_, "[ Allocate locals"); 154 int locals_count = info->scope()->num_stack_slots(); 155 // Generators allocate locals, if any, in context slots. 156 DCHECK(!info->function()->is_generator() || locals_count == 0); 157 if (locals_count > 0) { 158 if (locals_count >= 128) { 159 Label ok; 160 __ sub(r9, sp, Operand(locals_count * kPointerSize)); 161 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 162 __ cmp(r9, Operand(r2)); 163 __ b(hs, &ok); 164 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 165 __ bind(&ok); 166 } 167 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); 168 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; 169 if (locals_count >= kMaxPushes) { 170 int loop_iterations = locals_count / kMaxPushes; 171 __ mov(r2, Operand(loop_iterations)); 172 Label loop_header; 173 __ bind(&loop_header); 174 // Do pushes. 175 for (int i = 0; i < kMaxPushes; i++) { 176 __ push(r9); 177 } 178 // Continue loop if not done. 179 __ sub(r2, r2, Operand(1), SetCC); 180 __ b(&loop_header, ne); 181 } 182 int remaining = locals_count % kMaxPushes; 183 // Emit the remaining pushes. 184 for (int i = 0; i < remaining; i++) { 185 __ push(r9); 186 } 187 } 188 } 189 190 bool function_in_register = true; 191 192 // Possibly allocate a local context. 193 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 194 if (heap_slots > 0) { 195 // Argument to NewContext is the function, which is still in r1. 196 Comment cmnt(masm_, "[ Allocate context"); 197 bool need_write_barrier = true; 198 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 199 __ push(r1); 200 __ Push(info->scope()->GetScopeInfo()); 201 __ CallRuntime(Runtime::kNewGlobalContext, 2); 202 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 203 FastNewContextStub stub(isolate(), heap_slots); 204 __ CallStub(&stub); 205 // Result of FastNewContextStub is always in new space. 206 need_write_barrier = false; 207 } else { 208 __ push(r1); 209 __ CallRuntime(Runtime::kNewFunctionContext, 1); 210 } 211 function_in_register = false; 212 // Context is returned in r0. It replaces the context passed to us. 213 // It's saved in the stack and kept live in cp. 214 __ mov(cp, r0); 215 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 216 // Copy any necessary parameters into the context. 217 int num_parameters = info->scope()->num_parameters(); 218 for (int i = 0; i < num_parameters; i++) { 219 Variable* var = scope()->parameter(i); 220 if (var->IsContextSlot()) { 221 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 222 (num_parameters - 1 - i) * kPointerSize; 223 // Load parameter from stack. 224 __ ldr(r0, MemOperand(fp, parameter_offset)); 225 // Store it in the context. 226 MemOperand target = ContextOperand(cp, var->index()); 227 __ str(r0, target); 228 229 // Update the write barrier. 230 if (need_write_barrier) { 231 __ RecordWriteContextSlot( 232 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); 233 } else if (FLAG_debug_code) { 234 Label done; 235 __ JumpIfInNewSpace(cp, r0, &done); 236 __ Abort(kExpectedNewSpaceObject); 237 __ bind(&done); 238 } 239 } 240 } 241 } 242 243 Variable* arguments = scope()->arguments(); 244 if (arguments != NULL) { 245 // Function uses arguments object. 246 Comment cmnt(masm_, "[ Allocate arguments object"); 247 if (!function_in_register) { 248 // Load this again, if it's used by the local context below. 249 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 250 } else { 251 __ mov(r3, r1); 252 } 253 // Receiver is just before the parameters on the caller's stack. 254 int num_parameters = info->scope()->num_parameters(); 255 int offset = num_parameters * kPointerSize; 256 __ add(r2, fp, 257 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 258 __ mov(r1, Operand(Smi::FromInt(num_parameters))); 259 __ Push(r3, r2, r1); 260 261 // Arguments to ArgumentsAccessStub: 262 // function, receiver address, parameter count. 263 // The stub will rewrite receiever and parameter count if the previous 264 // stack frame was an arguments adapter frame. 265 ArgumentsAccessStub::Type type; 266 if (strict_mode() == STRICT) { 267 type = ArgumentsAccessStub::NEW_STRICT; 268 } else if (function()->has_duplicate_parameters()) { 269 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; 270 } else { 271 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; 272 } 273 ArgumentsAccessStub stub(isolate(), type); 274 __ CallStub(&stub); 275 276 SetVar(arguments, r0, r1, r2); 277 } 278 279 if (FLAG_trace) { 280 __ CallRuntime(Runtime::kTraceEnter, 0); 281 } 282 283 // Visit the declarations and body unless there is an illegal 284 // redeclaration. 285 if (scope()->HasIllegalRedeclaration()) { 286 Comment cmnt(masm_, "[ Declarations"); 287 scope()->VisitIllegalRedeclaration(this); 288 289 } else { 290 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 291 { Comment cmnt(masm_, "[ Declarations"); 292 // For named function expressions, declare the function name as a 293 // constant. 294 if (scope()->is_function_scope() && scope()->function() != NULL) { 295 VariableDeclaration* function = scope()->function(); 296 DCHECK(function->proxy()->var()->mode() == CONST || 297 function->proxy()->var()->mode() == CONST_LEGACY); 298 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED); 299 VisitVariableDeclaration(function); 300 } 301 VisitDeclarations(scope()->declarations()); 302 } 303 304 { Comment cmnt(masm_, "[ Stack check"); 305 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 306 Label ok; 307 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 308 __ cmp(sp, Operand(ip)); 309 __ b(hs, &ok); 310 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 311 PredictableCodeSizeScope predictable(masm_, 312 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); 313 __ Call(stack_check, RelocInfo::CODE_TARGET); 314 __ bind(&ok); 315 } 316 317 { Comment cmnt(masm_, "[ Body"); 318 DCHECK(loop_depth() == 0); 319 VisitStatements(function()->body()); 320 DCHECK(loop_depth() == 0); 321 } 322 } 323 324 // Always emit a 'return undefined' in case control fell off the end of 325 // the body. 326 { Comment cmnt(masm_, "[ return <undefined>;"); 327 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 328 } 329 EmitReturnSequence(); 330 331 // Force emit the constant pool, so it doesn't get emitted in the middle 332 // of the back edge table. 333 masm()->CheckConstPool(true, false); 334} 335 336 337void FullCodeGenerator::ClearAccumulator() { 338 __ mov(r0, Operand(Smi::FromInt(0))); 339} 340 341 342void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 343 __ mov(r2, Operand(profiling_counter_)); 344 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 345 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); 346 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 347} 348 349 350#ifdef CAN_USE_ARMV7_INSTRUCTIONS 351static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize; 352#else 353static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize; 354#endif 355 356 357void FullCodeGenerator::EmitProfilingCounterReset() { 358 Assembler::BlockConstPoolScope block_const_pool(masm_); 359 PredictableCodeSizeScope predictable_code_size_scope( 360 masm_, kProfileCounterResetSequenceLength); 361 Label start; 362 __ bind(&start); 363 int reset_value = FLAG_interrupt_budget; 364 if (info_->is_debug()) { 365 // Detect debug break requests as soon as possible. 366 reset_value = FLAG_interrupt_budget >> 4; 367 } 368 __ mov(r2, Operand(profiling_counter_)); 369 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5 370 // instructions (for ARMv6) depending upon whether it is an extended constant 371 // pool - insert nop to compensate. 372 int expected_instr_count = 373 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2; 374 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count); 375 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) { 376 __ nop(); 377 } 378 __ mov(r3, Operand(Smi::FromInt(reset_value))); 379 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 380} 381 382 383void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 384 Label* back_edge_target) { 385 Comment cmnt(masm_, "[ Back edge bookkeeping"); 386 // Block literal pools whilst emitting back edge code. 387 Assembler::BlockConstPoolScope block_const_pool(masm_); 388 Label ok; 389 390 DCHECK(back_edge_target->is_bound()); 391 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 392 int weight = Min(kMaxBackEdgeWeight, 393 Max(1, distance / kCodeSizeMultiplier)); 394 EmitProfilingCounterDecrement(weight); 395 __ b(pl, &ok); 396 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 397 398 // Record a mapping of this PC offset to the OSR id. This is used to find 399 // the AST id from the unoptimized code in order to use it as a key into 400 // the deoptimization input data found in the optimized code. 401 RecordBackEdge(stmt->OsrEntryId()); 402 403 EmitProfilingCounterReset(); 404 405 __ bind(&ok); 406 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 407 // Record a mapping of the OSR id to this PC. This is used if the OSR 408 // entry becomes the target of a bailout. We don't expect it to be, but 409 // we want it to work if it is. 410 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 411} 412 413 414void FullCodeGenerator::EmitReturnSequence() { 415 Comment cmnt(masm_, "[ Return sequence"); 416 if (return_label_.is_bound()) { 417 __ b(&return_label_); 418 } else { 419 __ bind(&return_label_); 420 if (FLAG_trace) { 421 // Push the return value on the stack as the parameter. 422 // Runtime::TraceExit returns its parameter in r0. 423 __ push(r0); 424 __ CallRuntime(Runtime::kTraceExit, 1); 425 } 426 // Pretend that the exit is a backwards jump to the entry. 427 int weight = 1; 428 if (info_->ShouldSelfOptimize()) { 429 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 430 } else { 431 int distance = masm_->pc_offset(); 432 weight = Min(kMaxBackEdgeWeight, 433 Max(1, distance / kCodeSizeMultiplier)); 434 } 435 EmitProfilingCounterDecrement(weight); 436 Label ok; 437 __ b(pl, &ok); 438 __ push(r0); 439 __ Call(isolate()->builtins()->InterruptCheck(), 440 RelocInfo::CODE_TARGET); 441 __ pop(r0); 442 EmitProfilingCounterReset(); 443 __ bind(&ok); 444 445#ifdef DEBUG 446 // Add a label for checking the size of the code used for returning. 447 Label check_exit_codesize; 448 __ bind(&check_exit_codesize); 449#endif 450 // Make sure that the constant pool is not emitted inside of the return 451 // sequence. 452 { Assembler::BlockConstPoolScope block_const_pool(masm_); 453 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 454 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 455 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5! 456 PredictableCodeSizeScope predictable(masm_, -1); 457 __ RecordJSReturn(); 458 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT); 459 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); 460 __ add(sp, sp, Operand(sp_delta)); 461 __ Jump(lr); 462 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 463 } 464 } 465 466#ifdef DEBUG 467 // Check that the size of the code used for returning is large enough 468 // for the debugger's requirements. 469 DCHECK(Assembler::kJSReturnSequenceInstructions <= 470 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 471#endif 472 } 473} 474 475 476void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 477 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 478} 479 480 481void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 482 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 483 codegen()->GetVar(result_register(), var); 484} 485 486 487void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 488 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 489 codegen()->GetVar(result_register(), var); 490 __ push(result_register()); 491} 492 493 494void FullCodeGenerator::TestContext::Plug(Variable* var) const { 495 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 496 // For simplicity we always test the accumulator register. 497 codegen()->GetVar(result_register(), var); 498 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 499 codegen()->DoTest(this); 500} 501 502 503void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 504} 505 506 507void FullCodeGenerator::AccumulatorValueContext::Plug( 508 Heap::RootListIndex index) const { 509 __ LoadRoot(result_register(), index); 510} 511 512 513void FullCodeGenerator::StackValueContext::Plug( 514 Heap::RootListIndex index) const { 515 __ LoadRoot(result_register(), index); 516 __ push(result_register()); 517} 518 519 520void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 521 codegen()->PrepareForBailoutBeforeSplit(condition(), 522 true, 523 true_label_, 524 false_label_); 525 if (index == Heap::kUndefinedValueRootIndex || 526 index == Heap::kNullValueRootIndex || 527 index == Heap::kFalseValueRootIndex) { 528 if (false_label_ != fall_through_) __ b(false_label_); 529 } else if (index == Heap::kTrueValueRootIndex) { 530 if (true_label_ != fall_through_) __ b(true_label_); 531 } else { 532 __ LoadRoot(result_register(), index); 533 codegen()->DoTest(this); 534 } 535} 536 537 538void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 539} 540 541 542void FullCodeGenerator::AccumulatorValueContext::Plug( 543 Handle<Object> lit) const { 544 __ mov(result_register(), Operand(lit)); 545} 546 547 548void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 549 // Immediates cannot be pushed directly. 550 __ mov(result_register(), Operand(lit)); 551 __ push(result_register()); 552} 553 554 555void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 556 codegen()->PrepareForBailoutBeforeSplit(condition(), 557 true, 558 true_label_, 559 false_label_); 560 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. 561 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 562 if (false_label_ != fall_through_) __ b(false_label_); 563 } else if (lit->IsTrue() || lit->IsJSObject()) { 564 if (true_label_ != fall_through_) __ b(true_label_); 565 } else if (lit->IsString()) { 566 if (String::cast(*lit)->length() == 0) { 567 if (false_label_ != fall_through_) __ b(false_label_); 568 } else { 569 if (true_label_ != fall_through_) __ b(true_label_); 570 } 571 } else if (lit->IsSmi()) { 572 if (Smi::cast(*lit)->value() == 0) { 573 if (false_label_ != fall_through_) __ b(false_label_); 574 } else { 575 if (true_label_ != fall_through_) __ b(true_label_); 576 } 577 } else { 578 // For simplicity we always test the accumulator register. 579 __ mov(result_register(), Operand(lit)); 580 codegen()->DoTest(this); 581 } 582} 583 584 585void FullCodeGenerator::EffectContext::DropAndPlug(int count, 586 Register reg) const { 587 DCHECK(count > 0); 588 __ Drop(count); 589} 590 591 592void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 593 int count, 594 Register reg) const { 595 DCHECK(count > 0); 596 __ Drop(count); 597 __ Move(result_register(), reg); 598} 599 600 601void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 602 Register reg) const { 603 DCHECK(count > 0); 604 if (count > 1) __ Drop(count - 1); 605 __ str(reg, MemOperand(sp, 0)); 606} 607 608 609void FullCodeGenerator::TestContext::DropAndPlug(int count, 610 Register reg) const { 611 DCHECK(count > 0); 612 // For simplicity we always test the accumulator register. 613 __ Drop(count); 614 __ Move(result_register(), reg); 615 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 616 codegen()->DoTest(this); 617} 618 619 620void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 621 Label* materialize_false) const { 622 DCHECK(materialize_true == materialize_false); 623 __ bind(materialize_true); 624} 625 626 627void FullCodeGenerator::AccumulatorValueContext::Plug( 628 Label* materialize_true, 629 Label* materialize_false) const { 630 Label done; 631 __ bind(materialize_true); 632 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 633 __ jmp(&done); 634 __ bind(materialize_false); 635 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 636 __ bind(&done); 637} 638 639 640void FullCodeGenerator::StackValueContext::Plug( 641 Label* materialize_true, 642 Label* materialize_false) const { 643 Label done; 644 __ bind(materialize_true); 645 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 646 __ jmp(&done); 647 __ bind(materialize_false); 648 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 649 __ bind(&done); 650 __ push(ip); 651} 652 653 654void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 655 Label* materialize_false) const { 656 DCHECK(materialize_true == true_label_); 657 DCHECK(materialize_false == false_label_); 658} 659 660 661void FullCodeGenerator::EffectContext::Plug(bool flag) const { 662} 663 664 665void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 666 Heap::RootListIndex value_root_index = 667 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 668 __ LoadRoot(result_register(), value_root_index); 669} 670 671 672void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 673 Heap::RootListIndex value_root_index = 674 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 675 __ LoadRoot(ip, value_root_index); 676 __ push(ip); 677} 678 679 680void FullCodeGenerator::TestContext::Plug(bool flag) const { 681 codegen()->PrepareForBailoutBeforeSplit(condition(), 682 true, 683 true_label_, 684 false_label_); 685 if (flag) { 686 if (true_label_ != fall_through_) __ b(true_label_); 687 } else { 688 if (false_label_ != fall_through_) __ b(false_label_); 689 } 690} 691 692 693void FullCodeGenerator::DoTest(Expression* condition, 694 Label* if_true, 695 Label* if_false, 696 Label* fall_through) { 697 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 698 CallIC(ic, condition->test_id()); 699 __ tst(result_register(), result_register()); 700 Split(ne, if_true, if_false, fall_through); 701} 702 703 704void FullCodeGenerator::Split(Condition cond, 705 Label* if_true, 706 Label* if_false, 707 Label* fall_through) { 708 if (if_false == fall_through) { 709 __ b(cond, if_true); 710 } else if (if_true == fall_through) { 711 __ b(NegateCondition(cond), if_false); 712 } else { 713 __ b(cond, if_true); 714 __ b(if_false); 715 } 716} 717 718 719MemOperand FullCodeGenerator::StackOperand(Variable* var) { 720 DCHECK(var->IsStackAllocated()); 721 // Offset is negative because higher indexes are at lower addresses. 722 int offset = -var->index() * kPointerSize; 723 // Adjust by a (parameter or local) base offset. 724 if (var->IsParameter()) { 725 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 726 } else { 727 offset += JavaScriptFrameConstants::kLocal0Offset; 728 } 729 return MemOperand(fp, offset); 730} 731 732 733MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 734 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 735 if (var->IsContextSlot()) { 736 int context_chain_length = scope()->ContextChainLength(var->scope()); 737 __ LoadContext(scratch, context_chain_length); 738 return ContextOperand(scratch, var->index()); 739 } else { 740 return StackOperand(var); 741 } 742} 743 744 745void FullCodeGenerator::GetVar(Register dest, Variable* var) { 746 // Use destination as scratch. 747 MemOperand location = VarOperand(var, dest); 748 __ ldr(dest, location); 749} 750 751 752void FullCodeGenerator::SetVar(Variable* var, 753 Register src, 754 Register scratch0, 755 Register scratch1) { 756 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 757 DCHECK(!scratch0.is(src)); 758 DCHECK(!scratch0.is(scratch1)); 759 DCHECK(!scratch1.is(src)); 760 MemOperand location = VarOperand(var, scratch0); 761 __ str(src, location); 762 763 // Emit the write barrier code if the location is in the heap. 764 if (var->IsContextSlot()) { 765 __ RecordWriteContextSlot(scratch0, 766 location.offset(), 767 src, 768 scratch1, 769 kLRHasBeenSaved, 770 kDontSaveFPRegs); 771 } 772} 773 774 775void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 776 bool should_normalize, 777 Label* if_true, 778 Label* if_false) { 779 // Only prepare for bailouts before splits if we're in a test 780 // context. Otherwise, we let the Visit function deal with the 781 // preparation to avoid preparing with the same AST id twice. 782 if (!context()->IsTest() || !info_->IsOptimizable()) return; 783 784 Label skip; 785 if (should_normalize) __ b(&skip); 786 PrepareForBailout(expr, TOS_REG); 787 if (should_normalize) { 788 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 789 __ cmp(r0, ip); 790 Split(eq, if_true, if_false, NULL); 791 __ bind(&skip); 792 } 793} 794 795 796void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 797 // The variable in the declaration always resides in the current function 798 // context. 799 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 800 if (generate_debug_code_) { 801 // Check that we're not inside a with or catch context. 802 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 803 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 804 __ Check(ne, kDeclarationInWithContext); 805 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 806 __ Check(ne, kDeclarationInCatchContext); 807 } 808} 809 810 811void FullCodeGenerator::VisitVariableDeclaration( 812 VariableDeclaration* declaration) { 813 // If it was not possible to allocate the variable at compile time, we 814 // need to "declare" it at runtime to make sure it actually exists in the 815 // local context. 816 VariableProxy* proxy = declaration->proxy(); 817 VariableMode mode = declaration->mode(); 818 Variable* variable = proxy->var(); 819 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; 820 switch (variable->location()) { 821 case Variable::UNALLOCATED: 822 globals_->Add(variable->name(), zone()); 823 globals_->Add(variable->binding_needs_init() 824 ? isolate()->factory()->the_hole_value() 825 : isolate()->factory()->undefined_value(), 826 zone()); 827 break; 828 829 case Variable::PARAMETER: 830 case Variable::LOCAL: 831 if (hole_init) { 832 Comment cmnt(masm_, "[ VariableDeclaration"); 833 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 834 __ str(ip, StackOperand(variable)); 835 } 836 break; 837 838 case Variable::CONTEXT: 839 if (hole_init) { 840 Comment cmnt(masm_, "[ VariableDeclaration"); 841 EmitDebugCheckDeclarationContext(variable); 842 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 843 __ str(ip, ContextOperand(cp, variable->index())); 844 // No write barrier since the_hole_value is in old space. 845 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 846 } 847 break; 848 849 case Variable::LOOKUP: { 850 Comment cmnt(masm_, "[ VariableDeclaration"); 851 __ mov(r2, Operand(variable->name())); 852 // Declaration nodes are always introduced in one of four modes. 853 DCHECK(IsDeclaredVariableMode(mode)); 854 PropertyAttributes attr = 855 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 856 __ mov(r1, Operand(Smi::FromInt(attr))); 857 // Push initial value, if any. 858 // Note: For variables we must not push an initial value (such as 859 // 'undefined') because we may have a (legal) redeclaration and we 860 // must not destroy the current value. 861 if (hole_init) { 862 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 863 __ Push(cp, r2, r1, r0); 864 } else { 865 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. 866 __ Push(cp, r2, r1, r0); 867 } 868 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 869 break; 870 } 871 } 872} 873 874 875void FullCodeGenerator::VisitFunctionDeclaration( 876 FunctionDeclaration* declaration) { 877 VariableProxy* proxy = declaration->proxy(); 878 Variable* variable = proxy->var(); 879 switch (variable->location()) { 880 case Variable::UNALLOCATED: { 881 globals_->Add(variable->name(), zone()); 882 Handle<SharedFunctionInfo> function = 883 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_); 884 // Check for stack-overflow exception. 885 if (function.is_null()) return SetStackOverflow(); 886 globals_->Add(function, zone()); 887 break; 888 } 889 890 case Variable::PARAMETER: 891 case Variable::LOCAL: { 892 Comment cmnt(masm_, "[ FunctionDeclaration"); 893 VisitForAccumulatorValue(declaration->fun()); 894 __ str(result_register(), StackOperand(variable)); 895 break; 896 } 897 898 case Variable::CONTEXT: { 899 Comment cmnt(masm_, "[ FunctionDeclaration"); 900 EmitDebugCheckDeclarationContext(variable); 901 VisitForAccumulatorValue(declaration->fun()); 902 __ str(result_register(), ContextOperand(cp, variable->index())); 903 int offset = Context::SlotOffset(variable->index()); 904 // We know that we have written a function, which is not a smi. 905 __ RecordWriteContextSlot(cp, 906 offset, 907 result_register(), 908 r2, 909 kLRHasBeenSaved, 910 kDontSaveFPRegs, 911 EMIT_REMEMBERED_SET, 912 OMIT_SMI_CHECK); 913 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 914 break; 915 } 916 917 case Variable::LOOKUP: { 918 Comment cmnt(masm_, "[ FunctionDeclaration"); 919 __ mov(r2, Operand(variable->name())); 920 __ mov(r1, Operand(Smi::FromInt(NONE))); 921 __ Push(cp, r2, r1); 922 // Push initial value for function declaration. 923 VisitForStackValue(declaration->fun()); 924 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 925 break; 926 } 927 } 928} 929 930 931void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 932 Variable* variable = declaration->proxy()->var(); 933 DCHECK(variable->location() == Variable::CONTEXT); 934 DCHECK(variable->interface()->IsFrozen()); 935 936 Comment cmnt(masm_, "[ ModuleDeclaration"); 937 EmitDebugCheckDeclarationContext(variable); 938 939 // Load instance object. 940 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope())); 941 __ ldr(r1, ContextOperand(r1, variable->interface()->Index())); 942 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX)); 943 944 // Assign it. 945 __ str(r1, ContextOperand(cp, variable->index())); 946 // We know that we have written a module, which is not a smi. 947 __ RecordWriteContextSlot(cp, 948 Context::SlotOffset(variable->index()), 949 r1, 950 r3, 951 kLRHasBeenSaved, 952 kDontSaveFPRegs, 953 EMIT_REMEMBERED_SET, 954 OMIT_SMI_CHECK); 955 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 956 957 // Traverse into body. 958 Visit(declaration->module()); 959} 960 961 962void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 963 VariableProxy* proxy = declaration->proxy(); 964 Variable* variable = proxy->var(); 965 switch (variable->location()) { 966 case Variable::UNALLOCATED: 967 // TODO(rossberg) 968 break; 969 970 case Variable::CONTEXT: { 971 Comment cmnt(masm_, "[ ImportDeclaration"); 972 EmitDebugCheckDeclarationContext(variable); 973 // TODO(rossberg) 974 break; 975 } 976 977 case Variable::PARAMETER: 978 case Variable::LOCAL: 979 case Variable::LOOKUP: 980 UNREACHABLE(); 981 } 982} 983 984 985void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 986 // TODO(rossberg) 987} 988 989 990void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 991 // Call the runtime to declare the globals. 992 // The context is the first argument. 993 __ mov(r1, Operand(pairs)); 994 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 995 __ Push(cp, r1, r0); 996 __ CallRuntime(Runtime::kDeclareGlobals, 3); 997 // Return value is ignored. 998} 999 1000 1001void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 1002 // Call the runtime to declare the modules. 1003 __ Push(descriptions); 1004 __ CallRuntime(Runtime::kDeclareModules, 1); 1005 // Return value is ignored. 1006} 1007 1008 1009void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 1010 Comment cmnt(masm_, "[ SwitchStatement"); 1011 Breakable nested_statement(this, stmt); 1012 SetStatementPosition(stmt); 1013 1014 // Keep the switch value on the stack until a case matches. 1015 VisitForStackValue(stmt->tag()); 1016 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 1017 1018 ZoneList<CaseClause*>* clauses = stmt->cases(); 1019 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 1020 1021 Label next_test; // Recycled for each test. 1022 // Compile all the tests with branches to their bodies. 1023 for (int i = 0; i < clauses->length(); i++) { 1024 CaseClause* clause = clauses->at(i); 1025 clause->body_target()->Unuse(); 1026 1027 // The default is not a test, but remember it as final fall through. 1028 if (clause->is_default()) { 1029 default_clause = clause; 1030 continue; 1031 } 1032 1033 Comment cmnt(masm_, "[ Case comparison"); 1034 __ bind(&next_test); 1035 next_test.Unuse(); 1036 1037 // Compile the label expression. 1038 VisitForAccumulatorValue(clause->label()); 1039 1040 // Perform the comparison as if via '==='. 1041 __ ldr(r1, MemOperand(sp, 0)); // Switch value. 1042 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 1043 JumpPatchSite patch_site(masm_); 1044 if (inline_smi_code) { 1045 Label slow_case; 1046 __ orr(r2, r1, r0); 1047 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 1048 1049 __ cmp(r1, r0); 1050 __ b(ne, &next_test); 1051 __ Drop(1); // Switch value is no longer needed. 1052 __ b(clause->body_target()); 1053 __ bind(&slow_case); 1054 } 1055 1056 // Record position before stub call for type feedback. 1057 SetSourcePosition(clause->position()); 1058 Handle<Code> ic = 1059 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 1060 CallIC(ic, clause->CompareId()); 1061 patch_site.EmitPatchInfo(); 1062 1063 Label skip; 1064 __ b(&skip); 1065 PrepareForBailout(clause, TOS_REG); 1066 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1067 __ cmp(r0, ip); 1068 __ b(ne, &next_test); 1069 __ Drop(1); 1070 __ jmp(clause->body_target()); 1071 __ bind(&skip); 1072 1073 __ cmp(r0, Operand::Zero()); 1074 __ b(ne, &next_test); 1075 __ Drop(1); // Switch value is no longer needed. 1076 __ b(clause->body_target()); 1077 } 1078 1079 // Discard the test value and jump to the default if present, otherwise to 1080 // the end of the statement. 1081 __ bind(&next_test); 1082 __ Drop(1); // Switch value is no longer needed. 1083 if (default_clause == NULL) { 1084 __ b(nested_statement.break_label()); 1085 } else { 1086 __ b(default_clause->body_target()); 1087 } 1088 1089 // Compile all the case bodies. 1090 for (int i = 0; i < clauses->length(); i++) { 1091 Comment cmnt(masm_, "[ Case body"); 1092 CaseClause* clause = clauses->at(i); 1093 __ bind(clause->body_target()); 1094 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 1095 VisitStatements(clause->statements()); 1096 } 1097 1098 __ bind(nested_statement.break_label()); 1099 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1100} 1101 1102 1103void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1104 Comment cmnt(masm_, "[ ForInStatement"); 1105 int slot = stmt->ForInFeedbackSlot(); 1106 SetStatementPosition(stmt); 1107 1108 Label loop, exit; 1109 ForIn loop_statement(this, stmt); 1110 increment_loop_depth(); 1111 1112 // Get the object to enumerate over. If the object is null or undefined, skip 1113 // over the loop. See ECMA-262 version 5, section 12.6.4. 1114 VisitForAccumulatorValue(stmt->enumerable()); 1115 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1116 __ cmp(r0, ip); 1117 __ b(eq, &exit); 1118 Register null_value = r5; 1119 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1120 __ cmp(r0, null_value); 1121 __ b(eq, &exit); 1122 1123 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1124 1125 // Convert the object to a JS object. 1126 Label convert, done_convert; 1127 __ JumpIfSmi(r0, &convert); 1128 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1129 __ b(ge, &done_convert); 1130 __ bind(&convert); 1131 __ push(r0); 1132 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1133 __ bind(&done_convert); 1134 __ push(r0); 1135 1136 // Check for proxies. 1137 Label call_runtime; 1138 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1139 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); 1140 __ b(le, &call_runtime); 1141 1142 // Check cache validity in generated code. This is a fast case for 1143 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1144 // guarantee cache validity, call the runtime system to check cache 1145 // validity or get the property names in a fixed array. 1146 __ CheckEnumCache(null_value, &call_runtime); 1147 1148 // The enum cache is valid. Load the map of the object being 1149 // iterated over and use the cache for the iteration. 1150 Label use_cache; 1151 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 1152 __ b(&use_cache); 1153 1154 // Get the set of properties to enumerate. 1155 __ bind(&call_runtime); 1156 __ push(r0); // Duplicate the enumerable object on the stack. 1157 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1158 1159 // If we got a map from the runtime call, we can do a fast 1160 // modification check. Otherwise, we got a fixed array, and we have 1161 // to do a slow check. 1162 Label fixed_array; 1163 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 1164 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 1165 __ cmp(r2, ip); 1166 __ b(ne, &fixed_array); 1167 1168 // We got a map in register r0. Get the enumeration cache from it. 1169 Label no_descriptors; 1170 __ bind(&use_cache); 1171 1172 __ EnumLength(r1, r0); 1173 __ cmp(r1, Operand(Smi::FromInt(0))); 1174 __ b(eq, &no_descriptors); 1175 1176 __ LoadInstanceDescriptors(r0, r2); 1177 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); 1178 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1179 1180 // Set up the four remaining stack slots. 1181 __ push(r0); // Map. 1182 __ mov(r0, Operand(Smi::FromInt(0))); 1183 // Push enumeration cache, enumeration cache length (as smi) and zero. 1184 __ Push(r2, r1, r0); 1185 __ jmp(&loop); 1186 1187 __ bind(&no_descriptors); 1188 __ Drop(1); 1189 __ jmp(&exit); 1190 1191 // We got a fixed array in register r0. Iterate through that. 1192 Label non_proxy; 1193 __ bind(&fixed_array); 1194 1195 __ Move(r1, FeedbackVector()); 1196 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 1197 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); 1198 1199 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1200 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1201 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1202 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); 1203 __ b(gt, &non_proxy); 1204 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1205 __ bind(&non_proxy); 1206 __ Push(r1, r0); // Smi and array 1207 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1208 __ mov(r0, Operand(Smi::FromInt(0))); 1209 __ Push(r1, r0); // Fixed array length (as smi) and initial index. 1210 1211 // Generate code for doing the condition check. 1212 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1213 __ bind(&loop); 1214 // Load the current count to r0, load the length to r1. 1215 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); 1216 __ cmp(r0, r1); // Compare to the array length. 1217 __ b(hs, loop_statement.break_label()); 1218 1219 // Get the current entry of the array into register r3. 1220 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 1221 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1222 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0)); 1223 1224 // Get the expected map from the stack or a smi in the 1225 // permanent slow case into register r2. 1226 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); 1227 1228 // Check if the expected map still matches that of the enumerable. 1229 // If not, we may have to filter the key. 1230 Label update_each; 1231 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); 1232 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); 1233 __ cmp(r4, Operand(r2)); 1234 __ b(eq, &update_each); 1235 1236 // For proxies, no filtering is done. 1237 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1238 __ cmp(r2, Operand(Smi::FromInt(0))); 1239 __ b(eq, &update_each); 1240 1241 // Convert the entry to a string or (smi) 0 if it isn't a property 1242 // any more. If the property has been removed while iterating, we 1243 // just skip it. 1244 __ push(r1); // Enumerable. 1245 __ push(r3); // Current entry. 1246 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1247 __ mov(r3, Operand(r0), SetCC); 1248 __ b(eq, loop_statement.continue_label()); 1249 1250 // Update the 'each' property or variable from the possibly filtered 1251 // entry in register r3. 1252 __ bind(&update_each); 1253 __ mov(result_register(), r3); 1254 // Perform the assignment as if via '='. 1255 { EffectContext context(this); 1256 EmitAssignment(stmt->each()); 1257 } 1258 1259 // Generate code for the body of the loop. 1260 Visit(stmt->body()); 1261 1262 // Generate code for the going to the next element by incrementing 1263 // the index (smi) stored on top of the stack. 1264 __ bind(loop_statement.continue_label()); 1265 __ pop(r0); 1266 __ add(r0, r0, Operand(Smi::FromInt(1))); 1267 __ push(r0); 1268 1269 EmitBackEdgeBookkeeping(stmt, &loop); 1270 __ b(&loop); 1271 1272 // Remove the pointers stored on the stack. 1273 __ bind(loop_statement.break_label()); 1274 __ Drop(5); 1275 1276 // Exit and decrement the loop depth. 1277 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1278 __ bind(&exit); 1279 decrement_loop_depth(); 1280} 1281 1282 1283void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { 1284 Comment cmnt(masm_, "[ ForOfStatement"); 1285 SetStatementPosition(stmt); 1286 1287 Iteration loop_statement(this, stmt); 1288 increment_loop_depth(); 1289 1290 // var iterator = iterable[Symbol.iterator](); 1291 VisitForEffect(stmt->assign_iterator()); 1292 1293 // Loop entry. 1294 __ bind(loop_statement.continue_label()); 1295 1296 // result = iterator.next() 1297 VisitForEffect(stmt->next_result()); 1298 1299 // if (result.done) break; 1300 Label result_not_done; 1301 VisitForControl(stmt->result_done(), 1302 loop_statement.break_label(), 1303 &result_not_done, 1304 &result_not_done); 1305 __ bind(&result_not_done); 1306 1307 // each = result.value 1308 VisitForEffect(stmt->assign_each()); 1309 1310 // Generate code for the body of the loop. 1311 Visit(stmt->body()); 1312 1313 // Check stack before looping. 1314 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); 1315 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); 1316 __ jmp(loop_statement.continue_label()); 1317 1318 // Exit and decrement the loop depth. 1319 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1320 __ bind(loop_statement.break_label()); 1321 decrement_loop_depth(); 1322} 1323 1324 1325void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1326 bool pretenure) { 1327 // Use the fast case closure allocation code that allocates in new 1328 // space for nested functions that don't need literals cloning. If 1329 // we're running with the --always-opt or the --prepare-always-opt 1330 // flag, we need to use the runtime function so that the new function 1331 // we are creating here gets a chance to have its code optimized and 1332 // doesn't just get a copy of the existing unoptimized code. 1333 if (!FLAG_always_opt && 1334 !FLAG_prepare_always_opt && 1335 !pretenure && 1336 scope()->is_function_scope() && 1337 info->num_literals() == 0) { 1338 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind()); 1339 __ mov(r2, Operand(info)); 1340 __ CallStub(&stub); 1341 } else { 1342 __ mov(r0, Operand(info)); 1343 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex 1344 : Heap::kFalseValueRootIndex); 1345 __ Push(cp, r0, r1); 1346 __ CallRuntime(Runtime::kNewClosure, 3); 1347 } 1348 context()->Plug(r0); 1349} 1350 1351 1352void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1353 Comment cmnt(masm_, "[ VariableProxy"); 1354 EmitVariableLoad(expr); 1355} 1356 1357 1358void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) { 1359 Comment cnmt(masm_, "[ SuperReference "); 1360 1361 __ ldr(LoadDescriptor::ReceiverRegister(), 1362 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1363 1364 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol()); 1365 __ Move(LoadDescriptor::NameRegister(), home_object_symbol); 1366 1367 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId()); 1368 1369 __ cmp(r0, Operand(isolate()->factory()->undefined_value())); 1370 Label done; 1371 __ b(ne, &done); 1372 __ CallRuntime(Runtime::kThrowNonMethodError, 0); 1373 __ bind(&done); 1374} 1375 1376 1377void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, 1378 TypeofState typeof_state, 1379 Label* slow) { 1380 Register current = cp; 1381 Register next = r1; 1382 Register temp = r2; 1383 1384 Scope* s = scope(); 1385 while (s != NULL) { 1386 if (s->num_heap_slots() > 0) { 1387 if (s->calls_sloppy_eval()) { 1388 // Check that extension is NULL. 1389 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1390 __ tst(temp, temp); 1391 __ b(ne, slow); 1392 } 1393 // Load next context in chain. 1394 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1395 // Walk the rest of the chain without clobbering cp. 1396 current = next; 1397 } 1398 // If no outer scope calls eval, we do not need to check more 1399 // context extensions. 1400 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1401 s = s->outer_scope(); 1402 } 1403 1404 if (s->is_eval_scope()) { 1405 Label loop, fast; 1406 if (!current.is(next)) { 1407 __ Move(next, current); 1408 } 1409 __ bind(&loop); 1410 // Terminate at native context. 1411 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1412 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); 1413 __ cmp(temp, ip); 1414 __ b(eq, &fast); 1415 // Check that extension is NULL. 1416 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1417 __ tst(temp, temp); 1418 __ b(ne, slow); 1419 // Load next context in chain. 1420 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1421 __ b(&loop); 1422 __ bind(&fast); 1423 } 1424 1425 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1426 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name())); 1427 if (FLAG_vector_ics) { 1428 __ mov(VectorLoadICDescriptor::SlotRegister(), 1429 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1430 } 1431 1432 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1433 ? NOT_CONTEXTUAL 1434 : CONTEXTUAL; 1435 CallLoadIC(mode); 1436} 1437 1438 1439MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1440 Label* slow) { 1441 DCHECK(var->IsContextSlot()); 1442 Register context = cp; 1443 Register next = r3; 1444 Register temp = r4; 1445 1446 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1447 if (s->num_heap_slots() > 0) { 1448 if (s->calls_sloppy_eval()) { 1449 // Check that extension is NULL. 1450 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1451 __ tst(temp, temp); 1452 __ b(ne, slow); 1453 } 1454 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1455 // Walk the rest of the chain without clobbering cp. 1456 context = next; 1457 } 1458 } 1459 // Check that last extension is NULL. 1460 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1461 __ tst(temp, temp); 1462 __ b(ne, slow); 1463 1464 // This function is used only for loads, not stores, so it's safe to 1465 // return an cp-based operand (the write barrier cannot be allowed to 1466 // destroy the cp register). 1467 return ContextOperand(context, var->index()); 1468} 1469 1470 1471void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1472 TypeofState typeof_state, 1473 Label* slow, 1474 Label* done) { 1475 // Generate fast-case code for variables that might be shadowed by 1476 // eval-introduced variables. Eval is used a lot without 1477 // introducing variables. In those cases, we do not want to 1478 // perform a runtime call for all variables in the scope 1479 // containing the eval. 1480 Variable* var = proxy->var(); 1481 if (var->mode() == DYNAMIC_GLOBAL) { 1482 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow); 1483 __ jmp(done); 1484 } else if (var->mode() == DYNAMIC_LOCAL) { 1485 Variable* local = var->local_if_not_shadowed(); 1486 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); 1487 if (local->mode() == LET || local->mode() == CONST || 1488 local->mode() == CONST_LEGACY) { 1489 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1490 if (local->mode() == CONST_LEGACY) { 1491 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1492 } else { // LET || CONST 1493 __ b(ne, done); 1494 __ mov(r0, Operand(var->name())); 1495 __ push(r0); 1496 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1497 } 1498 } 1499 __ jmp(done); 1500 } 1501} 1502 1503 1504void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1505 // Record position before possible IC call. 1506 SetSourcePosition(proxy->position()); 1507 Variable* var = proxy->var(); 1508 1509 // Three cases: global variables, lookup variables, and all other types of 1510 // variables. 1511 switch (var->location()) { 1512 case Variable::UNALLOCATED: { 1513 Comment cmnt(masm_, "[ Global variable"); 1514 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1515 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); 1516 if (FLAG_vector_ics) { 1517 __ mov(VectorLoadICDescriptor::SlotRegister(), 1518 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1519 } 1520 CallLoadIC(CONTEXTUAL); 1521 context()->Plug(r0); 1522 break; 1523 } 1524 1525 case Variable::PARAMETER: 1526 case Variable::LOCAL: 1527 case Variable::CONTEXT: { 1528 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1529 : "[ Stack variable"); 1530 if (var->binding_needs_init()) { 1531 // var->scope() may be NULL when the proxy is located in eval code and 1532 // refers to a potential outside binding. Currently those bindings are 1533 // always looked up dynamically, i.e. in that case 1534 // var->location() == LOOKUP. 1535 // always holds. 1536 DCHECK(var->scope() != NULL); 1537 1538 // Check if the binding really needs an initialization check. The check 1539 // can be skipped in the following situation: we have a LET or CONST 1540 // binding in harmony mode, both the Variable and the VariableProxy have 1541 // the same declaration scope (i.e. they are both in global code, in the 1542 // same function or in the same eval code) and the VariableProxy is in 1543 // the source physically located after the initializer of the variable. 1544 // 1545 // We cannot skip any initialization checks for CONST in non-harmony 1546 // mode because const variables may be declared but never initialized: 1547 // if (false) { const x; }; var y = x; 1548 // 1549 // The condition on the declaration scopes is a conservative check for 1550 // nested functions that access a binding and are called before the 1551 // binding is initialized: 1552 // function() { f(); let x = 1; function f() { x = 2; } } 1553 // 1554 bool skip_init_check; 1555 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1556 skip_init_check = false; 1557 } else { 1558 // Check that we always have valid source position. 1559 DCHECK(var->initializer_position() != RelocInfo::kNoPosition); 1560 DCHECK(proxy->position() != RelocInfo::kNoPosition); 1561 skip_init_check = var->mode() != CONST_LEGACY && 1562 var->initializer_position() < proxy->position(); 1563 } 1564 1565 if (!skip_init_check) { 1566 // Let and const need a read barrier. 1567 GetVar(r0, var); 1568 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1569 if (var->mode() == LET || var->mode() == CONST) { 1570 // Throw a reference error when using an uninitialized let/const 1571 // binding in harmony mode. 1572 Label done; 1573 __ b(ne, &done); 1574 __ mov(r0, Operand(var->name())); 1575 __ push(r0); 1576 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1577 __ bind(&done); 1578 } else { 1579 // Uninitalized const bindings outside of harmony mode are unholed. 1580 DCHECK(var->mode() == CONST_LEGACY); 1581 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1582 } 1583 context()->Plug(r0); 1584 break; 1585 } 1586 } 1587 context()->Plug(var); 1588 break; 1589 } 1590 1591 case Variable::LOOKUP: { 1592 Comment cmnt(masm_, "[ Lookup variable"); 1593 Label done, slow; 1594 // Generate code for loading from variables potentially shadowed 1595 // by eval-introduced variables. 1596 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 1597 __ bind(&slow); 1598 __ mov(r1, Operand(var->name())); 1599 __ Push(cp, r1); // Context and name. 1600 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 1601 __ bind(&done); 1602 context()->Plug(r0); 1603 } 1604 } 1605} 1606 1607 1608void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1609 Comment cmnt(masm_, "[ RegExpLiteral"); 1610 Label materialized; 1611 // Registers will be used as follows: 1612 // r5 = materialized value (RegExp literal) 1613 // r4 = JS function, literals array 1614 // r3 = literal index 1615 // r2 = RegExp pattern 1616 // r1 = RegExp flags 1617 // r0 = RegExp literal clone 1618 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1619 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); 1620 int literal_offset = 1621 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1622 __ ldr(r5, FieldMemOperand(r4, literal_offset)); 1623 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1624 __ cmp(r5, ip); 1625 __ b(ne, &materialized); 1626 1627 // Create regexp literal using runtime function. 1628 // Result will be in r0. 1629 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); 1630 __ mov(r2, Operand(expr->pattern())); 1631 __ mov(r1, Operand(expr->flags())); 1632 __ Push(r4, r3, r2, r1); 1633 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1634 __ mov(r5, r0); 1635 1636 __ bind(&materialized); 1637 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1638 Label allocated, runtime_allocate; 1639 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 1640 __ jmp(&allocated); 1641 1642 __ bind(&runtime_allocate); 1643 __ mov(r0, Operand(Smi::FromInt(size))); 1644 __ Push(r5, r0); 1645 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1646 __ pop(r5); 1647 1648 __ bind(&allocated); 1649 // After this, registers are used as follows: 1650 // r0: Newly allocated regexp. 1651 // r5: Materialized regexp. 1652 // r2: temp. 1653 __ CopyFields(r0, r5, d0, size / kPointerSize); 1654 context()->Plug(r0); 1655} 1656 1657 1658void FullCodeGenerator::EmitAccessor(Expression* expression) { 1659 if (expression == NULL) { 1660 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1661 __ push(r1); 1662 } else { 1663 VisitForStackValue(expression); 1664 } 1665} 1666 1667 1668void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1669 Comment cmnt(masm_, "[ ObjectLiteral"); 1670 1671 expr->BuildConstantProperties(isolate()); 1672 Handle<FixedArray> constant_properties = expr->constant_properties(); 1673 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1674 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1675 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1676 __ mov(r1, Operand(constant_properties)); 1677 int flags = expr->fast_elements() 1678 ? ObjectLiteral::kFastElements 1679 : ObjectLiteral::kNoFlags; 1680 flags |= expr->has_function() 1681 ? ObjectLiteral::kHasFunction 1682 : ObjectLiteral::kNoFlags; 1683 __ mov(r0, Operand(Smi::FromInt(flags))); 1684 int properties_count = constant_properties->length() / 2; 1685 if (expr->may_store_doubles() || expr->depth() > 1 || 1686 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements || 1687 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1688 __ Push(r3, r2, r1, r0); 1689 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1690 } else { 1691 FastCloneShallowObjectStub stub(isolate(), properties_count); 1692 __ CallStub(&stub); 1693 } 1694 1695 // If result_saved is true the result is on top of the stack. If 1696 // result_saved is false the result is in r0. 1697 bool result_saved = false; 1698 1699 // Mark all computed expressions that are bound to a key that 1700 // is shadowed by a later occurrence of the same key. For the 1701 // marked expressions, no store code is emitted. 1702 expr->CalculateEmitStore(zone()); 1703 1704 AccessorTable accessor_table(zone()); 1705 for (int i = 0; i < expr->properties()->length(); i++) { 1706 ObjectLiteral::Property* property = expr->properties()->at(i); 1707 if (property->IsCompileTimeValue()) continue; 1708 1709 Literal* key = property->key(); 1710 Expression* value = property->value(); 1711 if (!result_saved) { 1712 __ push(r0); // Save result on stack 1713 result_saved = true; 1714 } 1715 switch (property->kind()) { 1716 case ObjectLiteral::Property::CONSTANT: 1717 UNREACHABLE(); 1718 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1719 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); 1720 // Fall through. 1721 case ObjectLiteral::Property::COMPUTED: 1722 if (key->value()->IsInternalizedString()) { 1723 if (property->emit_store()) { 1724 VisitForAccumulatorValue(value); 1725 DCHECK(StoreDescriptor::ValueRegister().is(r0)); 1726 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); 1727 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1728 CallStoreIC(key->LiteralFeedbackId()); 1729 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1730 } else { 1731 VisitForEffect(value); 1732 } 1733 break; 1734 } 1735 // Duplicate receiver on stack. 1736 __ ldr(r0, MemOperand(sp)); 1737 __ push(r0); 1738 VisitForStackValue(key); 1739 VisitForStackValue(value); 1740 if (property->emit_store()) { 1741 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes 1742 __ push(r0); 1743 __ CallRuntime(Runtime::kSetProperty, 4); 1744 } else { 1745 __ Drop(3); 1746 } 1747 break; 1748 case ObjectLiteral::Property::PROTOTYPE: 1749 // Duplicate receiver on stack. 1750 __ ldr(r0, MemOperand(sp)); 1751 __ push(r0); 1752 VisitForStackValue(value); 1753 if (property->emit_store()) { 1754 __ CallRuntime(Runtime::kSetPrototype, 2); 1755 } else { 1756 __ Drop(2); 1757 } 1758 break; 1759 1760 case ObjectLiteral::Property::GETTER: 1761 accessor_table.lookup(key)->second->getter = value; 1762 break; 1763 case ObjectLiteral::Property::SETTER: 1764 accessor_table.lookup(key)->second->setter = value; 1765 break; 1766 } 1767 } 1768 1769 // Emit code to define accessors, using only a single call to the runtime for 1770 // each pair of corresponding getters and setters. 1771 for (AccessorTable::Iterator it = accessor_table.begin(); 1772 it != accessor_table.end(); 1773 ++it) { 1774 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. 1775 __ push(r0); 1776 VisitForStackValue(it->first); 1777 EmitAccessor(it->second->getter); 1778 EmitAccessor(it->second->setter); 1779 __ mov(r0, Operand(Smi::FromInt(NONE))); 1780 __ push(r0); 1781 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1782 } 1783 1784 if (expr->has_function()) { 1785 DCHECK(result_saved); 1786 __ ldr(r0, MemOperand(sp)); 1787 __ push(r0); 1788 __ CallRuntime(Runtime::kToFastProperties, 1); 1789 } 1790 1791 if (result_saved) { 1792 context()->PlugTOS(); 1793 } else { 1794 context()->Plug(r0); 1795 } 1796} 1797 1798 1799void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1800 Comment cmnt(masm_, "[ ArrayLiteral"); 1801 1802 expr->BuildConstantElements(isolate()); 1803 int flags = expr->depth() == 1 1804 ? ArrayLiteral::kShallowElements 1805 : ArrayLiteral::kNoFlags; 1806 1807 ZoneList<Expression*>* subexprs = expr->values(); 1808 int length = subexprs->length(); 1809 Handle<FixedArray> constant_elements = expr->constant_elements(); 1810 DCHECK_EQ(2, constant_elements->length()); 1811 ElementsKind constant_elements_kind = 1812 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1813 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1814 Handle<FixedArrayBase> constant_elements_values( 1815 FixedArrayBase::cast(constant_elements->get(1))); 1816 1817 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1818 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1819 // If the only customer of allocation sites is transitioning, then 1820 // we can turn it off if we don't have anywhere else to transition to. 1821 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1822 } 1823 1824 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1825 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1826 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1827 __ mov(r1, Operand(constant_elements)); 1828 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { 1829 __ mov(r0, Operand(Smi::FromInt(flags))); 1830 __ Push(r3, r2, r1, r0); 1831 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); 1832 } else { 1833 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1834 __ CallStub(&stub); 1835 } 1836 1837 bool result_saved = false; // Is the result saved to the stack? 1838 1839 // Emit code to evaluate all the non-constant subexpressions and to store 1840 // them into the newly cloned array. 1841 for (int i = 0; i < length; i++) { 1842 Expression* subexpr = subexprs->at(i); 1843 // If the subexpression is a literal or a simple materialized literal it 1844 // is already set in the cloned array. 1845 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1846 1847 if (!result_saved) { 1848 __ push(r0); 1849 __ Push(Smi::FromInt(expr->literal_index())); 1850 result_saved = true; 1851 } 1852 VisitForAccumulatorValue(subexpr); 1853 1854 if (IsFastObjectElementsKind(constant_elements_kind)) { 1855 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1856 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal. 1857 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); 1858 __ str(result_register(), FieldMemOperand(r1, offset)); 1859 // Update the write barrier for the array store. 1860 __ RecordWriteField(r1, offset, result_register(), r2, 1861 kLRHasBeenSaved, kDontSaveFPRegs, 1862 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1863 } else { 1864 __ mov(r3, Operand(Smi::FromInt(i))); 1865 StoreArrayLiteralElementStub stub(isolate()); 1866 __ CallStub(&stub); 1867 } 1868 1869 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1870 } 1871 1872 if (result_saved) { 1873 __ pop(); // literal index 1874 context()->PlugTOS(); 1875 } else { 1876 context()->Plug(r0); 1877 } 1878} 1879 1880 1881void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1882 DCHECK(expr->target()->IsValidReferenceExpression()); 1883 1884 Comment cmnt(masm_, "[ Assignment"); 1885 1886 // Left-hand side can only be a property, a global or a (parameter or local) 1887 // slot. 1888 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1889 LhsKind assign_type = VARIABLE; 1890 Property* property = expr->target()->AsProperty(); 1891 if (property != NULL) { 1892 assign_type = (property->key()->IsPropertyName()) 1893 ? NAMED_PROPERTY 1894 : KEYED_PROPERTY; 1895 } 1896 1897 // Evaluate LHS expression. 1898 switch (assign_type) { 1899 case VARIABLE: 1900 // Nothing to do here. 1901 break; 1902 case NAMED_PROPERTY: 1903 if (expr->is_compound()) { 1904 // We need the receiver both on the stack and in the register. 1905 VisitForStackValue(property->obj()); 1906 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1907 } else { 1908 VisitForStackValue(property->obj()); 1909 } 1910 break; 1911 case KEYED_PROPERTY: 1912 if (expr->is_compound()) { 1913 VisitForStackValue(property->obj()); 1914 VisitForStackValue(property->key()); 1915 __ ldr(LoadDescriptor::ReceiverRegister(), 1916 MemOperand(sp, 1 * kPointerSize)); 1917 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 1918 } else { 1919 VisitForStackValue(property->obj()); 1920 VisitForStackValue(property->key()); 1921 } 1922 break; 1923 } 1924 1925 // For compound assignments we need another deoptimization point after the 1926 // variable/property load. 1927 if (expr->is_compound()) { 1928 { AccumulatorValueContext context(this); 1929 switch (assign_type) { 1930 case VARIABLE: 1931 EmitVariableLoad(expr->target()->AsVariableProxy()); 1932 PrepareForBailout(expr->target(), TOS_REG); 1933 break; 1934 case NAMED_PROPERTY: 1935 EmitNamedPropertyLoad(property); 1936 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1937 break; 1938 case KEYED_PROPERTY: 1939 EmitKeyedPropertyLoad(property); 1940 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1941 break; 1942 } 1943 } 1944 1945 Token::Value op = expr->binary_op(); 1946 __ push(r0); // Left operand goes on the stack. 1947 VisitForAccumulatorValue(expr->value()); 1948 1949 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1950 ? OVERWRITE_RIGHT 1951 : NO_OVERWRITE; 1952 SetSourcePosition(expr->position() + 1); 1953 AccumulatorValueContext context(this); 1954 if (ShouldInlineSmiCase(op)) { 1955 EmitInlineSmiBinaryOp(expr->binary_operation(), 1956 op, 1957 mode, 1958 expr->target(), 1959 expr->value()); 1960 } else { 1961 EmitBinaryOp(expr->binary_operation(), op, mode); 1962 } 1963 1964 // Deoptimization point in case the binary operation may have side effects. 1965 PrepareForBailout(expr->binary_operation(), TOS_REG); 1966 } else { 1967 VisitForAccumulatorValue(expr->value()); 1968 } 1969 1970 // Record source position before possible IC call. 1971 SetSourcePosition(expr->position()); 1972 1973 // Store the value. 1974 switch (assign_type) { 1975 case VARIABLE: 1976 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1977 expr->op()); 1978 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1979 context()->Plug(r0); 1980 break; 1981 case NAMED_PROPERTY: 1982 EmitNamedPropertyAssignment(expr); 1983 break; 1984 case KEYED_PROPERTY: 1985 EmitKeyedPropertyAssignment(expr); 1986 break; 1987 } 1988} 1989 1990 1991void FullCodeGenerator::VisitYield(Yield* expr) { 1992 Comment cmnt(masm_, "[ Yield"); 1993 // Evaluate yielded value first; the initial iterator definition depends on 1994 // this. It stays on the stack while we update the iterator. 1995 VisitForStackValue(expr->expression()); 1996 1997 switch (expr->yield_kind()) { 1998 case Yield::kSuspend: 1999 // Pop value from top-of-stack slot; box result into result register. 2000 EmitCreateIteratorResult(false); 2001 __ push(result_register()); 2002 // Fall through. 2003 case Yield::kInitial: { 2004 Label suspend, continuation, post_runtime, resume; 2005 2006 __ jmp(&suspend); 2007 2008 __ bind(&continuation); 2009 __ jmp(&resume); 2010 2011 __ bind(&suspend); 2012 VisitForAccumulatorValue(expr->generator_object()); 2013 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 2014 __ mov(r1, Operand(Smi::FromInt(continuation.pos()))); 2015 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 2016 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 2017 __ mov(r1, cp); 2018 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 2019 kLRHasBeenSaved, kDontSaveFPRegs); 2020 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 2021 __ cmp(sp, r1); 2022 __ b(eq, &post_runtime); 2023 __ push(r0); // generator object 2024 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2025 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2026 __ bind(&post_runtime); 2027 __ pop(result_register()); 2028 EmitReturnSequence(); 2029 2030 __ bind(&resume); 2031 context()->Plug(result_register()); 2032 break; 2033 } 2034 2035 case Yield::kFinal: { 2036 VisitForAccumulatorValue(expr->generator_object()); 2037 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); 2038 __ str(r1, FieldMemOperand(result_register(), 2039 JSGeneratorObject::kContinuationOffset)); 2040 // Pop value from top-of-stack slot, box result into result register. 2041 EmitCreateIteratorResult(true); 2042 EmitUnwindBeforeReturn(); 2043 EmitReturnSequence(); 2044 break; 2045 } 2046 2047 case Yield::kDelegating: { 2048 VisitForStackValue(expr->generator_object()); 2049 2050 // Initial stack layout is as follows: 2051 // [sp + 1 * kPointerSize] iter 2052 // [sp + 0 * kPointerSize] g 2053 2054 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 2055 Label l_next, l_call, l_loop; 2056 Register load_receiver = LoadDescriptor::ReceiverRegister(); 2057 Register load_name = LoadDescriptor::NameRegister(); 2058 2059 // Initial send value is undefined. 2060 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 2061 __ b(&l_next); 2062 2063 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 2064 __ bind(&l_catch); 2065 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); 2066 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" 2067 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2068 __ Push(load_name, r3, r0); // "throw", iter, except 2069 __ jmp(&l_call); 2070 2071 // try { received = %yield result } 2072 // Shuffle the received result above a try handler and yield it without 2073 // re-boxing. 2074 __ bind(&l_try); 2075 __ pop(r0); // result 2076 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2077 const int handler_size = StackHandlerConstants::kSize; 2078 __ push(r0); // result 2079 __ jmp(&l_suspend); 2080 __ bind(&l_continuation); 2081 __ jmp(&l_resume); 2082 __ bind(&l_suspend); 2083 const int generator_object_depth = kPointerSize + handler_size; 2084 __ ldr(r0, MemOperand(sp, generator_object_depth)); 2085 __ push(r0); // g 2086 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2087 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos()))); 2088 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 2089 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 2090 __ mov(r1, cp); 2091 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 2092 kLRHasBeenSaved, kDontSaveFPRegs); 2093 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2094 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2095 __ pop(r0); // result 2096 EmitReturnSequence(); 2097 __ bind(&l_resume); // received in r0 2098 __ PopTryHandler(); 2099 2100 // receiver = iter; f = 'next'; arg = received; 2101 __ bind(&l_next); 2102 2103 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" 2104 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2105 __ Push(load_name, r3, r0); // "next", iter, received 2106 2107 // result = receiver[f](arg); 2108 __ bind(&l_call); 2109 __ ldr(load_receiver, MemOperand(sp, kPointerSize)); 2110 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize)); 2111 if (FLAG_vector_ics) { 2112 __ mov(VectorLoadICDescriptor::SlotRegister(), 2113 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot()))); 2114 } 2115 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2116 CallIC(ic, TypeFeedbackId::None()); 2117 __ mov(r1, r0); 2118 __ str(r1, MemOperand(sp, 2 * kPointerSize)); 2119 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); 2120 __ CallStub(&stub); 2121 2122 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2123 __ Drop(1); // The function is still on the stack; drop it. 2124 2125 // if (!result.done) goto l_try; 2126 __ bind(&l_loop); 2127 __ Move(load_receiver, r0); 2128 2129 __ push(load_receiver); // save result 2130 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" 2131 if (FLAG_vector_ics) { 2132 __ mov(VectorLoadICDescriptor::SlotRegister(), 2133 Operand(Smi::FromInt(expr->DoneFeedbackSlot()))); 2134 } 2135 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done 2136 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2137 CallIC(bool_ic); 2138 __ cmp(r0, Operand(0)); 2139 __ b(eq, &l_try); 2140 2141 // result.value 2142 __ pop(load_receiver); // result 2143 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" 2144 if (FLAG_vector_ics) { 2145 __ mov(VectorLoadICDescriptor::SlotRegister(), 2146 Operand(Smi::FromInt(expr->ValueFeedbackSlot()))); 2147 } 2148 CallLoadIC(NOT_CONTEXTUAL); // r0=result.value 2149 context()->DropAndPlug(2, r0); // drop iter and g 2150 break; 2151 } 2152 } 2153} 2154 2155 2156void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2157 Expression *value, 2158 JSGeneratorObject::ResumeMode resume_mode) { 2159 // The value stays in r0, and is ultimately read by the resumed generator, as 2160 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it 2161 // is read to throw the value when the resumed generator is already closed. 2162 // r1 will hold the generator object until the activation has been resumed. 2163 VisitForStackValue(generator); 2164 VisitForAccumulatorValue(value); 2165 __ pop(r1); 2166 2167 // Check generator state. 2168 Label wrong_state, closed_state, done; 2169 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2170 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); 2171 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); 2172 __ cmp(r3, Operand(Smi::FromInt(0))); 2173 __ b(eq, &closed_state); 2174 __ b(lt, &wrong_state); 2175 2176 // Load suspended function and context. 2177 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset)); 2178 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset)); 2179 2180 // Load receiver and store as the first argument. 2181 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset)); 2182 __ push(r2); 2183 2184 // Push holes for the rest of the arguments to the generator function. 2185 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); 2186 __ ldr(r3, 2187 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); 2188 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); 2189 Label push_argument_holes, push_frame; 2190 __ bind(&push_argument_holes); 2191 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC); 2192 __ b(mi, &push_frame); 2193 __ push(r2); 2194 __ jmp(&push_argument_holes); 2195 2196 // Enter a new JavaScript frame, and initialize its slots as they were when 2197 // the generator was suspended. 2198 Label resume_frame; 2199 __ bind(&push_frame); 2200 __ bl(&resume_frame); 2201 __ jmp(&done); 2202 __ bind(&resume_frame); 2203 // lr = return address. 2204 // fp = caller's frame pointer. 2205 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool), 2206 // cp = callee's context, 2207 // r4 = callee's JS function. 2208 __ PushFixedFrame(r4); 2209 // Adjust FP to point to saved FP. 2210 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 2211 2212 // Load the operand stack size. 2213 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset)); 2214 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset)); 2215 __ SmiUntag(r3); 2216 2217 // If we are sending a value and there is no operand stack, we can jump back 2218 // in directly. 2219 if (resume_mode == JSGeneratorObject::NEXT) { 2220 Label slow_resume; 2221 __ cmp(r3, Operand(0)); 2222 __ b(ne, &slow_resume); 2223 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); 2224 2225 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); 2226 if (FLAG_enable_ool_constant_pool) { 2227 // Load the new code object's constant pool pointer. 2228 __ ldr(pp, 2229 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize)); 2230 } 2231 2232 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2233 __ SmiUntag(r2); 2234 __ add(r3, r3, r2); 2235 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 2236 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2237 __ Jump(r3); 2238 } 2239 __ bind(&slow_resume); 2240 } 2241 2242 // Otherwise, we push holes for the operand stack and call the runtime to fix 2243 // up the stack and the handlers. 2244 Label push_operand_holes, call_resume; 2245 __ bind(&push_operand_holes); 2246 __ sub(r3, r3, Operand(1), SetCC); 2247 __ b(mi, &call_resume); 2248 __ push(r2); 2249 __ b(&push_operand_holes); 2250 __ bind(&call_resume); 2251 DCHECK(!result_register().is(r1)); 2252 __ Push(r1, result_register()); 2253 __ Push(Smi::FromInt(resume_mode)); 2254 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2255 // Not reached: the runtime call returns elsewhere. 2256 __ stop("not-reached"); 2257 2258 // Reach here when generator is closed. 2259 __ bind(&closed_state); 2260 if (resume_mode == JSGeneratorObject::NEXT) { 2261 // Return completed iterator result when generator is closed. 2262 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2263 __ push(r2); 2264 // Pop value from top-of-stack slot; box result into result register. 2265 EmitCreateIteratorResult(true); 2266 } else { 2267 // Throw the provided value. 2268 __ push(r0); 2269 __ CallRuntime(Runtime::kThrow, 1); 2270 } 2271 __ jmp(&done); 2272 2273 // Throw error if we attempt to operate on a running generator. 2274 __ bind(&wrong_state); 2275 __ push(r1); 2276 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); 2277 2278 __ bind(&done); 2279 context()->Plug(result_register()); 2280} 2281 2282 2283void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2284 Label gc_required; 2285 Label allocated; 2286 2287 Handle<Map> map(isolate()->native_context()->iterator_result_map()); 2288 2289 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT); 2290 __ jmp(&allocated); 2291 2292 __ bind(&gc_required); 2293 __ Push(Smi::FromInt(map->instance_size())); 2294 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2295 __ ldr(context_register(), 2296 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2297 2298 __ bind(&allocated); 2299 __ mov(r1, Operand(map)); 2300 __ pop(r2); 2301 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done))); 2302 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array())); 2303 DCHECK_EQ(map->instance_size(), 5 * kPointerSize); 2304 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2305 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 2306 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 2307 __ str(r2, 2308 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset)); 2309 __ str(r3, 2310 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset)); 2311 2312 // Only the value field needs a write barrier, as the other values are in the 2313 // root set. 2314 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset, 2315 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 2316} 2317 2318 2319void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 2320 SetSourcePosition(prop->position()); 2321 Literal* key = prop->key()->AsLiteral(); 2322 2323 __ mov(LoadDescriptor::NameRegister(), Operand(key->value())); 2324 if (FLAG_vector_ics) { 2325 __ mov(VectorLoadICDescriptor::SlotRegister(), 2326 Operand(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2327 CallLoadIC(NOT_CONTEXTUAL); 2328 } else { 2329 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 2330 } 2331} 2332 2333 2334void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) { 2335 SetSourcePosition(prop->position()); 2336 Literal* key = prop->key()->AsLiteral(); 2337 DCHECK(!key->value()->IsSmi()); 2338 DCHECK(prop->IsSuperAccess()); 2339 2340 SuperReference* super_ref = prop->obj()->AsSuperReference(); 2341 EmitLoadHomeObject(super_ref); 2342 __ Push(r0); 2343 VisitForStackValue(super_ref->this_var()); 2344 __ Push(key->value()); 2345 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2346} 2347 2348 2349void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2350 SetSourcePosition(prop->position()); 2351 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2352 if (FLAG_vector_ics) { 2353 __ mov(VectorLoadICDescriptor::SlotRegister(), 2354 Operand(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2355 CallIC(ic); 2356 } else { 2357 CallIC(ic, prop->PropertyFeedbackId()); 2358 } 2359} 2360 2361 2362void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2363 Token::Value op, 2364 OverwriteMode mode, 2365 Expression* left_expr, 2366 Expression* right_expr) { 2367 Label done, smi_case, stub_call; 2368 2369 Register scratch1 = r2; 2370 Register scratch2 = r3; 2371 2372 // Get the arguments. 2373 Register left = r1; 2374 Register right = r0; 2375 __ pop(left); 2376 2377 // Perform combined smi check on both operands. 2378 __ orr(scratch1, left, Operand(right)); 2379 STATIC_ASSERT(kSmiTag == 0); 2380 JumpPatchSite patch_site(masm_); 2381 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 2382 2383 __ bind(&stub_call); 2384 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2385 CallIC(code, expr->BinaryOperationFeedbackId()); 2386 patch_site.EmitPatchInfo(); 2387 __ jmp(&done); 2388 2389 __ bind(&smi_case); 2390 // Smi case. This code works the same way as the smi-smi case in the type 2391 // recording binary operation stub, see 2392 switch (op) { 2393 case Token::SAR: 2394 __ GetLeastBitsFromSmi(scratch1, right, 5); 2395 __ mov(right, Operand(left, ASR, scratch1)); 2396 __ bic(right, right, Operand(kSmiTagMask)); 2397 break; 2398 case Token::SHL: { 2399 __ SmiUntag(scratch1, left); 2400 __ GetLeastBitsFromSmi(scratch2, right, 5); 2401 __ mov(scratch1, Operand(scratch1, LSL, scratch2)); 2402 __ TrySmiTag(right, scratch1, &stub_call); 2403 break; 2404 } 2405 case Token::SHR: { 2406 __ SmiUntag(scratch1, left); 2407 __ GetLeastBitsFromSmi(scratch2, right, 5); 2408 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); 2409 __ tst(scratch1, Operand(0xc0000000)); 2410 __ b(ne, &stub_call); 2411 __ SmiTag(right, scratch1); 2412 break; 2413 } 2414 case Token::ADD: 2415 __ add(scratch1, left, Operand(right), SetCC); 2416 __ b(vs, &stub_call); 2417 __ mov(right, scratch1); 2418 break; 2419 case Token::SUB: 2420 __ sub(scratch1, left, Operand(right), SetCC); 2421 __ b(vs, &stub_call); 2422 __ mov(right, scratch1); 2423 break; 2424 case Token::MUL: { 2425 __ SmiUntag(ip, right); 2426 __ smull(scratch1, scratch2, left, ip); 2427 __ mov(ip, Operand(scratch1, ASR, 31)); 2428 __ cmp(ip, Operand(scratch2)); 2429 __ b(ne, &stub_call); 2430 __ cmp(scratch1, Operand::Zero()); 2431 __ mov(right, Operand(scratch1), LeaveCC, ne); 2432 __ b(ne, &done); 2433 __ add(scratch2, right, Operand(left), SetCC); 2434 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); 2435 __ b(mi, &stub_call); 2436 break; 2437 } 2438 case Token::BIT_OR: 2439 __ orr(right, left, Operand(right)); 2440 break; 2441 case Token::BIT_AND: 2442 __ and_(right, left, Operand(right)); 2443 break; 2444 case Token::BIT_XOR: 2445 __ eor(right, left, Operand(right)); 2446 break; 2447 default: 2448 UNREACHABLE(); 2449 } 2450 2451 __ bind(&done); 2452 context()->Plug(r0); 2453} 2454 2455 2456void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2457 Token::Value op, 2458 OverwriteMode mode) { 2459 __ pop(r1); 2460 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2461 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2462 CallIC(code, expr->BinaryOperationFeedbackId()); 2463 patch_site.EmitPatchInfo(); 2464 context()->Plug(r0); 2465} 2466 2467 2468void FullCodeGenerator::EmitAssignment(Expression* expr) { 2469 DCHECK(expr->IsValidReferenceExpression()); 2470 2471 // Left-hand side can only be a property, a global or a (parameter or local) 2472 // slot. 2473 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2474 LhsKind assign_type = VARIABLE; 2475 Property* prop = expr->AsProperty(); 2476 if (prop != NULL) { 2477 assign_type = (prop->key()->IsPropertyName()) 2478 ? NAMED_PROPERTY 2479 : KEYED_PROPERTY; 2480 } 2481 2482 switch (assign_type) { 2483 case VARIABLE: { 2484 Variable* var = expr->AsVariableProxy()->var(); 2485 EffectContext context(this); 2486 EmitVariableAssignment(var, Token::ASSIGN); 2487 break; 2488 } 2489 case NAMED_PROPERTY: { 2490 __ push(r0); // Preserve value. 2491 VisitForAccumulatorValue(prop->obj()); 2492 __ Move(StoreDescriptor::ReceiverRegister(), r0); 2493 __ pop(StoreDescriptor::ValueRegister()); // Restore value. 2494 __ mov(StoreDescriptor::NameRegister(), 2495 Operand(prop->key()->AsLiteral()->value())); 2496 CallStoreIC(); 2497 break; 2498 } 2499 case KEYED_PROPERTY: { 2500 __ push(r0); // Preserve value. 2501 VisitForStackValue(prop->obj()); 2502 VisitForAccumulatorValue(prop->key()); 2503 __ Move(StoreDescriptor::NameRegister(), r0); 2504 __ Pop(StoreDescriptor::ValueRegister(), 2505 StoreDescriptor::ReceiverRegister()); 2506 Handle<Code> ic = 2507 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2508 CallIC(ic); 2509 break; 2510 } 2511 } 2512 context()->Plug(r0); 2513} 2514 2515 2516void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2517 Variable* var, MemOperand location) { 2518 __ str(result_register(), location); 2519 if (var->IsContextSlot()) { 2520 // RecordWrite may destroy all its register arguments. 2521 __ mov(r3, result_register()); 2522 int offset = Context::SlotOffset(var->index()); 2523 __ RecordWriteContextSlot( 2524 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2525 } 2526} 2527 2528 2529void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { 2530 if (var->IsUnallocated()) { 2531 // Global var, const, or let. 2532 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); 2533 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); 2534 CallStoreIC(); 2535 2536 } else if (op == Token::INIT_CONST_LEGACY) { 2537 // Const initializers need a write barrier. 2538 DCHECK(!var->IsParameter()); // No const parameters. 2539 if (var->IsLookupSlot()) { 2540 __ push(r0); 2541 __ mov(r0, Operand(var->name())); 2542 __ Push(cp, r0); // Context and name. 2543 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2544 } else { 2545 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2546 Label skip; 2547 MemOperand location = VarOperand(var, r1); 2548 __ ldr(r2, location); 2549 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2550 __ b(ne, &skip); 2551 EmitStoreToStackLocalOrContextSlot(var, location); 2552 __ bind(&skip); 2553 } 2554 2555 } else if (var->mode() == LET && op != Token::INIT_LET) { 2556 // Non-initializing assignment to let variable needs a write barrier. 2557 DCHECK(!var->IsLookupSlot()); 2558 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2559 Label assign; 2560 MemOperand location = VarOperand(var, r1); 2561 __ ldr(r3, location); 2562 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2563 __ b(ne, &assign); 2564 __ mov(r3, Operand(var->name())); 2565 __ push(r3); 2566 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2567 // Perform the assignment. 2568 __ bind(&assign); 2569 EmitStoreToStackLocalOrContextSlot(var, location); 2570 2571 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2572 if (var->IsLookupSlot()) { 2573 // Assignment to var. 2574 __ push(r0); // Value. 2575 __ mov(r1, Operand(var->name())); 2576 __ mov(r0, Operand(Smi::FromInt(strict_mode()))); 2577 __ Push(cp, r1, r0); // Context, name, strict mode. 2578 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2579 } else { 2580 // Assignment to var or initializing assignment to let/const in harmony 2581 // mode. 2582 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); 2583 MemOperand location = VarOperand(var, r1); 2584 if (generate_debug_code_ && op == Token::INIT_LET) { 2585 // Check for an uninitialized let binding. 2586 __ ldr(r2, location); 2587 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2588 __ Check(eq, kLetBindingReInitialization); 2589 } 2590 EmitStoreToStackLocalOrContextSlot(var, location); 2591 } 2592 } 2593 // Non-initializing assignments to consts are ignored. 2594} 2595 2596 2597void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2598 // Assignment to a property, using a named store IC. 2599 Property* prop = expr->target()->AsProperty(); 2600 DCHECK(prop != NULL); 2601 DCHECK(prop->key()->IsLiteral()); 2602 2603 // Record source code position before IC call. 2604 SetSourcePosition(expr->position()); 2605 __ mov(StoreDescriptor::NameRegister(), 2606 Operand(prop->key()->AsLiteral()->value())); 2607 __ pop(StoreDescriptor::ReceiverRegister()); 2608 CallStoreIC(expr->AssignmentFeedbackId()); 2609 2610 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2611 context()->Plug(r0); 2612} 2613 2614 2615void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2616 // Assignment to a property, using a keyed store IC. 2617 2618 // Record source code position before IC call. 2619 SetSourcePosition(expr->position()); 2620 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister()); 2621 DCHECK(StoreDescriptor::ValueRegister().is(r0)); 2622 2623 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2624 CallIC(ic, expr->AssignmentFeedbackId()); 2625 2626 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2627 context()->Plug(r0); 2628} 2629 2630 2631void FullCodeGenerator::VisitProperty(Property* expr) { 2632 Comment cmnt(masm_, "[ Property"); 2633 Expression* key = expr->key(); 2634 2635 if (key->IsPropertyName()) { 2636 if (!expr->IsSuperAccess()) { 2637 VisitForAccumulatorValue(expr->obj()); 2638 __ Move(LoadDescriptor::ReceiverRegister(), r0); 2639 EmitNamedPropertyLoad(expr); 2640 } else { 2641 EmitNamedSuperPropertyLoad(expr); 2642 } 2643 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2644 context()->Plug(r0); 2645 } else { 2646 VisitForStackValue(expr->obj()); 2647 VisitForAccumulatorValue(expr->key()); 2648 __ Move(LoadDescriptor::NameRegister(), r0); 2649 __ pop(LoadDescriptor::ReceiverRegister()); 2650 EmitKeyedPropertyLoad(expr); 2651 context()->Plug(r0); 2652 } 2653} 2654 2655 2656void FullCodeGenerator::CallIC(Handle<Code> code, 2657 TypeFeedbackId ast_id) { 2658 ic_total_count_++; 2659 // All calls must have a predictable size in full-codegen code to ensure that 2660 // the debugger can patch them correctly. 2661 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al, 2662 NEVER_INLINE_TARGET_ADDRESS); 2663} 2664 2665 2666// Code common for calls using the IC. 2667void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2668 Expression* callee = expr->expression(); 2669 2670 CallICState::CallType call_type = 2671 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD; 2672 2673 // Get the target function. 2674 if (call_type == CallICState::FUNCTION) { 2675 { StackValueContext context(this); 2676 EmitVariableLoad(callee->AsVariableProxy()); 2677 PrepareForBailout(callee, NO_REGISTERS); 2678 } 2679 // Push undefined as receiver. This is patched in the method prologue if it 2680 // is a sloppy mode method. 2681 __ Push(isolate()->factory()->undefined_value()); 2682 } else { 2683 // Load the function from the receiver. 2684 DCHECK(callee->IsProperty()); 2685 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2686 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2687 EmitNamedPropertyLoad(callee->AsProperty()); 2688 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2689 // Push the target function under the receiver. 2690 __ ldr(ip, MemOperand(sp, 0)); 2691 __ push(ip); 2692 __ str(r0, MemOperand(sp, kPointerSize)); 2693 } 2694 2695 EmitCall(expr, call_type); 2696} 2697 2698 2699void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { 2700 Expression* callee = expr->expression(); 2701 DCHECK(callee->IsProperty()); 2702 Property* prop = callee->AsProperty(); 2703 DCHECK(prop->IsSuperAccess()); 2704 2705 SetSourcePosition(prop->position()); 2706 Literal* key = prop->key()->AsLiteral(); 2707 DCHECK(!key->value()->IsSmi()); 2708 // Load the function from the receiver. 2709 const Register scratch = r1; 2710 SuperReference* super_ref = prop->obj()->AsSuperReference(); 2711 EmitLoadHomeObject(super_ref); 2712 __ Push(r0); 2713 VisitForAccumulatorValue(super_ref->this_var()); 2714 __ Push(r0); 2715 __ ldr(scratch, MemOperand(sp, kPointerSize)); 2716 __ Push(scratch); 2717 __ Push(r0); 2718 __ Push(key->value()); 2719 2720 // Stack here: 2721 // - home_object 2722 // - this (receiver) 2723 // - home_object <-- LoadFromSuper will pop here and below. 2724 // - this (receiver) 2725 // - key 2726 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2727 2728 // Replace home_object with target function. 2729 __ str(r0, MemOperand(sp, kPointerSize)); 2730 2731 // Stack here: 2732 // - target function 2733 // - this (receiver) 2734 EmitCall(expr, CallICState::METHOD); 2735} 2736 2737 2738// Code common for calls using the IC. 2739void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2740 Expression* key) { 2741 // Load the key. 2742 VisitForAccumulatorValue(key); 2743 2744 Expression* callee = expr->expression(); 2745 2746 // Load the function from the receiver. 2747 DCHECK(callee->IsProperty()); 2748 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2749 __ Move(LoadDescriptor::NameRegister(), r0); 2750 EmitKeyedPropertyLoad(callee->AsProperty()); 2751 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2752 2753 // Push the target function under the receiver. 2754 __ ldr(ip, MemOperand(sp, 0)); 2755 __ push(ip); 2756 __ str(r0, MemOperand(sp, kPointerSize)); 2757 2758 EmitCall(expr, CallICState::METHOD); 2759} 2760 2761 2762void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) { 2763 // Load the arguments. 2764 ZoneList<Expression*>* args = expr->arguments(); 2765 int arg_count = args->length(); 2766 { PreservePositionScope scope(masm()->positions_recorder()); 2767 for (int i = 0; i < arg_count; i++) { 2768 VisitForStackValue(args->at(i)); 2769 } 2770 } 2771 2772 // Record source position of the IC call. 2773 SetSourcePosition(expr->position()); 2774 Handle<Code> ic = CallIC::initialize_stub( 2775 isolate(), arg_count, call_type); 2776 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); 2777 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2778 // Don't assign a type feedback id to the IC, since type feedback is provided 2779 // by the vector above. 2780 CallIC(ic); 2781 2782 RecordJSReturnSite(expr); 2783 // Restore context register. 2784 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2785 context()->DropAndPlug(1, r0); 2786} 2787 2788 2789void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2790 // r5: copy of the first argument or undefined if it doesn't exist. 2791 if (arg_count > 0) { 2792 __ ldr(r5, MemOperand(sp, arg_count * kPointerSize)); 2793 } else { 2794 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); 2795 } 2796 2797 // r4: the receiver of the enclosing function. 2798 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 2799 2800 // r3: the receiver of the enclosing function. 2801 int receiver_offset = 2 + info_->scope()->num_parameters(); 2802 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize)); 2803 2804 // r2: strict mode. 2805 __ mov(r2, Operand(Smi::FromInt(strict_mode()))); 2806 2807 // r1: the start position of the scope the calls resides in. 2808 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); 2809 2810 // Do the runtime call. 2811 __ Push(r5); 2812 __ Push(r4, r3, r2, r1); 2813 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6); 2814} 2815 2816 2817void FullCodeGenerator::VisitCall(Call* expr) { 2818#ifdef DEBUG 2819 // We want to verify that RecordJSReturnSite gets called on all paths 2820 // through this function. Avoid early returns. 2821 expr->return_is_recorded_ = false; 2822#endif 2823 2824 Comment cmnt(masm_, "[ Call"); 2825 Expression* callee = expr->expression(); 2826 Call::CallType call_type = expr->GetCallType(isolate()); 2827 2828 if (call_type == Call::POSSIBLY_EVAL_CALL) { 2829 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval 2830 // to resolve the function we need to call and the receiver of the 2831 // call. Then we call the resolved function using the given 2832 // arguments. 2833 ZoneList<Expression*>* args = expr->arguments(); 2834 int arg_count = args->length(); 2835 2836 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2837 VisitForStackValue(callee); 2838 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2839 __ push(r2); // Reserved receiver slot. 2840 2841 // Push the arguments. 2842 for (int i = 0; i < arg_count; i++) { 2843 VisitForStackValue(args->at(i)); 2844 } 2845 2846 // Push a copy of the function (found below the arguments) and 2847 // resolve eval. 2848 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2849 __ push(r1); 2850 EmitResolvePossiblyDirectEval(arg_count); 2851 2852 // The runtime call returns a pair of values in r0 (function) and 2853 // r1 (receiver). Touch up the stack with the right values. 2854 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2855 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); 2856 } 2857 2858 // Record source position for debugger. 2859 SetSourcePosition(expr->position()); 2860 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 2861 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2862 __ CallStub(&stub); 2863 RecordJSReturnSite(expr); 2864 // Restore context register. 2865 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2866 context()->DropAndPlug(1, r0); 2867 } else if (call_type == Call::GLOBAL_CALL) { 2868 EmitCallWithLoadIC(expr); 2869 2870 } else if (call_type == Call::LOOKUP_SLOT_CALL) { 2871 // Call to a lookup slot (dynamically introduced variable). 2872 VariableProxy* proxy = callee->AsVariableProxy(); 2873 Label slow, done; 2874 2875 { PreservePositionScope scope(masm()->positions_recorder()); 2876 // Generate code for loading from variables potentially shadowed 2877 // by eval-introduced variables. 2878 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 2879 } 2880 2881 __ bind(&slow); 2882 // Call the runtime to find the function to call (returned in r0) 2883 // and the object holding it (returned in edx). 2884 DCHECK(!context_register().is(r2)); 2885 __ mov(r2, Operand(proxy->name())); 2886 __ Push(context_register(), r2); 2887 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 2888 __ Push(r0, r1); // Function, receiver. 2889 2890 // If fast case code has been generated, emit code to push the 2891 // function and receiver and have the slow path jump around this 2892 // code. 2893 if (done.is_linked()) { 2894 Label call; 2895 __ b(&call); 2896 __ bind(&done); 2897 // Push function. 2898 __ push(r0); 2899 // The receiver is implicitly the global receiver. Indicate this 2900 // by passing the hole to the call function stub. 2901 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2902 __ push(r1); 2903 __ bind(&call); 2904 } 2905 2906 // The receiver is either the global receiver or an object found 2907 // by LoadContextSlot. 2908 EmitCall(expr); 2909 } else if (call_type == Call::PROPERTY_CALL) { 2910 Property* property = callee->AsProperty(); 2911 bool is_named_call = property->key()->IsPropertyName(); 2912 // super.x() is handled in EmitCallWithLoadIC. 2913 if (property->IsSuperAccess() && is_named_call) { 2914 EmitSuperCallWithLoadIC(expr); 2915 } else { 2916 { 2917 PreservePositionScope scope(masm()->positions_recorder()); 2918 VisitForStackValue(property->obj()); 2919 } 2920 if (is_named_call) { 2921 EmitCallWithLoadIC(expr); 2922 } else { 2923 EmitKeyedCallWithLoadIC(expr, property->key()); 2924 } 2925 } 2926 } else { 2927 DCHECK(call_type == Call::OTHER_CALL); 2928 // Call to an arbitrary expression not handled specially above. 2929 { PreservePositionScope scope(masm()->positions_recorder()); 2930 VisitForStackValue(callee); 2931 } 2932 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2933 __ push(r1); 2934 // Emit function call. 2935 EmitCall(expr); 2936 } 2937 2938#ifdef DEBUG 2939 // RecordJSReturnSite should have been called. 2940 DCHECK(expr->return_is_recorded_); 2941#endif 2942} 2943 2944 2945void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2946 Comment cmnt(masm_, "[ CallNew"); 2947 // According to ECMA-262, section 11.2.2, page 44, the function 2948 // expression in new calls must be evaluated before the 2949 // arguments. 2950 2951 // Push constructor on the stack. If it's not a function it's used as 2952 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 2953 // ignored. 2954 VisitForStackValue(expr->expression()); 2955 2956 // Push the arguments ("left-to-right") on the stack. 2957 ZoneList<Expression*>* args = expr->arguments(); 2958 int arg_count = args->length(); 2959 for (int i = 0; i < arg_count; i++) { 2960 VisitForStackValue(args->at(i)); 2961 } 2962 2963 // Call the construct call builtin that handles allocation and 2964 // constructor invocation. 2965 SetSourcePosition(expr->position()); 2966 2967 // Load function and argument count into r1 and r0. 2968 __ mov(r0, Operand(arg_count)); 2969 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2970 2971 // Record call targets in unoptimized code. 2972 if (FLAG_pretenuring_call_new) { 2973 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 2974 DCHECK(expr->AllocationSiteFeedbackSlot() == 2975 expr->CallNewFeedbackSlot() + 1); 2976 } 2977 2978 __ Move(r2, FeedbackVector()); 2979 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2980 2981 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 2982 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 2983 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2984 context()->Plug(r0); 2985} 2986 2987 2988void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2989 ZoneList<Expression*>* args = expr->arguments(); 2990 DCHECK(args->length() == 1); 2991 2992 VisitForAccumulatorValue(args->at(0)); 2993 2994 Label materialize_true, materialize_false; 2995 Label* if_true = NULL; 2996 Label* if_false = NULL; 2997 Label* fall_through = NULL; 2998 context()->PrepareTest(&materialize_true, &materialize_false, 2999 &if_true, &if_false, &fall_through); 3000 3001 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3002 __ SmiTst(r0); 3003 Split(eq, if_true, if_false, fall_through); 3004 3005 context()->Plug(if_true, if_false); 3006} 3007 3008 3009void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 3010 ZoneList<Expression*>* args = expr->arguments(); 3011 DCHECK(args->length() == 1); 3012 3013 VisitForAccumulatorValue(args->at(0)); 3014 3015 Label materialize_true, materialize_false; 3016 Label* if_true = NULL; 3017 Label* if_false = NULL; 3018 Label* fall_through = NULL; 3019 context()->PrepareTest(&materialize_true, &materialize_false, 3020 &if_true, &if_false, &fall_through); 3021 3022 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3023 __ NonNegativeSmiTst(r0); 3024 Split(eq, if_true, if_false, fall_through); 3025 3026 context()->Plug(if_true, if_false); 3027} 3028 3029 3030void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 3031 ZoneList<Expression*>* args = expr->arguments(); 3032 DCHECK(args->length() == 1); 3033 3034 VisitForAccumulatorValue(args->at(0)); 3035 3036 Label materialize_true, materialize_false; 3037 Label* if_true = NULL; 3038 Label* if_false = NULL; 3039 Label* fall_through = NULL; 3040 context()->PrepareTest(&materialize_true, &materialize_false, 3041 &if_true, &if_false, &fall_through); 3042 3043 __ JumpIfSmi(r0, if_false); 3044 __ LoadRoot(ip, Heap::kNullValueRootIndex); 3045 __ cmp(r0, ip); 3046 __ b(eq, if_true); 3047 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 3048 // Undetectable objects behave like undefined when tested with typeof. 3049 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); 3050 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3051 __ b(ne, if_false); 3052 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 3053 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3054 __ b(lt, if_false); 3055 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3056 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3057 Split(le, if_true, if_false, fall_through); 3058 3059 context()->Plug(if_true, if_false); 3060} 3061 3062 3063void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 3064 ZoneList<Expression*>* args = expr->arguments(); 3065 DCHECK(args->length() == 1); 3066 3067 VisitForAccumulatorValue(args->at(0)); 3068 3069 Label materialize_true, materialize_false; 3070 Label* if_true = NULL; 3071 Label* if_false = NULL; 3072 Label* fall_through = NULL; 3073 context()->PrepareTest(&materialize_true, &materialize_false, 3074 &if_true, &if_false, &fall_through); 3075 3076 __ JumpIfSmi(r0, if_false); 3077 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 3078 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3079 Split(ge, if_true, if_false, fall_through); 3080 3081 context()->Plug(if_true, if_false); 3082} 3083 3084 3085void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 3086 ZoneList<Expression*>* args = expr->arguments(); 3087 DCHECK(args->length() == 1); 3088 3089 VisitForAccumulatorValue(args->at(0)); 3090 3091 Label materialize_true, materialize_false; 3092 Label* if_true = NULL; 3093 Label* if_false = NULL; 3094 Label* fall_through = NULL; 3095 context()->PrepareTest(&materialize_true, &materialize_false, 3096 &if_true, &if_false, &fall_through); 3097 3098 __ JumpIfSmi(r0, if_false); 3099 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3100 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 3101 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3102 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3103 Split(ne, if_true, if_false, fall_through); 3104 3105 context()->Plug(if_true, if_false); 3106} 3107 3108 3109void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 3110 CallRuntime* expr) { 3111 ZoneList<Expression*>* args = expr->arguments(); 3112 DCHECK(args->length() == 1); 3113 3114 VisitForAccumulatorValue(args->at(0)); 3115 3116 Label materialize_true, materialize_false, skip_lookup; 3117 Label* if_true = NULL; 3118 Label* if_false = NULL; 3119 Label* fall_through = NULL; 3120 context()->PrepareTest(&materialize_true, &materialize_false, 3121 &if_true, &if_false, &fall_through); 3122 3123 __ AssertNotSmi(r0); 3124 3125 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3126 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); 3127 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3128 __ b(ne, &skip_lookup); 3129 3130 // Check for fast case object. Generate false result for slow case object. 3131 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 3132 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 3133 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 3134 __ cmp(r2, ip); 3135 __ b(eq, if_false); 3136 3137 // Look for valueOf name in the descriptor array, and indicate false if 3138 // found. Since we omit an enumeration index check, if it is added via a 3139 // transition that shares its descriptor array, this is a false positive. 3140 Label entry, loop, done; 3141 3142 // Skip loop if no descriptors are valid. 3143 __ NumberOfOwnDescriptors(r3, r1); 3144 __ cmp(r3, Operand::Zero()); 3145 __ b(eq, &done); 3146 3147 __ LoadInstanceDescriptors(r1, r4); 3148 // r4: descriptor array. 3149 // r3: valid entries in the descriptor array. 3150 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); 3151 __ mul(r3, r3, ip); 3152 // Calculate location of the first key name. 3153 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); 3154 // Calculate the end of the descriptor array. 3155 __ mov(r2, r4); 3156 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2)); 3157 3158 // Loop through all the keys in the descriptor array. If one of these is the 3159 // string "valueOf" the result is false. 3160 // The use of ip to store the valueOf string assumes that it is not otherwise 3161 // used in the loop below. 3162 __ mov(ip, Operand(isolate()->factory()->value_of_string())); 3163 __ jmp(&entry); 3164 __ bind(&loop); 3165 __ ldr(r3, MemOperand(r4, 0)); 3166 __ cmp(r3, ip); 3167 __ b(eq, if_false); 3168 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 3169 __ bind(&entry); 3170 __ cmp(r4, Operand(r2)); 3171 __ b(ne, &loop); 3172 3173 __ bind(&done); 3174 3175 // Set the bit in the map to indicate that there is no local valueOf field. 3176 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 3177 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3178 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 3179 3180 __ bind(&skip_lookup); 3181 3182 // If a valueOf property is not found on the object check that its 3183 // prototype is the un-modified String prototype. If not result is false. 3184 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); 3185 __ JumpIfSmi(r2, if_false); 3186 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 3187 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3188 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); 3189 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 3190 __ cmp(r2, r3); 3191 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3192 Split(eq, if_true, if_false, fall_through); 3193 3194 context()->Plug(if_true, if_false); 3195} 3196 3197 3198void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3199 ZoneList<Expression*>* args = expr->arguments(); 3200 DCHECK(args->length() == 1); 3201 3202 VisitForAccumulatorValue(args->at(0)); 3203 3204 Label materialize_true, materialize_false; 3205 Label* if_true = NULL; 3206 Label* if_false = NULL; 3207 Label* fall_through = NULL; 3208 context()->PrepareTest(&materialize_true, &materialize_false, 3209 &if_true, &if_false, &fall_through); 3210 3211 __ JumpIfSmi(r0, if_false); 3212 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 3213 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3214 Split(eq, if_true, if_false, fall_through); 3215 3216 context()->Plug(if_true, if_false); 3217} 3218 3219 3220void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3221 ZoneList<Expression*>* args = expr->arguments(); 3222 DCHECK(args->length() == 1); 3223 3224 VisitForAccumulatorValue(args->at(0)); 3225 3226 Label materialize_true, materialize_false; 3227 Label* if_true = NULL; 3228 Label* if_false = NULL; 3229 Label* fall_through = NULL; 3230 context()->PrepareTest(&materialize_true, &materialize_false, 3231 &if_true, &if_false, &fall_through); 3232 3233 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 3234 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 3235 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 3236 __ cmp(r2, Operand(0x80000000)); 3237 __ cmp(r1, Operand(0x00000000), eq); 3238 3239 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3240 Split(eq, if_true, if_false, fall_through); 3241 3242 context()->Plug(if_true, if_false); 3243} 3244 3245 3246void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3247 ZoneList<Expression*>* args = expr->arguments(); 3248 DCHECK(args->length() == 1); 3249 3250 VisitForAccumulatorValue(args->at(0)); 3251 3252 Label materialize_true, materialize_false; 3253 Label* if_true = NULL; 3254 Label* if_false = NULL; 3255 Label* fall_through = NULL; 3256 context()->PrepareTest(&materialize_true, &materialize_false, 3257 &if_true, &if_false, &fall_through); 3258 3259 __ JumpIfSmi(r0, if_false); 3260 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 3261 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3262 Split(eq, if_true, if_false, fall_through); 3263 3264 context()->Plug(if_true, if_false); 3265} 3266 3267 3268void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3269 ZoneList<Expression*>* args = expr->arguments(); 3270 DCHECK(args->length() == 1); 3271 3272 VisitForAccumulatorValue(args->at(0)); 3273 3274 Label materialize_true, materialize_false; 3275 Label* if_true = NULL; 3276 Label* if_false = NULL; 3277 Label* fall_through = NULL; 3278 context()->PrepareTest(&materialize_true, &materialize_false, 3279 &if_true, &if_false, &fall_through); 3280 3281 __ JumpIfSmi(r0, if_false); 3282 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 3283 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3284 Split(eq, if_true, if_false, fall_through); 3285 3286 context()->Plug(if_true, if_false); 3287} 3288 3289 3290 3291void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3292 DCHECK(expr->arguments()->length() == 0); 3293 3294 Label materialize_true, materialize_false; 3295 Label* if_true = NULL; 3296 Label* if_false = NULL; 3297 Label* fall_through = NULL; 3298 context()->PrepareTest(&materialize_true, &materialize_false, 3299 &if_true, &if_false, &fall_through); 3300 3301 // Get the frame pointer for the calling frame. 3302 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3303 3304 // Skip the arguments adaptor frame if it exists. 3305 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3306 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3307 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq); 3308 3309 // Check the marker in the calling frame. 3310 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); 3311 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 3312 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3313 Split(eq, if_true, if_false, fall_through); 3314 3315 context()->Plug(if_true, if_false); 3316} 3317 3318 3319void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3320 ZoneList<Expression*>* args = expr->arguments(); 3321 DCHECK(args->length() == 2); 3322 3323 // Load the two objects into registers and perform the comparison. 3324 VisitForStackValue(args->at(0)); 3325 VisitForAccumulatorValue(args->at(1)); 3326 3327 Label materialize_true, materialize_false; 3328 Label* if_true = NULL; 3329 Label* if_false = NULL; 3330 Label* fall_through = NULL; 3331 context()->PrepareTest(&materialize_true, &materialize_false, 3332 &if_true, &if_false, &fall_through); 3333 3334 __ pop(r1); 3335 __ cmp(r0, r1); 3336 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3337 Split(eq, if_true, if_false, fall_through); 3338 3339 context()->Plug(if_true, if_false); 3340} 3341 3342 3343void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3344 ZoneList<Expression*>* args = expr->arguments(); 3345 DCHECK(args->length() == 1); 3346 3347 // ArgumentsAccessStub expects the key in edx and the formal 3348 // parameter count in r0. 3349 VisitForAccumulatorValue(args->at(0)); 3350 __ mov(r1, r0); 3351 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3352 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3353 __ CallStub(&stub); 3354 context()->Plug(r0); 3355} 3356 3357 3358void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3359 DCHECK(expr->arguments()->length() == 0); 3360 3361 // Get the number of formal parameters. 3362 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3363 3364 // Check if the calling frame is an arguments adaptor frame. 3365 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3366 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3367 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3368 3369 // Arguments adaptor case: Read the arguments length from the 3370 // adaptor frame. 3371 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq); 3372 3373 context()->Plug(r0); 3374} 3375 3376 3377void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3378 ZoneList<Expression*>* args = expr->arguments(); 3379 DCHECK(args->length() == 1); 3380 Label done, null, function, non_function_constructor; 3381 3382 VisitForAccumulatorValue(args->at(0)); 3383 3384 // If the object is a smi, we return null. 3385 __ JumpIfSmi(r0, &null); 3386 3387 // Check that the object is a JS object but take special care of JS 3388 // functions to make sure they have 'Function' as their class. 3389 // Assume that there are only two callable types, and one of them is at 3390 // either end of the type range for JS object types. Saves extra comparisons. 3391 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 3392 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); 3393 // Map is now in r0. 3394 __ b(lt, &null); 3395 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 3396 FIRST_SPEC_OBJECT_TYPE + 1); 3397 __ b(eq, &function); 3398 3399 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); 3400 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 3401 LAST_SPEC_OBJECT_TYPE - 1); 3402 __ b(eq, &function); 3403 // Assume that there is no larger type. 3404 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 3405 3406 // Check if the constructor in the map is a JS function. 3407 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 3408 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3409 __ b(ne, &non_function_constructor); 3410 3411 // r0 now contains the constructor function. Grab the 3412 // instance class name from there. 3413 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 3414 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); 3415 __ b(&done); 3416 3417 // Functions have class 'Function'. 3418 __ bind(&function); 3419 __ LoadRoot(r0, Heap::kFunction_stringRootIndex); 3420 __ jmp(&done); 3421 3422 // Objects with a non-function constructor have class 'Object'. 3423 __ bind(&non_function_constructor); 3424 __ LoadRoot(r0, Heap::kObject_stringRootIndex); 3425 __ jmp(&done); 3426 3427 // Non-JS objects have class null. 3428 __ bind(&null); 3429 __ LoadRoot(r0, Heap::kNullValueRootIndex); 3430 3431 // All done. 3432 __ bind(&done); 3433 3434 context()->Plug(r0); 3435} 3436 3437 3438void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3439 // Load the arguments on the stack and call the stub. 3440 SubStringStub stub(isolate()); 3441 ZoneList<Expression*>* args = expr->arguments(); 3442 DCHECK(args->length() == 3); 3443 VisitForStackValue(args->at(0)); 3444 VisitForStackValue(args->at(1)); 3445 VisitForStackValue(args->at(2)); 3446 __ CallStub(&stub); 3447 context()->Plug(r0); 3448} 3449 3450 3451void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3452 // Load the arguments on the stack and call the stub. 3453 RegExpExecStub stub(isolate()); 3454 ZoneList<Expression*>* args = expr->arguments(); 3455 DCHECK(args->length() == 4); 3456 VisitForStackValue(args->at(0)); 3457 VisitForStackValue(args->at(1)); 3458 VisitForStackValue(args->at(2)); 3459 VisitForStackValue(args->at(3)); 3460 __ CallStub(&stub); 3461 context()->Plug(r0); 3462} 3463 3464 3465void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3466 ZoneList<Expression*>* args = expr->arguments(); 3467 DCHECK(args->length() == 1); 3468 VisitForAccumulatorValue(args->at(0)); // Load the object. 3469 3470 Label done; 3471 // If the object is a smi return the object. 3472 __ JumpIfSmi(r0, &done); 3473 // If the object is not a value type, return the object. 3474 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 3475 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq); 3476 3477 __ bind(&done); 3478 context()->Plug(r0); 3479} 3480 3481 3482void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3483 ZoneList<Expression*>* args = expr->arguments(); 3484 DCHECK(args->length() == 2); 3485 DCHECK_NE(NULL, args->at(1)->AsLiteral()); 3486 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3487 3488 VisitForAccumulatorValue(args->at(0)); // Load the object. 3489 3490 Label runtime, done, not_date_object; 3491 Register object = r0; 3492 Register result = r0; 3493 Register scratch0 = r9; 3494 Register scratch1 = r1; 3495 3496 __ JumpIfSmi(object, ¬_date_object); 3497 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE); 3498 __ b(ne, ¬_date_object); 3499 3500 if (index->value() == 0) { 3501 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); 3502 __ jmp(&done); 3503 } else { 3504 if (index->value() < JSDate::kFirstUncachedField) { 3505 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3506 __ mov(scratch1, Operand(stamp)); 3507 __ ldr(scratch1, MemOperand(scratch1)); 3508 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3509 __ cmp(scratch1, scratch0); 3510 __ b(ne, &runtime); 3511 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + 3512 kPointerSize * index->value())); 3513 __ jmp(&done); 3514 } 3515 __ bind(&runtime); 3516 __ PrepareCallCFunction(2, scratch1); 3517 __ mov(r1, Operand(index)); 3518 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3519 __ jmp(&done); 3520 } 3521 3522 __ bind(¬_date_object); 3523 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3524 __ bind(&done); 3525 context()->Plug(r0); 3526} 3527 3528 3529void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3530 ZoneList<Expression*>* args = expr->arguments(); 3531 DCHECK_EQ(3, args->length()); 3532 3533 Register string = r0; 3534 Register index = r1; 3535 Register value = r2; 3536 3537 VisitForStackValue(args->at(0)); // index 3538 VisitForStackValue(args->at(1)); // value 3539 VisitForAccumulatorValue(args->at(2)); // string 3540 __ Pop(index, value); 3541 3542 if (FLAG_debug_code) { 3543 __ SmiTst(value); 3544 __ Check(eq, kNonSmiValue); 3545 __ SmiTst(index); 3546 __ Check(eq, kNonSmiIndex); 3547 __ SmiUntag(index, index); 3548 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3549 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3550 __ SmiTag(index, index); 3551 } 3552 3553 __ SmiUntag(value, value); 3554 __ add(ip, 3555 string, 3556 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3557 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize)); 3558 context()->Plug(string); 3559} 3560 3561 3562void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3563 ZoneList<Expression*>* args = expr->arguments(); 3564 DCHECK_EQ(3, args->length()); 3565 3566 Register string = r0; 3567 Register index = r1; 3568 Register value = r2; 3569 3570 VisitForStackValue(args->at(0)); // index 3571 VisitForStackValue(args->at(1)); // value 3572 VisitForAccumulatorValue(args->at(2)); // string 3573 __ Pop(index, value); 3574 3575 if (FLAG_debug_code) { 3576 __ SmiTst(value); 3577 __ Check(eq, kNonSmiValue); 3578 __ SmiTst(index); 3579 __ Check(eq, kNonSmiIndex); 3580 __ SmiUntag(index, index); 3581 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3582 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3583 __ SmiTag(index, index); 3584 } 3585 3586 __ SmiUntag(value, value); 3587 __ add(ip, 3588 string, 3589 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3590 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 3591 __ strh(value, MemOperand(ip, index)); 3592 context()->Plug(string); 3593} 3594 3595 3596 3597void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3598 // Load the arguments on the stack and call the runtime function. 3599 ZoneList<Expression*>* args = expr->arguments(); 3600 DCHECK(args->length() == 2); 3601 VisitForStackValue(args->at(0)); 3602 VisitForStackValue(args->at(1)); 3603 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3604 __ CallStub(&stub); 3605 context()->Plug(r0); 3606} 3607 3608 3609void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3610 ZoneList<Expression*>* args = expr->arguments(); 3611 DCHECK(args->length() == 2); 3612 VisitForStackValue(args->at(0)); // Load the object. 3613 VisitForAccumulatorValue(args->at(1)); // Load the value. 3614 __ pop(r1); // r0 = value. r1 = object. 3615 3616 Label done; 3617 // If the object is a smi, return the value. 3618 __ JumpIfSmi(r1, &done); 3619 3620 // If the object is not a value type, return the value. 3621 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 3622 __ b(ne, &done); 3623 3624 // Store the value. 3625 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 3626 // Update the write barrier. Save the value as it will be 3627 // overwritten by the write barrier code and is needed afterward. 3628 __ mov(r2, r0); 3629 __ RecordWriteField( 3630 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 3631 3632 __ bind(&done); 3633 context()->Plug(r0); 3634} 3635 3636 3637void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3638 ZoneList<Expression*>* args = expr->arguments(); 3639 DCHECK_EQ(args->length(), 1); 3640 // Load the argument into r0 and call the stub. 3641 VisitForAccumulatorValue(args->at(0)); 3642 3643 NumberToStringStub stub(isolate()); 3644 __ CallStub(&stub); 3645 context()->Plug(r0); 3646} 3647 3648 3649void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3650 ZoneList<Expression*>* args = expr->arguments(); 3651 DCHECK(args->length() == 1); 3652 VisitForAccumulatorValue(args->at(0)); 3653 3654 Label done; 3655 StringCharFromCodeGenerator generator(r0, r1); 3656 generator.GenerateFast(masm_); 3657 __ jmp(&done); 3658 3659 NopRuntimeCallHelper call_helper; 3660 generator.GenerateSlow(masm_, call_helper); 3661 3662 __ bind(&done); 3663 context()->Plug(r1); 3664} 3665 3666 3667void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3668 ZoneList<Expression*>* args = expr->arguments(); 3669 DCHECK(args->length() == 2); 3670 VisitForStackValue(args->at(0)); 3671 VisitForAccumulatorValue(args->at(1)); 3672 3673 Register object = r1; 3674 Register index = r0; 3675 Register result = r3; 3676 3677 __ pop(object); 3678 3679 Label need_conversion; 3680 Label index_out_of_range; 3681 Label done; 3682 StringCharCodeAtGenerator generator(object, 3683 index, 3684 result, 3685 &need_conversion, 3686 &need_conversion, 3687 &index_out_of_range, 3688 STRING_INDEX_IS_NUMBER); 3689 generator.GenerateFast(masm_); 3690 __ jmp(&done); 3691 3692 __ bind(&index_out_of_range); 3693 // When the index is out of range, the spec requires us to return 3694 // NaN. 3695 __ LoadRoot(result, Heap::kNanValueRootIndex); 3696 __ jmp(&done); 3697 3698 __ bind(&need_conversion); 3699 // Load the undefined value into the result register, which will 3700 // trigger conversion. 3701 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3702 __ jmp(&done); 3703 3704 NopRuntimeCallHelper call_helper; 3705 generator.GenerateSlow(masm_, call_helper); 3706 3707 __ bind(&done); 3708 context()->Plug(result); 3709} 3710 3711 3712void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3713 ZoneList<Expression*>* args = expr->arguments(); 3714 DCHECK(args->length() == 2); 3715 VisitForStackValue(args->at(0)); 3716 VisitForAccumulatorValue(args->at(1)); 3717 3718 Register object = r1; 3719 Register index = r0; 3720 Register scratch = r3; 3721 Register result = r0; 3722 3723 __ pop(object); 3724 3725 Label need_conversion; 3726 Label index_out_of_range; 3727 Label done; 3728 StringCharAtGenerator generator(object, 3729 index, 3730 scratch, 3731 result, 3732 &need_conversion, 3733 &need_conversion, 3734 &index_out_of_range, 3735 STRING_INDEX_IS_NUMBER); 3736 generator.GenerateFast(masm_); 3737 __ jmp(&done); 3738 3739 __ bind(&index_out_of_range); 3740 // When the index is out of range, the spec requires us to return 3741 // the empty string. 3742 __ LoadRoot(result, Heap::kempty_stringRootIndex); 3743 __ jmp(&done); 3744 3745 __ bind(&need_conversion); 3746 // Move smi zero into the result register, which will trigger 3747 // conversion. 3748 __ mov(result, Operand(Smi::FromInt(0))); 3749 __ jmp(&done); 3750 3751 NopRuntimeCallHelper call_helper; 3752 generator.GenerateSlow(masm_, call_helper); 3753 3754 __ bind(&done); 3755 context()->Plug(result); 3756} 3757 3758 3759void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3760 ZoneList<Expression*>* args = expr->arguments(); 3761 DCHECK_EQ(2, args->length()); 3762 VisitForStackValue(args->at(0)); 3763 VisitForAccumulatorValue(args->at(1)); 3764 3765 __ pop(r1); 3766 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 3767 __ CallStub(&stub); 3768 context()->Plug(r0); 3769} 3770 3771 3772void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3773 ZoneList<Expression*>* args = expr->arguments(); 3774 DCHECK_EQ(2, args->length()); 3775 VisitForStackValue(args->at(0)); 3776 VisitForStackValue(args->at(1)); 3777 3778 StringCompareStub stub(isolate()); 3779 __ CallStub(&stub); 3780 context()->Plug(r0); 3781} 3782 3783 3784void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3785 ZoneList<Expression*>* args = expr->arguments(); 3786 DCHECK(args->length() >= 2); 3787 3788 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3789 for (int i = 0; i < arg_count + 1; i++) { 3790 VisitForStackValue(args->at(i)); 3791 } 3792 VisitForAccumulatorValue(args->last()); // Function. 3793 3794 Label runtime, done; 3795 // Check for non-function argument (including proxy). 3796 __ JumpIfSmi(r0, &runtime); 3797 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3798 __ b(ne, &runtime); 3799 3800 // InvokeFunction requires the function in r1. Move it in there. 3801 __ mov(r1, result_register()); 3802 ParameterCount count(arg_count); 3803 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper()); 3804 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3805 __ jmp(&done); 3806 3807 __ bind(&runtime); 3808 __ push(r0); 3809 __ CallRuntime(Runtime::kCall, args->length()); 3810 __ bind(&done); 3811 3812 context()->Plug(r0); 3813} 3814 3815 3816void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3817 RegExpConstructResultStub stub(isolate()); 3818 ZoneList<Expression*>* args = expr->arguments(); 3819 DCHECK(args->length() == 3); 3820 VisitForStackValue(args->at(0)); 3821 VisitForStackValue(args->at(1)); 3822 VisitForAccumulatorValue(args->at(2)); 3823 __ pop(r1); 3824 __ pop(r2); 3825 __ CallStub(&stub); 3826 context()->Plug(r0); 3827} 3828 3829 3830void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3831 ZoneList<Expression*>* args = expr->arguments(); 3832 DCHECK_EQ(2, args->length()); 3833 DCHECK_NE(NULL, args->at(0)->AsLiteral()); 3834 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3835 3836 Handle<FixedArray> jsfunction_result_caches( 3837 isolate()->native_context()->jsfunction_result_caches()); 3838 if (jsfunction_result_caches->length() <= cache_id) { 3839 __ Abort(kAttemptToUseUndefinedCache); 3840 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3841 context()->Plug(r0); 3842 return; 3843 } 3844 3845 VisitForAccumulatorValue(args->at(1)); 3846 3847 Register key = r0; 3848 Register cache = r1; 3849 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3850 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 3851 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3852 __ ldr(cache, 3853 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3854 3855 3856 Label done, not_found; 3857 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3858 // r2 now holds finger offset as a smi. 3859 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3860 // r3 now points to the start of fixed array elements. 3861 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex)); 3862 // Note side effect of PreIndex: r3 now points to the key of the pair. 3863 __ cmp(key, r2); 3864 __ b(ne, ¬_found); 3865 3866 __ ldr(r0, MemOperand(r3, kPointerSize)); 3867 __ b(&done); 3868 3869 __ bind(¬_found); 3870 // Call runtime to perform the lookup. 3871 __ Push(cache, key); 3872 __ CallRuntime(Runtime::kGetFromCache, 2); 3873 3874 __ bind(&done); 3875 context()->Plug(r0); 3876} 3877 3878 3879void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3880 ZoneList<Expression*>* args = expr->arguments(); 3881 VisitForAccumulatorValue(args->at(0)); 3882 3883 Label materialize_true, materialize_false; 3884 Label* if_true = NULL; 3885 Label* if_false = NULL; 3886 Label* fall_through = NULL; 3887 context()->PrepareTest(&materialize_true, &materialize_false, 3888 &if_true, &if_false, &fall_through); 3889 3890 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3891 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); 3892 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3893 Split(eq, if_true, if_false, fall_through); 3894 3895 context()->Plug(if_true, if_false); 3896} 3897 3898 3899void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3900 ZoneList<Expression*>* args = expr->arguments(); 3901 DCHECK(args->length() == 1); 3902 VisitForAccumulatorValue(args->at(0)); 3903 3904 __ AssertString(r0); 3905 3906 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3907 __ IndexFromHash(r0, r0); 3908 3909 context()->Plug(r0); 3910} 3911 3912 3913void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { 3914 Label bailout, done, one_char_separator, long_separator, non_trivial_array, 3915 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop, 3916 one_char_separator_loop_entry, long_separator_loop; 3917 ZoneList<Expression*>* args = expr->arguments(); 3918 DCHECK(args->length() == 2); 3919 VisitForStackValue(args->at(1)); 3920 VisitForAccumulatorValue(args->at(0)); 3921 3922 // All aliases of the same register have disjoint lifetimes. 3923 Register array = r0; 3924 Register elements = no_reg; // Will be r0. 3925 Register result = no_reg; // Will be r0. 3926 Register separator = r1; 3927 Register array_length = r2; 3928 Register result_pos = no_reg; // Will be r2 3929 Register string_length = r3; 3930 Register string = r4; 3931 Register element = r5; 3932 Register elements_end = r6; 3933 Register scratch = r9; 3934 3935 // Separator operand is on the stack. 3936 __ pop(separator); 3937 3938 // Check that the array is a JSArray. 3939 __ JumpIfSmi(array, &bailout); 3940 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE); 3941 __ b(ne, &bailout); 3942 3943 // Check that the array has fast elements. 3944 __ CheckFastElements(scratch, array_length, &bailout); 3945 3946 // If the array has length zero, return the empty string. 3947 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 3948 __ SmiUntag(array_length, SetCC); 3949 __ b(ne, &non_trivial_array); 3950 __ LoadRoot(r0, Heap::kempty_stringRootIndex); 3951 __ b(&done); 3952 3953 __ bind(&non_trivial_array); 3954 3955 // Get the FixedArray containing array's elements. 3956 elements = array; 3957 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); 3958 array = no_reg; // End of array's live range. 3959 3960 // Check that all array elements are sequential one-byte strings, and 3961 // accumulate the sum of their lengths, as a smi-encoded value. 3962 __ mov(string_length, Operand::Zero()); 3963 __ add(element, 3964 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3965 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 3966 // Loop condition: while (element < elements_end). 3967 // Live values in registers: 3968 // elements: Fixed array of strings. 3969 // array_length: Length of the fixed array of strings (not smi) 3970 // separator: Separator string 3971 // string_length: Accumulated sum of string lengths (smi). 3972 // element: Current array element. 3973 // elements_end: Array end. 3974 if (generate_debug_code_) { 3975 __ cmp(array_length, Operand::Zero()); 3976 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin); 3977 } 3978 __ bind(&loop); 3979 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3980 __ JumpIfSmi(string, &bailout); 3981 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); 3982 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 3983 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); 3984 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); 3985 __ add(string_length, string_length, Operand(scratch), SetCC); 3986 __ b(vs, &bailout); 3987 __ cmp(element, elements_end); 3988 __ b(lt, &loop); 3989 3990 // If array_length is 1, return elements[0], a string. 3991 __ cmp(array_length, Operand(1)); 3992 __ b(ne, ¬_size_one_array); 3993 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 3994 __ b(&done); 3995 3996 __ bind(¬_size_one_array); 3997 3998 // Live values in registers: 3999 // separator: Separator string 4000 // array_length: Length of the array. 4001 // string_length: Sum of string lengths (smi). 4002 // elements: FixedArray of strings. 4003 4004 // Check that the separator is a flat one-byte string. 4005 __ JumpIfSmi(separator, &bailout); 4006 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset)); 4007 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 4008 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); 4009 4010 // Add (separator length times array_length) - separator length to the 4011 // string_length to get the length of the result string. array_length is not 4012 // smi but the other values are, so the result is a smi 4013 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4014 __ sub(string_length, string_length, Operand(scratch)); 4015 __ smull(scratch, ip, array_length, scratch); 4016 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 4017 // zero. 4018 __ cmp(ip, Operand::Zero()); 4019 __ b(ne, &bailout); 4020 __ tst(scratch, Operand(0x80000000)); 4021 __ b(ne, &bailout); 4022 __ add(string_length, string_length, Operand(scratch), SetCC); 4023 __ b(vs, &bailout); 4024 __ SmiUntag(string_length); 4025 4026 // Get first element in the array to free up the elements register to be used 4027 // for the result. 4028 __ add(element, 4029 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 4030 result = elements; // End of live range for elements. 4031 elements = no_reg; 4032 // Live values in registers: 4033 // element: First array element 4034 // separator: Separator string 4035 // string_length: Length of result string (not smi) 4036 // array_length: Length of the array. 4037 __ AllocateOneByteString(result, string_length, scratch, 4038 string, // used as scratch 4039 elements_end, // used as scratch 4040 &bailout); 4041 // Prepare for looping. Set up elements_end to end of the array. Set 4042 // result_pos to the position of the result where to write the first 4043 // character. 4044 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 4045 result_pos = array_length; // End of live range for array_length. 4046 array_length = no_reg; 4047 __ add(result_pos, 4048 result, 4049 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4050 4051 // Check the length of the separator. 4052 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4053 __ cmp(scratch, Operand(Smi::FromInt(1))); 4054 __ b(eq, &one_char_separator); 4055 __ b(gt, &long_separator); 4056 4057 // Empty separator case 4058 __ bind(&empty_separator_loop); 4059 // Live values in registers: 4060 // result_pos: the position to which we are currently copying characters. 4061 // element: Current array element. 4062 // elements_end: Array end. 4063 4064 // Copy next array element to the result. 4065 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4066 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4067 __ SmiUntag(string_length); 4068 __ add(string, 4069 string, 4070 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4071 __ CopyBytes(string, result_pos, string_length, scratch); 4072 __ cmp(element, elements_end); 4073 __ b(lt, &empty_separator_loop); // End while (element < elements_end). 4074 DCHECK(result.is(r0)); 4075 __ b(&done); 4076 4077 // One-character separator case 4078 __ bind(&one_char_separator); 4079 // Replace separator with its one-byte character value. 4080 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 4081 // Jump into the loop after the code that copies the separator, so the first 4082 // element is not preceded by a separator 4083 __ jmp(&one_char_separator_loop_entry); 4084 4085 __ bind(&one_char_separator_loop); 4086 // Live values in registers: 4087 // result_pos: the position to which we are currently copying characters. 4088 // element: Current array element. 4089 // elements_end: Array end. 4090 // separator: Single separator one-byte char (in lower byte). 4091 4092 // Copy the separator character to the result. 4093 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); 4094 4095 // Copy next array element to the result. 4096 __ bind(&one_char_separator_loop_entry); 4097 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4098 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4099 __ SmiUntag(string_length); 4100 __ add(string, 4101 string, 4102 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4103 __ CopyBytes(string, result_pos, string_length, scratch); 4104 __ cmp(element, elements_end); 4105 __ b(lt, &one_char_separator_loop); // End while (element < elements_end). 4106 DCHECK(result.is(r0)); 4107 __ b(&done); 4108 4109 // Long separator case (separator is more than one character). Entry is at the 4110 // label long_separator below. 4111 __ bind(&long_separator_loop); 4112 // Live values in registers: 4113 // result_pos: the position to which we are currently copying characters. 4114 // element: Current array element. 4115 // elements_end: Array end. 4116 // separator: Separator string. 4117 4118 // Copy the separator to the result. 4119 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); 4120 __ SmiUntag(string_length); 4121 __ add(string, 4122 separator, 4123 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4124 __ CopyBytes(string, result_pos, string_length, scratch); 4125 4126 __ bind(&long_separator); 4127 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4128 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4129 __ SmiUntag(string_length); 4130 __ add(string, 4131 string, 4132 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4133 __ CopyBytes(string, result_pos, string_length, scratch); 4134 __ cmp(element, elements_end); 4135 __ b(lt, &long_separator_loop); // End while (element < elements_end). 4136 DCHECK(result.is(r0)); 4137 __ b(&done); 4138 4139 __ bind(&bailout); 4140 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 4141 __ bind(&done); 4142 context()->Plug(r0); 4143} 4144 4145 4146void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 4147 DCHECK(expr->arguments()->length() == 0); 4148 ExternalReference debug_is_active = 4149 ExternalReference::debug_is_active_address(isolate()); 4150 __ mov(ip, Operand(debug_is_active)); 4151 __ ldrb(r0, MemOperand(ip)); 4152 __ SmiTag(r0); 4153 context()->Plug(r0); 4154} 4155 4156 4157void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4158 if (expr->function() != NULL && 4159 expr->function()->intrinsic_type == Runtime::INLINE) { 4160 Comment cmnt(masm_, "[ InlineRuntimeCall"); 4161 EmitInlineRuntimeCall(expr); 4162 return; 4163 } 4164 4165 Comment cmnt(masm_, "[ CallRuntime"); 4166 ZoneList<Expression*>* args = expr->arguments(); 4167 int arg_count = args->length(); 4168 4169 if (expr->is_jsruntime()) { 4170 // Push the builtins object as the receiver. 4171 Register receiver = LoadDescriptor::ReceiverRegister(); 4172 __ ldr(receiver, GlobalObjectOperand()); 4173 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); 4174 __ push(receiver); 4175 4176 // Load the function from the receiver. 4177 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name())); 4178 if (FLAG_vector_ics) { 4179 __ mov(VectorLoadICDescriptor::SlotRegister(), 4180 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot()))); 4181 CallLoadIC(NOT_CONTEXTUAL); 4182 } else { 4183 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); 4184 } 4185 4186 // Push the target function under the receiver. 4187 __ ldr(ip, MemOperand(sp, 0)); 4188 __ push(ip); 4189 __ str(r0, MemOperand(sp, kPointerSize)); 4190 4191 // Push the arguments ("left-to-right"). 4192 int arg_count = args->length(); 4193 for (int i = 0; i < arg_count; i++) { 4194 VisitForStackValue(args->at(i)); 4195 } 4196 4197 // Record source position of the IC call. 4198 SetSourcePosition(expr->position()); 4199 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 4200 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 4201 __ CallStub(&stub); 4202 4203 // Restore context register. 4204 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4205 4206 context()->DropAndPlug(1, r0); 4207 } else { 4208 // Push the arguments ("left-to-right"). 4209 for (int i = 0; i < arg_count; i++) { 4210 VisitForStackValue(args->at(i)); 4211 } 4212 4213 // Call the C runtime function. 4214 __ CallRuntime(expr->function(), arg_count); 4215 context()->Plug(r0); 4216 } 4217} 4218 4219 4220void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4221 switch (expr->op()) { 4222 case Token::DELETE: { 4223 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4224 Property* property = expr->expression()->AsProperty(); 4225 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4226 4227 if (property != NULL) { 4228 VisitForStackValue(property->obj()); 4229 VisitForStackValue(property->key()); 4230 __ mov(r1, Operand(Smi::FromInt(strict_mode()))); 4231 __ push(r1); 4232 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4233 context()->Plug(r0); 4234 } else if (proxy != NULL) { 4235 Variable* var = proxy->var(); 4236 // Delete of an unqualified identifier is disallowed in strict mode 4237 // but "delete this" is allowed. 4238 DCHECK(strict_mode() == SLOPPY || var->is_this()); 4239 if (var->IsUnallocated()) { 4240 __ ldr(r2, GlobalObjectOperand()); 4241 __ mov(r1, Operand(var->name())); 4242 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); 4243 __ Push(r2, r1, r0); 4244 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4245 context()->Plug(r0); 4246 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4247 // Result of deleting non-global, non-dynamic variables is false. 4248 // The subexpression does not have side effects. 4249 context()->Plug(var->is_this()); 4250 } else { 4251 // Non-global variable. Call the runtime to try to delete from the 4252 // context where the variable was introduced. 4253 DCHECK(!context_register().is(r2)); 4254 __ mov(r2, Operand(var->name())); 4255 __ Push(context_register(), r2); 4256 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); 4257 context()->Plug(r0); 4258 } 4259 } else { 4260 // Result of deleting non-property, non-variable reference is true. 4261 // The subexpression may have side effects. 4262 VisitForEffect(expr->expression()); 4263 context()->Plug(true); 4264 } 4265 break; 4266 } 4267 4268 case Token::VOID: { 4269 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 4270 VisitForEffect(expr->expression()); 4271 context()->Plug(Heap::kUndefinedValueRootIndex); 4272 break; 4273 } 4274 4275 case Token::NOT: { 4276 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 4277 if (context()->IsEffect()) { 4278 // Unary NOT has no side effects so it's only necessary to visit the 4279 // subexpression. Match the optimizing compiler by not branching. 4280 VisitForEffect(expr->expression()); 4281 } else if (context()->IsTest()) { 4282 const TestContext* test = TestContext::cast(context()); 4283 // The labels are swapped for the recursive call. 4284 VisitForControl(expr->expression(), 4285 test->false_label(), 4286 test->true_label(), 4287 test->fall_through()); 4288 context()->Plug(test->true_label(), test->false_label()); 4289 } else { 4290 // We handle value contexts explicitly rather than simply visiting 4291 // for control and plugging the control flow into the context, 4292 // because we need to prepare a pair of extra administrative AST ids 4293 // for the optimizing compiler. 4294 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 4295 Label materialize_true, materialize_false, done; 4296 VisitForControl(expr->expression(), 4297 &materialize_false, 4298 &materialize_true, 4299 &materialize_true); 4300 __ bind(&materialize_true); 4301 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4302 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 4303 if (context()->IsStackValue()) __ push(r0); 4304 __ jmp(&done); 4305 __ bind(&materialize_false); 4306 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4307 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 4308 if (context()->IsStackValue()) __ push(r0); 4309 __ bind(&done); 4310 } 4311 break; 4312 } 4313 4314 case Token::TYPEOF: { 4315 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4316 { StackValueContext context(this); 4317 VisitForTypeofValue(expr->expression()); 4318 } 4319 __ CallRuntime(Runtime::kTypeof, 1); 4320 context()->Plug(r0); 4321 break; 4322 } 4323 4324 default: 4325 UNREACHABLE(); 4326 } 4327} 4328 4329 4330void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4331 DCHECK(expr->expression()->IsValidReferenceExpression()); 4332 4333 Comment cmnt(masm_, "[ CountOperation"); 4334 SetSourcePosition(expr->position()); 4335 4336 // Expression can only be a property, a global or a (parameter or local) 4337 // slot. 4338 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 4339 LhsKind assign_type = VARIABLE; 4340 Property* prop = expr->expression()->AsProperty(); 4341 // In case of a property we use the uninitialized expression context 4342 // of the key to detect a named property. 4343 if (prop != NULL) { 4344 assign_type = 4345 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 4346 } 4347 4348 // Evaluate expression and get value. 4349 if (assign_type == VARIABLE) { 4350 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 4351 AccumulatorValueContext context(this); 4352 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4353 } else { 4354 // Reserve space for result of postfix operation. 4355 if (expr->is_postfix() && !context()->IsEffect()) { 4356 __ mov(ip, Operand(Smi::FromInt(0))); 4357 __ push(ip); 4358 } 4359 if (assign_type == NAMED_PROPERTY) { 4360 // Put the object both on the stack and in the register. 4361 VisitForStackValue(prop->obj()); 4362 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 4363 EmitNamedPropertyLoad(prop); 4364 } else { 4365 VisitForStackValue(prop->obj()); 4366 VisitForStackValue(prop->key()); 4367 __ ldr(LoadDescriptor::ReceiverRegister(), 4368 MemOperand(sp, 1 * kPointerSize)); 4369 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 4370 EmitKeyedPropertyLoad(prop); 4371 } 4372 } 4373 4374 // We need a second deoptimization point after loading the value 4375 // in case evaluating the property load my have a side effect. 4376 if (assign_type == VARIABLE) { 4377 PrepareForBailout(expr->expression(), TOS_REG); 4378 } else { 4379 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4380 } 4381 4382 // Inline smi case if we are in a loop. 4383 Label stub_call, done; 4384 JumpPatchSite patch_site(masm_); 4385 4386 int count_value = expr->op() == Token::INC ? 1 : -1; 4387 if (ShouldInlineSmiCase(expr->op())) { 4388 Label slow; 4389 patch_site.EmitJumpIfNotSmi(r0, &slow); 4390 4391 // Save result for postfix expressions. 4392 if (expr->is_postfix()) { 4393 if (!context()->IsEffect()) { 4394 // Save the result on the stack. If we have a named or keyed property 4395 // we store the result under the receiver that is currently on top 4396 // of the stack. 4397 switch (assign_type) { 4398 case VARIABLE: 4399 __ push(r0); 4400 break; 4401 case NAMED_PROPERTY: 4402 __ str(r0, MemOperand(sp, kPointerSize)); 4403 break; 4404 case KEYED_PROPERTY: 4405 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4406 break; 4407 } 4408 } 4409 } 4410 4411 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 4412 __ b(vc, &done); 4413 // Call stub. Undo operation first. 4414 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 4415 __ jmp(&stub_call); 4416 __ bind(&slow); 4417 } 4418 ToNumberStub convert_stub(isolate()); 4419 __ CallStub(&convert_stub); 4420 4421 // Save result for postfix expressions. 4422 if (expr->is_postfix()) { 4423 if (!context()->IsEffect()) { 4424 // Save the result on the stack. If we have a named or keyed property 4425 // we store the result under the receiver that is currently on top 4426 // of the stack. 4427 switch (assign_type) { 4428 case VARIABLE: 4429 __ push(r0); 4430 break; 4431 case NAMED_PROPERTY: 4432 __ str(r0, MemOperand(sp, kPointerSize)); 4433 break; 4434 case KEYED_PROPERTY: 4435 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4436 break; 4437 } 4438 } 4439 } 4440 4441 4442 __ bind(&stub_call); 4443 __ mov(r1, r0); 4444 __ mov(r0, Operand(Smi::FromInt(count_value))); 4445 4446 // Record position before stub call. 4447 SetSourcePosition(expr->position()); 4448 4449 Handle<Code> code = 4450 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code(); 4451 CallIC(code, expr->CountBinOpFeedbackId()); 4452 patch_site.EmitPatchInfo(); 4453 __ bind(&done); 4454 4455 // Store the value returned in r0. 4456 switch (assign_type) { 4457 case VARIABLE: 4458 if (expr->is_postfix()) { 4459 { EffectContext context(this); 4460 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4461 Token::ASSIGN); 4462 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4463 context.Plug(r0); 4464 } 4465 // For all contexts except EffectConstant We have the result on 4466 // top of the stack. 4467 if (!context()->IsEffect()) { 4468 context()->PlugTOS(); 4469 } 4470 } else { 4471 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4472 Token::ASSIGN); 4473 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4474 context()->Plug(r0); 4475 } 4476 break; 4477 case NAMED_PROPERTY: { 4478 __ mov(StoreDescriptor::NameRegister(), 4479 Operand(prop->key()->AsLiteral()->value())); 4480 __ pop(StoreDescriptor::ReceiverRegister()); 4481 CallStoreIC(expr->CountStoreFeedbackId()); 4482 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4483 if (expr->is_postfix()) { 4484 if (!context()->IsEffect()) { 4485 context()->PlugTOS(); 4486 } 4487 } else { 4488 context()->Plug(r0); 4489 } 4490 break; 4491 } 4492 case KEYED_PROPERTY: { 4493 __ Pop(StoreDescriptor::ReceiverRegister(), 4494 StoreDescriptor::NameRegister()); 4495 Handle<Code> ic = 4496 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 4497 CallIC(ic, expr->CountStoreFeedbackId()); 4498 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4499 if (expr->is_postfix()) { 4500 if (!context()->IsEffect()) { 4501 context()->PlugTOS(); 4502 } 4503 } else { 4504 context()->Plug(r0); 4505 } 4506 break; 4507 } 4508 } 4509} 4510 4511 4512void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4513 DCHECK(!context()->IsEffect()); 4514 DCHECK(!context()->IsTest()); 4515 VariableProxy* proxy = expr->AsVariableProxy(); 4516 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4517 Comment cmnt(masm_, "[ Global variable"); 4518 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 4519 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name())); 4520 if (FLAG_vector_ics) { 4521 __ mov(VectorLoadICDescriptor::SlotRegister(), 4522 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 4523 } 4524 // Use a regular load, not a contextual load, to avoid a reference 4525 // error. 4526 CallLoadIC(NOT_CONTEXTUAL); 4527 PrepareForBailout(expr, TOS_REG); 4528 context()->Plug(r0); 4529 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4530 Comment cmnt(masm_, "[ Lookup slot"); 4531 Label done, slow; 4532 4533 // Generate code for loading from variables potentially shadowed 4534 // by eval-introduced variables. 4535 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done); 4536 4537 __ bind(&slow); 4538 __ mov(r0, Operand(proxy->name())); 4539 __ Push(cp, r0); 4540 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2); 4541 PrepareForBailout(expr, TOS_REG); 4542 __ bind(&done); 4543 4544 context()->Plug(r0); 4545 } else { 4546 // This expression cannot throw a reference error at the top level. 4547 VisitInDuplicateContext(expr); 4548 } 4549} 4550 4551 4552void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4553 Expression* sub_expr, 4554 Handle<String> check) { 4555 Label materialize_true, materialize_false; 4556 Label* if_true = NULL; 4557 Label* if_false = NULL; 4558 Label* fall_through = NULL; 4559 context()->PrepareTest(&materialize_true, &materialize_false, 4560 &if_true, &if_false, &fall_through); 4561 4562 { AccumulatorValueContext context(this); 4563 VisitForTypeofValue(sub_expr); 4564 } 4565 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4566 4567 Factory* factory = isolate()->factory(); 4568 if (String::Equals(check, factory->number_string())) { 4569 __ JumpIfSmi(r0, if_true); 4570 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4571 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 4572 __ cmp(r0, ip); 4573 Split(eq, if_true, if_false, fall_through); 4574 } else if (String::Equals(check, factory->string_string())) { 4575 __ JumpIfSmi(r0, if_false); 4576 // Check for undetectable objects => false. 4577 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); 4578 __ b(ge, if_false); 4579 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4580 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4581 Split(eq, if_true, if_false, fall_through); 4582 } else if (String::Equals(check, factory->symbol_string())) { 4583 __ JumpIfSmi(r0, if_false); 4584 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE); 4585 Split(eq, if_true, if_false, fall_through); 4586 } else if (String::Equals(check, factory->boolean_string())) { 4587 __ CompareRoot(r0, Heap::kTrueValueRootIndex); 4588 __ b(eq, if_true); 4589 __ CompareRoot(r0, Heap::kFalseValueRootIndex); 4590 Split(eq, if_true, if_false, fall_through); 4591 } else if (String::Equals(check, factory->undefined_string())) { 4592 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); 4593 __ b(eq, if_true); 4594 __ JumpIfSmi(r0, if_false); 4595 // Check for undetectable objects => true. 4596 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4597 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4598 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4599 Split(ne, if_true, if_false, fall_through); 4600 4601 } else if (String::Equals(check, factory->function_string())) { 4602 __ JumpIfSmi(r0, if_false); 4603 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 4604 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE); 4605 __ b(eq, if_true); 4606 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); 4607 Split(eq, if_true, if_false, fall_through); 4608 } else if (String::Equals(check, factory->object_string())) { 4609 __ JumpIfSmi(r0, if_false); 4610 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4611 __ b(eq, if_true); 4612 // Check for JS objects => true. 4613 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 4614 __ b(lt, if_false); 4615 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4616 __ b(gt, if_false); 4617 // Check for undetectable objects => false. 4618 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4619 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4620 Split(eq, if_true, if_false, fall_through); 4621 } else { 4622 if (if_false != fall_through) __ jmp(if_false); 4623 } 4624 context()->Plug(if_true, if_false); 4625} 4626 4627 4628void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4629 Comment cmnt(masm_, "[ CompareOperation"); 4630 SetSourcePosition(expr->position()); 4631 4632 // First we try a fast inlined version of the compare when one of 4633 // the operands is a literal. 4634 if (TryLiteralCompare(expr)) return; 4635 4636 // Always perform the comparison for its control flow. Pack the result 4637 // into the expression's context after the comparison is performed. 4638 Label materialize_true, materialize_false; 4639 Label* if_true = NULL; 4640 Label* if_false = NULL; 4641 Label* fall_through = NULL; 4642 context()->PrepareTest(&materialize_true, &materialize_false, 4643 &if_true, &if_false, &fall_through); 4644 4645 Token::Value op = expr->op(); 4646 VisitForStackValue(expr->left()); 4647 switch (op) { 4648 case Token::IN: 4649 VisitForStackValue(expr->right()); 4650 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4651 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4652 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 4653 __ cmp(r0, ip); 4654 Split(eq, if_true, if_false, fall_through); 4655 break; 4656 4657 case Token::INSTANCEOF: { 4658 VisitForStackValue(expr->right()); 4659 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); 4660 __ CallStub(&stub); 4661 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4662 // The stub returns 0 for true. 4663 __ tst(r0, r0); 4664 Split(eq, if_true, if_false, fall_through); 4665 break; 4666 } 4667 4668 default: { 4669 VisitForAccumulatorValue(expr->right()); 4670 Condition cond = CompareIC::ComputeCondition(op); 4671 __ pop(r1); 4672 4673 bool inline_smi_code = ShouldInlineSmiCase(op); 4674 JumpPatchSite patch_site(masm_); 4675 if (inline_smi_code) { 4676 Label slow_case; 4677 __ orr(r2, r0, Operand(r1)); 4678 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4679 __ cmp(r1, r0); 4680 Split(cond, if_true, if_false, NULL); 4681 __ bind(&slow_case); 4682 } 4683 4684 // Record position and call the compare IC. 4685 SetSourcePosition(expr->position()); 4686 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 4687 CallIC(ic, expr->CompareOperationFeedbackId()); 4688 patch_site.EmitPatchInfo(); 4689 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4690 __ cmp(r0, Operand::Zero()); 4691 Split(cond, if_true, if_false, fall_through); 4692 } 4693 } 4694 4695 // Convert the result of the comparison into one expected for this 4696 // expression's context. 4697 context()->Plug(if_true, if_false); 4698} 4699 4700 4701void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4702 Expression* sub_expr, 4703 NilValue nil) { 4704 Label materialize_true, materialize_false; 4705 Label* if_true = NULL; 4706 Label* if_false = NULL; 4707 Label* fall_through = NULL; 4708 context()->PrepareTest(&materialize_true, &materialize_false, 4709 &if_true, &if_false, &fall_through); 4710 4711 VisitForAccumulatorValue(sub_expr); 4712 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4713 if (expr->op() == Token::EQ_STRICT) { 4714 Heap::RootListIndex nil_value = nil == kNullValue ? 4715 Heap::kNullValueRootIndex : 4716 Heap::kUndefinedValueRootIndex; 4717 __ LoadRoot(r1, nil_value); 4718 __ cmp(r0, r1); 4719 Split(eq, if_true, if_false, fall_through); 4720 } else { 4721 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4722 CallIC(ic, expr->CompareOperationFeedbackId()); 4723 __ cmp(r0, Operand(0)); 4724 Split(ne, if_true, if_false, fall_through); 4725 } 4726 context()->Plug(if_true, if_false); 4727} 4728 4729 4730void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4731 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4732 context()->Plug(r0); 4733} 4734 4735 4736Register FullCodeGenerator::result_register() { 4737 return r0; 4738} 4739 4740 4741Register FullCodeGenerator::context_register() { 4742 return cp; 4743} 4744 4745 4746void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4747 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4748 __ str(value, MemOperand(fp, frame_offset)); 4749} 4750 4751 4752void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4753 __ ldr(dst, ContextOperand(cp, context_index)); 4754} 4755 4756 4757void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4758 Scope* declaration_scope = scope()->DeclarationScope(); 4759 if (declaration_scope->is_global_scope() || 4760 declaration_scope->is_module_scope()) { 4761 // Contexts nested in the native context have a canonical empty function 4762 // as their closure, not the anonymous closure containing the global 4763 // code. Pass a smi sentinel and let the runtime look up the empty 4764 // function. 4765 __ mov(ip, Operand(Smi::FromInt(0))); 4766 } else if (declaration_scope->is_eval_scope()) { 4767 // Contexts created by a call to eval have the same closure as the 4768 // context calling eval, not the anonymous closure containing the eval 4769 // code. Fetch it from the context. 4770 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); 4771 } else { 4772 DCHECK(declaration_scope->is_function_scope()); 4773 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4774 } 4775 __ push(ip); 4776} 4777 4778 4779// ---------------------------------------------------------------------------- 4780// Non-local control flow support. 4781 4782void FullCodeGenerator::EnterFinallyBlock() { 4783 DCHECK(!result_register().is(r1)); 4784 // Store result register while executing finally block. 4785 __ push(result_register()); 4786 // Cook return address in link register to stack (smi encoded Code* delta) 4787 __ sub(r1, lr, Operand(masm_->CodeObject())); 4788 __ SmiTag(r1); 4789 4790 // Store result register while executing finally block. 4791 __ push(r1); 4792 4793 // Store pending message while executing finally block. 4794 ExternalReference pending_message_obj = 4795 ExternalReference::address_of_pending_message_obj(isolate()); 4796 __ mov(ip, Operand(pending_message_obj)); 4797 __ ldr(r1, MemOperand(ip)); 4798 __ push(r1); 4799 4800 ExternalReference has_pending_message = 4801 ExternalReference::address_of_has_pending_message(isolate()); 4802 __ mov(ip, Operand(has_pending_message)); 4803 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) 4804 __ ldrb(r1, MemOperand(ip)); 4805 __ SmiTag(r1); 4806 __ push(r1); 4807 4808 ExternalReference pending_message_script = 4809 ExternalReference::address_of_pending_message_script(isolate()); 4810 __ mov(ip, Operand(pending_message_script)); 4811 __ ldr(r1, MemOperand(ip)); 4812 __ push(r1); 4813} 4814 4815 4816void FullCodeGenerator::ExitFinallyBlock() { 4817 DCHECK(!result_register().is(r1)); 4818 // Restore pending message from stack. 4819 __ pop(r1); 4820 ExternalReference pending_message_script = 4821 ExternalReference::address_of_pending_message_script(isolate()); 4822 __ mov(ip, Operand(pending_message_script)); 4823 __ str(r1, MemOperand(ip)); 4824 4825 __ pop(r1); 4826 __ SmiUntag(r1); 4827 ExternalReference has_pending_message = 4828 ExternalReference::address_of_has_pending_message(isolate()); 4829 __ mov(ip, Operand(has_pending_message)); 4830 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) 4831 __ strb(r1, MemOperand(ip)); 4832 4833 __ pop(r1); 4834 ExternalReference pending_message_obj = 4835 ExternalReference::address_of_pending_message_obj(isolate()); 4836 __ mov(ip, Operand(pending_message_obj)); 4837 __ str(r1, MemOperand(ip)); 4838 4839 // Restore result register from stack. 4840 __ pop(r1); 4841 4842 // Uncook return address and return. 4843 __ pop(result_register()); 4844 __ SmiUntag(r1); 4845 __ add(pc, r1, Operand(masm_->CodeObject())); 4846} 4847 4848 4849#undef __ 4850 4851#define __ ACCESS_MASM(masm()) 4852 4853FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( 4854 int* stack_depth, 4855 int* context_length) { 4856 // The macros used here must preserve the result register. 4857 4858 // Because the handler block contains the context of the finally 4859 // code, we can restore it directly from there for the finally code 4860 // rather than iteratively unwinding contexts via their previous 4861 // links. 4862 __ Drop(*stack_depth); // Down to the handler block. 4863 if (*context_length > 0) { 4864 // Restore the context to its dedicated register and the stack. 4865 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); 4866 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4867 } 4868 __ PopTryHandler(); 4869 __ bl(finally_entry_); 4870 4871 *stack_depth = 0; 4872 *context_length = 0; 4873 return previous_; 4874} 4875 4876 4877#undef __ 4878 4879 4880static Address GetInterruptImmediateLoadAddress(Address pc) { 4881 Address load_address = pc - 2 * Assembler::kInstrSize; 4882 if (!FLAG_enable_ool_constant_pool) { 4883 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); 4884 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { 4885 // This is an extended constant pool lookup. 4886 if (CpuFeatures::IsSupported(ARMv7)) { 4887 load_address -= 2 * Assembler::kInstrSize; 4888 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address))); 4889 DCHECK(Assembler::IsMovT( 4890 Memory::int32_at(load_address + Assembler::kInstrSize))); 4891 } else { 4892 load_address -= 4 * Assembler::kInstrSize; 4893 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address))); 4894 DCHECK(Assembler::IsOrrImmed( 4895 Memory::int32_at(load_address + Assembler::kInstrSize))); 4896 DCHECK(Assembler::IsOrrImmed( 4897 Memory::int32_at(load_address + 2 * Assembler::kInstrSize))); 4898 DCHECK(Assembler::IsOrrImmed( 4899 Memory::int32_at(load_address + 3 * Assembler::kInstrSize))); 4900 } 4901 } else if (CpuFeatures::IsSupported(ARMv7) && 4902 Assembler::IsMovT(Memory::int32_at(load_address))) { 4903 // This is a movw / movt immediate load. 4904 load_address -= Assembler::kInstrSize; 4905 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address))); 4906 } else if (!CpuFeatures::IsSupported(ARMv7) && 4907 Assembler::IsOrrImmed(Memory::int32_at(load_address))) { 4908 // This is a mov / orr immediate load. 4909 load_address -= 3 * Assembler::kInstrSize; 4910 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address))); 4911 DCHECK(Assembler::IsOrrImmed( 4912 Memory::int32_at(load_address + Assembler::kInstrSize))); 4913 DCHECK(Assembler::IsOrrImmed( 4914 Memory::int32_at(load_address + 2 * Assembler::kInstrSize))); 4915 } else { 4916 // This is a small constant pool lookup. 4917 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); 4918 } 4919 return load_address; 4920} 4921 4922 4923void BackEdgeTable::PatchAt(Code* unoptimized_code, 4924 Address pc, 4925 BackEdgeState target_state, 4926 Code* replacement_code) { 4927 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 4928 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 4929 CodePatcher patcher(branch_address, 1); 4930 switch (target_state) { 4931 case INTERRUPT: 4932 { 4933 // <decrement profiling counter> 4934 // bpl ok 4935 // ; load interrupt stub address into ip - either of (for ARMv7): 4936 // ; <small cp load> | <extended cp load> | <immediate load> 4937 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm 4938 // | movt ip, #imm | movw ip, #imm 4939 // | ldr ip, [pp, ip] 4940 // ; or (for ARMv6): 4941 // ; <small cp load> | <extended cp load> | <immediate load> 4942 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm 4943 // | orr ip, ip, #imm> | orr ip, ip, #imm 4944 // | orr ip, ip, #imm> | orr ip, ip, #imm 4945 // | orr ip, ip, #imm> | orr ip, ip, #imm 4946 // blx ip 4947 // <reset profiling counter> 4948 // ok-label 4949 4950 // Calculate branch offset to the ok-label - this is the difference 4951 // between the branch address and |pc| (which points at <blx ip>) plus 4952 // kProfileCounterResetSequence instructions 4953 int branch_offset = pc - Instruction::kPCReadOffset - branch_address + 4954 kProfileCounterResetSequenceLength; 4955 patcher.masm()->b(branch_offset, pl); 4956 break; 4957 } 4958 case ON_STACK_REPLACEMENT: 4959 case OSR_AFTER_STACK_CHECK: 4960 // <decrement profiling counter> 4961 // mov r0, r0 (NOP) 4962 // ; load on-stack replacement address into ip - either of (for ARMv7): 4963 // ; <small cp load> | <extended cp load> | <immediate load> 4964 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm 4965 // | movt ip, #imm> | movw ip, #imm 4966 // | ldr ip, [pp, ip] 4967 // ; or (for ARMv6): 4968 // ; <small cp load> | <extended cp load> | <immediate load> 4969 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm 4970 // | orr ip, ip, #imm> | orr ip, ip, #imm 4971 // | orr ip, ip, #imm> | orr ip, ip, #imm 4972 // | orr ip, ip, #imm> | orr ip, ip, #imm 4973 // blx ip 4974 // <reset profiling counter> 4975 // ok-label 4976 patcher.masm()->nop(); 4977 break; 4978 } 4979 4980 // Replace the call address. 4981 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, 4982 replacement_code->entry()); 4983 4984 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 4985 unoptimized_code, pc_immediate_load_address, replacement_code); 4986} 4987 4988 4989BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4990 Isolate* isolate, 4991 Code* unoptimized_code, 4992 Address pc) { 4993 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize))); 4994 4995 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 4996 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 4997 Address interrupt_address = Assembler::target_address_at( 4998 pc_immediate_load_address, unoptimized_code); 4999 5000 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) { 5001 DCHECK(interrupt_address == 5002 isolate->builtins()->InterruptCheck()->entry()); 5003 return INTERRUPT; 5004 } 5005 5006 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address))); 5007 5008 if (interrupt_address == 5009 isolate->builtins()->OnStackReplacement()->entry()) { 5010 return ON_STACK_REPLACEMENT; 5011 } 5012 5013 DCHECK(interrupt_address == 5014 isolate->builtins()->OsrAfterStackCheck()->entry()); 5015 return OSR_AFTER_STACK_CHECK; 5016} 5017 5018 5019} } // namespace v8::internal 5020 5021#endif // V8_TARGET_ARCH_ARM 5022