1// Copyright 2014 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#if V8_TARGET_ARCH_PPC 6 7#include "src/ast/compile-time-value.h" 8#include "src/ast/scopes.h" 9#include "src/builtins/builtins-constructor.h" 10#include "src/code-factory.h" 11#include "src/code-stubs.h" 12#include "src/codegen.h" 13#include "src/compilation-info.h" 14#include "src/compiler.h" 15#include "src/debug/debug.h" 16#include "src/full-codegen/full-codegen.h" 17#include "src/ic/ic.h" 18 19#include "src/ppc/code-stubs-ppc.h" 20#include "src/ppc/macro-assembler-ppc.h" 21 22namespace v8 { 23namespace internal { 24 25#define __ ACCESS_MASM(masm()) 26 27// A patch site is a location in the code which it is possible to patch. This 28// class has a number of methods to emit the code which is patchable and the 29// method EmitPatchInfo to record a marker back to the patchable code. This 30// marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit 31// immediate value is used) is the delta from the pc to the first instruction of 32// the patchable code. 33// See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it 34class JumpPatchSite BASE_EMBEDDED { 35 public: 36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 37#ifdef DEBUG 38 info_emitted_ = false; 39#endif 40 } 41 42 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); } 43 44 // When initially emitting this ensure that a jump is always generated to skip 45 // the inlined smi code. 46 void EmitJumpIfNotSmi(Register reg, Label* target) { 47 DCHECK(!patch_site_.is_bound() && !info_emitted_); 48 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 49 __ bind(&patch_site_); 50 __ cmp(reg, reg, cr0); 51 __ beq(target, cr0); // Always taken before patched. 52 } 53 54 // When initially emitting this ensure that a jump is never generated to skip 55 // the inlined smi code. 56 void EmitJumpIfSmi(Register reg, Label* target) { 57 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 58 DCHECK(!patch_site_.is_bound() && !info_emitted_); 59 __ bind(&patch_site_); 60 __ cmp(reg, reg, cr0); 61 __ bne(target, cr0); // Never taken before patched. 62 } 63 64 void EmitPatchInfo() { 65 if (patch_site_.is_bound()) { 66 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 67 Register reg; 68 // I believe this is using reg as the high bits of of the offset 69 reg.set_code(delta_to_patch_site / kOff16Mask); 70 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask)); 71#ifdef DEBUG 72 info_emitted_ = true; 73#endif 74 } else { 75 __ nop(); // Signals no inlined code. 76 } 77 } 78 79 private: 80 MacroAssembler* masm() { return masm_; } 81 MacroAssembler* masm_; 82 Label patch_site_; 83#ifdef DEBUG 84 bool info_emitted_; 85#endif 86}; 87 88 89// Generate code for a JS function. On entry to the function the receiver 90// and arguments have been pushed on the stack left to right. The actual 91// argument count matches the formal parameter count expected by the 92// function. 93// 94// The live registers are: 95// o r4: the JS function object being called (i.e., ourselves) 96// o r6: the new target value 97// o cp: our context 98// o fp: our caller's frame pointer (aka r31) 99// o sp: stack pointer 100// o lr: return address 101// o ip: our own function entry (required by the prologue) 102// 103// The function builds a JS frame. Please see JavaScriptFrameConstants in 104// frames-ppc.h for its layout. 105void FullCodeGenerator::Generate() { 106 CompilationInfo* info = info_; 107 profiling_counter_ = isolate()->factory()->NewCell( 108 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 109 SetFunctionPosition(literal()); 110 Comment cmnt(masm_, "[ function compiled by full code generator"); 111 112 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 113 114 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { 115 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 116 __ LoadP(r5, MemOperand(sp, receiver_offset), r0); 117 __ AssertNotSmi(r5); 118 __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE); 119 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver); 120 } 121 122 // Open a frame scope to indicate that there is a frame on the stack. The 123 // MANUAL indicates that the scope shouldn't actually generate code to set up 124 // the frame (that is done below). 125 FrameScope frame_scope(masm_, StackFrame::MANUAL); 126 int prologue_offset = masm_->pc_offset(); 127 128 if (prologue_offset) { 129 // Prologue logic requires it's starting address in ip and the 130 // corresponding offset from the function entry. 131 prologue_offset += Instruction::kInstrSize; 132 __ addi(ip, ip, Operand(prologue_offset)); 133 } 134 info->set_prologue_offset(prologue_offset); 135 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset); 136 137 // Increment invocation count for the function. 138 { 139 Comment cmnt(masm_, "[ Increment invocation count"); 140 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kFeedbackVectorOffset)); 141 __ LoadP(r7, FieldMemOperand(r7, Cell::kValueOffset)); 142 __ LoadP(r8, FieldMemOperand( 143 r7, FeedbackVector::kInvocationCountIndex * kPointerSize + 144 FeedbackVector::kHeaderSize)); 145 __ AddSmiLiteral(r8, r8, Smi::FromInt(1), r0); 146 __ StoreP(r8, FieldMemOperand( 147 r7, FeedbackVector::kInvocationCountIndex * kPointerSize + 148 FeedbackVector::kHeaderSize), 149 r0); 150 } 151 152 { 153 Comment cmnt(masm_, "[ Allocate locals"); 154 int locals_count = info->scope()->num_stack_slots(); 155 OperandStackDepthIncrement(locals_count); 156 if (locals_count > 0) { 157 if (locals_count >= 128) { 158 Label ok; 159 __ Add(ip, sp, -(locals_count * kPointerSize), r0); 160 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); 161 __ cmpl(ip, r5); 162 __ bc_short(ge, &ok); 163 __ CallRuntime(Runtime::kThrowStackOverflow); 164 __ bind(&ok); 165 } 166 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 167 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; 168 if (locals_count >= kMaxPushes) { 169 int loop_iterations = locals_count / kMaxPushes; 170 __ mov(r5, Operand(loop_iterations)); 171 __ mtctr(r5); 172 Label loop_header; 173 __ bind(&loop_header); 174 // Do pushes. 175 for (int i = 0; i < kMaxPushes; i++) { 176 __ push(ip); 177 } 178 // Continue loop if not done. 179 __ bdnz(&loop_header); 180 } 181 int remaining = locals_count % kMaxPushes; 182 // Emit the remaining pushes. 183 for (int i = 0; i < remaining; i++) { 184 __ push(ip); 185 } 186 } 187 } 188 189 bool function_in_register_r4 = true; 190 191 // Possibly allocate a local context. 192 if (info->scope()->NeedsContext()) { 193 // Argument to NewContext is the function, which is still in r4. 194 Comment cmnt(masm_, "[ Allocate context"); 195 bool need_write_barrier = true; 196 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 197 if (info->scope()->is_script_scope()) { 198 __ push(r4); 199 __ Push(info->scope()->scope_info()); 200 __ CallRuntime(Runtime::kNewScriptContext); 201 PrepareForBailoutForId(BailoutId::ScriptContext(), 202 BailoutState::TOS_REGISTER); 203 // The new target value is not used, clobbering is safe. 204 DCHECK_NULL(info->scope()->new_target_var()); 205 } else { 206 if (info->scope()->new_target_var() != nullptr) { 207 __ push(r6); // Preserve new target. 208 } 209 if (slots <= 210 ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) { 211 Callable callable = CodeFactory::FastNewFunctionContext( 212 isolate(), info->scope()->scope_type()); 213 __ mov(FastNewFunctionContextDescriptor::SlotsRegister(), 214 Operand(slots)); 215 __ Call(callable.code(), RelocInfo::CODE_TARGET); 216 // Result of the FastNewFunctionContext builtin is always in new space. 217 need_write_barrier = false; 218 } else { 219 __ push(r4); 220 __ Push(Smi::FromInt(info->scope()->scope_type())); 221 __ CallRuntime(Runtime::kNewFunctionContext); 222 } 223 if (info->scope()->new_target_var() != nullptr) { 224 __ pop(r6); // Preserve new target. 225 } 226 } 227 function_in_register_r4 = false; 228 // Context is returned in r3. It replaces the context passed to us. 229 // It's saved in the stack and kept live in cp. 230 __ mr(cp, r3); 231 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset)); 232 // Copy any necessary parameters into the context. 233 int num_parameters = info->scope()->num_parameters(); 234 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; 235 for (int i = first_parameter; i < num_parameters; i++) { 236 Variable* var = 237 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i); 238 if (var->IsContextSlot()) { 239 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 240 (num_parameters - 1 - i) * kPointerSize; 241 // Load parameter from stack. 242 __ LoadP(r3, MemOperand(fp, parameter_offset), r0); 243 // Store it in the context. 244 MemOperand target = ContextMemOperand(cp, var->index()); 245 __ StoreP(r3, target, r0); 246 247 // Update the write barrier. 248 if (need_write_barrier) { 249 __ RecordWriteContextSlot(cp, target.offset(), r3, r5, 250 kLRHasBeenSaved, kDontSaveFPRegs); 251 } else if (FLAG_debug_code) { 252 Label done; 253 __ JumpIfInNewSpace(cp, r3, &done); 254 __ Abort(kExpectedNewSpaceObject); 255 __ bind(&done); 256 } 257 } 258 } 259 } 260 261 // Register holding this function and new target are both trashed in case we 262 // bailout here. But since that can happen only when new target is not used 263 // and we allocate a context, the value of |function_in_register| is correct. 264 PrepareForBailoutForId(BailoutId::FunctionContext(), 265 BailoutState::NO_REGISTERS); 266 267 // We don't support new.target and rest parameters here. 268 DCHECK_NULL(info->scope()->new_target_var()); 269 DCHECK_NULL(info->scope()->rest_parameter()); 270 DCHECK_NULL(info->scope()->this_function_var()); 271 272 Variable* arguments = info->scope()->arguments(); 273 if (arguments != NULL) { 274 // Function uses arguments object. 275 Comment cmnt(masm_, "[ Allocate arguments object"); 276 if (!function_in_register_r4) { 277 // Load this again, if it's used by the local context below. 278 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 279 } 280 if (is_strict(language_mode()) || !has_simple_parameters()) { 281 Callable callable = CodeFactory::FastNewStrictArguments(isolate()); 282 __ Call(callable.code(), RelocInfo::CODE_TARGET); 283 RestoreContext(); 284 } else if (literal()->has_duplicate_parameters()) { 285 __ Push(r4); 286 __ CallRuntime(Runtime::kNewSloppyArguments_Generic); 287 } else { 288 Callable callable = CodeFactory::FastNewSloppyArguments(isolate()); 289 __ Call(callable.code(), RelocInfo::CODE_TARGET); 290 RestoreContext(); 291 } 292 293 SetVar(arguments, r3, r4, r5); 294 } 295 296 if (FLAG_trace) { 297 __ CallRuntime(Runtime::kTraceEnter); 298 } 299 300 // Visit the declarations and body. 301 PrepareForBailoutForId(BailoutId::FunctionEntry(), 302 BailoutState::NO_REGISTERS); 303 { 304 Comment cmnt(masm_, "[ Declarations"); 305 VisitDeclarations(scope()->declarations()); 306 } 307 308 // Assert that the declarations do not use ICs. Otherwise the debugger 309 // won't be able to redirect a PC at an IC to the correct IC in newly 310 // recompiled code. 311 DCHECK_EQ(0, ic_total_count_); 312 313 { 314 Comment cmnt(masm_, "[ Stack check"); 315 PrepareForBailoutForId(BailoutId::Declarations(), 316 BailoutState::NO_REGISTERS); 317 Label ok; 318 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 319 __ cmpl(sp, ip); 320 __ bc_short(ge, &ok); 321 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); 322 __ bind(&ok); 323 } 324 325 { 326 Comment cmnt(masm_, "[ Body"); 327 DCHECK(loop_depth() == 0); 328 VisitStatements(literal()->body()); 329 DCHECK(loop_depth() == 0); 330 } 331 332 // Always emit a 'return undefined' in case control fell off the end of 333 // the body. 334 { 335 Comment cmnt(masm_, "[ return <undefined>;"); 336 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); 337 } 338 EmitReturnSequence(); 339 340 if (HasStackOverflow()) { 341 masm_->AbortConstantPoolBuilding(); 342 } 343} 344 345 346void FullCodeGenerator::ClearAccumulator() { 347 __ LoadSmiLiteral(r3, Smi::kZero); 348} 349 350 351void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 352 __ mov(r5, Operand(profiling_counter_)); 353 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset)); 354 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0); 355 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0); 356} 357 358 359void FullCodeGenerator::EmitProfilingCounterReset() { 360 int reset_value = FLAG_interrupt_budget; 361 __ mov(r5, Operand(profiling_counter_)); 362 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value)); 363 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0); 364} 365 366 367void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 368 Label* back_edge_target) { 369 Comment cmnt(masm_, "[ Back edge bookkeeping"); 370 Label ok; 371 372 DCHECK(back_edge_target->is_bound()); 373 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) + 374 kCodeSizeMultiplier / 2; 375 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); 376 EmitProfilingCounterDecrement(weight); 377 { 378 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 379 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_); 380 // BackEdgeTable::PatchAt manipulates this sequence. 381 __ cmpi(r6, Operand::Zero()); 382 __ bc_short(ge, &ok); 383 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 384 385 // Record a mapping of this PC offset to the OSR id. This is used to find 386 // the AST id from the unoptimized code in order to use it as a key into 387 // the deoptimization input data found in the optimized code. 388 RecordBackEdge(stmt->OsrEntryId()); 389 } 390 EmitProfilingCounterReset(); 391 392 __ bind(&ok); 393 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS); 394 // Record a mapping of the OSR id to this PC. This is used if the OSR 395 // entry becomes the target of a bailout. We don't expect it to be, but 396 // we want it to work if it is. 397 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS); 398} 399 400void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( 401 bool is_tail_call) { 402 // Pretend that the exit is a backwards jump to the entry. 403 int weight = 1; 404 if (info_->ShouldSelfOptimize()) { 405 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 406 } else { 407 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; 408 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); 409 } 410 EmitProfilingCounterDecrement(weight); 411 Label ok; 412 __ cmpi(r6, Operand::Zero()); 413 __ bge(&ok); 414 // Don't need to save result register if we are going to do a tail call. 415 if (!is_tail_call) { 416 __ push(r3); 417 } 418 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 419 if (!is_tail_call) { 420 __ pop(r3); 421 } 422 EmitProfilingCounterReset(); 423 __ bind(&ok); 424} 425 426void FullCodeGenerator::EmitReturnSequence() { 427 Comment cmnt(masm_, "[ Return sequence"); 428 if (return_label_.is_bound()) { 429 __ b(&return_label_); 430 } else { 431 __ bind(&return_label_); 432 if (FLAG_trace) { 433 // Push the return value on the stack as the parameter. 434 // Runtime::TraceExit returns its parameter in r3 435 __ push(r3); 436 __ CallRuntime(Runtime::kTraceExit); 437 } 438 EmitProfilingCounterHandlingForReturnSequence(false); 439 440 // Make sure that the constant pool is not emitted inside of the return 441 // sequence. 442 { 443 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 444 int32_t arg_count = info_->scope()->num_parameters() + 1; 445 int32_t sp_delta = arg_count * kPointerSize; 446 SetReturnPosition(literal()); 447 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); 448 __ blr(); 449 } 450 } 451} 452 453void FullCodeGenerator::RestoreContext() { 454 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 455} 456 457void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 458 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 459 codegen()->GetVar(result_register(), var); 460 codegen()->PushOperand(result_register()); 461} 462 463 464void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {} 465 466 467void FullCodeGenerator::AccumulatorValueContext::Plug( 468 Heap::RootListIndex index) const { 469 __ LoadRoot(result_register(), index); 470} 471 472 473void FullCodeGenerator::StackValueContext::Plug( 474 Heap::RootListIndex index) const { 475 __ LoadRoot(result_register(), index); 476 codegen()->PushOperand(result_register()); 477} 478 479 480void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 481 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, 482 false_label_); 483 if (index == Heap::kUndefinedValueRootIndex || 484 index == Heap::kNullValueRootIndex || 485 index == Heap::kFalseValueRootIndex) { 486 if (false_label_ != fall_through_) __ b(false_label_); 487 } else if (index == Heap::kTrueValueRootIndex) { 488 if (true_label_ != fall_through_) __ b(true_label_); 489 } else { 490 __ LoadRoot(result_register(), index); 491 codegen()->DoTest(this); 492 } 493} 494 495 496void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {} 497 498 499void FullCodeGenerator::AccumulatorValueContext::Plug( 500 Handle<Object> lit) const { 501 __ mov(result_register(), Operand(lit)); 502} 503 504 505void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 506 // Immediates cannot be pushed directly. 507 __ mov(result_register(), Operand(lit)); 508 codegen()->PushOperand(result_register()); 509} 510 511 512void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 513 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, 514 false_label_); 515 DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable()); 516 if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) { 517 if (false_label_ != fall_through_) __ b(false_label_); 518 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) { 519 if (true_label_ != fall_through_) __ b(true_label_); 520 } else if (lit->IsString()) { 521 if (String::cast(*lit)->length() == 0) { 522 if (false_label_ != fall_through_) __ b(false_label_); 523 } else { 524 if (true_label_ != fall_through_) __ b(true_label_); 525 } 526 } else if (lit->IsSmi()) { 527 if (Smi::cast(*lit)->value() == 0) { 528 if (false_label_ != fall_through_) __ b(false_label_); 529 } else { 530 if (true_label_ != fall_through_) __ b(true_label_); 531 } 532 } else { 533 // For simplicity we always test the accumulator register. 534 __ mov(result_register(), Operand(lit)); 535 codegen()->DoTest(this); 536 } 537} 538 539 540void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 541 Register reg) const { 542 DCHECK(count > 0); 543 if (count > 1) codegen()->DropOperands(count - 1); 544 __ StoreP(reg, MemOperand(sp, 0)); 545} 546 547 548void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 549 Label* materialize_false) const { 550 DCHECK(materialize_true == materialize_false); 551 __ bind(materialize_true); 552} 553 554 555void FullCodeGenerator::AccumulatorValueContext::Plug( 556 Label* materialize_true, Label* materialize_false) const { 557 Label done; 558 __ bind(materialize_true); 559 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 560 __ b(&done); 561 __ bind(materialize_false); 562 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 563 __ bind(&done); 564} 565 566 567void FullCodeGenerator::StackValueContext::Plug( 568 Label* materialize_true, Label* materialize_false) const { 569 Label done; 570 __ bind(materialize_true); 571 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 572 __ b(&done); 573 __ bind(materialize_false); 574 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 575 __ bind(&done); 576 codegen()->PushOperand(ip); 577} 578 579 580void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 581 Label* materialize_false) const { 582 DCHECK(materialize_true == true_label_); 583 DCHECK(materialize_false == false_label_); 584} 585 586 587void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 588 Heap::RootListIndex value_root_index = 589 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 590 __ LoadRoot(result_register(), value_root_index); 591} 592 593 594void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 595 Heap::RootListIndex value_root_index = 596 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 597 __ LoadRoot(ip, value_root_index); 598 codegen()->PushOperand(ip); 599} 600 601 602void FullCodeGenerator::TestContext::Plug(bool flag) const { 603 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, 604 false_label_); 605 if (flag) { 606 if (true_label_ != fall_through_) __ b(true_label_); 607 } else { 608 if (false_label_ != fall_through_) __ b(false_label_); 609 } 610} 611 612 613void FullCodeGenerator::DoTest(Expression* condition, Label* if_true, 614 Label* if_false, Label* fall_through) { 615 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate()); 616 CallIC(ic, condition->test_id()); 617 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); 618 Split(eq, if_true, if_false, fall_through); 619} 620 621 622void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false, 623 Label* fall_through, CRegister cr) { 624 if (if_false == fall_through) { 625 __ b(cond, if_true, cr); 626 } else if (if_true == fall_through) { 627 __ b(NegateCondition(cond), if_false, cr); 628 } else { 629 __ b(cond, if_true, cr); 630 __ b(if_false); 631 } 632} 633 634 635MemOperand FullCodeGenerator::StackOperand(Variable* var) { 636 DCHECK(var->IsStackAllocated()); 637 // Offset is negative because higher indexes are at lower addresses. 638 int offset = -var->index() * kPointerSize; 639 // Adjust by a (parameter or local) base offset. 640 if (var->IsParameter()) { 641 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 642 } else { 643 offset += JavaScriptFrameConstants::kLocal0Offset; 644 } 645 return MemOperand(fp, offset); 646} 647 648 649MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 650 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 651 if (var->IsContextSlot()) { 652 int context_chain_length = scope()->ContextChainLength(var->scope()); 653 __ LoadContext(scratch, context_chain_length); 654 return ContextMemOperand(scratch, var->index()); 655 } else { 656 return StackOperand(var); 657 } 658} 659 660 661void FullCodeGenerator::GetVar(Register dest, Variable* var) { 662 // Use destination as scratch. 663 MemOperand location = VarOperand(var, dest); 664 __ LoadP(dest, location, r0); 665} 666 667 668void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0, 669 Register scratch1) { 670 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 671 DCHECK(!scratch0.is(src)); 672 DCHECK(!scratch0.is(scratch1)); 673 DCHECK(!scratch1.is(src)); 674 MemOperand location = VarOperand(var, scratch0); 675 __ StoreP(src, location, r0); 676 677 // Emit the write barrier code if the location is in the heap. 678 if (var->IsContextSlot()) { 679 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1, 680 kLRHasBeenSaved, kDontSaveFPRegs); 681 } 682} 683 684 685void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 686 bool should_normalize, 687 Label* if_true, 688 Label* if_false) { 689 // Only prepare for bailouts before splits if we're in a test 690 // context. Otherwise, we let the Visit function deal with the 691 // preparation to avoid preparing with the same AST id twice. 692 if (!context()->IsTest()) return; 693 694 Label skip; 695 if (should_normalize) __ b(&skip); 696 PrepareForBailout(expr, BailoutState::TOS_REGISTER); 697 if (should_normalize) { 698 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 699 __ cmp(r3, ip); 700 Split(eq, if_true, if_false, NULL); 701 __ bind(&skip); 702 } 703} 704 705 706void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 707 // The variable in the declaration always resides in the current function 708 // context. 709 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 710 if (FLAG_debug_code) { 711 // Check that we're not inside a with or catch context. 712 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset)); 713 __ CompareRoot(r4, Heap::kWithContextMapRootIndex); 714 __ Check(ne, kDeclarationInWithContext); 715 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex); 716 __ Check(ne, kDeclarationInCatchContext); 717 } 718} 719 720 721void FullCodeGenerator::VisitVariableDeclaration( 722 VariableDeclaration* declaration) { 723 VariableProxy* proxy = declaration->proxy(); 724 Variable* variable = proxy->var(); 725 switch (variable->location()) { 726 case VariableLocation::UNALLOCATED: { 727 DCHECK(!variable->binding_needs_init()); 728 globals_->Add(variable->name(), zone()); 729 FeedbackSlot slot = proxy->VariableFeedbackSlot(); 730 DCHECK(!slot.IsInvalid()); 731 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); 732 globals_->Add(isolate()->factory()->undefined_value(), zone()); 733 globals_->Add(isolate()->factory()->undefined_value(), zone()); 734 break; 735 } 736 case VariableLocation::PARAMETER: 737 case VariableLocation::LOCAL: 738 if (variable->binding_needs_init()) { 739 Comment cmnt(masm_, "[ VariableDeclaration"); 740 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 741 __ StoreP(ip, StackOperand(variable)); 742 } 743 break; 744 745 case VariableLocation::CONTEXT: 746 if (variable->binding_needs_init()) { 747 Comment cmnt(masm_, "[ VariableDeclaration"); 748 EmitDebugCheckDeclarationContext(variable); 749 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 750 __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0); 751 // No write barrier since the_hole_value is in old space. 752 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 753 } 754 break; 755 756 case VariableLocation::LOOKUP: 757 case VariableLocation::MODULE: 758 UNREACHABLE(); 759 } 760} 761 762 763void FullCodeGenerator::VisitFunctionDeclaration( 764 FunctionDeclaration* declaration) { 765 VariableProxy* proxy = declaration->proxy(); 766 Variable* variable = proxy->var(); 767 switch (variable->location()) { 768 case VariableLocation::UNALLOCATED: { 769 globals_->Add(variable->name(), zone()); 770 FeedbackSlot slot = proxy->VariableFeedbackSlot(); 771 DCHECK(!slot.IsInvalid()); 772 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); 773 774 // We need the slot where the literals array lives, too. 775 slot = declaration->fun()->LiteralFeedbackSlot(); 776 DCHECK(!slot.IsInvalid()); 777 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); 778 779 Handle<SharedFunctionInfo> function = 780 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); 781 // Check for stack-overflow exception. 782 if (function.is_null()) return SetStackOverflow(); 783 globals_->Add(function, zone()); 784 break; 785 } 786 787 case VariableLocation::PARAMETER: 788 case VariableLocation::LOCAL: { 789 Comment cmnt(masm_, "[ FunctionDeclaration"); 790 VisitForAccumulatorValue(declaration->fun()); 791 __ StoreP(result_register(), StackOperand(variable)); 792 break; 793 } 794 795 case VariableLocation::CONTEXT: { 796 Comment cmnt(masm_, "[ FunctionDeclaration"); 797 EmitDebugCheckDeclarationContext(variable); 798 VisitForAccumulatorValue(declaration->fun()); 799 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()), 800 r0); 801 int offset = Context::SlotOffset(variable->index()); 802 // We know that we have written a function, which is not a smi. 803 __ RecordWriteContextSlot(cp, offset, result_register(), r5, 804 kLRHasBeenSaved, kDontSaveFPRegs, 805 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 806 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 807 break; 808 } 809 810 case VariableLocation::LOOKUP: 811 case VariableLocation::MODULE: 812 UNREACHABLE(); 813 } 814} 815 816 817void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 818 // Call the runtime to declare the globals. 819 __ mov(r4, Operand(pairs)); 820 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags())); 821 __ EmitLoadFeedbackVector(r5); 822 __ Push(r4, r3, r5); 823 __ CallRuntime(Runtime::kDeclareGlobals); 824 // Return value is ignored. 825} 826 827 828void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 829 Comment cmnt(masm_, "[ SwitchStatement"); 830 Breakable nested_statement(this, stmt); 831 SetStatementPosition(stmt); 832 833 // Keep the switch value on the stack until a case matches. 834 VisitForStackValue(stmt->tag()); 835 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS); 836 837 ZoneList<CaseClause*>* clauses = stmt->cases(); 838 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 839 840 Label next_test; // Recycled for each test. 841 // Compile all the tests with branches to their bodies. 842 for (int i = 0; i < clauses->length(); i++) { 843 CaseClause* clause = clauses->at(i); 844 clause->body_target()->Unuse(); 845 846 // The default is not a test, but remember it as final fall through. 847 if (clause->is_default()) { 848 default_clause = clause; 849 continue; 850 } 851 852 Comment cmnt(masm_, "[ Case comparison"); 853 __ bind(&next_test); 854 next_test.Unuse(); 855 856 // Compile the label expression. 857 VisitForAccumulatorValue(clause->label()); 858 859 // Perform the comparison as if via '==='. 860 __ LoadP(r4, MemOperand(sp, 0)); // Switch value. 861 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 862 JumpPatchSite patch_site(masm_); 863 if (inline_smi_code) { 864 Label slow_case; 865 __ orx(r5, r4, r3); 866 patch_site.EmitJumpIfNotSmi(r5, &slow_case); 867 868 __ cmp(r4, r3); 869 __ bne(&next_test); 870 __ Drop(1); // Switch value is no longer needed. 871 __ b(clause->body_target()); 872 __ bind(&slow_case); 873 } 874 875 // Record position before stub call for type feedback. 876 SetExpressionPosition(clause); 877 Handle<Code> ic = 878 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 879 CallIC(ic, clause->CompareId()); 880 patch_site.EmitPatchInfo(); 881 882 Label skip; 883 __ b(&skip); 884 PrepareForBailout(clause, BailoutState::TOS_REGISTER); 885 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 886 __ cmp(r3, ip); 887 __ bne(&next_test); 888 __ Drop(1); 889 __ b(clause->body_target()); 890 __ bind(&skip); 891 892 __ cmpi(r3, Operand::Zero()); 893 __ bne(&next_test); 894 __ Drop(1); // Switch value is no longer needed. 895 __ b(clause->body_target()); 896 } 897 898 // Discard the test value and jump to the default if present, otherwise to 899 // the end of the statement. 900 __ bind(&next_test); 901 DropOperands(1); // Switch value is no longer needed. 902 if (default_clause == NULL) { 903 __ b(nested_statement.break_label()); 904 } else { 905 __ b(default_clause->body_target()); 906 } 907 908 // Compile all the case bodies. 909 for (int i = 0; i < clauses->length(); i++) { 910 Comment cmnt(masm_, "[ Case body"); 911 CaseClause* clause = clauses->at(i); 912 __ bind(clause->body_target()); 913 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS); 914 VisitStatements(clause->statements()); 915 } 916 917 __ bind(nested_statement.break_label()); 918 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 919} 920 921 922void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 923 Comment cmnt(masm_, "[ ForInStatement"); 924 SetStatementPosition(stmt, SKIP_BREAK); 925 926 FeedbackSlot slot = stmt->ForInFeedbackSlot(); 927 928 // Get the object to enumerate over. 929 SetExpressionAsStatementPosition(stmt->enumerable()); 930 VisitForAccumulatorValue(stmt->enumerable()); 931 OperandStackDepthIncrement(5); 932 933 Label loop, exit; 934 Iteration loop_statement(this, stmt); 935 increment_loop_depth(); 936 937 // If the object is null or undefined, skip over the loop, otherwise convert 938 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. 939 Label convert, done_convert; 940 __ JumpIfSmi(r3, &convert); 941 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); 942 __ bge(&done_convert); 943 __ CompareRoot(r3, Heap::kNullValueRootIndex); 944 __ beq(&exit); 945 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); 946 __ beq(&exit); 947 __ bind(&convert); 948 __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET); 949 RestoreContext(); 950 __ bind(&done_convert); 951 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER); 952 __ push(r3); 953 954 // Check cache validity in generated code. If we cannot guarantee cache 955 // validity, call the runtime system to check cache validity or get the 956 // property names in a fixed array. Note: Proxies never have an enum cache, 957 // so will always take the slow path. 958 Label call_runtime; 959 __ CheckEnumCache(&call_runtime); 960 961 // The enum cache is valid. Load the map of the object being 962 // iterated over and use the cache for the iteration. 963 Label use_cache; 964 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); 965 __ b(&use_cache); 966 967 // Get the set of properties to enumerate. 968 __ bind(&call_runtime); 969 __ push(r3); // Duplicate the enumerable object on the stack. 970 __ CallRuntime(Runtime::kForInEnumerate); 971 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER); 972 973 // If we got a map from the runtime call, we can do a fast 974 // modification check. Otherwise, we got a fixed array, and we have 975 // to do a slow check. 976 Label fixed_array; 977 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); 978 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 979 __ cmp(r5, ip); 980 __ bne(&fixed_array); 981 982 // We got a map in register r3. Get the enumeration cache from it. 983 Label no_descriptors; 984 __ bind(&use_cache); 985 986 __ EnumLength(r4, r3); 987 __ CmpSmiLiteral(r4, Smi::kZero, r0); 988 __ beq(&no_descriptors); 989 990 __ LoadInstanceDescriptors(r3, r5); 991 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset)); 992 __ LoadP(r5, 993 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset)); 994 995 // Set up the four remaining stack slots. 996 __ push(r3); // Map. 997 __ LoadSmiLiteral(r3, Smi::kZero); 998 // Push enumeration cache, enumeration cache length (as smi) and zero. 999 __ Push(r5, r4, r3); 1000 __ b(&loop); 1001 1002 __ bind(&no_descriptors); 1003 __ Drop(1); 1004 __ b(&exit); 1005 1006 // We got a fixed array in register r3. Iterate through that. 1007 __ bind(&fixed_array); 1008 1009 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi(1) indicates slow check 1010 __ Push(r4, r3); // Smi and array 1011 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset)); 1012 __ Push(r4); // Fixed array length (as smi). 1013 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS); 1014 __ LoadSmiLiteral(r3, Smi::kZero); 1015 __ Push(r3); // Initial index. 1016 1017 // Generate code for doing the condition check. 1018 __ bind(&loop); 1019 SetExpressionAsStatementPosition(stmt->each()); 1020 1021 // Load the current count to r3, load the length to r4. 1022 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize)); 1023 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize)); 1024 __ cmpl(r3, r4); // Compare to the array length. 1025 __ bge(loop_statement.break_label()); 1026 1027 // Get the current entry of the array into register r6. 1028 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize)); 1029 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1030 __ SmiToPtrArrayOffset(r6, r3); 1031 __ LoadPX(r6, MemOperand(r6, r5)); 1032 1033 // Get the expected map from the stack or a smi in the 1034 // permanent slow case into register r5. 1035 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize)); 1036 1037 // Check if the expected map still matches that of the enumerable. 1038 // If not, we may have to filter the key. 1039 Label update_each; 1040 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize)); 1041 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); 1042 __ cmp(r7, r5); 1043 __ beq(&update_each); 1044 1045 // We need to filter the key, record slow-path here. 1046 int const vector_index = SmiFromSlot(slot)->value(); 1047 __ EmitLoadFeedbackVector(r3); 1048 __ mov(r5, Operand(FeedbackVector::MegamorphicSentinel(isolate()))); 1049 __ StoreP( 1050 r5, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0); 1051 1052 // Convert the entry to a string or (smi) 0 if it isn't a property 1053 // any more. If the property has been removed while iterating, we 1054 // just skip it. 1055 __ Push(r4, r6); // Enumerable and current entry. 1056 __ CallRuntime(Runtime::kForInFilter); 1057 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER); 1058 __ mr(r6, r3); 1059 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 1060 __ cmp(r3, r0); 1061 __ beq(loop_statement.continue_label()); 1062 1063 // Update the 'each' property or variable from the possibly filtered 1064 // entry in register r6. 1065 __ bind(&update_each); 1066 __ mr(result_register(), r6); 1067 // Perform the assignment as if via '='. 1068 { 1069 EffectContext context(this); 1070 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); 1071 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS); 1072 } 1073 1074 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). 1075 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS); 1076 // Generate code for the body of the loop. 1077 Visit(stmt->body()); 1078 1079 // Generate code for the going to the next element by incrementing 1080 // the index (smi) stored on top of the stack. 1081 __ bind(loop_statement.continue_label()); 1082 PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS); 1083 __ pop(r3); 1084 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0); 1085 __ push(r3); 1086 1087 EmitBackEdgeBookkeeping(stmt, &loop); 1088 __ b(&loop); 1089 1090 // Remove the pointers stored on the stack. 1091 __ bind(loop_statement.break_label()); 1092 DropOperands(5); 1093 1094 // Exit and decrement the loop depth. 1095 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 1096 __ bind(&exit); 1097 decrement_loop_depth(); 1098} 1099 1100void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, 1101 FeedbackSlot slot) { 1102 DCHECK(NeedsHomeObject(initializer)); 1103 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1104 __ LoadP(StoreDescriptor::ValueRegister(), 1105 MemOperand(sp, offset * kPointerSize)); 1106 CallStoreIC(slot, isolate()->factory()->home_object_symbol()); 1107} 1108 1109void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, 1110 int offset, 1111 FeedbackSlot slot) { 1112 DCHECK(NeedsHomeObject(initializer)); 1113 __ Move(StoreDescriptor::ReceiverRegister(), r3); 1114 __ LoadP(StoreDescriptor::ValueRegister(), 1115 MemOperand(sp, offset * kPointerSize)); 1116 CallStoreIC(slot, isolate()->factory()->home_object_symbol()); 1117} 1118 1119void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, 1120 TypeofMode typeof_mode) { 1121 // Record position before possible IC call. 1122 SetExpressionPosition(proxy); 1123 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS); 1124 Variable* var = proxy->var(); 1125 1126 // Two cases: global variables and all other types of variables. 1127 switch (var->location()) { 1128 case VariableLocation::UNALLOCATED: { 1129 Comment cmnt(masm_, "[ Global variable"); 1130 EmitGlobalVariableLoad(proxy, typeof_mode); 1131 context()->Plug(r3); 1132 break; 1133 } 1134 1135 case VariableLocation::PARAMETER: 1136 case VariableLocation::LOCAL: 1137 case VariableLocation::CONTEXT: { 1138 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); 1139 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1140 : "[ Stack variable"); 1141 if (proxy->hole_check_mode() == HoleCheckMode::kRequired) { 1142 // Throw a reference error when using an uninitialized let/const 1143 // binding in harmony mode. 1144 Label done; 1145 GetVar(r3, var); 1146 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 1147 __ bne(&done); 1148 __ mov(r3, Operand(var->name())); 1149 __ push(r3); 1150 __ CallRuntime(Runtime::kThrowReferenceError); 1151 __ bind(&done); 1152 context()->Plug(r3); 1153 break; 1154 } 1155 context()->Plug(var); 1156 break; 1157 } 1158 1159 case VariableLocation::LOOKUP: 1160 case VariableLocation::MODULE: 1161 UNREACHABLE(); 1162 } 1163} 1164 1165 1166void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { 1167 Expression* expression = (property == NULL) ? NULL : property->value(); 1168 if (expression == NULL) { 1169 __ LoadRoot(r4, Heap::kNullValueRootIndex); 1170 PushOperand(r4); 1171 } else { 1172 VisitForStackValue(expression); 1173 if (NeedsHomeObject(expression)) { 1174 DCHECK(property->kind() == ObjectLiteral::Property::GETTER || 1175 property->kind() == ObjectLiteral::Property::SETTER); 1176 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; 1177 EmitSetHomeObject(expression, offset, property->GetSlot()); 1178 } 1179 } 1180} 1181 1182 1183void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1184 Comment cmnt(masm_, "[ ObjectLiteral"); 1185 1186 Handle<BoilerplateDescription> constant_properties = 1187 expr->GetOrBuildConstantProperties(isolate()); 1188 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1189 __ LoadSmiLiteral(r5, SmiFromSlot(expr->literal_slot())); 1190 __ mov(r4, Operand(constant_properties)); 1191 int flags = expr->ComputeFlags(); 1192 __ LoadSmiLiteral(r3, Smi::FromInt(flags)); 1193 if (MustCreateObjectLiteralWithRuntime(expr)) { 1194 __ Push(r6, r5, r4, r3); 1195 __ CallRuntime(Runtime::kCreateObjectLiteral); 1196 } else { 1197 Callable callable = CodeFactory::FastCloneShallowObject( 1198 isolate(), expr->properties_count()); 1199 __ Call(callable.code(), RelocInfo::CODE_TARGET); 1200 RestoreContext(); 1201 } 1202 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); 1203 1204 // If result_saved is true the result is on top of the stack. If 1205 // result_saved is false the result is in r3. 1206 bool result_saved = false; 1207 1208 AccessorTable accessor_table(zone()); 1209 for (int i = 0; i < expr->properties()->length(); i++) { 1210 ObjectLiteral::Property* property = expr->properties()->at(i); 1211 DCHECK(!property->is_computed_name()); 1212 if (property->IsCompileTimeValue()) continue; 1213 1214 Literal* key = property->key()->AsLiteral(); 1215 Expression* value = property->value(); 1216 if (!result_saved) { 1217 PushOperand(r3); // Save result on stack 1218 result_saved = true; 1219 } 1220 switch (property->kind()) { 1221 case ObjectLiteral::Property::SPREAD: 1222 case ObjectLiteral::Property::CONSTANT: 1223 UNREACHABLE(); 1224 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1225 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); 1226 // Fall through. 1227 case ObjectLiteral::Property::COMPUTED: 1228 // It is safe to use [[Put]] here because the boilerplate already 1229 // contains computed properties with an uninitialized value. 1230 if (key->IsStringLiteral()) { 1231 DCHECK(key->IsPropertyName()); 1232 if (property->emit_store()) { 1233 VisitForAccumulatorValue(value); 1234 DCHECK(StoreDescriptor::ValueRegister().is(r3)); 1235 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1236 CallStoreIC(property->GetSlot(0), key->value(), true); 1237 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS); 1238 1239 if (NeedsHomeObject(value)) { 1240 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); 1241 } 1242 } else { 1243 VisitForEffect(value); 1244 } 1245 break; 1246 } 1247 // Duplicate receiver on stack. 1248 __ LoadP(r3, MemOperand(sp)); 1249 PushOperand(r3); 1250 VisitForStackValue(key); 1251 VisitForStackValue(value); 1252 if (property->emit_store()) { 1253 if (NeedsHomeObject(value)) { 1254 EmitSetHomeObject(value, 2, property->GetSlot()); 1255 } 1256 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes 1257 PushOperand(r3); 1258 CallRuntimeWithOperands(Runtime::kSetProperty); 1259 } else { 1260 DropOperands(3); 1261 } 1262 break; 1263 case ObjectLiteral::Property::PROTOTYPE: 1264 // Duplicate receiver on stack. 1265 __ LoadP(r3, MemOperand(sp)); 1266 PushOperand(r3); 1267 VisitForStackValue(value); 1268 DCHECK(property->emit_store()); 1269 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1270 PrepareForBailoutForId(expr->GetIdForPropertySet(i), 1271 BailoutState::NO_REGISTERS); 1272 break; 1273 case ObjectLiteral::Property::GETTER: 1274 if (property->emit_store()) { 1275 AccessorTable::Iterator it = accessor_table.lookup(key); 1276 it->second->bailout_id = expr->GetIdForPropertySet(i); 1277 it->second->getter = property; 1278 } 1279 break; 1280 case ObjectLiteral::Property::SETTER: 1281 if (property->emit_store()) { 1282 AccessorTable::Iterator it = accessor_table.lookup(key); 1283 it->second->bailout_id = expr->GetIdForPropertySet(i); 1284 it->second->setter = property; 1285 } 1286 break; 1287 } 1288 } 1289 1290 // Emit code to define accessors, using only a single call to the runtime for 1291 // each pair of corresponding getters and setters. 1292 for (AccessorTable::Iterator it = accessor_table.begin(); 1293 it != accessor_table.end(); ++it) { 1294 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver. 1295 PushOperand(r3); 1296 VisitForStackValue(it->first); 1297 EmitAccessor(it->second->getter); 1298 EmitAccessor(it->second->setter); 1299 __ LoadSmiLiteral(r3, Smi::FromInt(NONE)); 1300 PushOperand(r3); 1301 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); 1302 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS); 1303 } 1304 1305 if (result_saved) { 1306 context()->PlugTOS(); 1307 } else { 1308 context()->Plug(r3); 1309 } 1310} 1311 1312 1313void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1314 Comment cmnt(masm_, "[ ArrayLiteral"); 1315 1316 Handle<ConstantElementsPair> constant_elements = 1317 expr->GetOrBuildConstantElements(isolate()); 1318 1319 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1320 __ LoadSmiLiteral(r5, SmiFromSlot(expr->literal_slot())); 1321 __ mov(r4, Operand(constant_elements)); 1322 if (MustCreateArrayLiteralWithRuntime(expr)) { 1323 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags())); 1324 __ Push(r6, r5, r4, r3); 1325 __ CallRuntime(Runtime::kCreateArrayLiteral); 1326 } else { 1327 Callable callable = 1328 CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE); 1329 __ Call(callable.code(), RelocInfo::CODE_TARGET); 1330 RestoreContext(); 1331 } 1332 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); 1333 1334 bool result_saved = false; // Is the result saved to the stack? 1335 ZoneList<Expression*>* subexprs = expr->values(); 1336 int length = subexprs->length(); 1337 1338 // Emit code to evaluate all the non-constant subexpressions and to store 1339 // them into the newly cloned array. 1340 for (int array_index = 0; array_index < length; array_index++) { 1341 Expression* subexpr = subexprs->at(array_index); 1342 DCHECK(!subexpr->IsSpread()); 1343 // If the subexpression is a literal or a simple materialized literal it 1344 // is already set in the cloned array. 1345 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1346 1347 if (!result_saved) { 1348 PushOperand(r3); 1349 result_saved = true; 1350 } 1351 VisitForAccumulatorValue(subexpr); 1352 1353 __ LoadSmiLiteral(StoreDescriptor::NameRegister(), 1354 Smi::FromInt(array_index)); 1355 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1356 CallKeyedStoreIC(expr->LiteralFeedbackSlot()); 1357 1358 PrepareForBailoutForId(expr->GetIdForElement(array_index), 1359 BailoutState::NO_REGISTERS); 1360 } 1361 1362 if (result_saved) { 1363 context()->PlugTOS(); 1364 } else { 1365 context()->Plug(r3); 1366 } 1367} 1368 1369 1370void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1371 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); 1372 1373 Comment cmnt(masm_, "[ Assignment"); 1374 1375 Property* property = expr->target()->AsProperty(); 1376 LhsKind assign_type = Property::GetAssignType(property); 1377 1378 // Evaluate LHS expression. 1379 switch (assign_type) { 1380 case VARIABLE: 1381 // Nothing to do here. 1382 break; 1383 case NAMED_PROPERTY: 1384 if (expr->is_compound()) { 1385 // We need the receiver both on the stack and in the register. 1386 VisitForStackValue(property->obj()); 1387 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1388 } else { 1389 VisitForStackValue(property->obj()); 1390 } 1391 break; 1392 case KEYED_PROPERTY: 1393 if (expr->is_compound()) { 1394 VisitForStackValue(property->obj()); 1395 VisitForStackValue(property->key()); 1396 __ LoadP(LoadDescriptor::ReceiverRegister(), 1397 MemOperand(sp, 1 * kPointerSize)); 1398 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 1399 } else { 1400 VisitForStackValue(property->obj()); 1401 VisitForStackValue(property->key()); 1402 } 1403 break; 1404 case NAMED_SUPER_PROPERTY: 1405 case KEYED_SUPER_PROPERTY: 1406 UNREACHABLE(); 1407 break; 1408 } 1409 1410 // For compound assignments we need another deoptimization point after the 1411 // variable/property load. 1412 if (expr->is_compound()) { 1413 { 1414 AccumulatorValueContext context(this); 1415 switch (assign_type) { 1416 case VARIABLE: 1417 EmitVariableLoad(expr->target()->AsVariableProxy()); 1418 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER); 1419 break; 1420 case NAMED_PROPERTY: 1421 EmitNamedPropertyLoad(property); 1422 PrepareForBailoutForId(property->LoadId(), 1423 BailoutState::TOS_REGISTER); 1424 break; 1425 case KEYED_PROPERTY: 1426 EmitKeyedPropertyLoad(property); 1427 PrepareForBailoutForId(property->LoadId(), 1428 BailoutState::TOS_REGISTER); 1429 break; 1430 case NAMED_SUPER_PROPERTY: 1431 case KEYED_SUPER_PROPERTY: 1432 UNREACHABLE(); 1433 break; 1434 } 1435 } 1436 1437 Token::Value op = expr->binary_op(); 1438 PushOperand(r3); // Left operand goes on the stack. 1439 VisitForAccumulatorValue(expr->value()); 1440 1441 AccumulatorValueContext context(this); 1442 if (ShouldInlineSmiCase(op)) { 1443 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(), 1444 expr->value()); 1445 } else { 1446 EmitBinaryOp(expr->binary_operation(), op); 1447 } 1448 1449 // Deoptimization point in case the binary operation may have side effects. 1450 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER); 1451 } else { 1452 VisitForAccumulatorValue(expr->value()); 1453 } 1454 1455 SetExpressionPosition(expr); 1456 1457 // Store the value. 1458 switch (assign_type) { 1459 case VARIABLE: { 1460 VariableProxy* proxy = expr->target()->AsVariableProxy(); 1461 EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(), 1462 proxy->hole_check_mode()); 1463 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 1464 context()->Plug(r3); 1465 break; 1466 } 1467 case NAMED_PROPERTY: 1468 EmitNamedPropertyAssignment(expr); 1469 break; 1470 case KEYED_PROPERTY: 1471 EmitKeyedPropertyAssignment(expr); 1472 break; 1473 case NAMED_SUPER_PROPERTY: 1474 case KEYED_SUPER_PROPERTY: 1475 UNREACHABLE(); 1476 break; 1477 } 1478} 1479 1480 1481void FullCodeGenerator::VisitYield(Yield* expr) { 1482 // Resumable functions are not supported. 1483 UNREACHABLE(); 1484} 1485 1486void FullCodeGenerator::PushOperands(Register reg1, Register reg2) { 1487 OperandStackDepthIncrement(2); 1488 __ Push(reg1, reg2); 1489} 1490 1491void FullCodeGenerator::PushOperands(Register reg1, Register reg2, 1492 Register reg3) { 1493 OperandStackDepthIncrement(3); 1494 __ Push(reg1, reg2, reg3); 1495} 1496 1497void FullCodeGenerator::PushOperands(Register reg1, Register reg2, 1498 Register reg3, Register reg4) { 1499 OperandStackDepthIncrement(4); 1500 __ Push(reg1, reg2, reg3, reg4); 1501} 1502 1503void FullCodeGenerator::PopOperands(Register reg1, Register reg2) { 1504 OperandStackDepthDecrement(2); 1505 __ Pop(reg1, reg2); 1506} 1507 1508void FullCodeGenerator::EmitOperandStackDepthCheck() { 1509 if (FLAG_debug_code) { 1510 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp + 1511 operand_stack_depth_ * kPointerSize; 1512 __ sub(r3, fp, sp); 1513 __ mov(ip, Operand(expected_diff)); 1514 __ cmp(r3, ip); 1515 __ Assert(eq, kUnexpectedStackDepth); 1516 } 1517} 1518 1519void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 1520 Label allocate, done_allocate; 1521 1522 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate, 1523 NO_ALLOCATION_FLAGS); 1524 __ b(&done_allocate); 1525 1526 __ bind(&allocate); 1527 __ Push(Smi::FromInt(JSIteratorResult::kSize)); 1528 __ CallRuntime(Runtime::kAllocateInNewSpace); 1529 1530 __ bind(&done_allocate); 1531 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4); 1532 PopOperand(r5); 1533 __ LoadRoot(r6, 1534 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); 1535 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex); 1536 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0); 1537 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); 1538 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); 1539 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0); 1540 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0); 1541} 1542 1543 1544void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1545 Token::Value op, 1546 Expression* left_expr, 1547 Expression* right_expr) { 1548 Label done, smi_case, stub_call; 1549 1550 Register scratch1 = r5; 1551 Register scratch2 = r6; 1552 1553 // Get the arguments. 1554 Register left = r4; 1555 Register right = r3; 1556 PopOperand(left); 1557 1558 // Perform combined smi check on both operands. 1559 __ orx(scratch1, left, right); 1560 STATIC_ASSERT(kSmiTag == 0); 1561 JumpPatchSite patch_site(masm_); 1562 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 1563 1564 __ bind(&stub_call); 1565 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); 1566 CallIC(code, expr->BinaryOperationFeedbackId()); 1567 patch_site.EmitPatchInfo(); 1568 __ b(&done); 1569 1570 __ bind(&smi_case); 1571 // Smi case. This code works the same way as the smi-smi case in the type 1572 // recording binary operation stub. 1573 switch (op) { 1574 case Token::SAR: 1575 __ GetLeastBitsFromSmi(scratch1, right, 5); 1576 __ ShiftRightArith(right, left, scratch1); 1577 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize)); 1578 break; 1579 case Token::SHL: { 1580 __ GetLeastBitsFromSmi(scratch2, right, 5); 1581#if V8_TARGET_ARCH_PPC64 1582 __ ShiftLeft_(right, left, scratch2); 1583#else 1584 __ SmiUntag(scratch1, left); 1585 __ ShiftLeft_(scratch1, scratch1, scratch2); 1586 // Check that the *signed* result fits in a smi 1587 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call); 1588 __ SmiTag(right, scratch1); 1589#endif 1590 break; 1591 } 1592 case Token::SHR: { 1593 __ SmiUntag(scratch1, left); 1594 __ GetLeastBitsFromSmi(scratch2, right, 5); 1595 __ srw(scratch1, scratch1, scratch2); 1596 // Unsigned shift is not allowed to produce a negative number. 1597 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call); 1598 __ SmiTag(right, scratch1); 1599 break; 1600 } 1601 case Token::ADD: { 1602 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0); 1603 __ BranchOnOverflow(&stub_call); 1604 __ mr(right, scratch1); 1605 break; 1606 } 1607 case Token::SUB: { 1608 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0); 1609 __ BranchOnOverflow(&stub_call); 1610 __ mr(right, scratch1); 1611 break; 1612 } 1613 case Token::MUL: { 1614 Label mul_zero; 1615#if V8_TARGET_ARCH_PPC64 1616 // Remove tag from both operands. 1617 __ SmiUntag(ip, right); 1618 __ SmiUntag(r0, left); 1619 __ Mul(scratch1, r0, ip); 1620 // Check for overflowing the smi range - no overflow if higher 33 bits of 1621 // the result are identical. 1622 __ TestIfInt32(scratch1, r0); 1623 __ bne(&stub_call); 1624#else 1625 __ SmiUntag(ip, right); 1626 __ mullw(scratch1, left, ip); 1627 __ mulhw(scratch2, left, ip); 1628 // Check for overflowing the smi range - no overflow if higher 33 bits of 1629 // the result are identical. 1630 __ TestIfInt32(scratch2, scratch1, ip); 1631 __ bne(&stub_call); 1632#endif 1633 // Go slow on zero result to handle -0. 1634 __ cmpi(scratch1, Operand::Zero()); 1635 __ beq(&mul_zero); 1636#if V8_TARGET_ARCH_PPC64 1637 __ SmiTag(right, scratch1); 1638#else 1639 __ mr(right, scratch1); 1640#endif 1641 __ b(&done); 1642 // We need -0 if we were multiplying a negative number with 0 to get 0. 1643 // We know one of them was zero. 1644 __ bind(&mul_zero); 1645 __ add(scratch2, right, left); 1646 __ cmpi(scratch2, Operand::Zero()); 1647 __ blt(&stub_call); 1648 __ LoadSmiLiteral(right, Smi::kZero); 1649 break; 1650 } 1651 case Token::BIT_OR: 1652 __ orx(right, left, right); 1653 break; 1654 case Token::BIT_AND: 1655 __ and_(right, left, right); 1656 break; 1657 case Token::BIT_XOR: 1658 __ xor_(right, left, right); 1659 break; 1660 default: 1661 UNREACHABLE(); 1662 } 1663 1664 __ bind(&done); 1665 context()->Plug(r3); 1666} 1667 1668void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { 1669 PopOperand(r4); 1670 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); 1671 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 1672 CallIC(code, expr->BinaryOperationFeedbackId()); 1673 patch_site.EmitPatchInfo(); 1674 context()->Plug(r3); 1675} 1676 1677void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) { 1678 DCHECK(expr->IsValidReferenceExpressionOrThis()); 1679 1680 Property* prop = expr->AsProperty(); 1681 LhsKind assign_type = Property::GetAssignType(prop); 1682 1683 switch (assign_type) { 1684 case VARIABLE: { 1685 VariableProxy* proxy = expr->AsVariableProxy(); 1686 EffectContext context(this); 1687 EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot, 1688 proxy->hole_check_mode()); 1689 break; 1690 } 1691 case NAMED_PROPERTY: { 1692 PushOperand(r3); // Preserve value. 1693 VisitForAccumulatorValue(prop->obj()); 1694 __ Move(StoreDescriptor::ReceiverRegister(), r3); 1695 PopOperand(StoreDescriptor::ValueRegister()); // Restore value. 1696 CallStoreIC(slot, prop->key()->AsLiteral()->value()); 1697 break; 1698 } 1699 case KEYED_PROPERTY: { 1700 PushOperand(r3); // Preserve value. 1701 VisitForStackValue(prop->obj()); 1702 VisitForAccumulatorValue(prop->key()); 1703 __ Move(StoreDescriptor::NameRegister(), r3); 1704 PopOperands(StoreDescriptor::ValueRegister(), 1705 StoreDescriptor::ReceiverRegister()); 1706 CallKeyedStoreIC(slot); 1707 break; 1708 } 1709 case NAMED_SUPER_PROPERTY: 1710 case KEYED_SUPER_PROPERTY: 1711 UNREACHABLE(); 1712 break; 1713 } 1714 context()->Plug(r3); 1715} 1716 1717 1718void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 1719 Variable* var, MemOperand location) { 1720 __ StoreP(result_register(), location, r0); 1721 if (var->IsContextSlot()) { 1722 // RecordWrite may destroy all its register arguments. 1723 __ mr(r6, result_register()); 1724 int offset = Context::SlotOffset(var->index()); 1725 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved, 1726 kDontSaveFPRegs); 1727 } 1728} 1729 1730void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, 1731 FeedbackSlot slot, 1732 HoleCheckMode hole_check_mode) { 1733 if (var->IsUnallocated()) { 1734 // Global var, const, or let. 1735 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); 1736 CallStoreIC(slot, var->name()); 1737 1738 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) { 1739 DCHECK(!var->IsLookupSlot()); 1740 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 1741 MemOperand location = VarOperand(var, r4); 1742 // Perform an initialization check for lexically declared variables. 1743 if (hole_check_mode == HoleCheckMode::kRequired) { 1744 Label assign; 1745 __ LoadP(r6, location); 1746 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); 1747 __ bne(&assign); 1748 __ mov(r6, Operand(var->name())); 1749 __ push(r6); 1750 __ CallRuntime(Runtime::kThrowReferenceError); 1751 __ bind(&assign); 1752 } 1753 if (var->mode() != CONST) { 1754 EmitStoreToStackLocalOrContextSlot(var, location); 1755 } else if (var->throw_on_const_assignment(language_mode())) { 1756 __ CallRuntime(Runtime::kThrowConstAssignError); 1757 } 1758 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { 1759 // Initializing assignment to const {this} needs a write barrier. 1760 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 1761 Label uninitialized_this; 1762 MemOperand location = VarOperand(var, r4); 1763 __ LoadP(r6, location); 1764 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); 1765 __ beq(&uninitialized_this); 1766 __ mov(r4, Operand(var->name())); 1767 __ push(r4); 1768 __ CallRuntime(Runtime::kThrowReferenceError); 1769 __ bind(&uninitialized_this); 1770 EmitStoreToStackLocalOrContextSlot(var, location); 1771 1772 } else { 1773 DCHECK(var->mode() != CONST || op == Token::INIT); 1774 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); 1775 DCHECK(!var->IsLookupSlot()); 1776 // Assignment to var or initializing assignment to let/const in harmony 1777 // mode. 1778 MemOperand location = VarOperand(var, r4); 1779 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { 1780 // Check for an uninitialized let binding. 1781 __ LoadP(r5, location); 1782 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); 1783 __ Check(eq, kLetBindingReInitialization); 1784 } 1785 EmitStoreToStackLocalOrContextSlot(var, location); 1786 } 1787} 1788 1789 1790void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1791 // Assignment to a property, using a named store IC. 1792 Property* prop = expr->target()->AsProperty(); 1793 DCHECK(prop != NULL); 1794 DCHECK(prop->key()->IsLiteral()); 1795 1796 PopOperand(StoreDescriptor::ReceiverRegister()); 1797 CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value()); 1798 1799 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 1800 context()->Plug(r3); 1801} 1802 1803 1804void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 1805 // Assignment to a property, using a keyed store IC. 1806 PopOperands(StoreDescriptor::ReceiverRegister(), 1807 StoreDescriptor::NameRegister()); 1808 DCHECK(StoreDescriptor::ValueRegister().is(r3)); 1809 1810 CallKeyedStoreIC(expr->AssignmentSlot()); 1811 1812 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 1813 context()->Plug(r3); 1814} 1815 1816// Code common for calls using the IC. 1817void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 1818 Expression* callee = expr->expression(); 1819 1820 // Get the target function. 1821 ConvertReceiverMode convert_mode; 1822 if (callee->IsVariableProxy()) { 1823 { 1824 StackValueContext context(this); 1825 EmitVariableLoad(callee->AsVariableProxy()); 1826 PrepareForBailout(callee, BailoutState::NO_REGISTERS); 1827 } 1828 // Push undefined as receiver. This is patched in the method prologue if it 1829 // is a sloppy mode method. 1830 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 1831 PushOperand(r0); 1832 convert_mode = ConvertReceiverMode::kNullOrUndefined; 1833 } else { 1834 // Load the function from the receiver. 1835 DCHECK(callee->IsProperty()); 1836 DCHECK(!callee->AsProperty()->IsSuperAccess()); 1837 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1838 EmitNamedPropertyLoad(callee->AsProperty()); 1839 PrepareForBailoutForId(callee->AsProperty()->LoadId(), 1840 BailoutState::TOS_REGISTER); 1841 // Push the target function under the receiver. 1842 __ LoadP(r0, MemOperand(sp, 0)); 1843 PushOperand(r0); 1844 __ StoreP(r3, MemOperand(sp, kPointerSize)); 1845 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; 1846 } 1847 1848 EmitCall(expr, convert_mode); 1849} 1850 1851 1852// Code common for calls using the IC. 1853void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) { 1854 // Load the key. 1855 VisitForAccumulatorValue(key); 1856 1857 Expression* callee = expr->expression(); 1858 1859 // Load the function from the receiver. 1860 DCHECK(callee->IsProperty()); 1861 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1862 __ Move(LoadDescriptor::NameRegister(), r3); 1863 EmitKeyedPropertyLoad(callee->AsProperty()); 1864 PrepareForBailoutForId(callee->AsProperty()->LoadId(), 1865 BailoutState::TOS_REGISTER); 1866 1867 // Push the target function under the receiver. 1868 __ LoadP(ip, MemOperand(sp, 0)); 1869 PushOperand(ip); 1870 __ StoreP(r3, MemOperand(sp, kPointerSize)); 1871 1872 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); 1873} 1874 1875 1876void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { 1877 // Load the arguments. 1878 ZoneList<Expression*>* args = expr->arguments(); 1879 int arg_count = args->length(); 1880 for (int i = 0; i < arg_count; i++) { 1881 VisitForStackValue(args->at(i)); 1882 } 1883 1884 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 1885 SetCallPosition(expr, expr->tail_call_mode()); 1886 if (expr->tail_call_mode() == TailCallMode::kAllow) { 1887 if (FLAG_trace) { 1888 __ CallRuntime(Runtime::kTraceTailCall); 1889 } 1890 // Update profiling counters before the tail call since we will 1891 // not return to this function. 1892 EmitProfilingCounterHandlingForReturnSequence(true); 1893 } 1894 Handle<Code> code = 1895 CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode()) 1896 .code(); 1897 __ mov(r6, Operand(IntFromSlot(expr->CallFeedbackICSlot()))); 1898 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); 1899 __ mov(r3, Operand(arg_count)); 1900 CallIC(code); 1901 OperandStackDepthDecrement(arg_count + 1); 1902 1903 RecordJSReturnSite(expr); 1904 RestoreContext(); 1905 context()->DropAndPlug(1, r3); 1906} 1907 1908void FullCodeGenerator::VisitCallNew(CallNew* expr) { 1909 Comment cmnt(masm_, "[ CallNew"); 1910 // According to ECMA-262, section 11.2.2, page 44, the function 1911 // expression in new calls must be evaluated before the 1912 // arguments. 1913 1914 // Push constructor on the stack. If it's not a function it's used as 1915 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 1916 // ignored. 1917 DCHECK(!expr->expression()->IsSuperPropertyReference()); 1918 VisitForStackValue(expr->expression()); 1919 1920 // Push the arguments ("left-to-right") on the stack. 1921 ZoneList<Expression*>* args = expr->arguments(); 1922 int arg_count = args->length(); 1923 for (int i = 0; i < arg_count; i++) { 1924 VisitForStackValue(args->at(i)); 1925 } 1926 1927 // Call the construct call builtin that handles allocation and 1928 // constructor invocation. 1929 SetConstructCallPosition(expr); 1930 1931 // Load function and argument count into r4 and r3. 1932 __ mov(r3, Operand(arg_count)); 1933 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0); 1934 1935 // Record call targets in unoptimized code. 1936 __ EmitLoadFeedbackVector(r5); 1937 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot())); 1938 1939 CallConstructStub stub(isolate()); 1940 CallIC(stub.GetCode()); 1941 OperandStackDepthDecrement(arg_count + 1); 1942 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER); 1943 RestoreContext(); 1944 context()->Plug(r3); 1945} 1946 1947 1948void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 1949 ZoneList<Expression*>* args = expr->arguments(); 1950 DCHECK(args->length() == 1); 1951 1952 VisitForAccumulatorValue(args->at(0)); 1953 1954 Label materialize_true, materialize_false; 1955 Label* if_true = NULL; 1956 Label* if_false = NULL; 1957 Label* fall_through = NULL; 1958 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 1959 &if_false, &fall_through); 1960 1961 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 1962 __ TestIfSmi(r3, r0); 1963 Split(eq, if_true, if_false, fall_through, cr0); 1964 1965 context()->Plug(if_true, if_false); 1966} 1967 1968 1969void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { 1970 ZoneList<Expression*>* args = expr->arguments(); 1971 DCHECK(args->length() == 1); 1972 1973 VisitForAccumulatorValue(args->at(0)); 1974 1975 Label materialize_true, materialize_false; 1976 Label* if_true = NULL; 1977 Label* if_false = NULL; 1978 Label* fall_through = NULL; 1979 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 1980 &if_false, &fall_through); 1981 1982 __ JumpIfSmi(r3, if_false); 1983 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); 1984 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 1985 Split(ge, if_true, if_false, fall_through); 1986 1987 context()->Plug(if_true, if_false); 1988} 1989 1990 1991void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 1992 ZoneList<Expression*>* args = expr->arguments(); 1993 DCHECK(args->length() == 1); 1994 1995 VisitForAccumulatorValue(args->at(0)); 1996 1997 Label materialize_true, materialize_false; 1998 Label* if_true = NULL; 1999 Label* if_false = NULL; 2000 Label* fall_through = NULL; 2001 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 2002 &if_false, &fall_through); 2003 2004 __ JumpIfSmi(r3, if_false); 2005 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE); 2006 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2007 Split(eq, if_true, if_false, fall_through); 2008 2009 context()->Plug(if_true, if_false); 2010} 2011 2012 2013void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { 2014 ZoneList<Expression*>* args = expr->arguments(); 2015 DCHECK(args->length() == 1); 2016 2017 VisitForAccumulatorValue(args->at(0)); 2018 2019 Label materialize_true, materialize_false; 2020 Label* if_true = NULL; 2021 Label* if_false = NULL; 2022 Label* fall_through = NULL; 2023 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 2024 &if_false, &fall_through); 2025 2026 __ JumpIfSmi(r3, if_false); 2027 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE); 2028 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2029 Split(eq, if_true, if_false, fall_through); 2030 2031 context()->Plug(if_true, if_false); 2032} 2033 2034 2035void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { 2036 ZoneList<Expression*>* args = expr->arguments(); 2037 DCHECK(args->length() == 1); 2038 2039 VisitForAccumulatorValue(args->at(0)); 2040 2041 Label materialize_true, materialize_false; 2042 Label* if_true = NULL; 2043 Label* if_false = NULL; 2044 Label* fall_through = NULL; 2045 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 2046 &if_false, &fall_through); 2047 2048 __ JumpIfSmi(r3, if_false); 2049 __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE); 2050 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2051 Split(eq, if_true, if_false, fall_through); 2052 2053 context()->Plug(if_true, if_false); 2054} 2055 2056 2057void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 2058 ZoneList<Expression*>* args = expr->arguments(); 2059 DCHECK(args->length() == 1); 2060 Label done, null, function, non_function_constructor; 2061 2062 VisitForAccumulatorValue(args->at(0)); 2063 2064 // If the object is not a JSReceiver, we return null. 2065 __ JumpIfSmi(r3, &null); 2066 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2067 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE); 2068 // Map is now in r3. 2069 __ blt(&null); 2070 2071 // Return 'Function' for JSFunction and JSBoundFunction objects. 2072 __ cmpli(r4, Operand(FIRST_FUNCTION_TYPE)); 2073 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE); 2074 __ bge(&function); 2075 2076 // Check if the constructor in the map is a JS function. 2077 Register instance_type = r5; 2078 __ GetMapConstructor(r3, r3, r4, instance_type); 2079 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE)); 2080 __ bne(&non_function_constructor); 2081 2082 // r3 now contains the constructor function. Grab the 2083 // instance class name from there. 2084 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); 2085 __ LoadP(r3, 2086 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset)); 2087 __ b(&done); 2088 2089 // Functions have class 'Function'. 2090 __ bind(&function); 2091 __ LoadRoot(r3, Heap::kFunction_stringRootIndex); 2092 __ b(&done); 2093 2094 // Objects with a non-function constructor have class 'Object'. 2095 __ bind(&non_function_constructor); 2096 __ LoadRoot(r3, Heap::kObject_stringRootIndex); 2097 __ b(&done); 2098 2099 // Non-JS objects have class null. 2100 __ bind(&null); 2101 __ LoadRoot(r3, Heap::kNullValueRootIndex); 2102 2103 // All done. 2104 __ bind(&done); 2105 2106 context()->Plug(r3); 2107} 2108 2109 2110void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 2111 ZoneList<Expression*>* args = expr->arguments(); 2112 DCHECK(args->length() == 2); 2113 VisitForStackValue(args->at(0)); 2114 VisitForAccumulatorValue(args->at(1)); 2115 2116 Register object = r4; 2117 Register index = r3; 2118 Register result = r6; 2119 2120 PopOperand(object); 2121 2122 Label need_conversion; 2123 Label index_out_of_range; 2124 Label done; 2125 StringCharCodeAtGenerator generator(object, index, result, &need_conversion, 2126 &need_conversion, &index_out_of_range); 2127 generator.GenerateFast(masm_); 2128 __ b(&done); 2129 2130 __ bind(&index_out_of_range); 2131 // When the index is out of range, the spec requires us to return 2132 // NaN. 2133 __ LoadRoot(result, Heap::kNanValueRootIndex); 2134 __ b(&done); 2135 2136 __ bind(&need_conversion); 2137 // Load the undefined value into the result register, which will 2138 // trigger conversion. 2139 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 2140 __ b(&done); 2141 2142 NopRuntimeCallHelper call_helper; 2143 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); 2144 2145 __ bind(&done); 2146 context()->Plug(result); 2147} 2148 2149 2150void FullCodeGenerator::EmitCall(CallRuntime* expr) { 2151 ZoneList<Expression*>* args = expr->arguments(); 2152 DCHECK_LE(2, args->length()); 2153 // Push target, receiver and arguments onto the stack. 2154 for (Expression* const arg : *args) { 2155 VisitForStackValue(arg); 2156 } 2157 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 2158 // Move target to r4. 2159 int const argc = args->length() - 2; 2160 __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize)); 2161 // Call the target. 2162 __ mov(r3, Operand(argc)); 2163 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2164 OperandStackDepthDecrement(argc + 1); 2165 RestoreContext(); 2166 // Discard the function left on TOS. 2167 context()->DropAndPlug(1, r3); 2168} 2169 2170void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { 2171 ZoneList<Expression*>* args = expr->arguments(); 2172 DCHECK_EQ(1, args->length()); 2173 VisitForAccumulatorValue(args->at(0)); 2174 __ AssertFunction(r3); 2175 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); 2176 __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset)); 2177 context()->Plug(r3); 2178} 2179 2180void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 2181 DCHECK(expr->arguments()->length() == 0); 2182 ExternalReference debug_is_active = 2183 ExternalReference::debug_is_active_address(isolate()); 2184 __ mov(ip, Operand(debug_is_active)); 2185 __ lbz(r3, MemOperand(ip)); 2186 __ SmiTag(r3); 2187 context()->Plug(r3); 2188} 2189 2190 2191void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { 2192 ZoneList<Expression*>* args = expr->arguments(); 2193 DCHECK_EQ(2, args->length()); 2194 VisitForStackValue(args->at(0)); 2195 VisitForStackValue(args->at(1)); 2196 2197 Label runtime, done; 2198 2199 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime, 2200 NO_ALLOCATION_FLAGS); 2201 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4); 2202 __ Pop(r5, r6); 2203 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex); 2204 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0); 2205 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); 2206 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); 2207 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0); 2208 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0); 2209 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 2210 __ b(&done); 2211 2212 __ bind(&runtime); 2213 CallRuntimeWithOperands(Runtime::kCreateIterResultObject); 2214 2215 __ bind(&done); 2216 context()->Plug(r3); 2217} 2218 2219 2220void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { 2221 // Push function. 2222 __ LoadNativeContextSlot(expr->context_index(), r3); 2223 PushOperand(r3); 2224 2225 // Push undefined as the receiver. 2226 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); 2227 PushOperand(r3); 2228} 2229 2230 2231void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { 2232 ZoneList<Expression*>* args = expr->arguments(); 2233 int arg_count = args->length(); 2234 2235 SetCallPosition(expr); 2236 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); 2237 __ mov(r3, Operand(arg_count)); 2238 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), 2239 RelocInfo::CODE_TARGET); 2240 OperandStackDepthDecrement(arg_count + 1); 2241 RestoreContext(); 2242} 2243 2244 2245void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 2246 switch (expr->op()) { 2247 case Token::DELETE: { 2248 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 2249 Property* property = expr->expression()->AsProperty(); 2250 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 2251 2252 if (property != NULL) { 2253 VisitForStackValue(property->obj()); 2254 VisitForStackValue(property->key()); 2255 CallRuntimeWithOperands(is_strict(language_mode()) 2256 ? Runtime::kDeleteProperty_Strict 2257 : Runtime::kDeleteProperty_Sloppy); 2258 context()->Plug(r3); 2259 } else if (proxy != NULL) { 2260 Variable* var = proxy->var(); 2261 // Delete of an unqualified identifier is disallowed in strict mode but 2262 // "delete this" is allowed. 2263 bool is_this = var->is_this(); 2264 DCHECK(is_sloppy(language_mode()) || is_this); 2265 if (var->IsUnallocated()) { 2266 __ LoadGlobalObject(r5); 2267 __ mov(r4, Operand(var->name())); 2268 __ Push(r5, r4); 2269 __ CallRuntime(Runtime::kDeleteProperty_Sloppy); 2270 context()->Plug(r3); 2271 } else { 2272 DCHECK(!var->IsLookupSlot()); 2273 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2274 // Result of deleting non-global, non-dynamic variables is false. 2275 // The subexpression does not have side effects. 2276 context()->Plug(is_this); 2277 } 2278 } else { 2279 // Result of deleting non-property, non-variable reference is true. 2280 // The subexpression may have side effects. 2281 VisitForEffect(expr->expression()); 2282 context()->Plug(true); 2283 } 2284 break; 2285 } 2286 2287 case Token::VOID: { 2288 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 2289 VisitForEffect(expr->expression()); 2290 context()->Plug(Heap::kUndefinedValueRootIndex); 2291 break; 2292 } 2293 2294 case Token::NOT: { 2295 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 2296 if (context()->IsEffect()) { 2297 // Unary NOT has no side effects so it's only necessary to visit the 2298 // subexpression. Match the optimizing compiler by not branching. 2299 VisitForEffect(expr->expression()); 2300 } else if (context()->IsTest()) { 2301 const TestContext* test = TestContext::cast(context()); 2302 // The labels are swapped for the recursive call. 2303 VisitForControl(expr->expression(), test->false_label(), 2304 test->true_label(), test->fall_through()); 2305 context()->Plug(test->true_label(), test->false_label()); 2306 } else { 2307 // We handle value contexts explicitly rather than simply visiting 2308 // for control and plugging the control flow into the context, 2309 // because we need to prepare a pair of extra administrative AST ids 2310 // for the optimizing compiler. 2311 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 2312 Label materialize_true, materialize_false, done; 2313 VisitForControl(expr->expression(), &materialize_false, 2314 &materialize_true, &materialize_true); 2315 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); 2316 __ bind(&materialize_true); 2317 PrepareForBailoutForId(expr->MaterializeTrueId(), 2318 BailoutState::NO_REGISTERS); 2319 __ LoadRoot(r3, Heap::kTrueValueRootIndex); 2320 if (context()->IsStackValue()) __ push(r3); 2321 __ b(&done); 2322 __ bind(&materialize_false); 2323 PrepareForBailoutForId(expr->MaterializeFalseId(), 2324 BailoutState::NO_REGISTERS); 2325 __ LoadRoot(r3, Heap::kFalseValueRootIndex); 2326 if (context()->IsStackValue()) __ push(r3); 2327 __ bind(&done); 2328 } 2329 break; 2330 } 2331 2332 case Token::TYPEOF: { 2333 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 2334 { 2335 AccumulatorValueContext context(this); 2336 VisitForTypeofValue(expr->expression()); 2337 } 2338 __ mr(r6, r3); 2339 __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET); 2340 context()->Plug(r3); 2341 break; 2342 } 2343 2344 default: 2345 UNREACHABLE(); 2346 } 2347} 2348 2349 2350void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 2351 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); 2352 2353 Comment cmnt(masm_, "[ CountOperation"); 2354 2355 Property* prop = expr->expression()->AsProperty(); 2356 LhsKind assign_type = Property::GetAssignType(prop); 2357 2358 // Evaluate expression and get value. 2359 if (assign_type == VARIABLE) { 2360 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 2361 AccumulatorValueContext context(this); 2362 EmitVariableLoad(expr->expression()->AsVariableProxy()); 2363 } else { 2364 // Reserve space for result of postfix operation. 2365 if (expr->is_postfix() && !context()->IsEffect()) { 2366 __ LoadSmiLiteral(ip, Smi::kZero); 2367 PushOperand(ip); 2368 } 2369 switch (assign_type) { 2370 case NAMED_PROPERTY: { 2371 // Put the object both on the stack and in the register. 2372 VisitForStackValue(prop->obj()); 2373 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2374 EmitNamedPropertyLoad(prop); 2375 break; 2376 } 2377 2378 case KEYED_PROPERTY: { 2379 VisitForStackValue(prop->obj()); 2380 VisitForStackValue(prop->key()); 2381 __ LoadP(LoadDescriptor::ReceiverRegister(), 2382 MemOperand(sp, 1 * kPointerSize)); 2383 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 2384 EmitKeyedPropertyLoad(prop); 2385 break; 2386 } 2387 2388 case NAMED_SUPER_PROPERTY: 2389 case KEYED_SUPER_PROPERTY: 2390 case VARIABLE: 2391 UNREACHABLE(); 2392 } 2393 } 2394 2395 // We need a second deoptimization point after loading the value 2396 // in case evaluating the property load my have a side effect. 2397 if (assign_type == VARIABLE) { 2398 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER); 2399 } else { 2400 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER); 2401 } 2402 2403 // Inline smi case if we are in a loop. 2404 Label stub_call, done; 2405 JumpPatchSite patch_site(masm_); 2406 2407 int count_value = expr->op() == Token::INC ? 1 : -1; 2408 if (ShouldInlineSmiCase(expr->op())) { 2409 Label slow; 2410 patch_site.EmitJumpIfNotSmi(r3, &slow); 2411 2412 // Save result for postfix expressions. 2413 if (expr->is_postfix()) { 2414 if (!context()->IsEffect()) { 2415 // Save the result on the stack. If we have a named or keyed property 2416 // we store the result under the receiver that is currently on top 2417 // of the stack. 2418 switch (assign_type) { 2419 case VARIABLE: 2420 __ push(r3); 2421 break; 2422 case NAMED_PROPERTY: 2423 __ StoreP(r3, MemOperand(sp, kPointerSize)); 2424 break; 2425 case KEYED_PROPERTY: 2426 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); 2427 break; 2428 case NAMED_SUPER_PROPERTY: 2429 case KEYED_SUPER_PROPERTY: 2430 UNREACHABLE(); 2431 break; 2432 } 2433 } 2434 } 2435 2436 Register scratch1 = r4; 2437 Register scratch2 = r5; 2438 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value)); 2439 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0); 2440 __ BranchOnNoOverflow(&done); 2441 // Call stub. Undo operation first. 2442 __ sub(r3, r3, scratch1); 2443 __ b(&stub_call); 2444 __ bind(&slow); 2445 } 2446 2447 // Convert old value into a number. 2448 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 2449 RestoreContext(); 2450 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER); 2451 2452 // Save result for postfix expressions. 2453 if (expr->is_postfix()) { 2454 if (!context()->IsEffect()) { 2455 // Save the result on the stack. If we have a named or keyed property 2456 // we store the result under the receiver that is currently on top 2457 // of the stack. 2458 switch (assign_type) { 2459 case VARIABLE: 2460 PushOperand(r3); 2461 break; 2462 case NAMED_PROPERTY: 2463 __ StoreP(r3, MemOperand(sp, kPointerSize)); 2464 break; 2465 case KEYED_PROPERTY: 2466 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); 2467 break; 2468 case NAMED_SUPER_PROPERTY: 2469 case KEYED_SUPER_PROPERTY: 2470 UNREACHABLE(); 2471 break; 2472 } 2473 } 2474 } 2475 2476 __ bind(&stub_call); 2477 __ mr(r4, r3); 2478 __ LoadSmiLiteral(r3, Smi::FromInt(count_value)); 2479 2480 SetExpressionPosition(expr); 2481 2482 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code(); 2483 CallIC(code, expr->CountBinOpFeedbackId()); 2484 patch_site.EmitPatchInfo(); 2485 __ bind(&done); 2486 2487 // Store the value returned in r3. 2488 switch (assign_type) { 2489 case VARIABLE: { 2490 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 2491 if (expr->is_postfix()) { 2492 { 2493 EffectContext context(this); 2494 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(), 2495 proxy->hole_check_mode()); 2496 PrepareForBailoutForId(expr->AssignmentId(), 2497 BailoutState::TOS_REGISTER); 2498 context.Plug(r3); 2499 } 2500 // For all contexts except EffectConstant We have the result on 2501 // top of the stack. 2502 if (!context()->IsEffect()) { 2503 context()->PlugTOS(); 2504 } 2505 } else { 2506 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(), 2507 proxy->hole_check_mode()); 2508 PrepareForBailoutForId(expr->AssignmentId(), 2509 BailoutState::TOS_REGISTER); 2510 context()->Plug(r3); 2511 } 2512 break; 2513 } 2514 case NAMED_PROPERTY: { 2515 PopOperand(StoreDescriptor::ReceiverRegister()); 2516 CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value()); 2517 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 2518 if (expr->is_postfix()) { 2519 if (!context()->IsEffect()) { 2520 context()->PlugTOS(); 2521 } 2522 } else { 2523 context()->Plug(r3); 2524 } 2525 break; 2526 } 2527 case KEYED_PROPERTY: { 2528 PopOperands(StoreDescriptor::ReceiverRegister(), 2529 StoreDescriptor::NameRegister()); 2530 CallKeyedStoreIC(expr->CountSlot()); 2531 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 2532 if (expr->is_postfix()) { 2533 if (!context()->IsEffect()) { 2534 context()->PlugTOS(); 2535 } 2536 } else { 2537 context()->Plug(r3); 2538 } 2539 break; 2540 } 2541 case NAMED_SUPER_PROPERTY: 2542 case KEYED_SUPER_PROPERTY: 2543 UNREACHABLE(); 2544 break; 2545 } 2546} 2547 2548 2549void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 2550 Expression* sub_expr, 2551 Handle<String> check) { 2552 Label materialize_true, materialize_false; 2553 Label* if_true = NULL; 2554 Label* if_false = NULL; 2555 Label* fall_through = NULL; 2556 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 2557 &if_false, &fall_through); 2558 2559 { 2560 AccumulatorValueContext context(this); 2561 VisitForTypeofValue(sub_expr); 2562 } 2563 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2564 2565 Factory* factory = isolate()->factory(); 2566 if (String::Equals(check, factory->number_string())) { 2567 __ JumpIfSmi(r3, if_true); 2568 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); 2569 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 2570 __ cmp(r3, ip); 2571 Split(eq, if_true, if_false, fall_through); 2572 } else if (String::Equals(check, factory->string_string())) { 2573 __ JumpIfSmi(r3, if_false); 2574 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE); 2575 Split(lt, if_true, if_false, fall_through); 2576 } else if (String::Equals(check, factory->symbol_string())) { 2577 __ JumpIfSmi(r3, if_false); 2578 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE); 2579 Split(eq, if_true, if_false, fall_through); 2580 } else if (String::Equals(check, factory->boolean_string())) { 2581 __ CompareRoot(r3, Heap::kTrueValueRootIndex); 2582 __ beq(if_true); 2583 __ CompareRoot(r3, Heap::kFalseValueRootIndex); 2584 Split(eq, if_true, if_false, fall_through); 2585 } else if (String::Equals(check, factory->undefined_string())) { 2586 __ CompareRoot(r3, Heap::kNullValueRootIndex); 2587 __ beq(if_false); 2588 __ JumpIfSmi(r3, if_false); 2589 // Check for undetectable objects => true. 2590 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); 2591 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); 2592 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable)); 2593 Split(ne, if_true, if_false, fall_through, cr0); 2594 2595 } else if (String::Equals(check, factory->function_string())) { 2596 __ JumpIfSmi(r3, if_false); 2597 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); 2598 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); 2599 __ andi(r4, r4, 2600 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 2601 __ cmpi(r4, Operand(1 << Map::kIsCallable)); 2602 Split(eq, if_true, if_false, fall_through); 2603 } else if (String::Equals(check, factory->object_string())) { 2604 __ JumpIfSmi(r3, if_false); 2605 __ CompareRoot(r3, Heap::kNullValueRootIndex); 2606 __ beq(if_true); 2607 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2608 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE); 2609 __ blt(if_false); 2610 // Check for callable or undetectable objects => false. 2611 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); 2612 __ andi(r0, r4, 2613 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 2614 Split(eq, if_true, if_false, fall_through, cr0); 2615 } else { 2616 if (if_false != fall_through) __ b(if_false); 2617 } 2618 context()->Plug(if_true, if_false); 2619} 2620 2621 2622void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 2623 Comment cmnt(masm_, "[ CompareOperation"); 2624 2625 // First we try a fast inlined version of the compare when one of 2626 // the operands is a literal. 2627 if (TryLiteralCompare(expr)) return; 2628 2629 // Always perform the comparison for its control flow. Pack the result 2630 // into the expression's context after the comparison is performed. 2631 Label materialize_true, materialize_false; 2632 Label* if_true = NULL; 2633 Label* if_false = NULL; 2634 Label* fall_through = NULL; 2635 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 2636 &if_false, &fall_through); 2637 2638 Token::Value op = expr->op(); 2639 VisitForStackValue(expr->left()); 2640 switch (op) { 2641 case Token::IN: 2642 VisitForStackValue(expr->right()); 2643 SetExpressionPosition(expr); 2644 EmitHasProperty(); 2645 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 2646 __ CompareRoot(r3, Heap::kTrueValueRootIndex); 2647 Split(eq, if_true, if_false, fall_through); 2648 break; 2649 2650 case Token::INSTANCEOF: { 2651 VisitForAccumulatorValue(expr->right()); 2652 SetExpressionPosition(expr); 2653 PopOperand(r4); 2654 __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET); 2655 RestoreContext(); 2656 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 2657 __ CompareRoot(r3, Heap::kTrueValueRootIndex); 2658 Split(eq, if_true, if_false, fall_through); 2659 break; 2660 } 2661 2662 default: { 2663 VisitForAccumulatorValue(expr->right()); 2664 SetExpressionPosition(expr); 2665 Condition cond = CompareIC::ComputeCondition(op); 2666 PopOperand(r4); 2667 2668 bool inline_smi_code = ShouldInlineSmiCase(op); 2669 JumpPatchSite patch_site(masm_); 2670 if (inline_smi_code) { 2671 Label slow_case; 2672 __ orx(r5, r3, r4); 2673 patch_site.EmitJumpIfNotSmi(r5, &slow_case); 2674 __ cmp(r4, r3); 2675 Split(cond, if_true, if_false, NULL); 2676 __ bind(&slow_case); 2677 } 2678 2679 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 2680 CallIC(ic, expr->CompareOperationFeedbackId()); 2681 patch_site.EmitPatchInfo(); 2682 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2683 __ cmpi(r3, Operand::Zero()); 2684 Split(cond, if_true, if_false, fall_through); 2685 } 2686 } 2687 2688 // Convert the result of the comparison into one expected for this 2689 // expression's context. 2690 context()->Plug(if_true, if_false); 2691} 2692 2693 2694void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 2695 Expression* sub_expr, 2696 NilValue nil) { 2697 Label materialize_true, materialize_false; 2698 Label* if_true = NULL; 2699 Label* if_false = NULL; 2700 Label* fall_through = NULL; 2701 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 2702 &if_false, &fall_through); 2703 2704 VisitForAccumulatorValue(sub_expr); 2705 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2706 if (expr->op() == Token::EQ_STRICT) { 2707 Heap::RootListIndex nil_value = nil == kNullValue 2708 ? Heap::kNullValueRootIndex 2709 : Heap::kUndefinedValueRootIndex; 2710 __ LoadRoot(r4, nil_value); 2711 __ cmp(r3, r4); 2712 Split(eq, if_true, if_false, fall_through); 2713 } else { 2714 __ JumpIfSmi(r3, if_false); 2715 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); 2716 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); 2717 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable)); 2718 Split(ne, if_true, if_false, fall_through, cr0); 2719 } 2720 context()->Plug(if_true, if_false); 2721} 2722 2723 2724Register FullCodeGenerator::result_register() { return r3; } 2725 2726 2727Register FullCodeGenerator::context_register() { return cp; } 2728 2729void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) { 2730 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset); 2731 __ LoadP(value, MemOperand(fp, frame_offset), r0); 2732} 2733 2734void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 2735 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset); 2736 __ StoreP(value, MemOperand(fp, frame_offset), r0); 2737} 2738 2739 2740void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 2741 __ LoadP(dst, ContextMemOperand(cp, context_index), r0); 2742} 2743 2744 2745void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 2746 DeclarationScope* closure_scope = scope()->GetClosureScope(); 2747 if (closure_scope->is_script_scope() || 2748 closure_scope->is_module_scope()) { 2749 // Contexts nested in the native context have a canonical empty function 2750 // as their closure, not the anonymous closure containing the global 2751 // code. 2752 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip); 2753 } else if (closure_scope->is_eval_scope()) { 2754 // Contexts created by a call to eval have the same closure as the 2755 // context calling eval, not the anonymous closure containing the eval 2756 // code. Fetch it from the context. 2757 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX)); 2758 } else { 2759 DCHECK(closure_scope->is_function_scope()); 2760 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 2761 } 2762 PushOperand(ip); 2763} 2764 2765 2766#undef __ 2767 2768 2769void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc, 2770 BackEdgeState target_state, 2771 Code* replacement_code) { 2772 Address mov_address = Assembler::target_address_from_return_address(pc); 2773 Address cmp_address = mov_address - 2 * Assembler::kInstrSize; 2774 Isolate* isolate = unoptimized_code->GetIsolate(); 2775 CodePatcher patcher(isolate, cmp_address, 1); 2776 2777 switch (target_state) { 2778 case INTERRUPT: { 2779 // <decrement profiling counter> 2780 // cmpi r6, 0 2781 // bge <ok> ;; not changed 2782 // mov r12, <interrupt stub address> 2783 // mtlr r12 2784 // blrl 2785 // <reset profiling counter> 2786 // ok-label 2787 patcher.masm()->cmpi(r6, Operand::Zero()); 2788 break; 2789 } 2790 case ON_STACK_REPLACEMENT: 2791 // <decrement profiling counter> 2792 // crset 2793 // bge <ok> ;; not changed 2794 // mov r12, <on-stack replacement address> 2795 // mtlr r12 2796 // blrl 2797 // <reset profiling counter> 2798 // ok-label ----- pc_after points here 2799 2800 // Set the LT bit such that bge is a NOP 2801 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT)); 2802 break; 2803 } 2804 2805 // Replace the stack check address in the mov sequence with the 2806 // entry address of the replacement code. 2807 Assembler::set_target_address_at(isolate, mov_address, unoptimized_code, 2808 replacement_code->entry()); 2809 2810 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 2811 unoptimized_code, mov_address, replacement_code); 2812} 2813 2814 2815BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 2816 Isolate* isolate, Code* unoptimized_code, Address pc) { 2817 Address mov_address = Assembler::target_address_from_return_address(pc); 2818 Address cmp_address = mov_address - 2 * Assembler::kInstrSize; 2819#ifdef DEBUG 2820 Address interrupt_address = 2821 Assembler::target_address_at(mov_address, unoptimized_code); 2822#endif 2823 2824 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) { 2825 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry()); 2826 return INTERRUPT; 2827 } 2828 2829 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address))); 2830 2831 DCHECK(interrupt_address == 2832 isolate->builtins()->OnStackReplacement()->entry()); 2833 return ON_STACK_REPLACEMENT; 2834} 2835} // namespace internal 2836} // namespace v8 2837#endif // V8_TARGET_ARCH_PPC 2838