1// Copyright 2013 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#if V8_TARGET_ARCH_ARM64 6 7#include "src/ast/compile-time-value.h" 8#include "src/ast/scopes.h" 9#include "src/builtins/builtins-constructor.h" 10#include "src/code-factory.h" 11#include "src/code-stubs.h" 12#include "src/codegen.h" 13#include "src/compilation-info.h" 14#include "src/compiler.h" 15#include "src/debug/debug.h" 16#include "src/full-codegen/full-codegen.h" 17#include "src/ic/ic.h" 18 19#include "src/arm64/code-stubs-arm64.h" 20#include "src/arm64/frames-arm64.h" 21#include "src/arm64/macro-assembler-arm64.h" 22 23namespace v8 { 24namespace internal { 25 26#define __ ACCESS_MASM(masm()) 27 28class JumpPatchSite BASE_EMBEDDED { 29 public: 30 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) { 31#ifdef DEBUG 32 info_emitted_ = false; 33#endif 34 } 35 36 ~JumpPatchSite() { 37 if (patch_site_.is_bound()) { 38 DCHECK(info_emitted_); 39 } else { 40 DCHECK(reg_.IsNone()); 41 } 42 } 43 44 void EmitJumpIfNotSmi(Register reg, Label* target) { 45 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc. 46 InstructionAccurateScope scope(masm_, 1); 47 DCHECK(!info_emitted_); 48 DCHECK(reg.Is64Bits()); 49 DCHECK(!reg.Is(csp)); 50 reg_ = reg; 51 __ bind(&patch_site_); 52 __ tbz(xzr, 0, target); // Always taken before patched. 53 } 54 55 void EmitJumpIfSmi(Register reg, Label* target) { 56 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc. 57 InstructionAccurateScope scope(masm_, 1); 58 DCHECK(!info_emitted_); 59 DCHECK(reg.Is64Bits()); 60 DCHECK(!reg.Is(csp)); 61 reg_ = reg; 62 __ bind(&patch_site_); 63 __ tbnz(xzr, 0, target); // Never taken before patched. 64 } 65 66 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) { 67 UseScratchRegisterScope temps(masm_); 68 Register temp = temps.AcquireX(); 69 __ Orr(temp, reg1, reg2); 70 EmitJumpIfNotSmi(temp, target); 71 } 72 73 void EmitPatchInfo() { 74 Assembler::BlockPoolsScope scope(masm_); 75 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_); 76#ifdef DEBUG 77 info_emitted_ = true; 78#endif 79 } 80 81 private: 82 MacroAssembler* masm() { return masm_; } 83 MacroAssembler* masm_; 84 Label patch_site_; 85 Register reg_; 86#ifdef DEBUG 87 bool info_emitted_; 88#endif 89}; 90 91 92// Generate code for a JS function. On entry to the function the receiver 93// and arguments have been pushed on the stack left to right. The actual 94// argument count matches the formal parameter count expected by the 95// function. 96// 97// The live registers are: 98// - x1: the JS function object being called (i.e. ourselves). 99// - x3: the new target value 100// - cp: our context. 101// - fp: our caller's frame pointer. 102// - jssp: stack pointer. 103// - lr: return address. 104// 105// The function builds a JS frame. See JavaScriptFrameConstants in 106// frames-arm.h for its layout. 107void FullCodeGenerator::Generate() { 108 CompilationInfo* info = info_; 109 profiling_counter_ = isolate()->factory()->NewCell( 110 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 111 SetFunctionPosition(literal()); 112 Comment cmnt(masm_, "[ Function compiled by full code generator"); 113 114 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 115 116 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { 117 int receiver_offset = info->scope()->num_parameters() * kXRegSize; 118 __ Peek(x10, receiver_offset); 119 __ AssertNotSmi(x10); 120 __ CompareObjectType(x10, x10, x11, FIRST_JS_RECEIVER_TYPE); 121 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver); 122 } 123 124 // Open a frame scope to indicate that there is a frame on the stack. 125 // The MANUAL indicates that the scope shouldn't actually generate code 126 // to set up the frame because we do it manually below. 127 FrameScope frame_scope(masm_, StackFrame::MANUAL); 128 129 // This call emits the following sequence in a way that can be patched for 130 // code ageing support: 131 // Push(lr, fp, cp, x1); 132 // Add(fp, jssp, 2 * kPointerSize); 133 info->set_prologue_offset(masm_->pc_offset()); 134 __ Prologue(info->GeneratePreagedPrologue()); 135 136 // Increment invocation count for the function. 137 { 138 Comment cmnt(masm_, "[ Increment invocation count"); 139 __ Ldr(x11, FieldMemOperand(x1, JSFunction::kFeedbackVectorOffset)); 140 __ Ldr(x11, FieldMemOperand(x11, Cell::kValueOffset)); 141 __ Ldr(x10, FieldMemOperand( 142 x11, FeedbackVector::kInvocationCountIndex * kPointerSize + 143 FeedbackVector::kHeaderSize)); 144 __ Add(x10, x10, Operand(Smi::FromInt(1))); 145 __ Str(x10, FieldMemOperand( 146 x11, FeedbackVector::kInvocationCountIndex * kPointerSize + 147 FeedbackVector::kHeaderSize)); 148 } 149 150 // Reserve space on the stack for locals. 151 { Comment cmnt(masm_, "[ Allocate locals"); 152 int locals_count = info->scope()->num_stack_slots(); 153 OperandStackDepthIncrement(locals_count); 154 if (locals_count > 0) { 155 if (locals_count >= 128) { 156 Label ok; 157 DCHECK(jssp.Is(__ StackPointer())); 158 __ Sub(x10, jssp, locals_count * kPointerSize); 159 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex); 160 __ B(hs, &ok); 161 __ CallRuntime(Runtime::kThrowStackOverflow); 162 __ Bind(&ok); 163 } 164 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); 165 if (FLAG_optimize_for_size) { 166 __ PushMultipleTimes(x10 , locals_count); 167 } else { 168 const int kMaxPushes = 32; 169 if (locals_count >= kMaxPushes) { 170 int loop_iterations = locals_count / kMaxPushes; 171 __ Mov(x2, loop_iterations); 172 Label loop_header; 173 __ Bind(&loop_header); 174 // Do pushes. 175 __ PushMultipleTimes(x10 , kMaxPushes); 176 __ Subs(x2, x2, 1); 177 __ B(ne, &loop_header); 178 } 179 int remaining = locals_count % kMaxPushes; 180 // Emit the remaining pushes. 181 __ PushMultipleTimes(x10 , remaining); 182 } 183 } 184 } 185 186 bool function_in_register_x1 = true; 187 188 if (info->scope()->NeedsContext()) { 189 // Argument to NewContext is the function, which is still in x1. 190 Comment cmnt(masm_, "[ Allocate context"); 191 bool need_write_barrier = true; 192 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 193 if (info->scope()->is_script_scope()) { 194 __ Mov(x10, Operand(info->scope()->scope_info())); 195 __ Push(x1, x10); 196 __ CallRuntime(Runtime::kNewScriptContext); 197 PrepareForBailoutForId(BailoutId::ScriptContext(), 198 BailoutState::TOS_REGISTER); 199 // The new target value is not used, clobbering is safe. 200 DCHECK_NULL(info->scope()->new_target_var()); 201 } else { 202 if (info->scope()->new_target_var() != nullptr) { 203 __ Push(x3); // Preserve new target. 204 } 205 if (slots <= 206 ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) { 207 Callable callable = CodeFactory::FastNewFunctionContext( 208 isolate(), info->scope()->scope_type()); 209 __ Mov(FastNewFunctionContextDescriptor::SlotsRegister(), slots); 210 __ Call(callable.code(), RelocInfo::CODE_TARGET); 211 // Result of the FastNewFunctionContext builtin is always in new space. 212 need_write_barrier = false; 213 } else { 214 __ Push(x1); 215 __ Push(Smi::FromInt(info->scope()->scope_type())); 216 __ CallRuntime(Runtime::kNewFunctionContext); 217 } 218 if (info->scope()->new_target_var() != nullptr) { 219 __ Pop(x3); // Restore new target. 220 } 221 } 222 function_in_register_x1 = false; 223 // Context is returned in x0. It replaces the context passed to us. 224 // It's saved in the stack and kept live in cp. 225 __ Mov(cp, x0); 226 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 227 // Copy any necessary parameters into the context. 228 int num_parameters = info->scope()->num_parameters(); 229 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; 230 for (int i = first_parameter; i < num_parameters; i++) { 231 Variable* var = 232 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i); 233 if (var->IsContextSlot()) { 234 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 235 (num_parameters - 1 - i) * kPointerSize; 236 // Load parameter from stack. 237 __ Ldr(x10, MemOperand(fp, parameter_offset)); 238 // Store it in the context. 239 MemOperand target = ContextMemOperand(cp, var->index()); 240 __ Str(x10, target); 241 242 // Update the write barrier. 243 if (need_write_barrier) { 244 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10, 245 x11, kLRHasBeenSaved, kDontSaveFPRegs); 246 } else if (FLAG_debug_code) { 247 Label done; 248 __ JumpIfInNewSpace(cp, &done); 249 __ Abort(kExpectedNewSpaceObject); 250 __ bind(&done); 251 } 252 } 253 } 254 } 255 256 // Register holding this function and new target are both trashed in case we 257 // bailout here. But since that can happen only when new target is not used 258 // and we allocate a context, the value of |function_in_register| is correct. 259 PrepareForBailoutForId(BailoutId::FunctionContext(), 260 BailoutState::NO_REGISTERS); 261 262 // We don't support new.target and rest parameters here. 263 DCHECK_NULL(info->scope()->new_target_var()); 264 DCHECK_NULL(info->scope()->rest_parameter()); 265 DCHECK_NULL(info->scope()->this_function_var()); 266 267 Variable* arguments = info->scope()->arguments(); 268 if (arguments != NULL) { 269 // Function uses arguments object. 270 Comment cmnt(masm_, "[ Allocate arguments object"); 271 if (!function_in_register_x1) { 272 // Load this again, if it's used by the local context below. 273 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 274 } 275 if (is_strict(language_mode()) || !has_simple_parameters()) { 276 Callable callable = CodeFactory::FastNewStrictArguments(isolate()); 277 __ Call(callable.code(), RelocInfo::CODE_TARGET); 278 RestoreContext(); 279 } else if (literal()->has_duplicate_parameters()) { 280 __ Push(x1); 281 __ CallRuntime(Runtime::kNewSloppyArguments_Generic); 282 } else { 283 Callable callable = CodeFactory::FastNewSloppyArguments(isolate()); 284 __ Call(callable.code(), RelocInfo::CODE_TARGET); 285 RestoreContext(); 286 } 287 288 SetVar(arguments, x0, x1, x2); 289 } 290 291 if (FLAG_trace) { 292 __ CallRuntime(Runtime::kTraceEnter); 293 } 294 295 // Visit the declarations and body. 296 PrepareForBailoutForId(BailoutId::FunctionEntry(), 297 BailoutState::NO_REGISTERS); 298 { 299 Comment cmnt(masm_, "[ Declarations"); 300 VisitDeclarations(scope()->declarations()); 301 } 302 303 // Assert that the declarations do not use ICs. Otherwise the debugger 304 // won't be able to redirect a PC at an IC to the correct IC in newly 305 // recompiled code. 306 DCHECK_EQ(0, ic_total_count_); 307 308 { 309 Comment cmnt(masm_, "[ Stack check"); 310 PrepareForBailoutForId(BailoutId::Declarations(), 311 BailoutState::NO_REGISTERS); 312 Label ok; 313 DCHECK(jssp.Is(__ StackPointer())); 314 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); 315 __ B(hs, &ok); 316 PredictableCodeSizeScope predictable(masm_, 317 Assembler::kCallSizeWithRelocation); 318 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); 319 __ Bind(&ok); 320 } 321 322 { 323 Comment cmnt(masm_, "[ Body"); 324 DCHECK(loop_depth() == 0); 325 VisitStatements(literal()->body()); 326 DCHECK(loop_depth() == 0); 327 } 328 329 // Always emit a 'return undefined' in case control fell off the end of 330 // the body. 331 { Comment cmnt(masm_, "[ return <undefined>;"); 332 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); 333 } 334 EmitReturnSequence(); 335 336 // Force emission of the pools, so they don't get emitted in the middle 337 // of the back edge table. 338 masm()->CheckVeneerPool(true, false); 339 masm()->CheckConstPool(true, false); 340} 341 342void FullCodeGenerator::ClearAccumulator() { __ Mov(x0, Smi::kZero); } 343 344void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 345 __ Mov(x2, Operand(profiling_counter_)); 346 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset)); 347 __ Subs(x3, x3, Smi::FromInt(delta)); 348 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); 349} 350 351 352void FullCodeGenerator::EmitProfilingCounterReset() { 353 int reset_value = FLAG_interrupt_budget; 354 __ Mov(x2, Operand(profiling_counter_)); 355 __ Mov(x3, Smi::FromInt(reset_value)); 356 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); 357} 358 359 360void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 361 Label* back_edge_target) { 362 DCHECK(jssp.Is(__ StackPointer())); 363 Comment cmnt(masm_, "[ Back edge bookkeeping"); 364 // Block literal pools whilst emitting back edge code. 365 Assembler::BlockPoolsScope block_const_pool(masm_); 366 Label ok; 367 368 DCHECK(back_edge_target->is_bound()); 369 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier 370 // to reduce the absolute error due to the integer division. To do that, 371 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to 372 // the result). 373 int distance = 374 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) + 375 kCodeSizeMultiplier / 2); 376 int weight = Min(kMaxBackEdgeWeight, 377 Max(1, distance / kCodeSizeMultiplier)); 378 EmitProfilingCounterDecrement(weight); 379 __ B(pl, &ok); 380 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 381 382 // Record a mapping of this PC offset to the OSR id. This is used to find 383 // the AST id from the unoptimized code in order to use it as a key into 384 // the deoptimization input data found in the optimized code. 385 RecordBackEdge(stmt->OsrEntryId()); 386 387 EmitProfilingCounterReset(); 388 389 __ Bind(&ok); 390 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS); 391 // Record a mapping of the OSR id to this PC. This is used if the OSR 392 // entry becomes the target of a bailout. We don't expect it to be, but 393 // we want it to work if it is. 394 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS); 395} 396 397void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( 398 bool is_tail_call) { 399 // Pretend that the exit is a backwards jump to the entry. 400 int weight = 1; 401 if (info_->ShouldSelfOptimize()) { 402 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 403 } else { 404 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; 405 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); 406 } 407 EmitProfilingCounterDecrement(weight); 408 Label ok; 409 __ B(pl, &ok); 410 // Don't need to save result register if we are going to do a tail call. 411 if (!is_tail_call) { 412 __ Push(x0); 413 } 414 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 415 if (!is_tail_call) { 416 __ Pop(x0); 417 } 418 EmitProfilingCounterReset(); 419 __ Bind(&ok); 420} 421 422void FullCodeGenerator::EmitReturnSequence() { 423 Comment cmnt(masm_, "[ Return sequence"); 424 425 if (return_label_.is_bound()) { 426 __ B(&return_label_); 427 428 } else { 429 __ Bind(&return_label_); 430 if (FLAG_trace) { 431 // Push the return value on the stack as the parameter. 432 // Runtime::TraceExit returns its parameter in x0. 433 __ Push(result_register()); 434 __ CallRuntime(Runtime::kTraceExit); 435 DCHECK(x0.Is(result_register())); 436 } 437 EmitProfilingCounterHandlingForReturnSequence(false); 438 439 SetReturnPosition(literal()); 440 const Register& current_sp = __ StackPointer(); 441 // Nothing ensures 16 bytes alignment here. 442 DCHECK(!current_sp.Is(csp)); 443 __ Mov(current_sp, fp); 444 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex)); 445 // Drop the arguments and receiver and return. 446 // TODO(all): This implementation is overkill as it supports 2**31+1 447 // arguments, consider how to improve it without creating a security 448 // hole. 449 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2); 450 __ Add(current_sp, current_sp, ip0); 451 __ Ret(); 452 int32_t arg_count = info_->scope()->num_parameters() + 1; 453 __ dc64(kXRegSize * arg_count); 454 } 455} 456 457void FullCodeGenerator::RestoreContext() { 458 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 459} 460 461void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 462 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 463 codegen()->GetVar(result_register(), var); 464 codegen()->PushOperand(result_register()); 465} 466 467 468void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 469 // Root values have no side effects. 470} 471 472 473void FullCodeGenerator::AccumulatorValueContext::Plug( 474 Heap::RootListIndex index) const { 475 __ LoadRoot(result_register(), index); 476} 477 478 479void FullCodeGenerator::StackValueContext::Plug( 480 Heap::RootListIndex index) const { 481 __ LoadRoot(result_register(), index); 482 codegen()->PushOperand(result_register()); 483} 484 485 486void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 487 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, 488 false_label_); 489 if (index == Heap::kUndefinedValueRootIndex || 490 index == Heap::kNullValueRootIndex || 491 index == Heap::kFalseValueRootIndex) { 492 if (false_label_ != fall_through_) __ B(false_label_); 493 } else if (index == Heap::kTrueValueRootIndex) { 494 if (true_label_ != fall_through_) __ B(true_label_); 495 } else { 496 __ LoadRoot(result_register(), index); 497 codegen()->DoTest(this); 498 } 499} 500 501 502void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 503} 504 505 506void FullCodeGenerator::AccumulatorValueContext::Plug( 507 Handle<Object> lit) const { 508 __ Mov(result_register(), Operand(lit)); 509} 510 511 512void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 513 // Immediates cannot be pushed directly. 514 __ Mov(result_register(), Operand(lit)); 515 codegen()->PushOperand(result_register()); 516} 517 518 519void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 520 codegen()->PrepareForBailoutBeforeSplit(condition(), 521 true, 522 true_label_, 523 false_label_); 524 DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable()); 525 if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) { 526 if (false_label_ != fall_through_) __ B(false_label_); 527 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) { 528 if (true_label_ != fall_through_) __ B(true_label_); 529 } else if (lit->IsString()) { 530 if (String::cast(*lit)->length() == 0) { 531 if (false_label_ != fall_through_) __ B(false_label_); 532 } else { 533 if (true_label_ != fall_through_) __ B(true_label_); 534 } 535 } else if (lit->IsSmi()) { 536 if (Smi::cast(*lit)->value() == 0) { 537 if (false_label_ != fall_through_) __ B(false_label_); 538 } else { 539 if (true_label_ != fall_through_) __ B(true_label_); 540 } 541 } else { 542 // For simplicity we always test the accumulator register. 543 __ Mov(result_register(), Operand(lit)); 544 codegen()->DoTest(this); 545 } 546} 547 548 549void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 550 Register reg) const { 551 DCHECK(count > 0); 552 if (count > 1) codegen()->DropOperands(count - 1); 553 __ Poke(reg, 0); 554} 555 556 557void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 558 Label* materialize_false) const { 559 DCHECK(materialize_true == materialize_false); 560 __ Bind(materialize_true); 561} 562 563 564void FullCodeGenerator::AccumulatorValueContext::Plug( 565 Label* materialize_true, 566 Label* materialize_false) const { 567 Label done; 568 __ Bind(materialize_true); 569 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 570 __ B(&done); 571 __ Bind(materialize_false); 572 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 573 __ Bind(&done); 574} 575 576 577void FullCodeGenerator::StackValueContext::Plug( 578 Label* materialize_true, 579 Label* materialize_false) const { 580 Label done; 581 __ Bind(materialize_true); 582 __ LoadRoot(x10, Heap::kTrueValueRootIndex); 583 __ B(&done); 584 __ Bind(materialize_false); 585 __ LoadRoot(x10, Heap::kFalseValueRootIndex); 586 __ Bind(&done); 587 codegen()->PushOperand(x10); 588} 589 590 591void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 592 Label* materialize_false) const { 593 DCHECK(materialize_true == true_label_); 594 DCHECK(materialize_false == false_label_); 595} 596 597 598void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 599 Heap::RootListIndex value_root_index = 600 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 601 __ LoadRoot(result_register(), value_root_index); 602} 603 604 605void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 606 Heap::RootListIndex value_root_index = 607 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 608 __ LoadRoot(x10, value_root_index); 609 codegen()->PushOperand(x10); 610} 611 612 613void FullCodeGenerator::TestContext::Plug(bool flag) const { 614 codegen()->PrepareForBailoutBeforeSplit(condition(), 615 true, 616 true_label_, 617 false_label_); 618 if (flag) { 619 if (true_label_ != fall_through_) { 620 __ B(true_label_); 621 } 622 } else { 623 if (false_label_ != fall_through_) { 624 __ B(false_label_); 625 } 626 } 627} 628 629 630void FullCodeGenerator::DoTest(Expression* condition, 631 Label* if_true, 632 Label* if_false, 633 Label* fall_through) { 634 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate()); 635 CallIC(ic, condition->test_id()); 636 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); 637 Split(eq, if_true, if_false, fall_through); 638} 639 640 641// If (cond), branch to if_true. 642// If (!cond), branch to if_false. 643// fall_through is used as an optimization in cases where only one branch 644// instruction is necessary. 645void FullCodeGenerator::Split(Condition cond, 646 Label* if_true, 647 Label* if_false, 648 Label* fall_through) { 649 if (if_false == fall_through) { 650 __ B(cond, if_true); 651 } else if (if_true == fall_through) { 652 DCHECK(if_false != fall_through); 653 __ B(NegateCondition(cond), if_false); 654 } else { 655 __ B(cond, if_true); 656 __ B(if_false); 657 } 658} 659 660 661MemOperand FullCodeGenerator::StackOperand(Variable* var) { 662 // Offset is negative because higher indexes are at lower addresses. 663 int offset = -var->index() * kXRegSize; 664 // Adjust by a (parameter or local) base offset. 665 if (var->IsParameter()) { 666 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 667 } else { 668 offset += JavaScriptFrameConstants::kLocal0Offset; 669 } 670 return MemOperand(fp, offset); 671} 672 673 674MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 675 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 676 if (var->IsContextSlot()) { 677 int context_chain_length = scope()->ContextChainLength(var->scope()); 678 __ LoadContext(scratch, context_chain_length); 679 return ContextMemOperand(scratch, var->index()); 680 } else { 681 return StackOperand(var); 682 } 683} 684 685 686void FullCodeGenerator::GetVar(Register dest, Variable* var) { 687 // Use destination as scratch. 688 MemOperand location = VarOperand(var, dest); 689 __ Ldr(dest, location); 690} 691 692 693void FullCodeGenerator::SetVar(Variable* var, 694 Register src, 695 Register scratch0, 696 Register scratch1) { 697 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 698 DCHECK(!AreAliased(src, scratch0, scratch1)); 699 MemOperand location = VarOperand(var, scratch0); 700 __ Str(src, location); 701 702 // Emit the write barrier code if the location is in the heap. 703 if (var->IsContextSlot()) { 704 // scratch0 contains the correct context. 705 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()), 706 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs); 707 } 708} 709 710 711void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 712 bool should_normalize, 713 Label* if_true, 714 Label* if_false) { 715 // Only prepare for bailouts before splits if we're in a test 716 // context. Otherwise, we let the Visit function deal with the 717 // preparation to avoid preparing with the same AST id twice. 718 if (!context()->IsTest()) return; 719 720 // TODO(all): Investigate to see if there is something to work on here. 721 Label skip; 722 if (should_normalize) { 723 __ B(&skip); 724 } 725 PrepareForBailout(expr, BailoutState::TOS_REGISTER); 726 if (should_normalize) { 727 __ CompareRoot(x0, Heap::kTrueValueRootIndex); 728 Split(eq, if_true, if_false, NULL); 729 __ Bind(&skip); 730 } 731} 732 733 734void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 735 // The variable in the declaration always resides in the current function 736 // context. 737 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 738 if (FLAG_debug_code) { 739 // Check that we're not inside a with or catch context. 740 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset)); 741 __ CompareRoot(x1, Heap::kWithContextMapRootIndex); 742 __ Check(ne, kDeclarationInWithContext); 743 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex); 744 __ Check(ne, kDeclarationInCatchContext); 745 } 746} 747 748 749void FullCodeGenerator::VisitVariableDeclaration( 750 VariableDeclaration* declaration) { 751 VariableProxy* proxy = declaration->proxy(); 752 Variable* variable = proxy->var(); 753 switch (variable->location()) { 754 case VariableLocation::UNALLOCATED: { 755 DCHECK(!variable->binding_needs_init()); 756 globals_->Add(variable->name(), zone()); 757 FeedbackSlot slot = proxy->VariableFeedbackSlot(); 758 DCHECK(!slot.IsInvalid()); 759 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); 760 globals_->Add(isolate()->factory()->undefined_value(), zone()); 761 globals_->Add(isolate()->factory()->undefined_value(), zone()); 762 break; 763 } 764 case VariableLocation::PARAMETER: 765 case VariableLocation::LOCAL: 766 if (variable->binding_needs_init()) { 767 Comment cmnt(masm_, "[ VariableDeclaration"); 768 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); 769 __ Str(x10, StackOperand(variable)); 770 } 771 break; 772 773 case VariableLocation::CONTEXT: 774 if (variable->binding_needs_init()) { 775 Comment cmnt(masm_, "[ VariableDeclaration"); 776 EmitDebugCheckDeclarationContext(variable); 777 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); 778 __ Str(x10, ContextMemOperand(cp, variable->index())); 779 // No write barrier since the_hole_value is in old space. 780 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 781 } 782 break; 783 784 case VariableLocation::LOOKUP: 785 case VariableLocation::MODULE: 786 UNREACHABLE(); 787 } 788} 789 790 791void FullCodeGenerator::VisitFunctionDeclaration( 792 FunctionDeclaration* declaration) { 793 VariableProxy* proxy = declaration->proxy(); 794 Variable* variable = proxy->var(); 795 switch (variable->location()) { 796 case VariableLocation::UNALLOCATED: { 797 globals_->Add(variable->name(), zone()); 798 FeedbackSlot slot = proxy->VariableFeedbackSlot(); 799 DCHECK(!slot.IsInvalid()); 800 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); 801 802 // We need the slot where the literals array lives, too. 803 slot = declaration->fun()->LiteralFeedbackSlot(); 804 DCHECK(!slot.IsInvalid()); 805 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); 806 807 Handle<SharedFunctionInfo> function = 808 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); 809 // Check for stack overflow exception. 810 if (function.is_null()) return SetStackOverflow(); 811 globals_->Add(function, zone()); 812 break; 813 } 814 815 case VariableLocation::PARAMETER: 816 case VariableLocation::LOCAL: { 817 Comment cmnt(masm_, "[ Function Declaration"); 818 VisitForAccumulatorValue(declaration->fun()); 819 __ Str(result_register(), StackOperand(variable)); 820 break; 821 } 822 823 case VariableLocation::CONTEXT: { 824 Comment cmnt(masm_, "[ Function Declaration"); 825 EmitDebugCheckDeclarationContext(variable); 826 VisitForAccumulatorValue(declaration->fun()); 827 __ Str(result_register(), ContextMemOperand(cp, variable->index())); 828 int offset = Context::SlotOffset(variable->index()); 829 // We know that we have written a function, which is not a smi. 830 __ RecordWriteContextSlot(cp, 831 offset, 832 result_register(), 833 x2, 834 kLRHasBeenSaved, 835 kDontSaveFPRegs, 836 EMIT_REMEMBERED_SET, 837 OMIT_SMI_CHECK); 838 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 839 break; 840 } 841 842 case VariableLocation::LOOKUP: 843 case VariableLocation::MODULE: 844 UNREACHABLE(); 845 } 846} 847 848 849void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 850 // Call the runtime to declare the globals. 851 __ Mov(x11, Operand(pairs)); 852 Register flags = xzr; 853 if (Smi::FromInt(DeclareGlobalsFlags())) { 854 flags = x10; 855 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags())); 856 } 857 __ EmitLoadFeedbackVector(x12); 858 __ Push(x11, flags, x12); 859 __ CallRuntime(Runtime::kDeclareGlobals); 860 // Return value is ignored. 861} 862 863 864void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 865 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement"); 866 Comment cmnt(masm_, "[ SwitchStatement"); 867 Breakable nested_statement(this, stmt); 868 SetStatementPosition(stmt); 869 870 // Keep the switch value on the stack until a case matches. 871 VisitForStackValue(stmt->tag()); 872 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS); 873 874 ZoneList<CaseClause*>* clauses = stmt->cases(); 875 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 876 877 Label next_test; // Recycled for each test. 878 // Compile all the tests with branches to their bodies. 879 for (int i = 0; i < clauses->length(); i++) { 880 CaseClause* clause = clauses->at(i); 881 clause->body_target()->Unuse(); 882 883 // The default is not a test, but remember it as final fall through. 884 if (clause->is_default()) { 885 default_clause = clause; 886 continue; 887 } 888 889 Comment cmnt(masm_, "[ Case comparison"); 890 __ Bind(&next_test); 891 next_test.Unuse(); 892 893 // Compile the label expression. 894 VisitForAccumulatorValue(clause->label()); 895 896 // Perform the comparison as if via '==='. 897 __ Peek(x1, 0); // Switch value. 898 899 JumpPatchSite patch_site(masm_); 900 if (ShouldInlineSmiCase(Token::EQ_STRICT)) { 901 Label slow_case; 902 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case); 903 __ Cmp(x1, x0); 904 __ B(ne, &next_test); 905 __ Drop(1); // Switch value is no longer needed. 906 __ B(clause->body_target()); 907 __ Bind(&slow_case); 908 } 909 910 // Record position before stub call for type feedback. 911 SetExpressionPosition(clause); 912 Handle<Code> ic = 913 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 914 CallIC(ic, clause->CompareId()); 915 patch_site.EmitPatchInfo(); 916 917 Label skip; 918 __ B(&skip); 919 PrepareForBailout(clause, BailoutState::TOS_REGISTER); 920 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test); 921 __ Drop(1); 922 __ B(clause->body_target()); 923 __ Bind(&skip); 924 925 __ Cbnz(x0, &next_test); 926 __ Drop(1); // Switch value is no longer needed. 927 __ B(clause->body_target()); 928 } 929 930 // Discard the test value and jump to the default if present, otherwise to 931 // the end of the statement. 932 __ Bind(&next_test); 933 DropOperands(1); // Switch value is no longer needed. 934 if (default_clause == NULL) { 935 __ B(nested_statement.break_label()); 936 } else { 937 __ B(default_clause->body_target()); 938 } 939 940 // Compile all the case bodies. 941 for (int i = 0; i < clauses->length(); i++) { 942 Comment cmnt(masm_, "[ Case body"); 943 CaseClause* clause = clauses->at(i); 944 __ Bind(clause->body_target()); 945 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS); 946 VisitStatements(clause->statements()); 947 } 948 949 __ Bind(nested_statement.break_label()); 950 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 951} 952 953 954void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 955 ASM_LOCATION("FullCodeGenerator::VisitForInStatement"); 956 Comment cmnt(masm_, "[ ForInStatement"); 957 SetStatementPosition(stmt, SKIP_BREAK); 958 959 FeedbackSlot slot = stmt->ForInFeedbackSlot(); 960 961 // TODO(all): This visitor probably needs better comments and a revisit. 962 963 // Get the object to enumerate over. 964 SetExpressionAsStatementPosition(stmt->enumerable()); 965 VisitForAccumulatorValue(stmt->enumerable()); 966 OperandStackDepthIncrement(5); 967 968 Label loop, exit; 969 Iteration loop_statement(this, stmt); 970 increment_loop_depth(); 971 972 // If the object is null or undefined, skip over the loop, otherwise convert 973 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. 974 Label convert, done_convert; 975 __ JumpIfSmi(x0, &convert); 976 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, &done_convert, ge); 977 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, &exit); 978 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit); 979 __ Bind(&convert); 980 __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET); 981 RestoreContext(); 982 __ Bind(&done_convert); 983 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER); 984 __ Push(x0); 985 986 // Check cache validity in generated code. If we cannot guarantee cache 987 // validity, call the runtime system to check cache validity or get the 988 // property names in a fixed array. Note: Proxies never have an enum cache, 989 // so will always take the slow path. 990 Label call_runtime; 991 __ CheckEnumCache(x0, x15, x10, x11, x12, x13, &call_runtime); 992 993 // The enum cache is valid. Load the map of the object being 994 // iterated over and use the cache for the iteration. 995 Label use_cache; 996 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); 997 __ B(&use_cache); 998 999 // Get the set of properties to enumerate. 1000 __ Bind(&call_runtime); 1001 __ Push(x0); // Duplicate the enumerable object on the stack. 1002 __ CallRuntime(Runtime::kForInEnumerate); 1003 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER); 1004 1005 // If we got a map from the runtime call, we can do a fast 1006 // modification check. Otherwise, we got a fixed array, and we have 1007 // to do a slow check. 1008 Label fixed_array, no_descriptors; 1009 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset)); 1010 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array); 1011 1012 // We got a map in register x0. Get the enumeration cache from it. 1013 __ Bind(&use_cache); 1014 1015 __ EnumLengthUntagged(x1, x0); 1016 __ Cbz(x1, &no_descriptors); 1017 1018 __ LoadInstanceDescriptors(x0, x2); 1019 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset)); 1020 __ Ldr(x2, 1021 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1022 1023 // Set up the four remaining stack slots. 1024 __ SmiTag(x1); 1025 // Map, enumeration cache, enum cache length, zero (both last as smis). 1026 __ Push(x0, x2, x1, xzr); 1027 __ B(&loop); 1028 1029 __ Bind(&no_descriptors); 1030 __ Drop(1); 1031 __ B(&exit); 1032 1033 // We got a fixed array in register x0. Iterate through that. 1034 __ Bind(&fixed_array); 1035 1036 __ Mov(x1, Smi::FromInt(1)); // Smi(1) indicates slow check. 1037 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset)); 1038 __ Push(x1, x0, x2); // Smi and array, fixed array length (as smi). 1039 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS); 1040 __ Push(xzr); // Initial index. 1041 1042 // Generate code for doing the condition check. 1043 __ Bind(&loop); 1044 SetExpressionAsStatementPosition(stmt->each()); 1045 1046 // Load the current count to x0, load the length to x1. 1047 __ PeekPair(x0, x1, 0); 1048 __ Cmp(x0, x1); // Compare to the array length. 1049 __ B(hs, loop_statement.break_label()); 1050 1051 // Get the current entry of the array into register x0. 1052 __ Peek(x10, 2 * kXRegSize); 1053 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2)); 1054 __ Ldr(x0, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag)); 1055 1056 // Get the expected map from the stack or a smi in the 1057 // permanent slow case into register x2. 1058 __ Peek(x2, 3 * kXRegSize); 1059 1060 // Check if the expected map still matches that of the enumerable. 1061 // If not, we may have to filter the key. 1062 Label update_each; 1063 __ Peek(x1, 4 * kXRegSize); 1064 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset)); 1065 __ Cmp(x11, x2); 1066 __ B(eq, &update_each); 1067 1068 // We need to filter the key, record slow-path here. 1069 int const vector_index = SmiFromSlot(slot)->value(); 1070 __ EmitLoadFeedbackVector(x3); 1071 __ Mov(x10, Operand(FeedbackVector::MegamorphicSentinel(isolate()))); 1072 __ Str(x10, FieldMemOperand(x3, FixedArray::OffsetOfElementAt(vector_index))); 1073 1074 // x0 contains the key. The receiver in x1 is the second argument to the 1075 // ForInFilter. ForInFilter returns undefined if the receiver doesn't 1076 // have the key or returns the name-converted key. 1077 __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET); 1078 RestoreContext(); 1079 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER); 1080 __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex); 1081 __ B(eq, loop_statement.continue_label()); 1082 1083 // Update the 'each' property or variable from the possibly filtered 1084 // entry in register x0. 1085 __ Bind(&update_each); 1086 // Perform the assignment as if via '='. 1087 { EffectContext context(this); 1088 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); 1089 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS); 1090 } 1091 1092 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). 1093 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS); 1094 // Generate code for the body of the loop. 1095 Visit(stmt->body()); 1096 1097 // Generate code for going to the next element by incrementing 1098 // the index (smi) stored on top of the stack. 1099 __ Bind(loop_statement.continue_label()); 1100 PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS); 1101 // TODO(all): We could use a callee saved register to avoid popping. 1102 __ Pop(x0); 1103 __ Add(x0, x0, Smi::FromInt(1)); 1104 __ Push(x0); 1105 1106 EmitBackEdgeBookkeeping(stmt, &loop); 1107 __ B(&loop); 1108 1109 // Remove the pointers stored on the stack. 1110 __ Bind(loop_statement.break_label()); 1111 DropOperands(5); 1112 1113 // Exit and decrement the loop depth. 1114 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 1115 __ Bind(&exit); 1116 decrement_loop_depth(); 1117} 1118 1119void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, 1120 FeedbackSlot slot) { 1121 DCHECK(NeedsHomeObject(initializer)); 1122 __ Peek(StoreDescriptor::ReceiverRegister(), 0); 1123 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize); 1124 CallStoreIC(slot, isolate()->factory()->home_object_symbol()); 1125} 1126 1127void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, 1128 int offset, 1129 FeedbackSlot slot) { 1130 DCHECK(NeedsHomeObject(initializer)); 1131 __ Move(StoreDescriptor::ReceiverRegister(), x0); 1132 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize); 1133 CallStoreIC(slot, isolate()->factory()->home_object_symbol()); 1134} 1135 1136void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, 1137 TypeofMode typeof_mode) { 1138 // Record position before possible IC call. 1139 SetExpressionPosition(proxy); 1140 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS); 1141 Variable* var = proxy->var(); 1142 1143 // Two cases: global variables and all other types of variables. 1144 switch (var->location()) { 1145 case VariableLocation::UNALLOCATED: { 1146 Comment cmnt(masm_, "Global variable"); 1147 EmitGlobalVariableLoad(proxy, typeof_mode); 1148 context()->Plug(x0); 1149 break; 1150 } 1151 1152 case VariableLocation::PARAMETER: 1153 case VariableLocation::LOCAL: 1154 case VariableLocation::CONTEXT: { 1155 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); 1156 Comment cmnt(masm_, var->IsContextSlot() 1157 ? "Context variable" 1158 : "Stack variable"); 1159 if (proxy->hole_check_mode() == HoleCheckMode::kRequired) { 1160 // Throw a reference error when using an uninitialized let/const 1161 // binding in harmony mode. 1162 Label done; 1163 GetVar(x0, var); 1164 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done); 1165 __ Mov(x0, Operand(var->name())); 1166 __ Push(x0); 1167 __ CallRuntime(Runtime::kThrowReferenceError); 1168 __ Bind(&done); 1169 context()->Plug(x0); 1170 break; 1171 } 1172 context()->Plug(var); 1173 break; 1174 } 1175 1176 case VariableLocation::LOOKUP: 1177 case VariableLocation::MODULE: 1178 UNREACHABLE(); 1179 } 1180} 1181 1182 1183void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { 1184 Expression* expression = (property == NULL) ? NULL : property->value(); 1185 if (expression == NULL) { 1186 __ LoadRoot(x10, Heap::kNullValueRootIndex); 1187 PushOperand(x10); 1188 } else { 1189 VisitForStackValue(expression); 1190 if (NeedsHomeObject(expression)) { 1191 DCHECK(property->kind() == ObjectLiteral::Property::GETTER || 1192 property->kind() == ObjectLiteral::Property::SETTER); 1193 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; 1194 EmitSetHomeObject(expression, offset, property->GetSlot()); 1195 } 1196 } 1197} 1198 1199 1200void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1201 Comment cmnt(masm_, "[ ObjectLiteral"); 1202 1203 Handle<BoilerplateDescription> constant_properties = 1204 expr->GetOrBuildConstantProperties(isolate()); 1205 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1206 __ Mov(x2, SmiFromSlot(expr->literal_slot())); 1207 __ Mov(x1, Operand(constant_properties)); 1208 int flags = expr->ComputeFlags(); 1209 __ Mov(x0, Smi::FromInt(flags)); 1210 if (MustCreateObjectLiteralWithRuntime(expr)) { 1211 __ Push(x3, x2, x1, x0); 1212 __ CallRuntime(Runtime::kCreateObjectLiteral); 1213 } else { 1214 Callable callable = CodeFactory::FastCloneShallowObject( 1215 isolate(), expr->properties_count()); 1216 __ Call(callable.code(), RelocInfo::CODE_TARGET); 1217 RestoreContext(); 1218 } 1219 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); 1220 1221 // If result_saved is true the result is on top of the stack. If 1222 // result_saved is false the result is in x0. 1223 bool result_saved = false; 1224 1225 AccessorTable accessor_table(zone()); 1226 for (int i = 0; i < expr->properties()->length(); i++) { 1227 ObjectLiteral::Property* property = expr->properties()->at(i); 1228 DCHECK(!property->is_computed_name()); 1229 if (property->IsCompileTimeValue()) continue; 1230 1231 Literal* key = property->key()->AsLiteral(); 1232 Expression* value = property->value(); 1233 if (!result_saved) { 1234 PushOperand(x0); // Save result on stack 1235 result_saved = true; 1236 } 1237 switch (property->kind()) { 1238 case ObjectLiteral::Property::SPREAD: 1239 case ObjectLiteral::Property::CONSTANT: 1240 UNREACHABLE(); 1241 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1242 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); 1243 // Fall through. 1244 case ObjectLiteral::Property::COMPUTED: 1245 // It is safe to use [[Put]] here because the boilerplate already 1246 // contains computed properties with an uninitialized value. 1247 if (key->IsStringLiteral()) { 1248 DCHECK(key->IsPropertyName()); 1249 if (property->emit_store()) { 1250 VisitForAccumulatorValue(value); 1251 DCHECK(StoreDescriptor::ValueRegister().is(x0)); 1252 __ Peek(StoreDescriptor::ReceiverRegister(), 0); 1253 CallStoreIC(property->GetSlot(0), key->value(), true); 1254 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS); 1255 1256 if (NeedsHomeObject(value)) { 1257 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); 1258 } 1259 } else { 1260 VisitForEffect(value); 1261 } 1262 break; 1263 } 1264 __ Peek(x0, 0); 1265 PushOperand(x0); 1266 VisitForStackValue(key); 1267 VisitForStackValue(value); 1268 if (property->emit_store()) { 1269 if (NeedsHomeObject(value)) { 1270 EmitSetHomeObject(value, 2, property->GetSlot()); 1271 } 1272 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode 1273 PushOperand(x0); 1274 CallRuntimeWithOperands(Runtime::kSetProperty); 1275 } else { 1276 DropOperands(3); 1277 } 1278 break; 1279 case ObjectLiteral::Property::PROTOTYPE: 1280 DCHECK(property->emit_store()); 1281 // Duplicate receiver on stack. 1282 __ Peek(x0, 0); 1283 PushOperand(x0); 1284 VisitForStackValue(value); 1285 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1286 PrepareForBailoutForId(expr->GetIdForPropertySet(i), 1287 BailoutState::NO_REGISTERS); 1288 break; 1289 case ObjectLiteral::Property::GETTER: 1290 if (property->emit_store()) { 1291 AccessorTable::Iterator it = accessor_table.lookup(key); 1292 it->second->bailout_id = expr->GetIdForPropertySet(i); 1293 it->second->getter = property; 1294 } 1295 break; 1296 case ObjectLiteral::Property::SETTER: 1297 if (property->emit_store()) { 1298 AccessorTable::Iterator it = accessor_table.lookup(key); 1299 it->second->bailout_id = expr->GetIdForPropertySet(i); 1300 it->second->setter = property; 1301 } 1302 break; 1303 } 1304 } 1305 1306 // Emit code to define accessors, using only a single call to the runtime for 1307 // each pair of corresponding getters and setters. 1308 for (AccessorTable::Iterator it = accessor_table.begin(); 1309 it != accessor_table.end(); 1310 ++it) { 1311 __ Peek(x10, 0); // Duplicate receiver. 1312 PushOperand(x10); 1313 VisitForStackValue(it->first); 1314 EmitAccessor(it->second->getter); 1315 EmitAccessor(it->second->setter); 1316 __ Mov(x10, Smi::FromInt(NONE)); 1317 PushOperand(x10); 1318 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); 1319 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS); 1320 } 1321 1322 if (result_saved) { 1323 context()->PlugTOS(); 1324 } else { 1325 context()->Plug(x0); 1326 } 1327} 1328 1329 1330void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1331 Comment cmnt(masm_, "[ ArrayLiteral"); 1332 1333 Handle<ConstantElementsPair> constant_elements = 1334 expr->GetOrBuildConstantElements(isolate()); 1335 1336 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1337 __ Mov(x2, SmiFromSlot(expr->literal_slot())); 1338 __ Mov(x1, Operand(constant_elements)); 1339 if (MustCreateArrayLiteralWithRuntime(expr)) { 1340 __ Mov(x0, Smi::FromInt(expr->ComputeFlags())); 1341 __ Push(x3, x2, x1, x0); 1342 __ CallRuntime(Runtime::kCreateArrayLiteral); 1343 } else { 1344 Callable callable = 1345 CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE); 1346 __ Call(callable.code(), RelocInfo::CODE_TARGET); 1347 RestoreContext(); 1348 } 1349 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); 1350 1351 bool result_saved = false; // Is the result saved to the stack? 1352 ZoneList<Expression*>* subexprs = expr->values(); 1353 int length = subexprs->length(); 1354 1355 // Emit code to evaluate all the non-constant subexpressions and to store 1356 // them into the newly cloned array. 1357 for (int array_index = 0; array_index < length; array_index++) { 1358 Expression* subexpr = subexprs->at(array_index); 1359 DCHECK(!subexpr->IsSpread()); 1360 1361 // If the subexpression is a literal or a simple materialized literal it 1362 // is already set in the cloned array. 1363 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1364 1365 if (!result_saved) { 1366 PushOperand(x0); 1367 result_saved = true; 1368 } 1369 VisitForAccumulatorValue(subexpr); 1370 1371 __ Mov(StoreDescriptor::NameRegister(), Smi::FromInt(array_index)); 1372 __ Peek(StoreDescriptor::ReceiverRegister(), 0); 1373 CallKeyedStoreIC(expr->LiteralFeedbackSlot()); 1374 1375 PrepareForBailoutForId(expr->GetIdForElement(array_index), 1376 BailoutState::NO_REGISTERS); 1377 } 1378 1379 if (result_saved) { 1380 context()->PlugTOS(); 1381 } else { 1382 context()->Plug(x0); 1383 } 1384} 1385 1386 1387void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1388 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); 1389 1390 Comment cmnt(masm_, "[ Assignment"); 1391 1392 Property* property = expr->target()->AsProperty(); 1393 LhsKind assign_type = Property::GetAssignType(property); 1394 1395 // Evaluate LHS expression. 1396 switch (assign_type) { 1397 case VARIABLE: 1398 // Nothing to do here. 1399 break; 1400 case NAMED_PROPERTY: 1401 if (expr->is_compound()) { 1402 // We need the receiver both on the stack and in the register. 1403 VisitForStackValue(property->obj()); 1404 __ Peek(LoadDescriptor::ReceiverRegister(), 0); 1405 } else { 1406 VisitForStackValue(property->obj()); 1407 } 1408 break; 1409 case KEYED_PROPERTY: 1410 if (expr->is_compound()) { 1411 VisitForStackValue(property->obj()); 1412 VisitForStackValue(property->key()); 1413 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize); 1414 __ Peek(LoadDescriptor::NameRegister(), 0); 1415 } else { 1416 VisitForStackValue(property->obj()); 1417 VisitForStackValue(property->key()); 1418 } 1419 break; 1420 case NAMED_SUPER_PROPERTY: 1421 case KEYED_SUPER_PROPERTY: 1422 UNREACHABLE(); 1423 break; 1424 } 1425 1426 // For compound assignments we need another deoptimization point after the 1427 // variable/property load. 1428 if (expr->is_compound()) { 1429 { AccumulatorValueContext context(this); 1430 switch (assign_type) { 1431 case VARIABLE: 1432 EmitVariableLoad(expr->target()->AsVariableProxy()); 1433 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER); 1434 break; 1435 case NAMED_PROPERTY: 1436 EmitNamedPropertyLoad(property); 1437 PrepareForBailoutForId(property->LoadId(), 1438 BailoutState::TOS_REGISTER); 1439 break; 1440 case KEYED_PROPERTY: 1441 EmitKeyedPropertyLoad(property); 1442 PrepareForBailoutForId(property->LoadId(), 1443 BailoutState::TOS_REGISTER); 1444 break; 1445 case NAMED_SUPER_PROPERTY: 1446 case KEYED_SUPER_PROPERTY: 1447 UNREACHABLE(); 1448 break; 1449 } 1450 } 1451 1452 Token::Value op = expr->binary_op(); 1453 PushOperand(x0); // Left operand goes on the stack. 1454 VisitForAccumulatorValue(expr->value()); 1455 1456 AccumulatorValueContext context(this); 1457 if (ShouldInlineSmiCase(op)) { 1458 EmitInlineSmiBinaryOp(expr->binary_operation(), 1459 op, 1460 expr->target(), 1461 expr->value()); 1462 } else { 1463 EmitBinaryOp(expr->binary_operation(), op); 1464 } 1465 1466 // Deoptimization point in case the binary operation may have side effects. 1467 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER); 1468 } else { 1469 VisitForAccumulatorValue(expr->value()); 1470 } 1471 1472 SetExpressionPosition(expr); 1473 1474 // Store the value. 1475 switch (assign_type) { 1476 case VARIABLE: { 1477 VariableProxy* proxy = expr->target()->AsVariableProxy(); 1478 EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(), 1479 proxy->hole_check_mode()); 1480 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 1481 context()->Plug(x0); 1482 break; 1483 } 1484 case NAMED_PROPERTY: 1485 EmitNamedPropertyAssignment(expr); 1486 break; 1487 case KEYED_PROPERTY: 1488 EmitKeyedPropertyAssignment(expr); 1489 break; 1490 case NAMED_SUPER_PROPERTY: 1491 case KEYED_SUPER_PROPERTY: 1492 UNREACHABLE(); 1493 break; 1494 } 1495} 1496 1497 1498void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1499 Token::Value op, 1500 Expression* left_expr, 1501 Expression* right_expr) { 1502 Label done, both_smis, stub_call; 1503 1504 // Get the arguments. 1505 Register left = x1; 1506 Register right = x0; 1507 Register result = x0; 1508 PopOperand(left); 1509 1510 // Perform combined smi check on both operands. 1511 __ Orr(x10, left, right); 1512 JumpPatchSite patch_site(masm_); 1513 patch_site.EmitJumpIfSmi(x10, &both_smis); 1514 1515 __ Bind(&stub_call); 1516 1517 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); 1518 { 1519 Assembler::BlockPoolsScope scope(masm_); 1520 CallIC(code, expr->BinaryOperationFeedbackId()); 1521 patch_site.EmitPatchInfo(); 1522 } 1523 __ B(&done); 1524 1525 __ Bind(&both_smis); 1526 // Smi case. This code works in the same way as the smi-smi case in the type 1527 // recording binary operation stub, see 1528 // BinaryOpStub::GenerateSmiSmiOperation for comments. 1529 // TODO(all): That doesn't exist any more. Where are the comments? 1530 // 1531 // The set of operations that needs to be supported here is controlled by 1532 // FullCodeGenerator::ShouldInlineSmiCase(). 1533 switch (op) { 1534 case Token::SAR: 1535 __ Ubfx(right, right, kSmiShift, 5); 1536 __ Asr(result, left, right); 1537 __ Bic(result, result, kSmiShiftMask); 1538 break; 1539 case Token::SHL: 1540 __ Ubfx(right, right, kSmiShift, 5); 1541 __ Lsl(result, left, right); 1542 break; 1543 case Token::SHR: 1544 // If `left >>> right` >= 0x80000000, the result is not representable in a 1545 // signed 32-bit smi. 1546 __ Ubfx(right, right, kSmiShift, 5); 1547 __ Lsr(x10, left, right); 1548 __ Tbnz(x10, kXSignBit, &stub_call); 1549 __ Bic(result, x10, kSmiShiftMask); 1550 break; 1551 case Token::ADD: 1552 __ Adds(x10, left, right); 1553 __ B(vs, &stub_call); 1554 __ Mov(result, x10); 1555 break; 1556 case Token::SUB: 1557 __ Subs(x10, left, right); 1558 __ B(vs, &stub_call); 1559 __ Mov(result, x10); 1560 break; 1561 case Token::MUL: { 1562 Label not_minus_zero, done; 1563 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2)); 1564 STATIC_ASSERT(kSmiTag == 0); 1565 __ Smulh(x10, left, right); 1566 __ Cbnz(x10, ¬_minus_zero); 1567 __ Eor(x11, left, right); 1568 __ Tbnz(x11, kXSignBit, &stub_call); 1569 __ Mov(result, x10); 1570 __ B(&done); 1571 __ Bind(¬_minus_zero); 1572 __ Cls(x11, x10); 1573 __ Cmp(x11, kXRegSizeInBits - kSmiShift); 1574 __ B(lt, &stub_call); 1575 __ SmiTag(result, x10); 1576 __ Bind(&done); 1577 break; 1578 } 1579 case Token::BIT_OR: 1580 __ Orr(result, left, right); 1581 break; 1582 case Token::BIT_AND: 1583 __ And(result, left, right); 1584 break; 1585 case Token::BIT_XOR: 1586 __ Eor(result, left, right); 1587 break; 1588 default: 1589 UNREACHABLE(); 1590 } 1591 1592 __ Bind(&done); 1593 context()->Plug(x0); 1594} 1595 1596 1597void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { 1598 PopOperand(x1); 1599 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); 1600 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code. 1601 { 1602 Assembler::BlockPoolsScope scope(masm_); 1603 CallIC(code, expr->BinaryOperationFeedbackId()); 1604 patch_site.EmitPatchInfo(); 1605 } 1606 context()->Plug(x0); 1607} 1608 1609void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) { 1610 DCHECK(expr->IsValidReferenceExpressionOrThis()); 1611 1612 Property* prop = expr->AsProperty(); 1613 LhsKind assign_type = Property::GetAssignType(prop); 1614 1615 switch (assign_type) { 1616 case VARIABLE: { 1617 VariableProxy* proxy = expr->AsVariableProxy(); 1618 EffectContext context(this); 1619 EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot, 1620 proxy->hole_check_mode()); 1621 break; 1622 } 1623 case NAMED_PROPERTY: { 1624 PushOperand(x0); // Preserve value. 1625 VisitForAccumulatorValue(prop->obj()); 1626 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid 1627 // this copy. 1628 __ Mov(StoreDescriptor::ReceiverRegister(), x0); 1629 PopOperand(StoreDescriptor::ValueRegister()); // Restore value. 1630 CallStoreIC(slot, prop->key()->AsLiteral()->value()); 1631 break; 1632 } 1633 case KEYED_PROPERTY: { 1634 PushOperand(x0); // Preserve value. 1635 VisitForStackValue(prop->obj()); 1636 VisitForAccumulatorValue(prop->key()); 1637 __ Mov(StoreDescriptor::NameRegister(), x0); 1638 PopOperands(StoreDescriptor::ReceiverRegister(), 1639 StoreDescriptor::ValueRegister()); 1640 CallKeyedStoreIC(slot); 1641 break; 1642 } 1643 case NAMED_SUPER_PROPERTY: 1644 case KEYED_SUPER_PROPERTY: 1645 UNREACHABLE(); 1646 break; 1647 } 1648 context()->Plug(x0); 1649} 1650 1651 1652void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 1653 Variable* var, MemOperand location) { 1654 __ Str(result_register(), location); 1655 if (var->IsContextSlot()) { 1656 // RecordWrite may destroy all its register arguments. 1657 __ Mov(x10, result_register()); 1658 int offset = Context::SlotOffset(var->index()); 1659 __ RecordWriteContextSlot( 1660 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); 1661 } 1662} 1663 1664void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, 1665 FeedbackSlot slot, 1666 HoleCheckMode hole_check_mode) { 1667 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment"); 1668 if (var->IsUnallocated()) { 1669 // Global var, const, or let. 1670 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); 1671 CallStoreIC(slot, var->name()); 1672 1673 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) { 1674 DCHECK(!var->IsLookupSlot()); 1675 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 1676 MemOperand location = VarOperand(var, x1); 1677 // Perform an initialization check for lexically declared variables. 1678 if (var->binding_needs_init()) { 1679 Label assign; 1680 __ Ldr(x10, location); 1681 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign); 1682 __ Mov(x10, Operand(var->name())); 1683 __ Push(x10); 1684 __ CallRuntime(Runtime::kThrowReferenceError); 1685 __ Bind(&assign); 1686 } 1687 if (var->mode() != CONST) { 1688 EmitStoreToStackLocalOrContextSlot(var, location); 1689 } else if (var->throw_on_const_assignment(language_mode())) { 1690 __ CallRuntime(Runtime::kThrowConstAssignError); 1691 } 1692 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { 1693 // Initializing assignment to const {this} needs a write barrier. 1694 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 1695 Label uninitialized_this; 1696 MemOperand location = VarOperand(var, x1); 1697 __ Ldr(x10, location); 1698 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this); 1699 __ Mov(x0, Operand(var->name())); 1700 __ Push(x0); 1701 __ CallRuntime(Runtime::kThrowReferenceError); 1702 __ bind(&uninitialized_this); 1703 EmitStoreToStackLocalOrContextSlot(var, location); 1704 1705 } else { 1706 DCHECK(var->mode() != CONST || op == Token::INIT); 1707 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 1708 DCHECK(!var->IsLookupSlot()); 1709 // Assignment to var or initializing assignment to let/const in harmony 1710 // mode. 1711 MemOperand location = VarOperand(var, x1); 1712 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { 1713 __ Ldr(x10, location); 1714 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex); 1715 __ Check(eq, kLetBindingReInitialization); 1716 } 1717 EmitStoreToStackLocalOrContextSlot(var, location); 1718 } 1719} 1720 1721 1722void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1723 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment"); 1724 // Assignment to a property, using a named store IC. 1725 Property* prop = expr->target()->AsProperty(); 1726 DCHECK(prop != NULL); 1727 DCHECK(prop->key()->IsLiteral()); 1728 1729 PopOperand(StoreDescriptor::ReceiverRegister()); 1730 CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value()); 1731 1732 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 1733 context()->Plug(x0); 1734} 1735 1736 1737void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 1738 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment"); 1739 // Assignment to a property, using a keyed store IC. 1740 1741 // TODO(all): Could we pass this in registers rather than on the stack? 1742 PopOperands(StoreDescriptor::NameRegister(), 1743 StoreDescriptor::ReceiverRegister()); 1744 DCHECK(StoreDescriptor::ValueRegister().is(x0)); 1745 1746 CallKeyedStoreIC(expr->AssignmentSlot()); 1747 1748 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 1749 context()->Plug(x0); 1750} 1751 1752// Code common for calls using the IC. 1753void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 1754 ASM_LOCATION("FullCodeGenerator::EmitCallWithLoadIC"); 1755 Expression* callee = expr->expression(); 1756 1757 // Get the target function. 1758 ConvertReceiverMode convert_mode; 1759 if (callee->IsVariableProxy()) { 1760 { StackValueContext context(this); 1761 EmitVariableLoad(callee->AsVariableProxy()); 1762 PrepareForBailout(callee, BailoutState::NO_REGISTERS); 1763 } 1764 // Push undefined as receiver. This is patched in the method prologue if it 1765 // is a sloppy mode method. 1766 { 1767 UseScratchRegisterScope temps(masm_); 1768 Register temp = temps.AcquireX(); 1769 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex); 1770 PushOperand(temp); 1771 } 1772 convert_mode = ConvertReceiverMode::kNullOrUndefined; 1773 } else { 1774 // Load the function from the receiver. 1775 DCHECK(callee->IsProperty()); 1776 DCHECK(!callee->AsProperty()->IsSuperAccess()); 1777 __ Peek(LoadDescriptor::ReceiverRegister(), 0); 1778 EmitNamedPropertyLoad(callee->AsProperty()); 1779 PrepareForBailoutForId(callee->AsProperty()->LoadId(), 1780 BailoutState::TOS_REGISTER); 1781 // Push the target function under the receiver. 1782 PopOperand(x10); 1783 PushOperands(x0, x10); 1784 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; 1785 } 1786 1787 EmitCall(expr, convert_mode); 1788} 1789 1790 1791// Code common for calls using the IC. 1792void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 1793 Expression* key) { 1794 ASM_LOCATION("FullCodeGenerator::EmitKeyedCallWithLoadIC"); 1795 // Load the key. 1796 VisitForAccumulatorValue(key); 1797 1798 Expression* callee = expr->expression(); 1799 1800 // Load the function from the receiver. 1801 DCHECK(callee->IsProperty()); 1802 __ Peek(LoadDescriptor::ReceiverRegister(), 0); 1803 __ Move(LoadDescriptor::NameRegister(), x0); 1804 EmitKeyedPropertyLoad(callee->AsProperty()); 1805 PrepareForBailoutForId(callee->AsProperty()->LoadId(), 1806 BailoutState::TOS_REGISTER); 1807 1808 // Push the target function under the receiver. 1809 PopOperand(x10); 1810 PushOperands(x0, x10); 1811 1812 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); 1813} 1814 1815 1816void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { 1817 ASM_LOCATION("FullCodeGenerator::EmitCall"); 1818 // Load the arguments. 1819 ZoneList<Expression*>* args = expr->arguments(); 1820 int arg_count = args->length(); 1821 for (int i = 0; i < arg_count; i++) { 1822 VisitForStackValue(args->at(i)); 1823 } 1824 1825 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 1826 SetCallPosition(expr, expr->tail_call_mode()); 1827 if (expr->tail_call_mode() == TailCallMode::kAllow) { 1828 if (FLAG_trace) { 1829 __ CallRuntime(Runtime::kTraceTailCall); 1830 } 1831 // Update profiling counters before the tail call since we will 1832 // not return to this function. 1833 EmitProfilingCounterHandlingForReturnSequence(true); 1834 } 1835 Handle<Code> code = 1836 CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode()) 1837 .code(); 1838 __ Mov(x3, IntFromSlot(expr->CallFeedbackICSlot())); 1839 __ Peek(x1, (arg_count + 1) * kXRegSize); 1840 __ Mov(x0, arg_count); 1841 CallIC(code); 1842 OperandStackDepthDecrement(arg_count + 1); 1843 1844 RecordJSReturnSite(expr); 1845 RestoreContext(); 1846 context()->DropAndPlug(1, x0); 1847} 1848 1849void FullCodeGenerator::VisitCallNew(CallNew* expr) { 1850 Comment cmnt(masm_, "[ CallNew"); 1851 // According to ECMA-262, section 11.2.2, page 44, the function 1852 // expression in new calls must be evaluated before the 1853 // arguments. 1854 1855 // Push constructor on the stack. If it's not a function it's used as 1856 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 1857 // ignored. 1858 DCHECK(!expr->expression()->IsSuperPropertyReference()); 1859 VisitForStackValue(expr->expression()); 1860 1861 // Push the arguments ("left-to-right") on the stack. 1862 ZoneList<Expression*>* args = expr->arguments(); 1863 int arg_count = args->length(); 1864 for (int i = 0; i < arg_count; i++) { 1865 VisitForStackValue(args->at(i)); 1866 } 1867 1868 // Call the construct call builtin that handles allocation and 1869 // constructor invocation. 1870 SetConstructCallPosition(expr); 1871 1872 // Load function and argument count into x1 and x0. 1873 __ Mov(x0, arg_count); 1874 __ Peek(x1, arg_count * kXRegSize); 1875 1876 // Record call targets in unoptimized code. 1877 __ EmitLoadFeedbackVector(x2); 1878 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot())); 1879 1880 CallConstructStub stub(isolate()); 1881 CallIC(stub.GetCode()); 1882 OperandStackDepthDecrement(arg_count + 1); 1883 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER); 1884 RestoreContext(); 1885 context()->Plug(x0); 1886} 1887 1888 1889void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 1890 ZoneList<Expression*>* args = expr->arguments(); 1891 DCHECK(args->length() == 1); 1892 1893 VisitForAccumulatorValue(args->at(0)); 1894 1895 Label materialize_true, materialize_false; 1896 Label* if_true = NULL; 1897 Label* if_false = NULL; 1898 Label* fall_through = NULL; 1899 context()->PrepareTest(&materialize_true, &materialize_false, 1900 &if_true, &if_false, &fall_through); 1901 1902 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 1903 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through); 1904 1905 context()->Plug(if_true, if_false); 1906} 1907 1908 1909void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { 1910 ZoneList<Expression*>* args = expr->arguments(); 1911 DCHECK(args->length() == 1); 1912 1913 VisitForAccumulatorValue(args->at(0)); 1914 1915 Label materialize_true, materialize_false; 1916 Label* if_true = NULL; 1917 Label* if_false = NULL; 1918 Label* fall_through = NULL; 1919 context()->PrepareTest(&materialize_true, &materialize_false, 1920 &if_true, &if_false, &fall_through); 1921 1922 __ JumpIfSmi(x0, if_false); 1923 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE); 1924 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 1925 Split(ge, if_true, if_false, fall_through); 1926 1927 context()->Plug(if_true, if_false); 1928} 1929 1930 1931void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 1932 ZoneList<Expression*>* args = expr->arguments(); 1933 DCHECK(args->length() == 1); 1934 1935 VisitForAccumulatorValue(args->at(0)); 1936 1937 Label materialize_true, materialize_false; 1938 Label* if_true = NULL; 1939 Label* if_false = NULL; 1940 Label* fall_through = NULL; 1941 context()->PrepareTest(&materialize_true, &materialize_false, 1942 &if_true, &if_false, &fall_through); 1943 1944 __ JumpIfSmi(x0, if_false); 1945 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE); 1946 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 1947 Split(eq, if_true, if_false, fall_through); 1948 1949 context()->Plug(if_true, if_false); 1950} 1951 1952 1953void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { 1954 ZoneList<Expression*>* args = expr->arguments(); 1955 DCHECK(args->length() == 1); 1956 1957 VisitForAccumulatorValue(args->at(0)); 1958 1959 Label materialize_true, materialize_false; 1960 Label* if_true = NULL; 1961 Label* if_false = NULL; 1962 Label* fall_through = NULL; 1963 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 1964 &if_false, &fall_through); 1965 1966 __ JumpIfSmi(x0, if_false); 1967 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE); 1968 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 1969 Split(eq, if_true, if_false, fall_through); 1970 1971 context()->Plug(if_true, if_false); 1972} 1973 1974 1975void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { 1976 ZoneList<Expression*>* args = expr->arguments(); 1977 DCHECK(args->length() == 1); 1978 1979 VisitForAccumulatorValue(args->at(0)); 1980 1981 Label materialize_true, materialize_false; 1982 Label* if_true = NULL; 1983 Label* if_false = NULL; 1984 Label* fall_through = NULL; 1985 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 1986 &if_false, &fall_through); 1987 1988 __ JumpIfSmi(x0, if_false); 1989 __ CompareObjectType(x0, x10, x11, JS_PROXY_TYPE); 1990 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 1991 Split(eq, if_true, if_false, fall_through); 1992 1993 context()->Plug(if_true, if_false); 1994} 1995 1996 1997void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 1998 ASM_LOCATION("FullCodeGenerator::EmitClassOf"); 1999 ZoneList<Expression*>* args = expr->arguments(); 2000 DCHECK(args->length() == 1); 2001 Label done, null, function, non_function_constructor; 2002 2003 VisitForAccumulatorValue(args->at(0)); 2004 2005 // If the object is not a JSReceiver, we return null. 2006 __ JumpIfSmi(x0, &null); 2007 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2008 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE); 2009 // x10: object's map. 2010 // x11: object's type. 2011 __ B(lt, &null); 2012 2013 // Return 'Function' for JSFunction objects. 2014 __ Cmp(x11, FIRST_FUNCTION_TYPE); 2015 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE); 2016 __ B(hs, &function); 2017 2018 // Check if the constructor in the map is a JS function. 2019 Register instance_type = x14; 2020 __ GetMapConstructor(x12, x10, x13, instance_type); 2021 __ Cmp(instance_type, JS_FUNCTION_TYPE); 2022 __ B(ne, &non_function_constructor); 2023 2024 // x12 now contains the constructor function. Grab the 2025 // instance class name from there. 2026 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset)); 2027 __ Ldr(x0, 2028 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset)); 2029 __ B(&done); 2030 2031 // Functions have class 'Function'. 2032 __ Bind(&function); 2033 __ LoadRoot(x0, Heap::kFunction_stringRootIndex); 2034 __ B(&done); 2035 2036 // Objects with a non-function constructor have class 'Object'. 2037 __ Bind(&non_function_constructor); 2038 __ LoadRoot(x0, Heap::kObject_stringRootIndex); 2039 __ B(&done); 2040 2041 // Non-JS objects have class null. 2042 __ Bind(&null); 2043 __ LoadRoot(x0, Heap::kNullValueRootIndex); 2044 2045 // All done. 2046 __ Bind(&done); 2047 2048 context()->Plug(x0); 2049} 2050 2051 2052void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 2053 ZoneList<Expression*>* args = expr->arguments(); 2054 DCHECK(args->length() == 2); 2055 2056 VisitForStackValue(args->at(0)); 2057 VisitForAccumulatorValue(args->at(1)); 2058 2059 Register object = x1; 2060 Register index = x0; 2061 Register result = x3; 2062 2063 PopOperand(object); 2064 2065 Label need_conversion; 2066 Label index_out_of_range; 2067 Label done; 2068 StringCharCodeAtGenerator generator(object, index, result, &need_conversion, 2069 &need_conversion, &index_out_of_range); 2070 generator.GenerateFast(masm_); 2071 __ B(&done); 2072 2073 __ Bind(&index_out_of_range); 2074 // When the index is out of range, the spec requires us to return NaN. 2075 __ LoadRoot(result, Heap::kNanValueRootIndex); 2076 __ B(&done); 2077 2078 __ Bind(&need_conversion); 2079 // Load the undefined value into the result register, which will 2080 // trigger conversion. 2081 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 2082 __ B(&done); 2083 2084 NopRuntimeCallHelper call_helper; 2085 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); 2086 2087 __ Bind(&done); 2088 context()->Plug(result); 2089} 2090 2091 2092void FullCodeGenerator::EmitCall(CallRuntime* expr) { 2093 ASM_LOCATION("FullCodeGenerator::EmitCall"); 2094 ZoneList<Expression*>* args = expr->arguments(); 2095 DCHECK_LE(2, args->length()); 2096 // Push target, receiver and arguments onto the stack. 2097 for (Expression* const arg : *args) { 2098 VisitForStackValue(arg); 2099 } 2100 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 2101 // Move target to x1. 2102 int const argc = args->length() - 2; 2103 __ Peek(x1, (argc + 1) * kXRegSize); 2104 // Call the target. 2105 __ Mov(x0, argc); 2106 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2107 OperandStackDepthDecrement(argc + 1); 2108 RestoreContext(); 2109 // Discard the function left on TOS. 2110 context()->DropAndPlug(1, x0); 2111} 2112 2113void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { 2114 ZoneList<Expression*>* args = expr->arguments(); 2115 DCHECK_EQ(1, args->length()); 2116 VisitForAccumulatorValue(args->at(0)); 2117 __ AssertFunction(x0); 2118 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); 2119 __ Ldr(x0, FieldMemOperand(x0, Map::kPrototypeOffset)); 2120 context()->Plug(x0); 2121} 2122 2123void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 2124 DCHECK(expr->arguments()->length() == 0); 2125 ExternalReference debug_is_active = 2126 ExternalReference::debug_is_active_address(isolate()); 2127 __ Mov(x10, debug_is_active); 2128 __ Ldrb(x0, MemOperand(x10)); 2129 __ SmiTag(x0); 2130 context()->Plug(x0); 2131} 2132 2133 2134void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { 2135 ZoneList<Expression*>* args = expr->arguments(); 2136 DCHECK_EQ(2, args->length()); 2137 VisitForStackValue(args->at(0)); 2138 VisitForStackValue(args->at(1)); 2139 2140 Label runtime, done; 2141 2142 Register result = x0; 2143 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &runtime, 2144 NO_ALLOCATION_FLAGS); 2145 Register map_reg = x1; 2146 Register result_value = x2; 2147 Register boolean_done = x3; 2148 Register empty_fixed_array = x4; 2149 Register untagged_result = x5; 2150 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg); 2151 __ Pop(boolean_done); 2152 __ Pop(result_value); 2153 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex); 2154 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize == 2155 JSObject::kElementsOffset); 2156 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize == 2157 JSIteratorResult::kDoneOffset); 2158 __ ObjectUntag(untagged_result, result); 2159 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset)); 2160 __ Stp(empty_fixed_array, empty_fixed_array, 2161 MemOperand(untagged_result, JSObject::kPropertiesOffset)); 2162 __ Stp(result_value, boolean_done, 2163 MemOperand(untagged_result, JSIteratorResult::kValueOffset)); 2164 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 2165 __ B(&done); 2166 2167 __ Bind(&runtime); 2168 CallRuntimeWithOperands(Runtime::kCreateIterResultObject); 2169 2170 __ Bind(&done); 2171 context()->Plug(x0); 2172} 2173 2174 2175void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { 2176 // Push function. 2177 __ LoadNativeContextSlot(expr->context_index(), x0); 2178 PushOperand(x0); 2179 2180 // Push undefined as the receiver. 2181 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); 2182 PushOperand(x0); 2183} 2184 2185 2186void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { 2187 ZoneList<Expression*>* args = expr->arguments(); 2188 int arg_count = args->length(); 2189 2190 SetCallPosition(expr); 2191 __ Peek(x1, (arg_count + 1) * kPointerSize); 2192 __ Mov(x0, arg_count); 2193 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), 2194 RelocInfo::CODE_TARGET); 2195 OperandStackDepthDecrement(arg_count + 1); 2196 RestoreContext(); 2197} 2198 2199 2200void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 2201 switch (expr->op()) { 2202 case Token::DELETE: { 2203 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 2204 Property* property = expr->expression()->AsProperty(); 2205 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 2206 2207 if (property != NULL) { 2208 VisitForStackValue(property->obj()); 2209 VisitForStackValue(property->key()); 2210 CallRuntimeWithOperands(is_strict(language_mode()) 2211 ? Runtime::kDeleteProperty_Strict 2212 : Runtime::kDeleteProperty_Sloppy); 2213 context()->Plug(x0); 2214 } else if (proxy != NULL) { 2215 Variable* var = proxy->var(); 2216 // Delete of an unqualified identifier is disallowed in strict mode but 2217 // "delete this" is allowed. 2218 bool is_this = var->is_this(); 2219 DCHECK(is_sloppy(language_mode()) || is_this); 2220 if (var->IsUnallocated()) { 2221 __ LoadGlobalObject(x12); 2222 __ Mov(x11, Operand(var->name())); 2223 __ Push(x12, x11); 2224 __ CallRuntime(Runtime::kDeleteProperty_Sloppy); 2225 context()->Plug(x0); 2226 } else { 2227 DCHECK(!var->IsLookupSlot()); 2228 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2229 // Result of deleting non-global, non-dynamic variables is false. 2230 // The subexpression does not have side effects. 2231 context()->Plug(is_this); 2232 } 2233 } else { 2234 // Result of deleting non-property, non-variable reference is true. 2235 // The subexpression may have side effects. 2236 VisitForEffect(expr->expression()); 2237 context()->Plug(true); 2238 } 2239 break; 2240 break; 2241 } 2242 case Token::VOID: { 2243 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 2244 VisitForEffect(expr->expression()); 2245 context()->Plug(Heap::kUndefinedValueRootIndex); 2246 break; 2247 } 2248 case Token::NOT: { 2249 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 2250 if (context()->IsEffect()) { 2251 // Unary NOT has no side effects so it's only necessary to visit the 2252 // subexpression. Match the optimizing compiler by not branching. 2253 VisitForEffect(expr->expression()); 2254 } else if (context()->IsTest()) { 2255 const TestContext* test = TestContext::cast(context()); 2256 // The labels are swapped for the recursive call. 2257 VisitForControl(expr->expression(), 2258 test->false_label(), 2259 test->true_label(), 2260 test->fall_through()); 2261 context()->Plug(test->true_label(), test->false_label()); 2262 } else { 2263 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 2264 // TODO(jbramley): This could be much more efficient using (for 2265 // example) the CSEL instruction. 2266 Label materialize_true, materialize_false, done; 2267 VisitForControl(expr->expression(), 2268 &materialize_false, 2269 &materialize_true, 2270 &materialize_true); 2271 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); 2272 2273 __ Bind(&materialize_true); 2274 PrepareForBailoutForId(expr->MaterializeTrueId(), 2275 BailoutState::NO_REGISTERS); 2276 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 2277 __ B(&done); 2278 2279 __ Bind(&materialize_false); 2280 PrepareForBailoutForId(expr->MaterializeFalseId(), 2281 BailoutState::NO_REGISTERS); 2282 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 2283 __ B(&done); 2284 2285 __ Bind(&done); 2286 if (context()->IsStackValue()) { 2287 __ Push(result_register()); 2288 } 2289 } 2290 break; 2291 } 2292 case Token::TYPEOF: { 2293 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 2294 { 2295 AccumulatorValueContext context(this); 2296 VisitForTypeofValue(expr->expression()); 2297 } 2298 __ Mov(x3, x0); 2299 __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET); 2300 context()->Plug(x0); 2301 break; 2302 } 2303 default: 2304 UNREACHABLE(); 2305 } 2306} 2307 2308 2309void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 2310 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); 2311 2312 Comment cmnt(masm_, "[ CountOperation"); 2313 2314 Property* prop = expr->expression()->AsProperty(); 2315 LhsKind assign_type = Property::GetAssignType(prop); 2316 2317 // Evaluate expression and get value. 2318 if (assign_type == VARIABLE) { 2319 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 2320 AccumulatorValueContext context(this); 2321 EmitVariableLoad(expr->expression()->AsVariableProxy()); 2322 } else { 2323 // Reserve space for result of postfix operation. 2324 if (expr->is_postfix() && !context()->IsEffect()) { 2325 PushOperand(xzr); 2326 } 2327 switch (assign_type) { 2328 case NAMED_PROPERTY: { 2329 // Put the object both on the stack and in the register. 2330 VisitForStackValue(prop->obj()); 2331 __ Peek(LoadDescriptor::ReceiverRegister(), 0); 2332 EmitNamedPropertyLoad(prop); 2333 break; 2334 } 2335 2336 case KEYED_PROPERTY: { 2337 VisitForStackValue(prop->obj()); 2338 VisitForStackValue(prop->key()); 2339 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize); 2340 __ Peek(LoadDescriptor::NameRegister(), 0); 2341 EmitKeyedPropertyLoad(prop); 2342 break; 2343 } 2344 2345 case NAMED_SUPER_PROPERTY: 2346 case KEYED_SUPER_PROPERTY: 2347 case VARIABLE: 2348 UNREACHABLE(); 2349 } 2350 } 2351 2352 // We need a second deoptimization point after loading the value 2353 // in case evaluating the property load my have a side effect. 2354 if (assign_type == VARIABLE) { 2355 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER); 2356 } else { 2357 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER); 2358 } 2359 2360 // Inline smi case if we are in a loop. 2361 Label stub_call, done; 2362 JumpPatchSite patch_site(masm_); 2363 2364 int count_value = expr->op() == Token::INC ? 1 : -1; 2365 if (ShouldInlineSmiCase(expr->op())) { 2366 Label slow; 2367 patch_site.EmitJumpIfNotSmi(x0, &slow); 2368 2369 // Save result for postfix expressions. 2370 if (expr->is_postfix()) { 2371 if (!context()->IsEffect()) { 2372 // Save the result on the stack. If we have a named or keyed property we 2373 // store the result under the receiver that is currently on top of the 2374 // stack. 2375 switch (assign_type) { 2376 case VARIABLE: 2377 __ Push(x0); 2378 break; 2379 case NAMED_PROPERTY: 2380 __ Poke(x0, kPointerSize); 2381 break; 2382 case KEYED_PROPERTY: 2383 __ Poke(x0, kPointerSize * 2); 2384 break; 2385 case NAMED_SUPER_PROPERTY: 2386 case KEYED_SUPER_PROPERTY: 2387 UNREACHABLE(); 2388 break; 2389 } 2390 } 2391 } 2392 2393 __ Adds(x0, x0, Smi::FromInt(count_value)); 2394 __ B(vc, &done); 2395 // Call stub. Undo operation first. 2396 __ Sub(x0, x0, Smi::FromInt(count_value)); 2397 __ B(&stub_call); 2398 __ Bind(&slow); 2399 } 2400 2401 // Convert old value into a number. 2402 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 2403 RestoreContext(); 2404 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER); 2405 2406 // Save result for postfix expressions. 2407 if (expr->is_postfix()) { 2408 if (!context()->IsEffect()) { 2409 // Save the result on the stack. If we have a named or keyed property 2410 // we store the result under the receiver that is currently on top 2411 // of the stack. 2412 switch (assign_type) { 2413 case VARIABLE: 2414 PushOperand(x0); 2415 break; 2416 case NAMED_PROPERTY: 2417 __ Poke(x0, kXRegSize); 2418 break; 2419 case KEYED_PROPERTY: 2420 __ Poke(x0, 2 * kXRegSize); 2421 break; 2422 case NAMED_SUPER_PROPERTY: 2423 case KEYED_SUPER_PROPERTY: 2424 UNREACHABLE(); 2425 break; 2426 } 2427 } 2428 } 2429 2430 __ Bind(&stub_call); 2431 __ Mov(x1, x0); 2432 __ Mov(x0, Smi::FromInt(count_value)); 2433 2434 SetExpressionPosition(expr); 2435 2436 { 2437 Assembler::BlockPoolsScope scope(masm_); 2438 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code(); 2439 CallIC(code, expr->CountBinOpFeedbackId()); 2440 patch_site.EmitPatchInfo(); 2441 } 2442 __ Bind(&done); 2443 2444 // Store the value returned in x0. 2445 switch (assign_type) { 2446 case VARIABLE: { 2447 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 2448 if (expr->is_postfix()) { 2449 { EffectContext context(this); 2450 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(), 2451 proxy->hole_check_mode()); 2452 PrepareForBailoutForId(expr->AssignmentId(), 2453 BailoutState::TOS_REGISTER); 2454 context.Plug(x0); 2455 } 2456 // For all contexts except EffectConstant We have the result on 2457 // top of the stack. 2458 if (!context()->IsEffect()) { 2459 context()->PlugTOS(); 2460 } 2461 } else { 2462 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(), 2463 proxy->hole_check_mode()); 2464 PrepareForBailoutForId(expr->AssignmentId(), 2465 BailoutState::TOS_REGISTER); 2466 context()->Plug(x0); 2467 } 2468 break; 2469 } 2470 case NAMED_PROPERTY: { 2471 PopOperand(StoreDescriptor::ReceiverRegister()); 2472 CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value()); 2473 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 2474 if (expr->is_postfix()) { 2475 if (!context()->IsEffect()) { 2476 context()->PlugTOS(); 2477 } 2478 } else { 2479 context()->Plug(x0); 2480 } 2481 break; 2482 } 2483 case KEYED_PROPERTY: { 2484 PopOperand(StoreDescriptor::NameRegister()); 2485 PopOperand(StoreDescriptor::ReceiverRegister()); 2486 CallKeyedStoreIC(expr->CountSlot()); 2487 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 2488 if (expr->is_postfix()) { 2489 if (!context()->IsEffect()) { 2490 context()->PlugTOS(); 2491 } 2492 } else { 2493 context()->Plug(x0); 2494 } 2495 break; 2496 } 2497 case NAMED_SUPER_PROPERTY: 2498 case KEYED_SUPER_PROPERTY: 2499 UNREACHABLE(); 2500 break; 2501 } 2502} 2503 2504 2505void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 2506 Expression* sub_expr, 2507 Handle<String> check) { 2508 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof"); 2509 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof"); 2510 Label materialize_true, materialize_false; 2511 Label* if_true = NULL; 2512 Label* if_false = NULL; 2513 Label* fall_through = NULL; 2514 context()->PrepareTest(&materialize_true, &materialize_false, 2515 &if_true, &if_false, &fall_through); 2516 2517 { AccumulatorValueContext context(this); 2518 VisitForTypeofValue(sub_expr); 2519 } 2520 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2521 2522 Factory* factory = isolate()->factory(); 2523 if (String::Equals(check, factory->number_string())) { 2524 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string"); 2525 __ JumpIfSmi(x0, if_true); 2526 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); 2527 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex); 2528 Split(eq, if_true, if_false, fall_through); 2529 } else if (String::Equals(check, factory->string_string())) { 2530 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string"); 2531 __ JumpIfSmi(x0, if_false); 2532 __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE); 2533 Split(lt, if_true, if_false, fall_through); 2534 } else if (String::Equals(check, factory->symbol_string())) { 2535 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string"); 2536 __ JumpIfSmi(x0, if_false); 2537 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE); 2538 Split(eq, if_true, if_false, fall_through); 2539 } else if (String::Equals(check, factory->boolean_string())) { 2540 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string"); 2541 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true); 2542 __ CompareRoot(x0, Heap::kFalseValueRootIndex); 2543 Split(eq, if_true, if_false, fall_through); 2544 } else if (String::Equals(check, factory->undefined_string())) { 2545 ASM_LOCATION( 2546 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string"); 2547 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_false); 2548 __ JumpIfSmi(x0, if_false); 2549 // Check for undetectable objects => true. 2550 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); 2551 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset)); 2552 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true, 2553 fall_through); 2554 } else if (String::Equals(check, factory->function_string())) { 2555 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string"); 2556 __ JumpIfSmi(x0, if_false); 2557 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); 2558 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset)); 2559 __ And(x1, x1, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)); 2560 __ CompareAndSplit(x1, Operand(1 << Map::kIsCallable), eq, if_true, 2561 if_false, fall_through); 2562 } else if (String::Equals(check, factory->object_string())) { 2563 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string"); 2564 __ JumpIfSmi(x0, if_false); 2565 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true); 2566 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2567 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, if_false, lt); 2568 // Check for callable or undetectable objects => false. 2569 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset)); 2570 __ TestAndSplit(x10, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable), 2571 if_true, if_false, fall_through); 2572 } else { 2573 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other"); 2574 if (if_false != fall_through) __ B(if_false); 2575 } 2576 context()->Plug(if_true, if_false); 2577} 2578 2579 2580void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 2581 Comment cmnt(masm_, "[ CompareOperation"); 2582 2583 // Try to generate an optimized comparison with a literal value. 2584 // TODO(jbramley): This only checks common values like NaN or undefined. 2585 // Should it also handle ARM64 immediate operands? 2586 if (TryLiteralCompare(expr)) { 2587 return; 2588 } 2589 2590 // Assign labels according to context()->PrepareTest. 2591 Label materialize_true; 2592 Label materialize_false; 2593 Label* if_true = NULL; 2594 Label* if_false = NULL; 2595 Label* fall_through = NULL; 2596 context()->PrepareTest(&materialize_true, &materialize_false, 2597 &if_true, &if_false, &fall_through); 2598 2599 Token::Value op = expr->op(); 2600 VisitForStackValue(expr->left()); 2601 switch (op) { 2602 case Token::IN: 2603 VisitForStackValue(expr->right()); 2604 SetExpressionPosition(expr); 2605 EmitHasProperty(); 2606 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 2607 __ CompareRoot(x0, Heap::kTrueValueRootIndex); 2608 Split(eq, if_true, if_false, fall_through); 2609 break; 2610 2611 case Token::INSTANCEOF: { 2612 VisitForAccumulatorValue(expr->right()); 2613 SetExpressionPosition(expr); 2614 PopOperand(x1); 2615 __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET); 2616 RestoreContext(); 2617 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 2618 __ CompareRoot(x0, Heap::kTrueValueRootIndex); 2619 Split(eq, if_true, if_false, fall_through); 2620 break; 2621 } 2622 2623 default: { 2624 VisitForAccumulatorValue(expr->right()); 2625 SetExpressionPosition(expr); 2626 Condition cond = CompareIC::ComputeCondition(op); 2627 2628 // Pop the stack value. 2629 PopOperand(x1); 2630 2631 JumpPatchSite patch_site(masm_); 2632 if (ShouldInlineSmiCase(op)) { 2633 Label slow_case; 2634 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case); 2635 __ Cmp(x1, x0); 2636 Split(cond, if_true, if_false, NULL); 2637 __ Bind(&slow_case); 2638 } 2639 2640 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 2641 CallIC(ic, expr->CompareOperationFeedbackId()); 2642 patch_site.EmitPatchInfo(); 2643 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2644 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through); 2645 } 2646 } 2647 2648 // Convert the result of the comparison into one expected for this 2649 // expression's context. 2650 context()->Plug(if_true, if_false); 2651} 2652 2653 2654void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 2655 Expression* sub_expr, 2656 NilValue nil) { 2657 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil"); 2658 Label materialize_true, materialize_false; 2659 Label* if_true = NULL; 2660 Label* if_false = NULL; 2661 Label* fall_through = NULL; 2662 context()->PrepareTest(&materialize_true, &materialize_false, 2663 &if_true, &if_false, &fall_through); 2664 2665 VisitForAccumulatorValue(sub_expr); 2666 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2667 2668 if (expr->op() == Token::EQ_STRICT) { 2669 Heap::RootListIndex nil_value = nil == kNullValue ? 2670 Heap::kNullValueRootIndex : 2671 Heap::kUndefinedValueRootIndex; 2672 __ CompareRoot(x0, nil_value); 2673 Split(eq, if_true, if_false, fall_through); 2674 } else { 2675 __ JumpIfSmi(x0, if_false); 2676 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); 2677 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset)); 2678 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true, 2679 fall_through); 2680 } 2681 2682 context()->Plug(if_true, if_false); 2683} 2684 2685 2686void FullCodeGenerator::VisitYield(Yield* expr) { 2687 // Resumable functions are not supported. 2688 UNREACHABLE(); 2689} 2690 2691void FullCodeGenerator::PushOperands(Register reg1, Register reg2) { 2692 OperandStackDepthIncrement(2); 2693 __ Push(reg1, reg2); 2694} 2695 2696void FullCodeGenerator::PushOperands(Register reg1, Register reg2, 2697 Register reg3) { 2698 OperandStackDepthIncrement(3); 2699 __ Push(reg1, reg2, reg3); 2700} 2701 2702void FullCodeGenerator::PopOperands(Register reg1, Register reg2) { 2703 OperandStackDepthDecrement(2); 2704 __ Pop(reg1, reg2); 2705} 2706 2707void FullCodeGenerator::EmitOperandStackDepthCheck() { 2708 if (FLAG_debug_code) { 2709 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp + 2710 operand_stack_depth_ * kPointerSize; 2711 __ Sub(x0, fp, jssp); 2712 __ Cmp(x0, Operand(expected_diff)); 2713 __ Assert(eq, kUnexpectedStackDepth); 2714 } 2715} 2716 2717void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2718 Label allocate, done_allocate; 2719 2720 // Allocate and populate an object with this form: { value: VAL, done: DONE } 2721 2722 Register result = x0; 2723 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &allocate, 2724 NO_ALLOCATION_FLAGS); 2725 __ B(&done_allocate); 2726 2727 __ Bind(&allocate); 2728 __ Push(Smi::FromInt(JSIteratorResult::kSize)); 2729 __ CallRuntime(Runtime::kAllocateInNewSpace); 2730 2731 __ Bind(&done_allocate); 2732 Register map_reg = x1; 2733 Register result_value = x2; 2734 Register boolean_done = x3; 2735 Register empty_fixed_array = x4; 2736 Register untagged_result = x5; 2737 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg); 2738 PopOperand(result_value); 2739 __ LoadRoot(boolean_done, 2740 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); 2741 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex); 2742 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize == 2743 JSObject::kElementsOffset); 2744 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize == 2745 JSIteratorResult::kDoneOffset); 2746 __ ObjectUntag(untagged_result, result); 2747 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset)); 2748 __ Stp(empty_fixed_array, empty_fixed_array, 2749 MemOperand(untagged_result, JSObject::kPropertiesOffset)); 2750 __ Stp(result_value, boolean_done, 2751 MemOperand(untagged_result, JSIteratorResult::kValueOffset)); 2752 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 2753} 2754 2755 2756// TODO(all): I don't like this method. 2757// It seems to me that in too many places x0 is used in place of this. 2758// Also, this function is not suitable for all places where x0 should be 2759// abstracted (eg. when used as an argument). But some places assume that the 2760// first argument register is x0, and use this function instead. 2761// Considering that most of the register allocation is hard-coded in the 2762// FullCodeGen, that it is unlikely we will need to change it extensively, and 2763// that abstracting the allocation through functions would not yield any 2764// performance benefit, I think the existence of this function is debatable. 2765Register FullCodeGenerator::result_register() { 2766 return x0; 2767} 2768 2769 2770Register FullCodeGenerator::context_register() { 2771 return cp; 2772} 2773 2774void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) { 2775 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset); 2776 __ Ldr(value, MemOperand(fp, frame_offset)); 2777} 2778 2779void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 2780 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset); 2781 __ Str(value, MemOperand(fp, frame_offset)); 2782} 2783 2784 2785void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 2786 __ Ldr(dst, ContextMemOperand(cp, context_index)); 2787} 2788 2789 2790void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 2791 DeclarationScope* closure_scope = scope()->GetClosureScope(); 2792 if (closure_scope->is_script_scope() || 2793 closure_scope->is_module_scope()) { 2794 // Contexts nested in the native context have a canonical empty function 2795 // as their closure, not the anonymous closure containing the global 2796 // code. 2797 DCHECK(kSmiTag == 0); 2798 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, x10); 2799 } else if (closure_scope->is_eval_scope()) { 2800 // Contexts created by a call to eval have the same closure as the 2801 // context calling eval, not the anonymous closure containing the eval 2802 // code. Fetch it from the context. 2803 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX)); 2804 } else { 2805 DCHECK(closure_scope->is_function_scope()); 2806 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 2807 } 2808 PushOperand(x10); 2809} 2810 2811 2812#undef __ 2813 2814 2815void BackEdgeTable::PatchAt(Code* unoptimized_code, 2816 Address pc, 2817 BackEdgeState target_state, 2818 Code* replacement_code) { 2819 // Turn the jump into a nop. 2820 Address branch_address = pc - 3 * kInstructionSize; 2821 Isolate* isolate = unoptimized_code->GetIsolate(); 2822 PatchingAssembler patcher(isolate, branch_address, 1); 2823 2824 DCHECK(Instruction::Cast(branch_address) 2825 ->IsNop(Assembler::INTERRUPT_CODE_NOP) || 2826 (Instruction::Cast(branch_address)->IsCondBranchImm() && 2827 Instruction::Cast(branch_address)->ImmPCOffset() == 2828 6 * kInstructionSize)); 2829 2830 switch (target_state) { 2831 case INTERRUPT: 2832 // <decrement profiling counter> 2833 // .. .. .. .. b.pl ok 2834 // .. .. .. .. ldr x16, pc+<interrupt stub address> 2835 // .. .. .. .. blr x16 2836 // ... more instructions. 2837 // ok-label 2838 // Jump offset is 6 instructions. 2839 patcher.b(6, pl); 2840 break; 2841 case ON_STACK_REPLACEMENT: 2842 // <decrement profiling counter> 2843 // .. .. .. .. mov x0, x0 (NOP) 2844 // .. .. .. .. ldr x16, pc+<on-stack replacement address> 2845 // .. .. .. .. blr x16 2846 patcher.nop(Assembler::INTERRUPT_CODE_NOP); 2847 break; 2848 } 2849 2850 // Replace the call address. 2851 Instruction* load = Instruction::Cast(pc)->preceding(2); 2852 Address interrupt_address_pointer = 2853 reinterpret_cast<Address>(load) + load->ImmPCOffset(); 2854 DCHECK((Memory::uint64_at(interrupt_address_pointer) == 2855 reinterpret_cast<uint64_t>( 2856 isolate->builtins()->OnStackReplacement()->entry())) || 2857 (Memory::uint64_at(interrupt_address_pointer) == 2858 reinterpret_cast<uint64_t>( 2859 isolate->builtins()->InterruptCheck()->entry())) || 2860 (Memory::uint64_at(interrupt_address_pointer) == 2861 reinterpret_cast<uint64_t>( 2862 isolate->builtins()->OnStackReplacement()->entry()))); 2863 Memory::uint64_at(interrupt_address_pointer) = 2864 reinterpret_cast<uint64_t>(replacement_code->entry()); 2865 2866 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 2867 unoptimized_code, reinterpret_cast<Address>(load), replacement_code); 2868} 2869 2870 2871BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 2872 Isolate* isolate, 2873 Code* unoptimized_code, 2874 Address pc) { 2875 // TODO(jbramley): There should be some extra assertions here (as in the ARM 2876 // back-end), but this function is gone in bleeding_edge so it might not 2877 // matter anyway. 2878 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3); 2879 2880 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) { 2881 Instruction* load = Instruction::Cast(pc)->preceding(2); 2882 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) + 2883 load->ImmPCOffset()); 2884 if (entry == reinterpret_cast<uint64_t>( 2885 isolate->builtins()->OnStackReplacement()->entry())) { 2886 return ON_STACK_REPLACEMENT; 2887 } else { 2888 UNREACHABLE(); 2889 } 2890 } 2891 2892 return INTERRUPT; 2893} 2894 2895 2896} // namespace internal 2897} // namespace v8 2898 2899#endif // V8_TARGET_ARCH_ARM64 2900