Lines Matching defs:in

5  * you may not use this file except in compliance with the License.
10 * Unless required by applicable law or agreed to in writing, software
70 // Live registers will be restored in the catch block if caught.
189 // Live registers will be restored in the catch block if caught.
455 // barrier. The field `obj.field` in the object `obj` holding this
464 // reference (different from `ref`) in `obj.field`).
494 << "Unexpected instruction in read barrier marking slow path: "
511 // and output in R0):
532 // Should the reference in `ref_` be unpoisoned prior to marking it?
539 // and if needed, atomically updating the field `obj.field` in the
547 // another object reference (different from `ref`) in `obj.field`).
579 << "Unexpected instruction in read barrier marking and field updating slow path: "
602 // and output in R0):
619 // update the field in the holder (`*field_addr`).
623 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
636 // reference) to be in EAX. Save RAX beforehand, and move the
637 // expected value (stored in `temp1_`) into EAX.
701 // Should the reference in `ref_` be unpoisoned prior to marking it?
751 << "Unexpected instruction in read barrier for heap reference slow path: "
764 // Compute real offset and store it in index_.
795 // The initial register stored in `index_` has already been
796 // saved in the call to art::SlowPathCode::SaveLiveRegisters
800 // Shifting the index value contained in `index_reg` by the
801 // scale factor (2) cannot overflow in practice, as the
812 // (as in the case of ArrayGet), as it is actually an offset
905 << "Unexpected instruction in read barrier for GC root slow path: "
980 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
1036 // intrinsics may have put the receiver in a different register. In the intrinsics
1048 // However this is not required in practice, as this is an
1052 // concurrent copying collector may not in the future).
1310 // do this in HCurrentMethod, as the instruction might have been removed
1311 // in the SSA graph.
1860 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
1870 // Handle the long/FP comparisons made in instruction simplification.
2455 // However this is not required in practice, as this is an
2459 // concurrent copying collector may not in the future).
2510 Location in = locations->InAt(0);
2513 DCHECK(in.IsRegister());
2514 DCHECK(in.Equals(out));
2519 DCHECK(in.IsRegister());
2520 DCHECK(in.Equals(out));
2525 DCHECK(in.Equals(out));
2536 DCHECK(in.Equals(out));
2687 Location in = locations->InAt(0);
2700 if (in.IsRegister()) {
2701 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2702 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2704 Address(CpuRegister(RSP), in.GetStackIndex()));
2707 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
2724 if (in.IsRegister()) {
2725 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2726 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2728 Address(CpuRegister(RSP), in.GetStackIndex()));
2731 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
2747 if (in.IsRegister()) {
2748 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2749 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2751 Address(CpuRegister(RSP), in.GetStackIndex()));
2754 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2769 if (in.IsRegister()) {
2770 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2771 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2773 Address(CpuRegister(RSP), in.GetStackIndex()));
2776 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
2789 if (in.IsRegister()) {
2790 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2791 } else if (in.IsDoubleStackSlot()) {
2793 Address(CpuRegister(RSP), in.GetStackIndex()));
2795 DCHECK(in.IsConstant());
2796 DCHECK(in.GetConstant()->IsLongConstant());
2797 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2803 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2824 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2859 DCHECK(in.IsRegister());
2860 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2864 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2885 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2919 if (in.IsRegister()) {
2920 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2921 } else if (in.IsConstant()) {
2922 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2927 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2932 if (in.IsRegister()) {
2933 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2934 } else if (in.IsConstant()) {
2935 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2940 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2945 if (in.IsFpuRegister()) {
2946 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2947 } else if (in.IsConstant()) {
2948 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2953 Address(CpuRegister(RSP), in.GetStackIndex()));
2971 if (in.IsRegister()) {
2972 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2973 } else if (in.IsConstant()) {
2974 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2979 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2984 if (in.IsRegister()) {
2985 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2986 } else if (in.IsConstant()) {
2987 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2992 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2997 if (in.IsFpuRegister()) {
2998 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2999 } else if (in.IsConstant()) {
3000 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3005 Address(CpuRegister(RSP), in.GetStackIndex()));
3034 // We can use a leaq or addq if the constant can fit in an immediate.
3283 // The constant may have ended up in a register, so test explicitly to avoid
3299 // The constant may have ended up in a register, so test explicitly to avoid
3395 // Load the values to the FP stack in reverse order, using temporaries if needed.
3700 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3701 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3781 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3783 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3784 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3888 // The shift count needs to be in CL.
3964 // The shift count needs to be in CL (unless it is a constant).
4188 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4277 // Note that a potential implicit null check is handled in this
4321 // Potential implicit null checks, in the case of reference
4322 // fields, are handled in the previous switch statement.
4329 // Memory barriers, in the case of references, are also handled
4330 // in the previous switch statement.
4732 // Note that a potential implicit null check is handled in this
4781 // Potential implicit null checks, in the case of reference
4782 // arrays, are handled in the previous switch statement.
4897 // even in the case where a class object is in the from-space
4900 // false negative, in which case we would take the ArraySet
4911 // nor the object reference in `register_value->klass`, as
5042 // Mask out most significant bit in case the array is String's array of char.
5091 // Address the length field in the array.
5098 // the string compression flag) with the in-memory length and avoid the temporary.
5163 // registers in full width (since the runtime only saves/restores lower part).
6186 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6226 // If the class reference currently in `temp` is not null, jump
6481 // in the following move operation, as we will need it for the
6636 // inserted after the original load. However, in fast path based
6650 // Note: the original implementation in ReadBarrier::Barrier is
6852 // Is the value in range?
6856 // We are in the range of the table.
6857 // Load the address of the jump table in the constant area.
6940 // Value won't fit in an int.
7014 // Patch in the right value.
7018 // Location in constant area that the fixup refers to.
7023 t * Class to handle late fixup of offsets to a jump table that will be created in the
7139 // Didn't fit in a register. Do it in pieces.