codegen-x64.cc revision 3fb3ca8c7ca439d408449a395897395c0faae8d1
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "codegen.h"
33
34namespace v8 {
35namespace internal {
36
37// -------------------------------------------------------------------------
38// Platform-specific RuntimeCallHelper functions.
39
40void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
41  masm->EnterInternalFrame();
42}
43
44
45void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
46  masm->LeaveInternalFrame();
47}
48
49
50#define __ masm.
51
52#ifdef _WIN64
53typedef double (*ModuloFunction)(double, double);
54// Define custom fmod implementation.
55ModuloFunction CreateModuloFunction() {
56  size_t actual_size;
57  byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
58                                                 &actual_size,
59                                                 true));
60  CHECK(buffer);
61  Assembler masm(NULL, buffer, static_cast<int>(actual_size));
62  // Generated code is put into a fixed, unmovable, buffer, and not into
63  // the V8 heap. We can't, and don't, refer to any relocatable addresses
64  // (e.g. the JavaScript nan-object).
65
66  // Windows 64 ABI passes double arguments in xmm0, xmm1 and
67  // returns result in xmm0.
68  // Argument backing space is allocated on the stack above
69  // the return address.
70
71  // Compute x mod y.
72  // Load y and x (use argument backing store as temporary storage).
73  __ movsd(Operand(rsp, kPointerSize * 2), xmm1);
74  __ movsd(Operand(rsp, kPointerSize), xmm0);
75  __ fld_d(Operand(rsp, kPointerSize * 2));
76  __ fld_d(Operand(rsp, kPointerSize));
77
78  // Clear exception flags before operation.
79  {
80    Label no_exceptions;
81    __ fwait();
82    __ fnstsw_ax();
83    // Clear if Illegal Operand or Zero Division exceptions are set.
84    __ testb(rax, Immediate(5));
85    __ j(zero, &no_exceptions);
86    __ fnclex();
87    __ bind(&no_exceptions);
88  }
89
90  // Compute st(0) % st(1)
91  {
92    Label partial_remainder_loop;
93    __ bind(&partial_remainder_loop);
94    __ fprem();
95    __ fwait();
96    __ fnstsw_ax();
97    __ testl(rax, Immediate(0x400 /* C2 */));
98    // If C2 is set, computation only has partial result. Loop to
99    // continue computation.
100    __ j(not_zero, &partial_remainder_loop);
101  }
102
103  Label valid_result;
104  Label return_result;
105  // If Invalid Operand or Zero Division exceptions are set,
106  // return NaN.
107  __ testb(rax, Immediate(5));
108  __ j(zero, &valid_result);
109  __ fstp(0);  // Drop result in st(0).
110  int64_t kNaNValue = V8_INT64_C(0x7ff8000000000000);
111  __ movq(rcx, kNaNValue, RelocInfo::NONE);
112  __ movq(Operand(rsp, kPointerSize), rcx);
113  __ movsd(xmm0, Operand(rsp, kPointerSize));
114  __ jmp(&return_result);
115
116  // If result is valid, return that.
117  __ bind(&valid_result);
118  __ fstp_d(Operand(rsp, kPointerSize));
119  __ movsd(xmm0, Operand(rsp, kPointerSize));
120
121  // Clean up FPU stack and exceptions and return xmm0
122  __ bind(&return_result);
123  __ fstp(0);  // Unload y.
124
125  Label clear_exceptions;
126  __ testb(rax, Immediate(0x3f /* Any Exception*/));
127  __ j(not_zero, &clear_exceptions);
128  __ ret(0);
129  __ bind(&clear_exceptions);
130  __ fnclex();
131  __ ret(0);
132
133  CodeDesc desc;
134  masm.GetCode(&desc);
135  OS::ProtectCode(buffer, actual_size);
136  // Call the function from C++ through this pointer.
137  return FUNCTION_CAST<ModuloFunction>(buffer);
138}
139
140#endif
141
142
143#undef __
144
145} }  // namespace v8::internal
146
147#endif  // V8_TARGET_ARCH_X64
148