1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_ARM)
31
32#include "codegen.h"
33#include "macro-assembler.h"
34
35namespace v8 {
36namespace internal {
37
38#define __ ACCESS_MASM(masm)
39
40UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type) {
41  switch (type) {
42    case TranscendentalCache::SIN: return &sin;
43    case TranscendentalCache::COS: return &cos;
44    case TranscendentalCache::TAN: return &tan;
45    case TranscendentalCache::LOG: return &log;
46    default: UNIMPLEMENTED();
47  }
48  return NULL;
49}
50
51
52UnaryMathFunction CreateSqrtFunction() {
53  return &sqrt;
54}
55
56// -------------------------------------------------------------------------
57// Platform-specific RuntimeCallHelper functions.
58
59void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
60  masm->EnterFrame(StackFrame::INTERNAL);
61  ASSERT(!masm->has_frame());
62  masm->set_has_frame(true);
63}
64
65
66void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
67  masm->LeaveFrame(StackFrame::INTERNAL);
68  ASSERT(masm->has_frame());
69  masm->set_has_frame(false);
70}
71
72
73// -------------------------------------------------------------------------
74// Code generators
75
76void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
77    MacroAssembler* masm) {
78  // ----------- S t a t e -------------
79  //  -- r0    : value
80  //  -- r1    : key
81  //  -- r2    : receiver
82  //  -- lr    : return address
83  //  -- r3    : target map, scratch for subsequent call
84  //  -- r4    : scratch (elements)
85  // -----------------------------------
86  // Set transitioned map.
87  __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
88  __ RecordWriteField(r2,
89                      HeapObject::kMapOffset,
90                      r3,
91                      r9,
92                      kLRHasNotBeenSaved,
93                      kDontSaveFPRegs,
94                      EMIT_REMEMBERED_SET,
95                      OMIT_SMI_CHECK);
96}
97
98
99void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
100    MacroAssembler* masm, Label* fail) {
101  // ----------- S t a t e -------------
102  //  -- r0    : value
103  //  -- r1    : key
104  //  -- r2    : receiver
105  //  -- lr    : return address
106  //  -- r3    : target map, scratch for subsequent call
107  //  -- r4    : scratch (elements)
108  // -----------------------------------
109  Label loop, entry, convert_hole, gc_required, only_change_map, done;
110  bool vfp3_supported = CpuFeatures::IsSupported(VFP3);
111
112  // Check for empty arrays, which only require a map transition and no changes
113  // to the backing store.
114  __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset));
115  __ CompareRoot(r4, Heap::kEmptyFixedArrayRootIndex);
116  __ b(eq, &only_change_map);
117
118  __ push(lr);
119  __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
120  // r4: source FixedArray
121  // r5: number of elements (smi-tagged)
122
123  // Allocate new FixedDoubleArray.
124  __ mov(lr, Operand(FixedDoubleArray::kHeaderSize));
125  __ add(lr, lr, Operand(r5, LSL, 2));
126  __ AllocateInNewSpace(lr, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS);
127  // r6: destination FixedDoubleArray, not tagged as heap object
128  // Set destination FixedDoubleArray's length and map.
129  __ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex);
130  __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset));
131  __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
132  // Update receiver's map.
133
134  __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
135  __ RecordWriteField(r2,
136                      HeapObject::kMapOffset,
137                      r3,
138                      r9,
139                      kLRHasBeenSaved,
140                      kDontSaveFPRegs,
141                      OMIT_REMEMBERED_SET,
142                      OMIT_SMI_CHECK);
143  // Replace receiver's backing store with newly created FixedDoubleArray.
144  __ add(r3, r6, Operand(kHeapObjectTag));
145  __ str(r3, FieldMemOperand(r2, JSObject::kElementsOffset));
146  __ RecordWriteField(r2,
147                      JSObject::kElementsOffset,
148                      r3,
149                      r9,
150                      kLRHasBeenSaved,
151                      kDontSaveFPRegs,
152                      EMIT_REMEMBERED_SET,
153                      OMIT_SMI_CHECK);
154
155  // Prepare for conversion loop.
156  __ add(r3, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
157  __ add(r7, r6, Operand(FixedDoubleArray::kHeaderSize));
158  __ add(r6, r7, Operand(r5, LSL, 2));
159  __ mov(r4, Operand(kHoleNanLower32));
160  __ mov(r5, Operand(kHoleNanUpper32));
161  // r3: begin of source FixedArray element fields, not tagged
162  // r4: kHoleNanLower32
163  // r5: kHoleNanUpper32
164  // r6: end of destination FixedDoubleArray, not tagged
165  // r7: begin of FixedDoubleArray element fields, not tagged
166  if (!vfp3_supported) __ Push(r1, r0);
167
168  __ b(&entry);
169
170  __ bind(&only_change_map);
171  __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
172  __ RecordWriteField(r2,
173                      HeapObject::kMapOffset,
174                      r3,
175                      r9,
176                      kLRHasBeenSaved,
177                      kDontSaveFPRegs,
178                      OMIT_REMEMBERED_SET,
179                      OMIT_SMI_CHECK);
180  __ b(&done);
181
182  // Call into runtime if GC is required.
183  __ bind(&gc_required);
184  __ pop(lr);
185  __ b(fail);
186
187  // Convert and copy elements.
188  __ bind(&loop);
189  __ ldr(r9, MemOperand(r3, 4, PostIndex));
190  // r9: current element
191  __ UntagAndJumpIfNotSmi(r9, r9, &convert_hole);
192
193  // Normal smi, convert to double and store.
194  if (vfp3_supported) {
195    CpuFeatures::Scope scope(VFP3);
196    __ vmov(s0, r9);
197    __ vcvt_f64_s32(d0, s0);
198    __ vstr(d0, r7, 0);
199    __ add(r7, r7, Operand(8));
200  } else {
201    FloatingPointHelper::ConvertIntToDouble(masm,
202                                            r9,
203                                            FloatingPointHelper::kCoreRegisters,
204                                            d0,
205                                            r0,
206                                            r1,
207                                            lr,
208                                            s0);
209    __ Strd(r0, r1, MemOperand(r7, 8, PostIndex));
210  }
211  __ b(&entry);
212
213  // Hole found, store the-hole NaN.
214  __ bind(&convert_hole);
215  if (FLAG_debug_code) {
216    // Restore a "smi-untagged" heap object.
217    __ SmiTag(r9);
218    __ orr(r9, r9, Operand(1));
219    __ CompareRoot(r9, Heap::kTheHoleValueRootIndex);
220    __ Assert(eq, "object found in smi-only array");
221  }
222  __ Strd(r4, r5, MemOperand(r7, 8, PostIndex));
223
224  __ bind(&entry);
225  __ cmp(r7, r6);
226  __ b(lt, &loop);
227
228  if (!vfp3_supported) __ Pop(r1, r0);
229  __ pop(lr);
230  __ bind(&done);
231}
232
233
234void ElementsTransitionGenerator::GenerateDoubleToObject(
235    MacroAssembler* masm, Label* fail) {
236  // ----------- S t a t e -------------
237  //  -- r0    : value
238  //  -- r1    : key
239  //  -- r2    : receiver
240  //  -- lr    : return address
241  //  -- r3    : target map, scratch for subsequent call
242  //  -- r4    : scratch (elements)
243  // -----------------------------------
244  Label entry, loop, convert_hole, gc_required, only_change_map;
245
246  // Check for empty arrays, which only require a map transition and no changes
247  // to the backing store.
248  __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset));
249  __ CompareRoot(r4, Heap::kEmptyFixedArrayRootIndex);
250  __ b(eq, &only_change_map);
251
252  __ push(lr);
253  __ Push(r3, r2, r1, r0);
254  __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
255  // r4: source FixedDoubleArray
256  // r5: number of elements (smi-tagged)
257
258  // Allocate new FixedArray.
259  __ mov(r0, Operand(FixedDoubleArray::kHeaderSize));
260  __ add(r0, r0, Operand(r5, LSL, 1));
261  __ AllocateInNewSpace(r0, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS);
262  // r6: destination FixedArray, not tagged as heap object
263  // Set destination FixedDoubleArray's length and map.
264  __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
265  __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset));
266  __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
267
268  // Prepare for conversion loop.
269  __ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4));
270  __ add(r3, r6, Operand(FixedArray::kHeaderSize));
271  __ add(r6, r6, Operand(kHeapObjectTag));
272  __ add(r5, r3, Operand(r5, LSL, 1));
273  __ LoadRoot(r7, Heap::kTheHoleValueRootIndex);
274  __ LoadRoot(r9, Heap::kHeapNumberMapRootIndex);
275  // Using offsetted addresses in r4 to fully take advantage of post-indexing.
276  // r3: begin of destination FixedArray element fields, not tagged
277  // r4: begin of source FixedDoubleArray element fields, not tagged, +4
278  // r5: end of destination FixedArray, not tagged
279  // r6: destination FixedArray
280  // r7: the-hole pointer
281  // r9: heap number map
282  __ b(&entry);
283
284  // Call into runtime if GC is required.
285  __ bind(&gc_required);
286  __ Pop(r3, r2, r1, r0);
287  __ pop(lr);
288  __ b(fail);
289
290  __ bind(&loop);
291  __ ldr(r1, MemOperand(r4, 8, PostIndex));
292  // lr: current element's upper 32 bit
293  // r4: address of next element's upper 32 bit
294  __ cmp(r1, Operand(kHoleNanUpper32));
295  __ b(eq, &convert_hole);
296
297  // Non-hole double, copy value into a heap number.
298  __ AllocateHeapNumber(r2, r0, lr, r9, &gc_required);
299  // r2: new heap number
300  __ ldr(r0, MemOperand(r4, 12, NegOffset));
301  __ Strd(r0, r1, FieldMemOperand(r2, HeapNumber::kValueOffset));
302  __ mov(r0, r3);
303  __ str(r2, MemOperand(r3, 4, PostIndex));
304  __ RecordWrite(r6,
305                 r0,
306                 r2,
307                 kLRHasBeenSaved,
308                 kDontSaveFPRegs,
309                 EMIT_REMEMBERED_SET,
310                 OMIT_SMI_CHECK);
311  __ b(&entry);
312
313  // Replace the-hole NaN with the-hole pointer.
314  __ bind(&convert_hole);
315  __ str(r7, MemOperand(r3, 4, PostIndex));
316
317  __ bind(&entry);
318  __ cmp(r3, r5);
319  __ b(lt, &loop);
320
321  __ Pop(r3, r2, r1, r0);
322  // Replace receiver's backing store with newly created and filled FixedArray.
323  __ str(r6, FieldMemOperand(r2, JSObject::kElementsOffset));
324  __ RecordWriteField(r2,
325                      JSObject::kElementsOffset,
326                      r6,
327                      r9,
328                      kLRHasBeenSaved,
329                      kDontSaveFPRegs,
330                      EMIT_REMEMBERED_SET,
331                      OMIT_SMI_CHECK);
332  __ pop(lr);
333
334  __ bind(&only_change_map);
335  // Update receiver's map.
336  __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
337  __ RecordWriteField(r2,
338                      HeapObject::kMapOffset,
339                      r3,
340                      r9,
341                      kLRHasNotBeenSaved,
342                      kDontSaveFPRegs,
343                      OMIT_REMEMBERED_SET,
344                      OMIT_SMI_CHECK);
345}
346
347
348void StringCharLoadGenerator::Generate(MacroAssembler* masm,
349                                       Register string,
350                                       Register index,
351                                       Register result,
352                                       Label* call_runtime) {
353  // Fetch the instance type of the receiver into result register.
354  __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
355  __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
356
357  // We need special handling for indirect strings.
358  Label check_sequential;
359  __ tst(result, Operand(kIsIndirectStringMask));
360  __ b(eq, &check_sequential);
361
362  // Dispatch on the indirect string shape: slice or cons.
363  Label cons_string;
364  __ tst(result, Operand(kSlicedNotConsMask));
365  __ b(eq, &cons_string);
366
367  // Handle slices.
368  Label indirect_string_loaded;
369  __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
370  __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
371  __ add(index, index, Operand(result, ASR, kSmiTagSize));
372  __ jmp(&indirect_string_loaded);
373
374  // Handle cons strings.
375  // Check whether the right hand side is the empty string (i.e. if
376  // this is really a flat string in a cons string). If that is not
377  // the case we would rather go to the runtime system now to flatten
378  // the string.
379  __ bind(&cons_string);
380  __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
381  __ CompareRoot(result, Heap::kEmptyStringRootIndex);
382  __ b(ne, call_runtime);
383  // Get the first of the two strings and load its instance type.
384  __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
385
386  __ bind(&indirect_string_loaded);
387  __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
388  __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
389
390  // Distinguish sequential and external strings. Only these two string
391  // representations can reach here (slices and flat cons strings have been
392  // reduced to the underlying sequential or external string).
393  Label external_string, check_encoding;
394  __ bind(&check_sequential);
395  STATIC_ASSERT(kSeqStringTag == 0);
396  __ tst(result, Operand(kStringRepresentationMask));
397  __ b(ne, &external_string);
398
399  // Prepare sequential strings
400  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
401  __ add(string,
402         string,
403         Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
404  __ jmp(&check_encoding);
405
406  // Handle external strings.
407  __ bind(&external_string);
408  if (FLAG_debug_code) {
409    // Assert that we do not have a cons or slice (indirect strings) here.
410    // Sequential strings have already been ruled out.
411    __ tst(result, Operand(kIsIndirectStringMask));
412    __ Assert(eq, "external string expected, but not found");
413  }
414  // Rule out short external strings.
415  STATIC_CHECK(kShortExternalStringTag != 0);
416  __ tst(result, Operand(kShortExternalStringMask));
417  __ b(ne, call_runtime);
418  __ ldr(string, FieldMemOperand(string, ExternalString::kResourceDataOffset));
419
420  Label ascii, done;
421  __ bind(&check_encoding);
422  STATIC_ASSERT(kTwoByteStringTag == 0);
423  __ tst(result, Operand(kStringEncodingMask));
424  __ b(ne, &ascii);
425  // Two-byte string.
426  __ ldrh(result, MemOperand(string, index, LSL, 1));
427  __ jmp(&done);
428  __ bind(&ascii);
429  // Ascii string.
430  __ ldrb(result, MemOperand(string, index));
431  __ bind(&done);
432}
433
434#undef __
435
436} }  // namespace v8::internal
437
438#endif  // V8_TARGET_ARCH_ARM
439