1// Copyright 2016, VIXL authors
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are met:
6//
7//   * Redistributions of source code must retain the above copyright notice,
8//     this list of conditions and the following disclaimer.
9//   * Redistributions in binary form must reproduce the above copyright notice,
10//     this list of conditions and the following disclaimer in the documentation
11//     and/or other materials provided with the distribution.
12//   * Neither the name of ARM Limited nor the names of its contributors may be
13//     used to endorse or promote products derived from this software without
14//     specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27#include "test-runner.h"
28
29#ifdef VIXL_INCLUDE_TARGET_AARCH32
30#include "aarch32/macro-assembler-aarch32.h"
31#endif
32
33#ifdef VIXL_INCLUDE_TARGET_AARCH64
34#include "aarch64/macro-assembler-aarch64.h"
35#endif
36
37#define TEST(name) TEST_(SCOPES_##name)
38
39#ifdef VIXL_INCLUDE_TARGET_A32
40#define TEST_A32(name) TEST(name)
41#else
42// Do not add this test to the harness.
43#define TEST_A32(name) void Test##name()
44#endif
45
46#define __ masm.
47
48namespace vixl {
49
50// This file contains tests for code generation scopes.
51
52#ifdef VIXL_INCLUDE_TARGET_AARCH32
53TEST(CodeBufferCheckScope_basic_32) {
54  aarch32::MacroAssembler masm;
55
56  {
57    CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
58    __ Mov(aarch32::r0, 0);
59  }
60
61  masm.FinalizeCode();
62}
63#endif  // VIXL_INCLUDE_TARGET_AARCH32
64
65
66#ifdef VIXL_INCLUDE_TARGET_AARCH64
67TEST(CodeBufferCheckScope_basic_64) {
68  aarch64::MacroAssembler masm;
69
70  {
71    CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
72    __ Mov(aarch64::x0, 0);
73  }
74
75  masm.FinalizeCode();
76}
77#endif  // VIXL_INCLUDE_TARGET_AARCH64
78
79
80#ifdef VIXL_INCLUDE_TARGET_AARCH32
81TEST(CodeBufferCheckScope_assembler_use_32) {
82  aarch32::MacroAssembler masm;
83
84  {
85    CodeBufferCheckScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
86    __ Mov(aarch32::r0, 0);
87    __ mov(aarch32::r1, 1);
88  }
89
90  masm.FinalizeCode();
91}
92#endif  // VIXL_INCLUDE_TARGET_AARCH32
93
94
95#ifdef VIXL_INCLUDE_TARGET_AARCH64
96TEST(CodeBufferCheckScope_assembler_use_64) {
97  aarch64::MacroAssembler masm;
98
99  {
100    CodeBufferCheckScope scope(&masm, 2 * aarch64::kInstructionSize);
101    __ Mov(aarch64::x0, 0);
102    __ movz(aarch64::x1, 1);
103  }
104
105  masm.FinalizeCode();
106}
107#endif  // VIXL_INCLUDE_TARGET_AARCH64
108
109
110#ifdef VIXL_INCLUDE_TARGET_AARCH32
111TEST(CodeBufferCheckScope_Open_32) {
112  aarch32::MacroAssembler masm;
113
114  {
115    CodeBufferCheckScope scope;
116    __ Mov(aarch32::r0, 0);
117    scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
118    __ Mov(aarch32::r1, 1);
119  }
120
121  masm.FinalizeCode();
122}
123#endif  // VIXL_INCLUDE_TARGET_AARCH32
124
125
126#ifdef VIXL_INCLUDE_TARGET_AARCH64
127TEST(CodeBufferCheckScope_Open_64) {
128  aarch64::MacroAssembler masm;
129
130  {
131    CodeBufferCheckScope scope;
132    __ Mov(aarch64::x0, 0);
133    scope.Open(&masm, aarch64::kInstructionSize);
134    __ Mov(aarch64::x1, 1);
135  }
136
137  masm.FinalizeCode();
138}
139#endif  // VIXL_INCLUDE_TARGET_AARCH64
140
141
142#ifdef VIXL_INCLUDE_TARGET_AARCH32
143TEST(CodeBufferCheckScope_Close_32) {
144  aarch32::MacroAssembler masm;
145
146  {
147    CodeBufferCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
148    __ Mov(aarch32::r0, 0);
149    scope.Close();
150    __ Mov(aarch32::r1, 1);
151  }
152
153  masm.FinalizeCode();
154}
155#endif  // VIXL_INCLUDE_TARGET_AARCH32
156
157
158#ifdef VIXL_INCLUDE_TARGET_AARCH64
159TEST(CodeBufferCheckScope_Close_64) {
160  aarch64::MacroAssembler masm;
161
162  {
163    CodeBufferCheckScope scope(&masm, aarch64::kInstructionSize);
164    __ Mov(aarch64::x0, 0);
165    scope.Close();
166    __ Mov(aarch64::x1, 1);
167  }
168
169  masm.FinalizeCode();
170}
171#endif  // VIXL_INCLUDE_TARGET_AARCH64
172
173
174#ifdef VIXL_INCLUDE_TARGET_AARCH32
175TEST(CodeBufferCheckScope_Open_Close_32) {
176  aarch32::MacroAssembler masm;
177
178  {
179    CodeBufferCheckScope scope;
180    __ Mov(aarch32::r0, 0);
181    scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
182    __ Mov(aarch32::r1, 1);
183    scope.Close();
184    __ Mov(aarch32::r2, 2);
185  }
186
187  masm.FinalizeCode();
188}
189#endif  // VIXL_INCLUDE_TARGET_AARCH32
190
191
192#ifdef VIXL_INCLUDE_TARGET_AARCH64
193TEST(CodeBufferCheckScope_Open_Close_64) {
194  aarch64::MacroAssembler masm;
195
196  {
197    CodeBufferCheckScope scope;
198    __ Mov(aarch64::x0, 0);
199    scope.Open(&masm, aarch64::kInstructionSize);
200    __ Mov(aarch64::x1, 1);
201    scope.Close();
202    __ Mov(aarch64::x2, 2);
203  }
204
205  masm.FinalizeCode();
206}
207#endif  // VIXL_INCLUDE_TARGET_AARCH64
208
209
210#ifdef VIXL_INCLUDE_TARGET_AARCH32
211TEST(EmissionCheckScope_basic_32) {
212  aarch32::MacroAssembler masm;
213
214  {
215    EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
216    __ Mov(aarch32::r0, 0);
217  }
218
219  masm.FinalizeCode();
220}
221#endif  // VIXL_INCLUDE_TARGET_AARCH32
222
223
224#ifdef VIXL_INCLUDE_TARGET_AARCH64
225TEST(EmissionCheckScope_basic_64) {
226  aarch64::MacroAssembler masm;
227
228  {
229    EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
230    __ Mov(aarch64::x0, 0);
231  }
232
233  masm.FinalizeCode();
234}
235#endif  // VIXL_INCLUDE_TARGET_AARCH64
236
237
238#ifdef VIXL_INCLUDE_TARGET_AARCH32
239TEST(EmissionCheckScope_Open_32) {
240  aarch32::MacroAssembler masm;
241
242  {
243    EmissionCheckScope scope;
244    __ Mov(aarch32::r0, 0);
245    scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
246    __ Mov(aarch32::r1, 1);
247  }
248
249  masm.FinalizeCode();
250}
251#endif  // VIXL_INCLUDE_TARGET_AARCH32
252
253
254#ifdef VIXL_INCLUDE_TARGET_AARCH64
255TEST(EmissionCheckScope_Open_64) {
256  aarch64::MacroAssembler masm;
257
258  {
259    EmissionCheckScope scope;
260    __ Mov(aarch64::x0, 0);
261    scope.Open(&masm, aarch64::kInstructionSize);
262    __ Mov(aarch64::x1, 1);
263  }
264
265  masm.FinalizeCode();
266}
267#endif  // VIXL_INCLUDE_TARGET_AARCH64
268
269
270#ifdef VIXL_INCLUDE_TARGET_AARCH32
271TEST(EmissionCheckScope_Close_32) {
272  aarch32::MacroAssembler masm;
273
274  {
275    EmissionCheckScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
276    __ Mov(aarch32::r0, 0);
277    scope.Close();
278    __ Mov(aarch32::r1, 1);
279  }
280
281  masm.FinalizeCode();
282}
283#endif  // VIXL_INCLUDE_TARGET_AARCH32
284
285
286#ifdef VIXL_INCLUDE_TARGET_AARCH64
287TEST(EmissionCheckScope_Close_64) {
288  aarch64::MacroAssembler masm;
289
290  {
291    EmissionCheckScope scope(&masm, aarch64::kInstructionSize);
292    __ Mov(aarch64::x0, 0);
293    scope.Close();
294    __ Mov(aarch64::x1, 1);
295  }
296
297  masm.FinalizeCode();
298}
299#endif  // VIXL_INCLUDE_TARGET_AARCH64
300
301
302#ifdef VIXL_INCLUDE_TARGET_AARCH32
303TEST(EmissionCheckScope_Open_Close_32) {
304  aarch32::MacroAssembler masm;
305
306  {
307    EmissionCheckScope scope;
308    __ Mov(aarch32::r0, 0);
309    scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
310    __ Mov(aarch32::r1, 1);
311    scope.Close();
312    __ Mov(aarch32::r2, 2);
313  }
314
315  masm.FinalizeCode();
316}
317#endif  // VIXL_INCLUDE_TARGET_AARCH32
318
319
320#ifdef VIXL_INCLUDE_TARGET_AARCH64
321TEST(EmissionCheckScope_Open_Close_64) {
322  aarch64::MacroAssembler masm;
323
324  {
325    EmissionCheckScope scope;
326    __ Mov(aarch64::x0, 0);
327    scope.Open(&masm, aarch64::kInstructionSize);
328    __ Mov(aarch64::x1, 1);
329    scope.Close();
330    __ Mov(aarch64::x2, 2);
331  }
332
333  masm.FinalizeCode();
334}
335#endif  // VIXL_INCLUDE_TARGET_AARCH64
336
337
338#ifdef VIXL_INCLUDE_TARGET_AARCH32
339
340#define ASSERT_LITERAL_POOL_SIZE_32(expected) \
341  VIXL_CHECK((expected) == masm.GetLiteralPoolSize())
342
343TEST_A32(EmissionCheckScope_emit_pool_32) {
344  aarch32::MacroAssembler masm;
345
346  // Make sure the pool is empty;
347  masm.EmitLiteralPool(aarch32::MacroAssembler::kBranchRequired);
348  ASSERT_LITERAL_POOL_SIZE_32(0);
349
350  __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
351  ASSERT_LITERAL_POOL_SIZE_32(8);
352
353  const int kLdrdRange = 255;
354  const int kLessThanLdrdRange = 100;
355
356  {
357    // Check that opening the scope with a reserved space well below the limit
358    // at which can generate the literal pool does not force the emission of
359    // the pool.
360    EmissionCheckScope scope(&masm,
361                             kLessThanLdrdRange,
362                             EmissionCheckScope::kMaximumSize);
363    ASSERT_LITERAL_POOL_SIZE_32(8);
364  }
365
366  {
367    // Check that the scope forces emission of the pool if necessary.
368    EmissionCheckScope scope(&masm,
369                             kLdrdRange + 1,
370                             EmissionCheckScope::kMaximumSize);
371    ASSERT_LITERAL_POOL_SIZE_32(0);
372  }
373
374  masm.FinalizeCode();
375}
376#endif  // VIXL_INCLUDE_TARGET_AARCH32
377
378
379#ifdef VIXL_INCLUDE_TARGET_AARCH64
380
381#define ASSERT_LITERAL_POOL_SIZE_64(expected)          \
382  VIXL_CHECK((expected + aarch64::kInstructionSize) == \
383             masm.GetLiteralPoolSize())
384
385TEST(EmissionCheckScope_emit_pool_64) {
386  aarch64::MacroAssembler masm;
387
388  // Make sure the pool is empty;
389  masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
390  ASSERT_LITERAL_POOL_SIZE_64(0);
391
392  __ Ldr(aarch64::x0, 0x1234567890abcdef);
393  ASSERT_LITERAL_POOL_SIZE_64(8);
394
395  {
396    // Check that opening the scope with a reserved space well below the limit
397    // at which can generate the literal pool does not force the emission of
398    // the pool.
399    EmissionCheckScope scope(&masm,
400                             10 * aarch64::kInstructionSize,
401                             EmissionCheckScope::kMaximumSize);
402    ASSERT_LITERAL_POOL_SIZE_64(8);
403  }
404
405  {
406    // Check that the scope forces emission of the pool if necessary.
407    EmissionCheckScope scope(&masm,
408                             aarch64::kMaxLoadLiteralRange + 1,
409                             EmissionCheckScope::kMaximumSize);
410    ASSERT_LITERAL_POOL_SIZE_64(0);
411  }
412
413  masm.FinalizeCode();
414}
415#endif  // VIXL_INCLUDE_TARGET_AARCH64
416
417
418#ifdef VIXL_INCLUDE_TARGET_AARCH32
419TEST_A32(EmissionCheckScope_emit_pool_on_Open_32) {
420  aarch32::MacroAssembler masm;
421
422  // Make sure the pool is empty;
423  masm.EmitLiteralPool(aarch32::MacroAssembler::kBranchRequired);
424  ASSERT_LITERAL_POOL_SIZE_32(0);
425
426  __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
427  ASSERT_LITERAL_POOL_SIZE_32(8);
428
429  const int kLdrdRange = 255;
430  const int kLessThanLdrdRange = 100;
431
432  {
433    // Check that opening the scope with a reserved space well below the limit
434    // at which can generate the literal pool does not force the emission of
435    // the pool.
436    EmissionCheckScope scope(&masm,
437                             kLessThanLdrdRange,
438                             EmissionCheckScope::kMaximumSize);
439    ASSERT_LITERAL_POOL_SIZE_32(8);
440  }
441
442  {
443    // Check that the scope forces emission of the pool if necessary.
444    EmissionCheckScope scope(&masm,
445                             kLdrdRange + 1,
446                             EmissionCheckScope::kMaximumSize);
447    ASSERT_LITERAL_POOL_SIZE_32(0);
448  }
449
450  masm.FinalizeCode();
451}
452#endif  // VIXL_INCLUDE_TARGET_AARCH32
453
454
455#ifdef VIXL_INCLUDE_TARGET_AARCH64
456TEST(EmissionCheckScope_emit_pool_on_Open_64) {
457  aarch64::MacroAssembler masm;
458
459  // Make sure the pool is empty;
460  masm.EmitLiteralPool(aarch64::LiteralPool::kBranchRequired);
461  ASSERT_LITERAL_POOL_SIZE_64(0);
462
463  __ Ldr(aarch64::x0, 0x1234567890abcdef);
464  ASSERT_LITERAL_POOL_SIZE_64(8);
465
466  {
467    // Check that opening the scope with a reserved space well below the limit
468    // at which can generate the literal pool does not force the emission of
469    // the pool.
470    EmissionCheckScope scope;
471    scope.Open(&masm,
472               10 * aarch64::kInstructionSize,
473               EmissionCheckScope::kMaximumSize);
474    ASSERT_LITERAL_POOL_SIZE_64(8);
475  }
476
477  {
478    // Check that the scope forces emission of the pool if necessary.
479    EmissionCheckScope scope;
480    scope.Open(&masm,
481               aarch64::kMaxLoadLiteralRange + 1,
482               EmissionCheckScope::kMaximumSize);
483    ASSERT_LITERAL_POOL_SIZE_64(0);
484  }
485
486  masm.FinalizeCode();
487}
488#endif  // VIXL_INCLUDE_TARGET_AARCH64
489
490
491#ifdef VIXL_INCLUDE_TARGET_AARCH32
492TEST_A32(ExactAssemblyScope_basic_32) {
493  aarch32::MacroAssembler masm;
494
495  {
496    ExactAssemblyScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
497    __ nop();
498  }
499
500  masm.FinalizeCode();
501}
502#endif  // VIXL_INCLUDE_TARGET_AARCH32
503
504
505#ifdef VIXL_INCLUDE_TARGET_AARCH64
506TEST(ExactAssemblyScope_basic_64) {
507  aarch64::MacroAssembler masm;
508
509  {
510    ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
511    __ nop();
512  }
513
514  masm.FinalizeCode();
515}
516#endif  // VIXL_INCLUDE_TARGET_AARCH64
517
518
519#ifdef VIXL_INCLUDE_TARGET_AARCH32
520TEST_A32(ExactAssemblyScope_Open_32) {
521  aarch32::MacroAssembler masm;
522
523  {
524    ExactAssemblyScope scope;
525    __ Mov(aarch32::r0, 0);
526    scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
527    __ mov(aarch32::r1, 1);
528  }
529
530  masm.FinalizeCode();
531}
532#endif  // VIXL_INCLUDE_TARGET_AARCH32
533
534
535#ifdef VIXL_INCLUDE_TARGET_AARCH64
536TEST(ExactAssemblyScope_Open_64) {
537  aarch64::MacroAssembler masm;
538
539  {
540    ExactAssemblyScope scope;
541    __ Mov(aarch64::x0, 0);
542    scope.Open(&masm, aarch64::kInstructionSize);
543    __ movz(aarch64::x1, 1);
544  }
545
546  masm.FinalizeCode();
547}
548#endif  // VIXL_INCLUDE_TARGET_AARCH64
549
550
551#ifdef VIXL_INCLUDE_TARGET_AARCH32
552TEST_A32(ExactAssemblyScope_Close_32) {
553  aarch32::MacroAssembler masm;
554
555  {
556    ExactAssemblyScope scope(&masm, aarch32::kA32InstructionSizeInBytes);
557    __ mov(aarch32::r0, 0);
558    scope.Close();
559    __ Mov(aarch32::r1, 1);
560  }
561
562  masm.FinalizeCode();
563}
564#endif  // VIXL_INCLUDE_TARGET_AARCH32
565
566
567#ifdef VIXL_INCLUDE_TARGET_AARCH64
568TEST(ExactAssemblyScope_Close_64) {
569  aarch64::MacroAssembler masm;
570
571  {
572    ExactAssemblyScope scope(&masm, aarch64::kInstructionSize);
573    __ movz(aarch64::x0, 0);
574    scope.Close();
575    __ Mov(aarch64::x1, 1);
576  }
577
578  masm.FinalizeCode();
579}
580#endif  // VIXL_INCLUDE_TARGET_AARCH64
581
582
583#ifdef VIXL_INCLUDE_TARGET_AARCH32
584TEST_A32(ExactAssemblyScope_Open_Close_32) {
585  aarch32::MacroAssembler masm;
586
587  {
588    ExactAssemblyScope scope;
589    __ Mov(aarch32::r0, 0);
590    scope.Open(&masm, aarch32::kA32InstructionSizeInBytes);
591    __ mov(aarch32::r1, 1);
592    scope.Close();
593    __ Mov(aarch32::r2, 2);
594  }
595
596  masm.FinalizeCode();
597}
598#endif  // VIXL_INCLUDE_TARGET_AARCH32
599
600
601#ifdef VIXL_INCLUDE_TARGET_AARCH64
602TEST(ExactAssemblyScope_Open_Close_64) {
603  aarch64::MacroAssembler masm;
604
605  {
606    ExactAssemblyScope scope;
607    __ Mov(aarch64::x0, 0);
608    scope.Open(&masm, aarch64::kInstructionSize);
609    __ movz(aarch64::x1, 1);
610    scope.Close();
611    __ Mov(aarch64::x2, 2);
612  }
613
614  masm.FinalizeCode();
615}
616#endif  // VIXL_INCLUDE_TARGET_AARCH64
617
618
619#ifdef VIXL_INCLUDE_TARGET_AARCH32
620TEST_A32(ExactAssemblyScope_32) {
621  aarch32::MacroAssembler masm;
622
623  // By default macro instructions are allowed.
624  VIXL_CHECK(!masm.ArePoolsBlocked());
625  VIXL_ASSERT(!masm.AllowAssembler());
626  VIXL_ASSERT(masm.AllowMacroInstructions());
627  {
628    ExactAssemblyScope scope1(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
629    VIXL_CHECK(masm.ArePoolsBlocked());
630    VIXL_ASSERT(masm.AllowAssembler());
631    VIXL_ASSERT(!masm.AllowMacroInstructions());
632    __ nop();
633    {
634      ExactAssemblyScope scope2(&masm, 1 * aarch32::kA32InstructionSizeInBytes);
635      VIXL_CHECK(masm.ArePoolsBlocked());
636      VIXL_ASSERT(masm.AllowAssembler());
637      VIXL_ASSERT(!masm.AllowMacroInstructions());
638      __ nop();
639    }
640    VIXL_CHECK(masm.ArePoolsBlocked());
641    VIXL_ASSERT(masm.AllowAssembler());
642    VIXL_ASSERT(!masm.AllowMacroInstructions());
643  }
644  VIXL_CHECK(!masm.ArePoolsBlocked());
645  VIXL_ASSERT(!masm.AllowAssembler());
646  VIXL_ASSERT(masm.AllowMacroInstructions());
647
648  {
649    ExactAssemblyScope scope(&masm, 2 * aarch32::kA32InstructionSizeInBytes);
650    __ add(aarch32::r0, aarch32::r0, aarch32::r0);
651    __ sub(aarch32::r0, aarch32::r0, aarch32::r0);
652  }
653
654  masm.FinalizeCode();
655}
656#endif  // VIXL_INCLUDE_TARGET_AARCH32
657
658
659#ifdef VIXL_INCLUDE_TARGET_AARCH64
660TEST(ExactAssemblyScope_64) {
661  aarch64::MacroAssembler masm;
662
663  // By default macro instructions are allowed.
664  VIXL_CHECK(!masm.ArePoolsBlocked());
665  VIXL_ASSERT(!masm.AllowAssembler());
666  VIXL_ASSERT(masm.AllowMacroInstructions());
667  {
668    ExactAssemblyScope scope1(&masm, 2 * aarch64::kInstructionSize);
669    VIXL_CHECK(masm.ArePoolsBlocked());
670    VIXL_ASSERT(masm.AllowAssembler());
671    VIXL_ASSERT(!masm.AllowMacroInstructions());
672    __ nop();
673    {
674      ExactAssemblyScope scope2(&masm, 1 * aarch64::kInstructionSize);
675      VIXL_CHECK(masm.ArePoolsBlocked());
676      VIXL_ASSERT(masm.AllowAssembler());
677      VIXL_ASSERT(!masm.AllowMacroInstructions());
678      __ nop();
679    }
680    VIXL_CHECK(masm.ArePoolsBlocked());
681    VIXL_ASSERT(masm.AllowAssembler());
682    VIXL_ASSERT(!masm.AllowMacroInstructions());
683  }
684  VIXL_CHECK(!masm.ArePoolsBlocked());
685  VIXL_ASSERT(!masm.AllowAssembler());
686  VIXL_ASSERT(masm.AllowMacroInstructions());
687
688  {
689    ExactAssemblyScope scope(&masm, 2 * aarch64::kInstructionSize);
690    __ add(aarch64::x0, aarch64::x0, aarch64::x0);
691    __ sub(aarch64::x0, aarch64::x0, aarch64::x0);
692  }
693
694  masm.FinalizeCode();
695}
696#endif  // VIXL_INCLUDE_TARGET_AARCH64
697
698
699#ifdef VIXL_INCLUDE_TARGET_AARCH32
700TEST_A32(ExactAssemblyScope_scope_with_pools_32) {
701  aarch32::MacroAssembler masm;
702
703  ASSERT_LITERAL_POOL_SIZE_32(0);
704
705  __ Ldrd(aarch32::r0, aarch32::r1, 0x1234567890abcdef);
706
707  ASSERT_LITERAL_POOL_SIZE_32(8);
708
709  const int32_t kLdrdRange = 255;
710  const int32_t n_nops = (kLdrdRange / aarch32::kA32InstructionSizeInBytes) + 1;
711  {
712    // The literal pool should be generated when opening this scope, as
713    // otherwise the `Ldrd` will run out of range when we generate the `nop`
714    // instructions below.
715    ExactAssemblyScope scope(&masm,
716                             n_nops * aarch32::kA32InstructionSizeInBytes);
717
718    // Although it must be, we do not check that the literal pool size is zero
719    // here, because we want this regression test to fail while or after we
720    // generate the nops.
721
722    for (int32_t i = 0; i < n_nops; ++i) {
723      __ nop();
724    }
725  }
726
727  ASSERT_LITERAL_POOL_SIZE_32(0);
728
729  masm.FinalizeCode();
730}
731#endif  // VIXL_INCLUDE_TARGET_AARCH32
732
733
734#ifdef VIXL_INCLUDE_TARGET_AARCH64
735TEST(ExactAssemblyScope_scope_with_pools_64) {
736  aarch64::MacroAssembler masm;
737
738  ASSERT_LITERAL_POOL_SIZE_64(0);
739
740  __ Ldr(aarch64::x10, 0x1234567890abcdef);
741
742  ASSERT_LITERAL_POOL_SIZE_64(8);
743
744  const int64_t n_nops =
745      aarch64::kMaxLoadLiteralRange / aarch64::kInstructionSize;
746  {
747    // The literal pool should be generated when opening this scope, as
748    // otherwise the `Ldr` will run out of range when we generate the `nop`
749    // instructions below.
750    ExactAssemblyScope scope(&masm, n_nops * aarch64::kInstructionSize);
751
752    // Although it must be, we do not check that the literal pool size is zero
753    // here, because we want this regression test to fail while or after we
754    // generate the nops.
755
756    for (int64_t i = 0; i < n_nops; ++i) {
757      __ nop();
758    }
759  }
760
761  ASSERT_LITERAL_POOL_SIZE_64(0);
762
763  masm.FinalizeCode();
764}
765#endif  // VIXL_INCLUDE_TARGET_AARCH64
766
767
768}  // namespace vixl
769