1// RUN: %clang_cc1 %s -O3 -triple=x86_64-apple-darwin -target-feature +bmi -emit-llvm -o - | FileCheck %s
2
3// Don't include mm_malloc.h, it's system specific.
4#define __MM_MALLOC_H
5
6#include <x86intrin.h>
7
8// The double underscore intrinsics are for compatibility with
9// AMD's BMI interface. The single underscore intrinsics
10// are for compatibility with Intel's BMI interface.
11// Apart from the underscores, the interfaces are identical
12// except in one case: although the 'bextr' register-form
13// instruction is identical in hardware, the AMD and Intel
14// intrinsics are different!
15
16unsigned short test__tzcnt_u16(unsigned short __X) {
17  // CHECK: @llvm.cttz.i16
18  return __tzcnt_u16(__X);
19}
20
21unsigned int test__andn_u32(unsigned int __X, unsigned int __Y) {
22  // CHECK: [[DEST:%.*]] = xor i32 %{{.*}}, -1
23  // CHECK-NEXT: %{{.*}} = and i32 %{{.*}}, [[DEST]]
24  return __andn_u32(__X, __Y);
25}
26
27unsigned int test__bextr_u32(unsigned int __X, unsigned int __Y) {
28  // CHECK: @llvm.x86.bmi.bextr.32
29  return __bextr_u32(__X, __Y);
30}
31
32unsigned int test__blsi_u32(unsigned int __X) {
33  // CHECK: [[DEST:%.*]] = sub i32 0, [[SRC:%.*]]
34  // CHECK-NEXT: %{{.*}} = and i32 [[SRC]], [[DEST]]
35  return __blsi_u32(__X);
36}
37
38unsigned int test__blsmsk_u32(unsigned int __X) {
39  // CHECK: [[DEST:%.*]] = add i32 [[SRC:%.*]], -1
40  // CHECK-NEXT: %{{.*}} = xor i32 [[DEST]], [[SRC]]
41  return __blsmsk_u32(__X);
42}
43
44unsigned int test__blsr_u32(unsigned int __X) {
45  // CHECK: [[DEST:%.*]] = add i32 [[SRC:%.*]], -1
46  // CHECK-NEXT: %{{.*}} = and i32 [[DEST]], [[SRC]]
47  return __blsr_u32(__X);
48}
49
50unsigned int test__tzcnt_u32(unsigned int __X) {
51  // CHECK: @llvm.cttz.i32
52  return __tzcnt_u32(__X);
53}
54
55unsigned long long test__andn_u64(unsigned long __X, unsigned long __Y) {
56  // CHECK: [[DEST:%.*]] = xor i64 %{{.*}}, -1
57  // CHECK-NEXT: %{{.*}} = and i64 %{{.*}}, [[DEST]]
58  return __andn_u64(__X, __Y);
59}
60
61unsigned long long test__bextr_u64(unsigned long __X, unsigned long __Y) {
62  // CHECK: @llvm.x86.bmi.bextr.64
63  return __bextr_u64(__X, __Y);
64}
65
66unsigned long long test__blsi_u64(unsigned long long __X) {
67  // CHECK: [[DEST:%.*]] = sub i64 0, [[SRC:%.*]]
68  // CHECK-NEXT: %{{.*}} = and i64 [[SRC]], [[DEST]]
69  return __blsi_u64(__X);
70}
71
72unsigned long long test__blsmsk_u64(unsigned long long __X) {
73  // CHECK: [[DEST:%.*]] = add i64 [[SRC:%.*]], -1
74  // CHECK-NEXT: %{{.*}} = xor i64 [[DEST]], [[SRC]]
75  return __blsmsk_u64(__X);
76}
77
78unsigned long long test__blsr_u64(unsigned long long __X) {
79  // CHECK: [[DEST:%.*]] = add i64 [[SRC:%.*]], -1
80  // CHECK-NEXT: %{{.*}} = and i64 [[DEST]], [[SRC]]
81  return __blsr_u64(__X);
82}
83
84unsigned long long test__tzcnt_u64(unsigned long long __X) {
85  // CHECK: @llvm.cttz.i64
86  return __tzcnt_u64(__X);
87}
88
89// Intel intrinsics
90
91unsigned short test_tzcnt_u16(unsigned short __X) {
92  // CHECK: @llvm.cttz.i16
93  return _tzcnt_u16(__X);
94}
95
96unsigned int test_andn_u32(unsigned int __X, unsigned int __Y) {
97  // CHECK: [[DEST:%.*]] = xor i32 %{{.*}}, -1
98  // CHECK-NEXT: %{{.*}} = and i32 %{{.*}}, [[DEST]]
99  return _andn_u32(__X, __Y);
100}
101
102unsigned int test_bextr_u32(unsigned int __X, unsigned int __Y,
103                            unsigned int __Z) {
104  // CHECK: @llvm.x86.bmi.bextr.32
105  return _bextr_u32(__X, __Y, __Z);
106}
107
108unsigned int test_blsi_u32(unsigned int __X) {
109  // CHECK: [[DEST:%.*]] = sub i32 0, [[SRC:%.*]]
110  // CHECK-NEXT: %{{.*}} = and i32 [[SRC]], [[DEST]]
111  return _blsi_u32(__X);
112}
113
114unsigned int test_blsmsk_u32(unsigned int __X) {
115  // CHECK: [[DEST:%.*]] = add i32 [[SRC:%.*]], -1
116  // CHECK-NEXT: %{{.*}} = xor i32 [[DEST]], [[SRC]]
117  return _blsmsk_u32(__X);
118}
119
120unsigned int test_blsr_u32(unsigned int __X) {
121  // CHECK: [[DEST:%.*]] = add i32 [[SRC:%.*]], -1
122  // CHECK-NEXT: %{{.*}} = and i32 [[DEST]], [[SRC]]
123  return _blsr_u32(__X);
124}
125
126unsigned int test_tzcnt_u32(unsigned int __X) {
127  // CHECK: @llvm.cttz.i32
128  return _tzcnt_u32(__X);
129}
130
131unsigned long long test_andn_u64(unsigned long __X, unsigned long __Y) {
132  // CHECK: [[DEST:%.*]] = xor i64 %{{.*}}, -1
133  // CHECK-NEXT: %{{.*}} = and i64 %{{.*}}, [[DEST]]
134  return _andn_u64(__X, __Y);
135}
136
137unsigned long long test_bextr_u64(unsigned long __X, unsigned int __Y,
138                                  unsigned int __Z) {
139  // CHECK: @llvm.x86.bmi.bextr.64
140  return _bextr_u64(__X, __Y, __Z);
141}
142
143unsigned long long test_blsi_u64(unsigned long long __X) {
144  // CHECK: [[DEST:%.*]] = sub i64 0, [[SRC:%.*]]
145  // CHECK-NEXT: %{{.*}} = and i64 [[SRC]], [[DEST]]
146  return _blsi_u64(__X);
147}
148
149unsigned long long test_blsmsk_u64(unsigned long long __X) {
150  // CHECK: [[DEST:%.*]] = add i64 [[SRC:%.*]], -1
151  // CHECK-NEXT: %{{.*}} = xor i64 [[DEST]], [[SRC]]
152  return _blsmsk_u64(__X);
153}
154
155unsigned long long test_blsr_u64(unsigned long long __X) {
156  // CHECK: [[DEST:%.*]] = add i64 [[SRC:%.*]], -1
157  // CHECK-NEXT: %{{.*}} = and i64 [[DEST]], [[SRC]]
158  return _blsr_u64(__X);
159}
160
161unsigned long long test_tzcnt_u64(unsigned long long __X) {
162  // CHECK: @llvm.cttz.i64
163  return _tzcnt_u64(__X);
164}
165