@@ -10,10 +10,11 @@ body: |
10
10
liveins: $w0
11
11
; CHECK-LABEL: name: s32_legal
12
12
; CHECK: liveins: $w0
13
- ; CHECK: %copy:_(s32) = COPY $w0
14
- ; CHECK: %bitreverse:_(s32) = G_BITREVERSE %copy
15
- ; CHECK: $w0 = COPY %bitreverse(s32)
16
- ; CHECK: RET_ReallyLR implicit $w0
13
+ ; CHECK-NEXT: {{ $}}
14
+ ; CHECK-NEXT: %copy:_(s32) = COPY $w0
15
+ ; CHECK-NEXT: %bitreverse:_(s32) = G_BITREVERSE %copy
16
+ ; CHECK-NEXT: $w0 = COPY %bitreverse(s32)
17
+ ; CHECK-NEXT: RET_ReallyLR implicit $w0
17
18
%copy:_(s32) = COPY $w0
18
19
%bitreverse:_(s32) = G_BITREVERSE %copy
19
20
$w0 = COPY %bitreverse
@@ -27,10 +28,11 @@ body: |
27
28
liveins: $x0
28
29
; CHECK-LABEL: name: s64_legal
29
30
; CHECK: liveins: $x0
30
- ; CHECK: %copy:_(s64) = COPY $x0
31
- ; CHECK: %bitreverse:_(s64) = G_BITREVERSE %copy
32
- ; CHECK: $x0 = COPY %bitreverse(s64)
33
- ; CHECK: RET_ReallyLR implicit $x0
31
+ ; CHECK-NEXT: {{ $}}
32
+ ; CHECK-NEXT: %copy:_(s64) = COPY $x0
33
+ ; CHECK-NEXT: %bitreverse:_(s64) = G_BITREVERSE %copy
34
+ ; CHECK-NEXT: $x0 = COPY %bitreverse(s64)
35
+ ; CHECK-NEXT: RET_ReallyLR implicit $x0
34
36
%copy:_(s64) = COPY $x0
35
37
%bitreverse:_(s64) = G_BITREVERSE %copy
36
38
$x0 = COPY %bitreverse
@@ -44,10 +46,11 @@ body: |
44
46
liveins: $x0
45
47
; CHECK-LABEL: name: v8s8_legal
46
48
; CHECK: liveins: $x0
47
- ; CHECK: %vec:_(<8 x s8>) = G_IMPLICIT_DEF
48
- ; CHECK: %bitreverse:_(<8 x s8>) = G_BITREVERSE %vec
49
- ; CHECK: $x0 = COPY %bitreverse(<8 x s8>)
50
- ; CHECK: RET_ReallyLR implicit $x0
49
+ ; CHECK-NEXT: {{ $}}
50
+ ; CHECK-NEXT: %vec:_(<8 x s8>) = G_IMPLICIT_DEF
51
+ ; CHECK-NEXT: %bitreverse:_(<8 x s8>) = G_BITREVERSE %vec
52
+ ; CHECK-NEXT: $x0 = COPY %bitreverse(<8 x s8>)
53
+ ; CHECK-NEXT: RET_ReallyLR implicit $x0
51
54
%vec:_(<8 x s8>) = G_IMPLICIT_DEF
52
55
%bitreverse:_(<8 x s8>) = G_BITREVERSE %vec
53
56
$x0 = COPY %bitreverse
@@ -61,10 +64,11 @@ body: |
61
64
liveins: $q0
62
65
; CHECK-LABEL: name: v16s8_legal
63
66
; CHECK: liveins: $q0
64
- ; CHECK: %vec:_(<16 x s8>) = G_IMPLICIT_DEF
65
- ; CHECK: %bitreverse:_(<16 x s8>) = G_BITREVERSE %vec
66
- ; CHECK: $q0 = COPY %bitreverse(<16 x s8>)
67
- ; CHECK: RET_ReallyLR implicit $q0
67
+ ; CHECK-NEXT: {{ $}}
68
+ ; CHECK-NEXT: %vec:_(<16 x s8>) = G_IMPLICIT_DEF
69
+ ; CHECK-NEXT: %bitreverse:_(<16 x s8>) = G_BITREVERSE %vec
70
+ ; CHECK-NEXT: $q0 = COPY %bitreverse(<16 x s8>)
71
+ ; CHECK-NEXT: RET_ReallyLR implicit $q0
68
72
%vec:_(<16 x s8>) = G_IMPLICIT_DEF
69
73
%bitreverse:_(<16 x s8>) = G_BITREVERSE %vec
70
74
$q0 = COPY %bitreverse
@@ -78,14 +82,15 @@ body: |
78
82
liveins: $b0
79
83
; CHECK-LABEL: name: s8_widen
80
84
; CHECK: liveins: $b0
81
- ; CHECK: %copy:_(s8) = COPY $b0
82
- ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT %copy(s8)
83
- ; CHECK: [[BITREVERSE:%[0-9]+]]:_(s32) = G_BITREVERSE [[ANYEXT]]
84
- ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
85
- ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITREVERSE]], [[C]](s64)
86
- ; CHECK: %bitreverse:_(s8) = G_TRUNC [[LSHR]](s32)
87
- ; CHECK: $b0 = COPY %bitreverse(s8)
88
- ; CHECK: RET_ReallyLR implicit $b0
85
+ ; CHECK-NEXT: {{ $}}
86
+ ; CHECK-NEXT: %copy:_(s8) = COPY $b0
87
+ ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT %copy(s8)
88
+ ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s32) = G_BITREVERSE [[ANYEXT]]
89
+ ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
90
+ ; CHECK-NEXT: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITREVERSE]], [[C]](s64)
91
+ ; CHECK-NEXT: %bitreverse:_(s8) = G_TRUNC [[LSHR]](s32)
92
+ ; CHECK-NEXT: $b0 = COPY %bitreverse(s8)
93
+ ; CHECK-NEXT: RET_ReallyLR implicit $b0
89
94
%copy:_(s8) = COPY $b0
90
95
%bitreverse:_(s8) = G_BITREVERSE %copy
91
96
$b0 = COPY %bitreverse
@@ -99,14 +104,15 @@ body: |
99
104
liveins: $b0
100
105
; CHECK-LABEL: name: s3_widen
101
106
; CHECK: liveins: $b0
102
- ; CHECK: %copy:_(s8) = COPY $b0
103
- ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT %copy(s8)
104
- ; CHECK: [[BITREVERSE:%[0-9]+]]:_(s32) = G_BITREVERSE [[ANYEXT]]
105
- ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 29
106
- ; CHECK: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITREVERSE]], [[C]](s64)
107
- ; CHECK: %ext:_(s8) = G_TRUNC [[LSHR]](s32)
108
- ; CHECK: $b0 = COPY %ext(s8)
109
- ; CHECK: RET_ReallyLR implicit $b0
107
+ ; CHECK-NEXT: {{ $}}
108
+ ; CHECK-NEXT: %copy:_(s8) = COPY $b0
109
+ ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT %copy(s8)
110
+ ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s32) = G_BITREVERSE [[ANYEXT]]
111
+ ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 29
112
+ ; CHECK-NEXT: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[BITREVERSE]], [[C]](s64)
113
+ ; CHECK-NEXT: %ext:_(s8) = G_TRUNC [[LSHR]](s32)
114
+ ; CHECK-NEXT: $b0 = COPY %ext(s8)
115
+ ; CHECK-NEXT: RET_ReallyLR implicit $b0
110
116
%copy:_(s8) = COPY $b0
111
117
%trunc:_(s3) = G_TRUNC %copy
112
118
%bitreverse:_(s3) = G_BITREVERSE %trunc
@@ -122,14 +128,61 @@ body: |
122
128
liveins: $q0
123
129
; CHECK-LABEL: name: s128_narrow
124
130
; CHECK: liveins: $q0
125
- ; CHECK: %copy:_(s128) = COPY $q0
126
- ; CHECK: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES %copy(s128)
127
- ; CHECK: [[BITREVERSE:%[0-9]+]]:_(s64) = G_BITREVERSE [[UV1]]
128
- ; CHECK: [[BITREVERSE1:%[0-9]+]]:_(s64) = G_BITREVERSE [[UV]]
129
- ; CHECK: %bitreverse:_(s128) = G_MERGE_VALUES [[BITREVERSE]](s64), [[BITREVERSE1]](s64)
130
- ; CHECK: $q0 = COPY %bitreverse(s128)
131
- ; CHECK: RET_ReallyLR implicit $q0
131
+ ; CHECK-NEXT: {{ $}}
132
+ ; CHECK-NEXT: %copy:_(s128) = COPY $q0
133
+ ; CHECK-NEXT: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES %copy(s128)
134
+ ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s64) = G_BITREVERSE [[UV1]]
135
+ ; CHECK-NEXT: [[BITREVERSE1:%[0-9]+]]:_(s64) = G_BITREVERSE [[UV]]
136
+ ; CHECK-NEXT: %bitreverse:_(s128) = G_MERGE_VALUES [[BITREVERSE]](s64), [[BITREVERSE1]](s64)
137
+ ; CHECK-NEXT: $q0 = COPY %bitreverse(s128)
138
+ ; CHECK-NEXT: RET_ReallyLR implicit $q0
132
139
%copy:_(s128) = COPY $q0
133
140
%bitreverse:_(s128) = G_BITREVERSE %copy
134
141
$q0 = COPY %bitreverse
135
142
RET_ReallyLR implicit $q0
143
+ ...
144
+ ---
145
+ name : v4s16
146
+ tracksRegLiveness : true
147
+ body : |
148
+ bb.0:
149
+ liveins: $d0
150
+ ; CHECK-LABEL: name: v4s16
151
+ ; CHECK: liveins: $d0
152
+ ; CHECK-NEXT: {{ $}}
153
+ ; CHECK-NEXT: %vec:_(<4 x s16>) = COPY $d0
154
+ ; CHECK-NEXT: [[BSWAP:%[0-9]+]]:_(<4 x s16>) = G_BSWAP %vec
155
+ ; CHECK-NEXT: [[C:%[0-9]+]]:_(s16) = G_CONSTANT i16 4
156
+ ; CHECK-NEXT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s16>) = G_BUILD_VECTOR [[C]](s16), [[C]](s16), [[C]](s16), [[C]](s16)
157
+ ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 -3856
158
+ ; CHECK-NEXT: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s16>) = G_BUILD_VECTOR [[C1]](s16), [[C1]](s16), [[C1]](s16), [[C1]](s16)
159
+ ; CHECK-NEXT: [[AND:%[0-9]+]]:_(<4 x s16>) = G_AND [[BSWAP]], [[BUILD_VECTOR1]]
160
+ ; CHECK-NEXT: [[LSHR:%[0-9]+]]:_(<4 x s16>) = G_LSHR [[AND]], [[BUILD_VECTOR]](<4 x s16>)
161
+ ; CHECK-NEXT: [[SHL:%[0-9]+]]:_(<4 x s16>) = G_SHL [[BSWAP]], [[BUILD_VECTOR]](<4 x s16>)
162
+ ; CHECK-NEXT: [[AND1:%[0-9]+]]:_(<4 x s16>) = G_AND [[SHL]], [[BUILD_VECTOR1]]
163
+ ; CHECK-NEXT: [[OR:%[0-9]+]]:_(<4 x s16>) = G_OR [[LSHR]], [[AND1]]
164
+ ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s16) = G_CONSTANT i16 2
165
+ ; CHECK-NEXT: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s16>) = G_BUILD_VECTOR [[C2]](s16), [[C2]](s16), [[C2]](s16), [[C2]](s16)
166
+ ; CHECK-NEXT: [[C3:%[0-9]+]]:_(s16) = G_CONSTANT i16 -13108
167
+ ; CHECK-NEXT: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s16>) = G_BUILD_VECTOR [[C3]](s16), [[C3]](s16), [[C3]](s16), [[C3]](s16)
168
+ ; CHECK-NEXT: [[AND2:%[0-9]+]]:_(<4 x s16>) = G_AND [[OR]], [[BUILD_VECTOR3]]
169
+ ; CHECK-NEXT: [[LSHR1:%[0-9]+]]:_(<4 x s16>) = G_LSHR [[AND2]], [[BUILD_VECTOR2]](<4 x s16>)
170
+ ; CHECK-NEXT: [[SHL1:%[0-9]+]]:_(<4 x s16>) = G_SHL [[OR]], [[BUILD_VECTOR2]](<4 x s16>)
171
+ ; CHECK-NEXT: [[AND3:%[0-9]+]]:_(<4 x s16>) = G_AND [[SHL1]], [[BUILD_VECTOR3]]
172
+ ; CHECK-NEXT: [[OR1:%[0-9]+]]:_(<4 x s16>) = G_OR [[LSHR1]], [[AND3]]
173
+ ; CHECK-NEXT: [[C4:%[0-9]+]]:_(s16) = G_CONSTANT i16 1
174
+ ; CHECK-NEXT: [[BUILD_VECTOR4:%[0-9]+]]:_(<4 x s16>) = G_BUILD_VECTOR [[C4]](s16), [[C4]](s16), [[C4]](s16), [[C4]](s16)
175
+ ; CHECK-NEXT: [[C5:%[0-9]+]]:_(s16) = G_CONSTANT i16 -21846
176
+ ; CHECK-NEXT: [[BUILD_VECTOR5:%[0-9]+]]:_(<4 x s16>) = G_BUILD_VECTOR [[C5]](s16), [[C5]](s16), [[C5]](s16), [[C5]](s16)
177
+ ; CHECK-NEXT: [[AND4:%[0-9]+]]:_(<4 x s16>) = G_AND [[OR1]], [[BUILD_VECTOR5]]
178
+ ; CHECK-NEXT: [[LSHR2:%[0-9]+]]:_(<4 x s16>) = G_LSHR [[AND4]], [[BUILD_VECTOR4]](<4 x s16>)
179
+ ; CHECK-NEXT: [[SHL2:%[0-9]+]]:_(<4 x s16>) = G_SHL [[OR1]], [[BUILD_VECTOR4]](<4 x s16>)
180
+ ; CHECK-NEXT: [[AND5:%[0-9]+]]:_(<4 x s16>) = G_AND [[SHL2]], [[BUILD_VECTOR5]]
181
+ ; CHECK-NEXT: %bitreverse:_(<4 x s16>) = G_OR [[LSHR2]], [[AND5]]
182
+ ; CHECK-NEXT: $d0 = COPY %bitreverse(<4 x s16>)
183
+ ; CHECK-NEXT: RET_ReallyLR implicit $q0
184
+ %vec:_(<4 x s16>) = COPY $d0
185
+ %bitreverse:_(<4 x s16>) = G_BITREVERSE %vec
186
+ $d0 = COPY %bitreverse
187
+ RET_ReallyLR implicit $q0
188
+ ...
0 commit comments