Lines Matching full:sub2
19 ; CHECK: %2.sub2:sgpr_128 = S_MOV_B32 -1
22 ; CHECK: undef %3.sub0:sgpr_128 = COPY [[S_LOAD_DWORDX4_IMM]].sub2
24 ; CHECK: %3.sub2:sgpr_128 = COPY %2.sub2
32 …; CHECK: undef %47.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET]].sub1, impl…
34 …; CHECK: undef %52.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET]].sub0, impl…
36 …; CHECK: undef %57.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET]].sub3, impl…
38 …; CHECK: undef %62.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET]].sub2, impl…
40 …; CHECK: undef %67.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET1]].sub1, imp…
41 …; CHECK: undef %71.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET1]].sub0, imp…
43 …; CHECK: undef %76.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET1]].sub3, imp…
45 …; CHECK: undef %81.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET1]].sub2, imp…
47 …; CHECK: undef %86.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET2]].sub1, imp…
48 …; CHECK: undef %90.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET2]].sub0, imp…
50 …; CHECK: undef %95.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET2]].sub3, imp…
52 …; CHECK: undef %100.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET2]].sub2, im…
54 …; CHECK: undef %105.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub1, im…
55 …; CHECK: undef %109.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub0, im…
56 …; CHECK: undef %113.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub3, im…
57 …; CHECK: undef %117.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub2, im…
60 …; CHECK: undef %122.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET4]].sub1, im…
61 …; CHECK: undef %126.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET4]].sub0, im…
62 …; CHECK: undef %130.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET4]].sub3, im…
64 …; CHECK: undef %135.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET4]].sub2, im…
67 …; CHECK: undef %140.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET5]].sub1, im…
68 …; CHECK: undef %144.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET5]].sub0, im…
70 …; CHECK: undef %149.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET5]].sub3, im…
72 …; CHECK: undef %154.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET5]].sub2, im…
74 …; CHECK: undef %158.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub1, im…
75 …; CHECK: undef %36.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub0, imp…
76 …; CHECK: undef %37.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub3, imp…
77 …; CHECK: undef %38.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub2, imp…
79 …; CHECK: undef %40.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub1, imp…
80 …; CHECK: undef %41.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub0, imp…
81 …; CHECK: undef %42.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub3, imp…
82 …; CHECK: undef %43.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub2, imp…
94 …].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET]].sub2, implicit $exec
96 ; CHECK: undef %68.sub2:vreg_128 = COPY %67.sub2
105 ….sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET1]].sub2, implicit $exec
107 ; CHECK: undef %87.sub2:vreg_128 = COPY %86.sub2
116 ….sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET2]].sub2, implicit $exec
118 ; CHECK: undef %106.sub2:vreg_128 = COPY %105.sub2
120 ; CHECK: undef %110.sub2:vreg_128 = COPY %109.sub2
122 ; CHECK: undef %114.sub2:vreg_128 = COPY %113.sub2
125 ….sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub2, implicit $exec
127 ; CHECK: undef %123.sub2:vreg_128 = COPY %122.sub2
129 ; CHECK: undef %127.sub2:vreg_128 = COPY %126.sub2
135 ….sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET4]].sub2, implicit $exec
137 ; CHECK: undef %141.sub2:vreg_128 = COPY %140.sub2
145 ; CHECK: undef %155.sub2:vreg_128 = COPY %154.sub2
146 ….sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET5]].sub2, implicit $exec
147 ; CHECK: undef %159.sub2:vreg_128 = COPY %158.sub2
151 …; CHECK: %38.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub2, i…
155 …; CHECK: %43.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub2, i…
178 ; CHECK: internal %157.sub2:vreg_128 = COPY %159.sub2
184 ; CHECK: internal %153.sub2:vreg_128 = COPY %155.sub2
191 ; CHECK: internal %148.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE15]].sub2
198 ; CHECK: internal %143.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE16]].sub2
204 ; CHECK: internal %139.sub2:vreg_128 = COPY %141.sub2
211 ; CHECK: internal %134.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE17]].sub2
218 ; CHECK: internal %129.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE18]].sub2
224 ; CHECK: internal %125.sub2:vreg_128 = COPY %127.sub2
230 ; CHECK: internal %121.sub2:vreg_128 = COPY %123.sub2
237 ; CHECK: internal %116.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE19]].sub2
243 ; CHECK: internal %112.sub2:vreg_128 = COPY %114.sub2
249 ; CHECK: internal %108.sub2:vreg_128 = COPY %110.sub2
255 ; CHECK: internal %104.sub2:vreg_128 = COPY %106.sub2
262 ; CHECK: internal %99.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE20]].sub2
269 ; CHECK: internal %94.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE21]].sub2
276 ; CHECK: internal %89.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE22]].sub2
282 ; CHECK: internal %85.sub2:vreg_128 = COPY %87.sub2
289 ; CHECK: internal %80.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE23]].sub2
296 ; CHECK: internal %75.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE24]].sub2
303 ; CHECK: internal %70.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE25]].sub2
309 ; CHECK: internal %66.sub2:vreg_128 = COPY %68.sub2
316 ; CHECK: internal %61.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE26]].sub2
323 ; CHECK: internal %56.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE27]].sub2
330 ; CHECK: internal %51.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE28]].sub2
337 ; CHECK: internal %46.sub2:vreg_128 = COPY [[SI_SPILL_V128_RESTORE29]].sub2
346 %2.sub2:sgpr_128 = S_MOV_B32 -1
349 undef %3.sub0:sgpr_128 = COPY %1.sub2
351 %3.sub2:sgpr_128 = COPY %2.sub2
359 undef %8.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %7.sub1, implicit $exec
360 undef %9.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %7.sub0, implicit $exec
361 undef %10.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %7.sub3, implicit $exec
362 undef %11.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %7.sub2, implicit $exec
363 undef %12.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %5.sub1, implicit $exec
364 undef %13.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %5.sub0, implicit $exec
365 undef %14.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %5.sub3, implicit $exec
366 undef %15.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %5.sub2, implicit $exec
367 undef %16.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %4.sub1, implicit $exec
368 undef %17.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %4.sub0, implicit $exec
369 undef %18.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %4.sub3, implicit $exec
370 undef %19.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %4.sub2, implicit $exec
371 undef %20.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %6.sub1, implicit $exec
372 undef %21.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %6.sub0, implicit $exec
373 undef %22.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %6.sub3, implicit $exec
374 undef %23.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %6.sub2, implicit $exec
376 undef %25.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %24.sub1, implicit $exec
377 undef %26.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %24.sub0, implicit $exec
378 undef %27.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %24.sub3, implicit $exec
379 undef %28.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %24.sub2, implicit $exec
381 undef %30.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %29.sub1, implicit $exec
382 undef %31.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %29.sub0, implicit $exec
383 undef %32.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %29.sub3, implicit $exec
384 undef %33.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %29.sub2, implicit $exec
386 undef %35.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %34.sub1, implicit $exec
387 undef %36.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %34.sub0, implicit $exec
388 undef %37.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %34.sub3, implicit $exec
389 undef %38.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %34.sub2, implicit $exec
391 undef %40.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %39.sub1, implicit $exec
392 undef %41.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %39.sub0, implicit $exec
393 undef %42.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %39.sub3, implicit $exec
394 undef %43.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %39.sub2, implicit $exec
399 %11.sub0:vreg_128 = V_AND_B32_e32 %44, %7.sub2, implicit $exec
403 %15.sub0:vreg_128 = V_AND_B32_e32 %44, %5.sub2, implicit $exec
407 %19.sub0:vreg_128 = V_AND_B32_e32 %44, %4.sub2, implicit $exec
411 %23.sub0:vreg_128 = V_AND_B32_e32 %44, %6.sub2, implicit $exec
415 %28.sub0:vreg_128 = V_AND_B32_e32 %44, %24.sub2, implicit $exec
419 %33.sub0:vreg_128 = V_AND_B32_e32 %44, %29.sub2, implicit $exec
423 %38.sub0:vreg_128 = V_AND_B32_e32 %44, %34.sub2, implicit $exec
427 %43.sub0:vreg_128 = V_AND_B32_e32 %44, %39.sub2, implicit $exec