Lines Matching refs:dst_base
3105 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3109 __ Mov(x18, dst_base); in TEST()
3135 ASSERT_EQUAL_64(dst_base, x18); in TEST()
3147 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3156 __ Mov(x23, dst_base); in TEST()
3158 __ Mov(x25, dst_base); in TEST()
3160 __ Mov(x27, dst_base); in TEST()
3175 ASSERT_EQUAL_64(dst_base, x23); in TEST()
3179 ASSERT_EQUAL_64(dst_base + 4096 * sizeof(dst[0]), x25); in TEST()
3183 ASSERT_EQUAL_64(dst_base + 6144 * sizeof(dst[0]), x27); in TEST()
3195 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3199 __ Mov(x18, dst_base); in TEST()
3201 __ Mov(x20, dst_base); in TEST()
3203 __ Mov(x22, dst_base + 40); in TEST()
3205 __ Mov(x24, dst_base); in TEST()
3207 __ Mov(x26, dst_base); in TEST()
3233 ASSERT_EQUAL_64(dst_base + 12, x18); in TEST()
3235 ASSERT_EQUAL_64(dst_base + 16, x20); in TEST()
3237 ASSERT_EQUAL_64(dst_base + 36, x22); in TEST()
3239 ASSERT_EQUAL_64(dst_base + 25, x24); in TEST()
3241 ASSERT_EQUAL_64(dst_base + 41, x26); in TEST()
3253 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3257 __ Mov(x18, dst_base + 12); in TEST()
3259 __ Mov(x20, dst_base + 16); in TEST()
3261 __ Mov(x22, dst_base + 32); in TEST()
3263 __ Mov(x24, dst_base + 25); in TEST()
3265 __ Mov(x26, dst_base + 41); in TEST()
3291 ASSERT_EQUAL_64(dst_base + 24, x18); in TEST()
3293 ASSERT_EQUAL_64(dst_base + 32, x20); in TEST()
3295 ASSERT_EQUAL_64(dst_base, x22); in TEST()
3297 ASSERT_EQUAL_64(dst_base + 30, x24); in TEST()
3299 ASSERT_EQUAL_64(dst_base, x26); in TEST()
3393 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3397 __ Mov(x17, dst_base); in TEST()
3399 __ Mov(x19, dst_base + 3 * sizeof(dst[0])); in TEST()
3400 __ Mov(x20, dst_base + 4 * sizeof(dst[0])); in TEST()
3440 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3444 __ Mov(x18, dst_base); in TEST()
3446 __ Mov(x20, dst_base); in TEST()
3448 __ Mov(x22, dst_base); in TEST()
3466 ASSERT_EQUAL_64(dst_base + sizeof(dst[0]), x18); in TEST()
3468 ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20); in TEST()
3470 ASSERT_EQUAL_64(dst_base, x22); in TEST()
3482 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3486 __ Mov(x18, dst_base); in TEST()
3488 __ Mov(x20, dst_base); in TEST()
3490 __ Mov(x22, dst_base); in TEST()
3508 ASSERT_EQUAL_64(dst_base + sizeof(dst[0]), x18); in TEST()
3510 ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20); in TEST()
3512 ASSERT_EQUAL_64(dst_base, x22); in TEST()
3524 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3528 __ Mov(x18, dst_base); in TEST()
3530 __ Mov(x20, dst_base); in TEST()
3532 __ Mov(x22, dst_base); in TEST()
3550 ASSERT_EQUAL_64(dst_base + sizeof(dst[0]), x18); in TEST()
3552 ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20); in TEST()
3554 ASSERT_EQUAL_64(dst_base, x22); in TEST()
3566 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3570 __ Mov(x18, dst_base); in TEST()
3572 __ Mov(x20, dst_base); in TEST()
3574 __ Mov(x22, dst_base); in TEST()
3592 ASSERT_EQUAL_64(dst_base + sizeof(dst[0]), x18); in TEST()
3594 ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20); in TEST()
3596 ASSERT_EQUAL_64(dst_base, x22); in TEST()
3613 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3617 __ Mov(x18, dst_base); in TEST()
3619 __ Mov(x20, dst_base); in TEST()
3621 __ Mov(x22, dst_base); in TEST()
3642 ASSERT_EQUAL_64(dst_base + 16, x18); in TEST()
3644 ASSERT_EQUAL_64(dst_base + 32, x20); in TEST()
3646 ASSERT_EQUAL_64(dst_base, x22); in TEST()
3663 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
3669 __ Mov(x20, dst_base - 1); in TEST()
5642 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
5645 __ Mov(x17, dst_base); in TEST()
5646 __ Mov(x18, dst_base); in TEST()
5735 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
5738 __ Mov(x17, dst_base); in TEST()
5739 __ Mov(x18, dst_base); in TEST()
5834 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
5837 __ Mov(x17, dst_base); in TEST()
5838 __ Mov(x18, dst_base); in TEST()
7116 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7120 __ Mov(x17, dst_base); in TEST()
7133 ASSERT_EQUAL_64(dst_base + sizeof(dst[1]), x17); in TEST()
7145 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7149 __ Mov(x17, dst_base); in TEST()
7162 ASSERT_EQUAL_64(dst_base + sizeof(dst[1]), x17); in TEST()
7177 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7181 __ Mov(x17, dst_base); in TEST()
7197 ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[1]), x17); in TEST()
7211 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7215 __ Mov(x17, dst_base); in TEST()
7217 __ Mov(x19, dst_base + 56); in TEST()
7250 ASSERT_EQUAL_64(dst_base, x17); in TEST()
7252 ASSERT_EQUAL_64(dst_base + 56, x19); in TEST()
7266 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7273 __ Mov(x21, dst_base - base_offset); in TEST()
7275 __ Mov(x19, dst_base + base_offset + 56); in TEST()
7308 ASSERT_EQUAL_64(dst_base - base_offset, x21); in TEST()
7310 ASSERT_EQUAL_64(dst_base + base_offset + 56, x19); in TEST()
7325 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7329 __ Mov(x17, dst_base); in TEST()
7331 __ Mov(x19, dst_base + 64); in TEST()
7385 ASSERT_EQUAL_64(dst_base, x17); in TEST()
7387 ASSERT_EQUAL_64(dst_base + 64, x19); in TEST()
7400 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7404 __ Mov(x17, dst_base); in TEST()
7406 __ Mov(x19, dst_base + 24); in TEST()
7434 ASSERT_EQUAL_64(dst_base, x17); in TEST()
7436 ASSERT_EQUAL_64(dst_base + 24, x19); in TEST()
7448 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7452 __ Mov(x17, dst_base); in TEST()
7454 __ Mov(x19, dst_base + 48); in TEST()
7482 ASSERT_EQUAL_64(dst_base, x17); in TEST()
7484 ASSERT_EQUAL_64(dst_base + 48, x19); in TEST()
7498 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7502 __ Mov(x17, dst_base); in TEST()
7503 __ Mov(x18, dst_base + 16); in TEST()
7534 ASSERT_EQUAL_64(dst_base, x17); in TEST()
7535 ASSERT_EQUAL_64(dst_base + 16, x18); in TEST()
7537 ASSERT_EQUAL_64(dst_base + 4, x20); in TEST()
7539 ASSERT_EQUAL_64(dst_base + 24, x22); in TEST()
7553 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7560 __ Mov(x25, dst_base + base_offset); in TEST()
7561 __ Mov(x18, dst_base + base_offset + 16); in TEST()
7568 __ Mov(x25, dst_base + base_offset + 4); in TEST()
7577 __ Mov(x18, dst_base + base_offset + 16 + 8); in TEST()
7597 ASSERT_EQUAL_64(dst_base, x25); in TEST()
7598 ASSERT_EQUAL_64(dst_base + 16, x18); in TEST()
7600 ASSERT_EQUAL_64(dst_base + 4, x20); in TEST()
7602 ASSERT_EQUAL_64(dst_base + 24, x22); in TEST()
7617 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7621 __ Mov(x17, dst_base); in TEST()
7622 __ Mov(x18, dst_base + 16); in TEST()
7653 ASSERT_EQUAL_64(dst_base, x17); in TEST()
7654 ASSERT_EQUAL_64(dst_base + 16, x18); in TEST()
7656 ASSERT_EQUAL_64(dst_base + 4, x20); in TEST()
7658 ASSERT_EQUAL_64(dst_base + 24, x22); in TEST()
7673 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7680 __ Mov(x25, dst_base); in TEST()
7681 __ Mov(x18, dst_base + 16); in TEST()
7717 ASSERT_EQUAL_64(dst_base - base_offset, x25); in TEST()
7718 ASSERT_EQUAL_64(dst_base - base_offset + 16, x18); in TEST()
7720 ASSERT_EQUAL_64(dst_base - base_offset + 4, x20); in TEST()
7722 ASSERT_EQUAL_64(dst_base - base_offset + 24, x22); in TEST()
7754 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7758 __ Mov(x18, dst_base); in TEST()
7760 __ Mov(x20, dst_base + 32); in TEST()
7761 __ Mov(x21, dst_base + 40); in TEST()
7784 ASSERT_EQUAL_64(dst_base, x18); in TEST()
7786 ASSERT_EQUAL_64(dst_base + 32, x20); in TEST()
7798 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); in TEST() local
7802 __ Mov(x18, dst_base); in TEST()