1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in
12 * the documentation and/or other materials provided with the
13 * distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 #include <gtest/gtest.h>
30
31 #if __has_include(<sys/hwprobe.h>)
32 #include <sys/hwprobe.h>
33 #include <sys/syscall.h>
34 #endif
35
36
37 #if defined(__riscv)
38 #include <riscv_vector.h>
39
40 __attribute__((noinline))
scalar_cast(uint8_t const * p)41 uint64_t scalar_cast(uint8_t const* p) {
42 return *(uint64_t const*)p;
43 }
44
45 __attribute__((noinline))
scalar_memcpy(uint8_t const * p)46 uint64_t scalar_memcpy(uint8_t const* p) {
47 uint64_t r;
48 __builtin_memcpy(&r, p, sizeof(r));
49 return r;
50 }
51
52 __attribute__((noinline))
vector_memcpy(uint8_t * d,uint8_t const * p)53 uint64_t vector_memcpy(uint8_t* d, uint8_t const* p) {
54 __builtin_memcpy(d, p, 16);
55 return *(uint64_t const*)d;
56 }
57
58 __attribute__((noinline))
vector_ldst(uint8_t * d,uint8_t const * p)59 uint64_t vector_ldst(uint8_t* d, uint8_t const* p) {
60 __riscv_vse8(d, __riscv_vle8_v_u8m1(p, 16), 16);
61 return *(uint64_t const*)d;
62 }
63
64 __attribute__((noinline))
vector_ldst64(uint8_t * d,uint8_t const * p)65 uint64_t vector_ldst64(uint8_t* d, uint8_t const* p) {
66 __riscv_vse64((unsigned long *)d, __riscv_vle64_v_u64m1((const unsigned long *)p, 16), 16);
67 return *(uint64_t const*)d;
68 }
69
70 // For testing scalar and vector unaligned accesses.
71 uint64_t tmp[3] = {1,1,1};
72 uint64_t dst[3] = {1,1,1};
73 #endif
74
TEST(sys_hwprobe,__riscv_hwprobe_misaligned_scalar)75 TEST(sys_hwprobe, __riscv_hwprobe_misaligned_scalar) {
76 #if defined(__riscv)
77 uint8_t* p = (uint8_t*)tmp + 1;
78 ASSERT_NE(0U, scalar_cast(p));
79 ASSERT_NE(0U, scalar_memcpy(p));
80 #else
81 GTEST_SKIP() << "__riscv_hwprobe requires riscv64";
82 #endif
83 }
84
TEST(sys_hwprobe,__riscv_hwprobe_misaligned_vector)85 TEST(sys_hwprobe, __riscv_hwprobe_misaligned_vector) {
86 #if defined(__riscv)
87 uint8_t* p = (uint8_t*)tmp + 1;
88 uint8_t* d = (uint8_t*)dst + 1;
89
90 ASSERT_NE(0U, vector_ldst(d, p));
91 ASSERT_NE(0U, vector_memcpy(d, p));
92 ASSERT_NE(0U, vector_ldst64(d, p));
93 #else
94 GTEST_SKIP() << "__riscv_hwprobe requires riscv64";
95 #endif
96 }
97
TEST(sys_hwprobe,__riscv_hwprobe)98 TEST(sys_hwprobe, __riscv_hwprobe) {
99 #if defined(__riscv) && __has_include(<sys/hwprobe.h>)
100 riscv_hwprobe probes[] = {{.key = RISCV_HWPROBE_KEY_IMA_EXT_0},
101 {.key = RISCV_HWPROBE_KEY_CPUPERF_0}};
102 ASSERT_EQ(0, __riscv_hwprobe(probes, 2, 0, nullptr, 0));
103 EXPECT_EQ(RISCV_HWPROBE_KEY_IMA_EXT_0, probes[0].key);
104 EXPECT_TRUE((probes[0].value & RISCV_HWPROBE_IMA_FD) != 0);
105 EXPECT_TRUE((probes[0].value & RISCV_HWPROBE_IMA_C) != 0);
106 EXPECT_TRUE((probes[0].value & RISCV_HWPROBE_IMA_V) != 0);
107 EXPECT_TRUE((probes[0].value & RISCV_HWPROBE_EXT_ZBA) != 0);
108 EXPECT_TRUE((probes[0].value & RISCV_HWPROBE_EXT_ZBB) != 0);
109 EXPECT_TRUE((probes[0].value & RISCV_HWPROBE_EXT_ZBS) != 0);
110
111 EXPECT_EQ(RISCV_HWPROBE_KEY_CPUPERF_0, probes[1].key);
112 EXPECT_TRUE((probes[1].value & RISCV_HWPROBE_MISALIGNED_MASK) == RISCV_HWPROBE_MISALIGNED_FAST);
113 #else
114 GTEST_SKIP() << "__riscv_hwprobe requires riscv64";
115 #endif
116 }
117
TEST(sys_hwprobe,__riscv_hwprobe_syscall_vdso)118 TEST(sys_hwprobe, __riscv_hwprobe_syscall_vdso) {
119 #if defined(__riscv) && __has_include(<sys/hwprobe.h>)
120 riscv_hwprobe probes_vdso[] = {{.key = RISCV_HWPROBE_KEY_IMA_EXT_0},
121 {.key = RISCV_HWPROBE_KEY_CPUPERF_0}};
122 ASSERT_EQ(0, __riscv_hwprobe(probes_vdso, 2, 0, nullptr, 0));
123
124 riscv_hwprobe probes_syscall[] = {{.key = RISCV_HWPROBE_KEY_IMA_EXT_0},
125 {.key = RISCV_HWPROBE_KEY_CPUPERF_0}};
126 ASSERT_EQ(0, syscall(SYS_riscv_hwprobe, probes_syscall, 2, 0, nullptr, 0));
127
128 // Check we got the same answers from the vdso and the syscall.
129 EXPECT_EQ(RISCV_HWPROBE_KEY_IMA_EXT_0, probes_syscall[0].key);
130 EXPECT_EQ(probes_vdso[0].key, probes_syscall[0].key);
131 EXPECT_EQ(probes_vdso[0].value, probes_syscall[0].value);
132 EXPECT_EQ(RISCV_HWPROBE_KEY_CPUPERF_0, probes_syscall[1].key);
133 EXPECT_EQ(probes_vdso[1].key, probes_syscall[1].key);
134 EXPECT_EQ(probes_vdso[1].value, probes_syscall[1].value);
135 #else
136 GTEST_SKIP() << "__riscv_hwprobe requires riscv64";
137 #endif
138 }
139
TEST(sys_hwprobe,__riscv_hwprobe_fail)140 TEST(sys_hwprobe, __riscv_hwprobe_fail) {
141 #if defined(__riscv) && __has_include(<sys/hwprobe.h>)
142 riscv_hwprobe probes[] = {};
143 ASSERT_EQ(EINVAL, __riscv_hwprobe(probes, 0, 0, nullptr, ~0));
144 #else
145 GTEST_SKIP() << "__riscv_hwprobe requires riscv64";
146 #endif
147 }