1 #include <gtest/gtest.h> 2 3 #include <cpuinfo.h> 4 #include <cpuinfo-mock.h> 5 6 7 TEST(PROCESSORS, count) { 8 ASSERT_EQ(5, cpuinfo_get_processors_count()); 9 } 10 11 TEST(PROCESSORS, non_null) { 12 ASSERT_TRUE(cpuinfo_get_processors()); 13 } 14 15 TEST(PROCESSORS, smt_id) { 16 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 17 ASSERT_EQ(0, cpuinfo_get_processor(i)->smt_id); 18 } 19 } 20 21 TEST(PROCESSORS, core) { 22 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 23 ASSERT_EQ(cpuinfo_get_core(i), cpuinfo_get_processor(i)->core); 24 } 25 } 26 27 TEST(PROCESSORS, cluster) { 28 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 29 switch (i) { 30 case 0: 31 case 1: 32 case 2: 33 case 3: 34 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_processor(i)->cluster); 35 break; 36 case 4: 37 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_processor(i)->cluster); 38 break; 39 } 40 } 41 } 42 43 TEST(PROCESSORS, package) { 44 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 45 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_processor(i)->package); 46 } 47 } 48 49 TEST(PROCESSORS, linux_id) { 50 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 51 ASSERT_EQ(i, cpuinfo_get_processor(i)->linux_id); 52 } 53 } 54 55 TEST(PROCESSORS, l1i) { 56 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 57 ASSERT_EQ(cpuinfo_get_l1i_cache(i), cpuinfo_get_processor(i)->cache.l1i); 58 } 59 } 60 61 TEST(PROCESSORS, l1d) { 62 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 63 ASSERT_EQ(cpuinfo_get_l1d_cache(i), cpuinfo_get_processor(i)->cache.l1d); 64 } 65 } 66 67 TEST(PROCESSORS, l2) { 68 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 69 switch (i) { 70 case 0: 71 case 1: 72 case 2: 73 case 3: 74 ASSERT_EQ(cpuinfo_get_l2_cache(0), cpuinfo_get_processor(i)->cache.l2); 75 break; 76 case 4: 77 ASSERT_EQ(cpuinfo_get_l2_cache(1), cpuinfo_get_processor(i)->cache.l2); 78 break; 79 } 80 } 81 } 82 83 TEST(PROCESSORS, l3) { 84 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 85 ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l3); 86 } 87 } 88 89 TEST(PROCESSORS, l4) { 90 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 91 ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l4); 92 } 93 } 94 95 TEST(CORES, count) { 96 ASSERT_EQ(5, cpuinfo_get_cores_count()); 97 } 98 99 TEST(CORES, non_null) { 100 ASSERT_TRUE(cpuinfo_get_cores()); 101 } 102 103 TEST(CORES, processor_start) { 104 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 105 ASSERT_EQ(i, cpuinfo_get_core(i)->processor_start); 106 } 107 } 108 109 TEST(CORES, processor_count) { 110 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 111 ASSERT_EQ(1, cpuinfo_get_core(i)->processor_count); 112 } 113 } 114 115 TEST(CORES, core_id) { 116 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 117 ASSERT_EQ(i, cpuinfo_get_core(i)->core_id); 118 } 119 } 120 121 TEST(CORES, cluster) { 122 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 123 switch (i) { 124 case 0: 125 case 1: 126 case 2: 127 case 3: 128 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_core(i)->cluster); 129 break; 130 case 4: 131 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_core(i)->cluster); 132 break; 133 } 134 } 135 } 136 137 TEST(CORES, package) { 138 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 139 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_core(i)->package); 140 } 141 } 142 143 TEST(CORES, vendor) { 144 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 145 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_core(i)->vendor); 146 } 147 } 148 149 TEST(CORES, uarch) { 150 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 151 ASSERT_EQ(cpuinfo_uarch_cortex_a7, cpuinfo_get_core(i)->uarch); 152 } 153 } 154 155 TEST(CORES, midr) { 156 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 157 ASSERT_EQ(UINT32_C(0x410FC075), cpuinfo_get_core(i)->midr); 158 } 159 } 160 161 TEST(CORES, DISABLED_frequency) { 162 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 163 ASSERT_EQ(UINT64_C(1495000000), cpuinfo_get_core(i)->frequency); 164 } 165 } 166 167 TEST(CLUSTERS, count) { 168 ASSERT_EQ(2, cpuinfo_get_clusters_count()); 169 } 170 171 TEST(CLUSTERS, non_null) { 172 ASSERT_TRUE(cpuinfo_get_clusters()); 173 } 174 175 TEST(CLUSTERS, processor_start) { 176 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 177 switch (i) { 178 case 0: 179 ASSERT_EQ(0, cpuinfo_get_cluster(i)->processor_start); 180 break; 181 case 1: 182 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_start); 183 break; 184 } 185 } 186 } 187 188 TEST(CLUSTERS, processor_count) { 189 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 190 switch (i) { 191 case 0: 192 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_count); 193 break; 194 case 1: 195 ASSERT_EQ(1, cpuinfo_get_cluster(i)->processor_count); 196 break; 197 } 198 } 199 } 200 201 TEST(CLUSTERS, core_start) { 202 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 203 switch (i) { 204 case 0: 205 ASSERT_EQ(0, cpuinfo_get_cluster(i)->core_start); 206 break; 207 case 1: 208 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_start); 209 break; 210 } 211 } 212 } 213 214 TEST(CLUSTERS, core_count) { 215 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 216 switch (i) { 217 case 0: 218 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_count); 219 break; 220 case 1: 221 ASSERT_EQ(1, cpuinfo_get_cluster(i)->core_count); 222 break; 223 } 224 } 225 } 226 227 TEST(CLUSTERS, cluster_id) { 228 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 229 ASSERT_EQ(i, cpuinfo_get_cluster(i)->cluster_id); 230 } 231 } 232 233 TEST(CLUSTERS, package) { 234 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 235 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_cluster(i)->package); 236 } 237 } 238 239 TEST(CLUSTERS, vendor) { 240 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 241 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_cluster(i)->vendor); 242 } 243 } 244 245 TEST(CLUSTERS, uarch) { 246 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 247 ASSERT_EQ(cpuinfo_uarch_cortex_a7, cpuinfo_get_cluster(i)->uarch); 248 } 249 } 250 251 TEST(CLUSTERS, midr) { 252 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 253 ASSERT_EQ(UINT32_C(0x410FC075), cpuinfo_get_cluster(i)->midr); 254 } 255 } 256 257 TEST(CLUSTERS, DISABLED_frequency) { 258 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 259 ASSERT_EQ(UINT64_C(1495000000), cpuinfo_get_cluster(i)->frequency); 260 } 261 } 262 263 TEST(PACKAGES, count) { 264 ASSERT_EQ(1, cpuinfo_get_packages_count()); 265 } 266 267 TEST(PACKAGES, name) { 268 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 269 ASSERT_EQ("Leadcore LC1860", 270 std::string(cpuinfo_get_package(i)->name, 271 strnlen(cpuinfo_get_package(i)->name, CPUINFO_PACKAGE_NAME_MAX))); 272 } 273 } 274 275 TEST(PACKAGES, processor_start) { 276 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 277 ASSERT_EQ(0, cpuinfo_get_package(i)->processor_start); 278 } 279 } 280 281 TEST(PACKAGES, processor_count) { 282 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 283 ASSERT_EQ(5, cpuinfo_get_package(i)->processor_count); 284 } 285 } 286 287 TEST(PACKAGES, core_start) { 288 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 289 ASSERT_EQ(0, cpuinfo_get_package(i)->core_start); 290 } 291 } 292 293 TEST(PACKAGES, core_count) { 294 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 295 ASSERT_EQ(5, cpuinfo_get_package(i)->core_count); 296 } 297 } 298 299 TEST(PACKAGES, cluster_start) { 300 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 301 ASSERT_EQ(0, cpuinfo_get_package(i)->cluster_start); 302 } 303 } 304 305 TEST(PACKAGES, cluster_count) { 306 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 307 ASSERT_EQ(2, cpuinfo_get_package(i)->cluster_count); 308 } 309 } 310 311 TEST(ISA, thumb) { 312 ASSERT_TRUE(cpuinfo_has_arm_thumb()); 313 } 314 315 TEST(ISA, thumb2) { 316 ASSERT_TRUE(cpuinfo_has_arm_thumb2()); 317 } 318 319 TEST(ISA, armv5e) { 320 ASSERT_TRUE(cpuinfo_has_arm_v5e()); 321 } 322 323 TEST(ISA, armv6) { 324 ASSERT_TRUE(cpuinfo_has_arm_v6()); 325 } 326 327 TEST(ISA, armv6k) { 328 ASSERT_TRUE(cpuinfo_has_arm_v6k()); 329 } 330 331 TEST(ISA, armv7) { 332 ASSERT_TRUE(cpuinfo_has_arm_v7()); 333 } 334 335 TEST(ISA, armv7mp) { 336 ASSERT_TRUE(cpuinfo_has_arm_v7mp()); 337 } 338 339 TEST(ISA, idiv) { 340 ASSERT_TRUE(cpuinfo_has_arm_idiv()); 341 } 342 343 TEST(ISA, vfpv2) { 344 ASSERT_FALSE(cpuinfo_has_arm_vfpv2()); 345 } 346 347 TEST(ISA, vfpv3) { 348 ASSERT_TRUE(cpuinfo_has_arm_vfpv3()); 349 } 350 351 TEST(ISA, vfpv3_d32) { 352 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_d32()); 353 } 354 355 TEST(ISA, vfpv3_fp16) { 356 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16()); 357 } 358 359 TEST(ISA, vfpv3_fp16_d32) { 360 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16_d32()); 361 } 362 363 TEST(ISA, vfpv4) { 364 ASSERT_TRUE(cpuinfo_has_arm_vfpv4()); 365 } 366 367 TEST(ISA, vfpv4_d32) { 368 ASSERT_TRUE(cpuinfo_has_arm_vfpv4_d32()); 369 } 370 371 TEST(ISA, wmmx) { 372 ASSERT_FALSE(cpuinfo_has_arm_wmmx()); 373 } 374 375 TEST(ISA, wmmx2) { 376 ASSERT_FALSE(cpuinfo_has_arm_wmmx2()); 377 } 378 379 TEST(ISA, neon) { 380 ASSERT_TRUE(cpuinfo_has_arm_neon()); 381 } 382 383 TEST(ISA, neon_fp16) { 384 ASSERT_TRUE(cpuinfo_has_arm_neon_fp16()); 385 } 386 387 TEST(ISA, neon_fma) { 388 ASSERT_TRUE(cpuinfo_has_arm_neon_fma()); 389 } 390 391 TEST(ISA, atomics) { 392 ASSERT_FALSE(cpuinfo_has_arm_atomics()); 393 } 394 395 TEST(ISA, neon_rdm) { 396 ASSERT_FALSE(cpuinfo_has_arm_neon_rdm()); 397 } 398 399 TEST(ISA, fp16_arith) { 400 ASSERT_FALSE(cpuinfo_has_arm_fp16_arith()); 401 } 402 403 TEST(ISA, neon_fp16_arith) { 404 ASSERT_FALSE(cpuinfo_has_arm_neon_fp16_arith()); 405 } 406 407 TEST(ISA, neon_dot) { 408 ASSERT_FALSE(cpuinfo_has_arm_neon_dot()); 409 } 410 411 TEST(ISA, jscvt) { 412 ASSERT_FALSE(cpuinfo_has_arm_jscvt()); 413 } 414 415 TEST(ISA, fcma) { 416 ASSERT_FALSE(cpuinfo_has_arm_fcma()); 417 } 418 419 TEST(ISA, aes) { 420 ASSERT_FALSE(cpuinfo_has_arm_aes()); 421 } 422 423 TEST(ISA, sha1) { 424 ASSERT_FALSE(cpuinfo_has_arm_sha1()); 425 } 426 427 TEST(ISA, sha2) { 428 ASSERT_FALSE(cpuinfo_has_arm_sha2()); 429 } 430 431 TEST(ISA, pmull) { 432 ASSERT_FALSE(cpuinfo_has_arm_pmull()); 433 } 434 435 TEST(ISA, crc32) { 436 ASSERT_FALSE(cpuinfo_has_arm_crc32()); 437 } 438 439 TEST(L1I, count) { 440 ASSERT_EQ(5, cpuinfo_get_l1i_caches_count()); 441 } 442 443 TEST(L1I, non_null) { 444 ASSERT_TRUE(cpuinfo_get_l1i_caches()); 445 } 446 447 TEST(L1I, size) { 448 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 449 ASSERT_EQ(32 * 1024, cpuinfo_get_l1i_cache(i)->size); 450 } 451 } 452 453 TEST(L1I, associativity) { 454 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 455 ASSERT_EQ(2, cpuinfo_get_l1i_cache(i)->associativity); 456 } 457 } 458 459 TEST(L1I, sets) { 460 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 461 ASSERT_EQ(cpuinfo_get_l1i_cache(i)->size, 462 cpuinfo_get_l1i_cache(i)->sets * cpuinfo_get_l1i_cache(i)->line_size * cpuinfo_get_l1i_cache(i)->partitions * cpuinfo_get_l1i_cache(i)->associativity); 463 } 464 } 465 466 TEST(L1I, partitions) { 467 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 468 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->partitions); 469 } 470 } 471 472 TEST(L1I, line_size) { 473 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 474 ASSERT_EQ(32, cpuinfo_get_l1i_cache(i)->line_size); 475 } 476 } 477 478 TEST(L1I, flags) { 479 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 480 ASSERT_EQ(0, cpuinfo_get_l1i_cache(i)->flags); 481 } 482 } 483 484 TEST(L1I, processors) { 485 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 486 ASSERT_EQ(i, cpuinfo_get_l1i_cache(i)->processor_start); 487 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->processor_count); 488 } 489 } 490 491 TEST(L1D, count) { 492 ASSERT_EQ(5, cpuinfo_get_l1d_caches_count()); 493 } 494 495 TEST(L1D, non_null) { 496 ASSERT_TRUE(cpuinfo_get_l1d_caches()); 497 } 498 499 TEST(L1D, size) { 500 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 501 ASSERT_EQ(32 * 1024, cpuinfo_get_l1d_cache(i)->size); 502 } 503 } 504 505 TEST(L1D, associativity) { 506 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 507 ASSERT_EQ(4, cpuinfo_get_l1d_cache(i)->associativity); 508 } 509 } 510 511 TEST(L1D, sets) { 512 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 513 ASSERT_EQ(cpuinfo_get_l1d_cache(i)->size, 514 cpuinfo_get_l1d_cache(i)->sets * cpuinfo_get_l1d_cache(i)->line_size * cpuinfo_get_l1d_cache(i)->partitions * cpuinfo_get_l1d_cache(i)->associativity); 515 } 516 } 517 518 TEST(L1D, partitions) { 519 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 520 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->partitions); 521 } 522 } 523 524 TEST(L1D, line_size) { 525 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 526 ASSERT_EQ(64, cpuinfo_get_l1d_cache(i)->line_size); 527 } 528 } 529 530 TEST(L1D, flags) { 531 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 532 ASSERT_EQ(0, cpuinfo_get_l1d_cache(i)->flags); 533 } 534 } 535 536 TEST(L1D, processors) { 537 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 538 ASSERT_EQ(i, cpuinfo_get_l1d_cache(i)->processor_start); 539 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->processor_count); 540 } 541 } 542 543 TEST(L2, count) { 544 ASSERT_EQ(2, cpuinfo_get_l2_caches_count()); 545 } 546 547 TEST(L2, non_null) { 548 ASSERT_TRUE(cpuinfo_get_l2_caches()); 549 } 550 551 TEST(L2, size) { 552 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 553 switch (i) { 554 case 0: 555 ASSERT_EQ(512 * 1024, cpuinfo_get_l2_cache(i)->size); 556 break; 557 case 1: 558 ASSERT_EQ(128 * 1024, cpuinfo_get_l2_cache(i)->size); 559 break; 560 } 561 } 562 } 563 564 TEST(L2, associativity) { 565 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 566 ASSERT_EQ(8, cpuinfo_get_l2_cache(i)->associativity); 567 } 568 } 569 570 TEST(L2, sets) { 571 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 572 ASSERT_EQ(cpuinfo_get_l2_cache(i)->size, 573 cpuinfo_get_l2_cache(i)->sets * cpuinfo_get_l2_cache(i)->line_size * cpuinfo_get_l2_cache(i)->partitions * cpuinfo_get_l2_cache(i)->associativity); 574 } 575 } 576 577 TEST(L2, partitions) { 578 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 579 ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->partitions); 580 } 581 } 582 583 TEST(L2, line_size) { 584 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 585 ASSERT_EQ(64, cpuinfo_get_l2_cache(i)->line_size); 586 } 587 } 588 589 TEST(L2, flags) { 590 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 591 ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->flags); 592 } 593 } 594 595 TEST(L2, processors) { 596 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 597 switch (i) { 598 case 0: 599 ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->processor_start); 600 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_count); 601 break; 602 case 1: 603 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_start); 604 ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->processor_count); 605 break; 606 } 607 } 608 } 609 610 TEST(L3, none) { 611 ASSERT_EQ(0, cpuinfo_get_l3_caches_count()); 612 ASSERT_FALSE(cpuinfo_get_l3_caches()); 613 } 614 615 TEST(L4, none) { 616 ASSERT_EQ(0, cpuinfo_get_l4_caches_count()); 617 ASSERT_FALSE(cpuinfo_get_l4_caches()); 618 } 619 620 #include <xiaomi-redmi-2a.h> 621 622 int main(int argc, char* argv[]) { 623 #if CPUINFO_ARCH_ARM 624 cpuinfo_set_hwcap(UINT32_C(0x0007B0D7)); 625 #endif 626 cpuinfo_mock_filesystem(filesystem); 627 #ifdef __ANDROID__ 628 cpuinfo_mock_android_properties(properties); 629 #endif 630 cpuinfo_initialize(); 631 ::testing::InitGoogleTest(&argc, argv); 632 return RUN_ALL_TESTS(); 633 }