1 #include <gtest/gtest.h> 2 3 #include <cpuinfo.h> 4 #include <cpuinfo-mock.h> 5 6 7 TEST(PROCESSORS, count) { 8 ASSERT_EQ(8, cpuinfo_get_processors_count()); 9 } 10 11 TEST(PROCESSORS, non_null) { 12 ASSERT_TRUE(cpuinfo_get_processors()); 13 } 14 15 TEST(PROCESSORS, smt_id) { 16 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 17 ASSERT_EQ(0, cpuinfo_get_processor(i)->smt_id); 18 } 19 } 20 21 TEST(PROCESSORS, core) { 22 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 23 ASSERT_EQ(cpuinfo_get_core(i), cpuinfo_get_processor(i)->core); 24 } 25 } 26 27 TEST(PROCESSORS, cluster) { 28 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 29 switch (i) { 30 case 0: 31 case 1: 32 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_processor(i)->cluster); 33 break; 34 case 2: 35 case 3: 36 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_processor(i)->cluster); 37 break; 38 case 4: 39 case 5: 40 case 6: 41 case 7: 42 ASSERT_EQ(cpuinfo_get_cluster(2), cpuinfo_get_processor(i)->cluster); 43 break; 44 } 45 } 46 } 47 48 TEST(PROCESSORS, package) { 49 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 50 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_processor(i)->package); 51 } 52 } 53 54 TEST(PROCESSORS, linux_id) { 55 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 56 switch (i) { 57 case 0: 58 case 1: 59 ASSERT_EQ(i + 6, cpuinfo_get_processor(i)->linux_id); 60 break; 61 case 2: 62 case 3: 63 ASSERT_EQ(i + 2, cpuinfo_get_processor(i)->linux_id); 64 break; 65 case 4: 66 case 5: 67 case 6: 68 case 7: 69 ASSERT_EQ(i - 4, cpuinfo_get_processor(i)->linux_id); 70 break; 71 } 72 } 73 } 74 75 TEST(PROCESSORS, l1i) { 76 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 77 ASSERT_EQ(cpuinfo_get_l1i_cache(i), cpuinfo_get_processor(i)->cache.l1i); 78 } 79 } 80 81 TEST(PROCESSORS, l1d) { 82 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 83 ASSERT_EQ(cpuinfo_get_l1d_cache(i), cpuinfo_get_processor(i)->cache.l1d); 84 } 85 } 86 87 TEST(PROCESSORS, l2) { 88 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 89 ASSERT_EQ(cpuinfo_get_l2_cache(i), cpuinfo_get_processor(i)->cache.l2); 90 } 91 } 92 93 TEST(PROCESSORS, l3) { 94 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 95 ASSERT_EQ(cpuinfo_get_l3_cache(0), cpuinfo_get_processor(i)->cache.l3); 96 } 97 } 98 99 TEST(PROCESSORS, l4) { 100 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 101 ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l4); 102 } 103 } 104 105 TEST(CORES, count) { 106 ASSERT_EQ(8, cpuinfo_get_cores_count()); 107 } 108 109 TEST(CORES, non_null) { 110 ASSERT_TRUE(cpuinfo_get_cores()); 111 } 112 113 TEST(CORES, processor_start) { 114 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 115 ASSERT_EQ(i, cpuinfo_get_core(i)->processor_start); 116 } 117 } 118 119 TEST(CORES, processor_count) { 120 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 121 ASSERT_EQ(1, cpuinfo_get_core(i)->processor_count); 122 } 123 } 124 125 TEST(CORES, core_id) { 126 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 127 ASSERT_EQ(i, cpuinfo_get_core(i)->core_id); 128 } 129 } 130 131 TEST(CORES, cluster) { 132 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 133 switch (i) { 134 case 0: 135 case 1: 136 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_core(i)->cluster); 137 break; 138 case 2: 139 case 3: 140 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_core(i)->cluster); 141 break; 142 case 4: 143 case 5: 144 case 6: 145 case 7: 146 ASSERT_EQ(cpuinfo_get_cluster(2), cpuinfo_get_core(i)->cluster); 147 break; 148 } 149 } 150 } 151 152 TEST(CORES, package) { 153 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 154 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_core(i)->package); 155 } 156 } 157 158 TEST(CORES, vendor) { 159 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 160 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_core(i)->vendor); 161 } 162 } 163 164 TEST(CORES, uarch) { 165 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 166 switch (i) { 167 case 0: 168 case 1: 169 case 2: 170 case 3: 171 ASSERT_EQ(cpuinfo_uarch_cortex_a76, cpuinfo_get_core(i)->uarch); 172 break; 173 case 4: 174 case 5: 175 case 6: 176 case 7: 177 ASSERT_EQ(cpuinfo_uarch_cortex_a55, cpuinfo_get_core(i)->uarch); 178 break; 179 } 180 } 181 } 182 183 TEST(CORES, midr) { 184 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 185 switch (i) { 186 case 0: 187 case 1: 188 case 2: 189 case 3: 190 ASSERT_EQ(UINT32_C(0x481FD400), cpuinfo_get_core(i)->midr); 191 break; 192 case 4: 193 case 5: 194 case 6: 195 case 7: 196 ASSERT_EQ(UINT32_C(0x411FD050), cpuinfo_get_core(i)->midr); 197 break; 198 } 199 } 200 } 201 202 TEST(CORES, DISABLED_frequency) { 203 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 204 switch (i) { 205 case 0: 206 case 1: 207 ASSERT_EQ(UINT64_C(2600000000), cpuinfo_get_core(i)->frequency); 208 break; 209 case 2: 210 case 3: 211 ASSERT_EQ(UINT64_C(1901000000), cpuinfo_get_core(i)->frequency); 212 break; 213 case 4: 214 case 5: 215 case 6: 216 case 7: 217 ASSERT_EQ(UINT64_C(1805000000), cpuinfo_get_core(i)->frequency); 218 break; 219 } 220 } 221 } 222 223 TEST(CLUSTERS, count) { 224 ASSERT_EQ(3, cpuinfo_get_clusters_count()); 225 } 226 227 TEST(CLUSTERS, non_null) { 228 ASSERT_TRUE(cpuinfo_get_clusters()); 229 } 230 231 TEST(CLUSTERS, processor_start) { 232 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 233 switch (i) { 234 case 0: 235 ASSERT_EQ(0, cpuinfo_get_cluster(i)->processor_start); 236 break; 237 case 1: 238 ASSERT_EQ(2, cpuinfo_get_cluster(i)->processor_start); 239 break; 240 case 2: 241 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_start); 242 break; 243 } 244 } 245 } 246 247 TEST(CLUSTERS, processor_count) { 248 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 249 switch (i) { 250 case 0: 251 case 1: 252 ASSERT_EQ(2, cpuinfo_get_cluster(i)->processor_count); 253 break; 254 case 2: 255 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_count); 256 break; 257 } 258 } 259 } 260 261 TEST(CLUSTERS, core_start) { 262 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 263 switch (i) { 264 case 0: 265 ASSERT_EQ(0, cpuinfo_get_cluster(i)->core_start); 266 break; 267 case 1: 268 ASSERT_EQ(2, cpuinfo_get_cluster(i)->core_start); 269 break; 270 case 2: 271 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_start); 272 break; 273 } 274 } 275 } 276 277 TEST(CLUSTERS, core_count) { 278 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 279 switch (i) { 280 case 0: 281 case 1: 282 ASSERT_EQ(2, cpuinfo_get_cluster(i)->core_count); 283 break; 284 case 2: 285 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_count); 286 break; 287 } 288 } 289 } 290 291 TEST(CLUSTERS, cluster_id) { 292 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 293 ASSERT_EQ(i, cpuinfo_get_cluster(i)->cluster_id); 294 } 295 } 296 297 TEST(CLUSTERS, package) { 298 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 299 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_cluster(i)->package); 300 } 301 } 302 303 TEST(CLUSTERS, vendor) { 304 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 305 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_cluster(i)->vendor); 306 } 307 } 308 309 TEST(CLUSTERS, uarch) { 310 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 311 switch (i) { 312 case 0: 313 ASSERT_EQ(cpuinfo_uarch_cortex_a76, cpuinfo_get_cluster(i)->uarch); 314 break; 315 case 1: 316 ASSERT_EQ(cpuinfo_uarch_cortex_a76, cpuinfo_get_cluster(i)->uarch); 317 break; 318 case 2: 319 ASSERT_EQ(cpuinfo_uarch_cortex_a55, cpuinfo_get_cluster(i)->uarch); 320 break; 321 } 322 } 323 } 324 325 TEST(CLUSTERS, midr) { 326 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 327 switch (i) { 328 case 0: 329 ASSERT_EQ(UINT32_C(0x481FD400), cpuinfo_get_cluster(i)->midr); 330 break; 331 case 1: 332 ASSERT_EQ(UINT32_C(0x481FD400), cpuinfo_get_cluster(i)->midr); 333 break; 334 case 2: 335 ASSERT_EQ(UINT32_C(0x411FD050), cpuinfo_get_cluster(i)->midr); 336 break; 337 } 338 } 339 } 340 341 TEST(CLUSTERS, DISABLED_frequency) { 342 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 343 switch (i) { 344 case 0: 345 ASSERT_EQ(UINT64_C(2600000000), cpuinfo_get_core(i)->frequency); 346 break; 347 case 1: 348 ASSERT_EQ(UINT64_C(1901000000), cpuinfo_get_core(i)->frequency); 349 break; 350 case 2: 351 ASSERT_EQ(UINT64_C(1805000000), cpuinfo_get_core(i)->frequency); 352 break; 353 } 354 } 355 } 356 357 TEST(PACKAGES, count) { 358 ASSERT_EQ(1, cpuinfo_get_packages_count()); 359 } 360 361 TEST(PACKAGES, name) { 362 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 363 ASSERT_EQ("HiSilicon Kirin 980", 364 std::string(cpuinfo_get_package(i)->name, 365 strnlen(cpuinfo_get_package(i)->name, CPUINFO_PACKAGE_NAME_MAX))); 366 } 367 } 368 369 TEST(PACKAGES, processor_start) { 370 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 371 ASSERT_EQ(0, cpuinfo_get_package(i)->processor_start); 372 } 373 } 374 375 TEST(PACKAGES, processor_count) { 376 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 377 ASSERT_EQ(8, cpuinfo_get_package(i)->processor_count); 378 } 379 } 380 381 TEST(PACKAGES, core_start) { 382 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 383 ASSERT_EQ(0, cpuinfo_get_package(i)->core_start); 384 } 385 } 386 387 TEST(PACKAGES, core_count) { 388 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 389 ASSERT_EQ(8, cpuinfo_get_package(i)->core_count); 390 } 391 } 392 393 TEST(PACKAGES, cluster_start) { 394 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 395 ASSERT_EQ(0, cpuinfo_get_package(i)->cluster_start); 396 } 397 } 398 399 TEST(PACKAGES, cluster_count) { 400 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 401 ASSERT_EQ(3, cpuinfo_get_package(i)->cluster_count); 402 } 403 } 404 405 TEST(ISA, thumb) { 406 #if CPUINFO_ARCH_ARM 407 ASSERT_TRUE(cpuinfo_has_arm_thumb()); 408 #elif CPUINFO_ARCH_ARM64 409 ASSERT_FALSE(cpuinfo_has_arm_thumb()); 410 #endif 411 } 412 413 TEST(ISA, thumb2) { 414 #if CPUINFO_ARCH_ARM 415 ASSERT_TRUE(cpuinfo_has_arm_thumb2()); 416 #elif CPUINFO_ARCH_ARM64 417 ASSERT_FALSE(cpuinfo_has_arm_thumb2()); 418 #endif 419 } 420 421 TEST(ISA, armv5e) { 422 #if CPUINFO_ARCH_ARM 423 ASSERT_TRUE(cpuinfo_has_arm_v5e()); 424 #elif CPUINFO_ARCH_ARM64 425 ASSERT_FALSE(cpuinfo_has_arm_v5e()); 426 #endif 427 } 428 429 TEST(ISA, armv6) { 430 #if CPUINFO_ARCH_ARM 431 ASSERT_TRUE(cpuinfo_has_arm_v6()); 432 #elif CPUINFO_ARCH_ARM64 433 ASSERT_FALSE(cpuinfo_has_arm_v6()); 434 #endif 435 } 436 437 TEST(ISA, armv6k) { 438 #if CPUINFO_ARCH_ARM 439 ASSERT_TRUE(cpuinfo_has_arm_v6k()); 440 #elif CPUINFO_ARCH_ARM64 441 ASSERT_FALSE(cpuinfo_has_arm_v6k()); 442 #endif 443 } 444 445 TEST(ISA, armv7) { 446 #if CPUINFO_ARCH_ARM 447 ASSERT_TRUE(cpuinfo_has_arm_v7()); 448 #elif CPUINFO_ARCH_ARM64 449 ASSERT_FALSE(cpuinfo_has_arm_v7()); 450 #endif 451 } 452 453 TEST(ISA, armv7mp) { 454 #if CPUINFO_ARCH_ARM 455 ASSERT_TRUE(cpuinfo_has_arm_v7mp()); 456 #elif CPUINFO_ARCH_ARM64 457 ASSERT_FALSE(cpuinfo_has_arm_v7mp()); 458 #endif 459 } 460 461 TEST(ISA, idiv) { 462 ASSERT_TRUE(cpuinfo_has_arm_idiv()); 463 } 464 465 TEST(ISA, vfpv2) { 466 ASSERT_FALSE(cpuinfo_has_arm_vfpv2()); 467 } 468 469 TEST(ISA, vfpv3) { 470 ASSERT_TRUE(cpuinfo_has_arm_vfpv3()); 471 } 472 473 TEST(ISA, vfpv3_d32) { 474 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_d32()); 475 } 476 477 TEST(ISA, vfpv3_fp16) { 478 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16()); 479 } 480 481 TEST(ISA, vfpv3_fp16_d32) { 482 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16_d32()); 483 } 484 485 TEST(ISA, vfpv4) { 486 ASSERT_TRUE(cpuinfo_has_arm_vfpv4()); 487 } 488 489 TEST(ISA, vfpv4_d32) { 490 ASSERT_TRUE(cpuinfo_has_arm_vfpv4_d32()); 491 } 492 493 TEST(ISA, wmmx) { 494 ASSERT_FALSE(cpuinfo_has_arm_wmmx()); 495 } 496 497 TEST(ISA, wmmx2) { 498 ASSERT_FALSE(cpuinfo_has_arm_wmmx2()); 499 } 500 501 TEST(ISA, neon) { 502 ASSERT_TRUE(cpuinfo_has_arm_neon()); 503 } 504 505 TEST(ISA, neon_fp16) { 506 ASSERT_TRUE(cpuinfo_has_arm_neon_fp16()); 507 } 508 509 TEST(ISA, neon_fma) { 510 ASSERT_TRUE(cpuinfo_has_arm_neon_fma()); 511 } 512 513 TEST(ISA, atomics) { 514 #if CPUINFO_ARCH_ARM 515 ASSERT_FALSE(cpuinfo_has_arm_atomics()); 516 #elif CPUINFO_ARCH_ARM64 517 ASSERT_TRUE(cpuinfo_has_arm_atomics()); 518 #endif 519 } 520 521 TEST(ISA, neon_rdm) { 522 ASSERT_TRUE(cpuinfo_has_arm_neon_rdm()); 523 } 524 525 TEST(ISA, fp16_arith) { 526 ASSERT_TRUE(cpuinfo_has_arm_fp16_arith()); 527 } 528 529 TEST(ISA, neon_fp16_arith) { 530 ASSERT_TRUE(cpuinfo_has_arm_neon_fp16_arith()); 531 } 532 533 TEST(ISA, neon_dot) { 534 ASSERT_TRUE(cpuinfo_has_arm_neon_dot()); 535 } 536 537 TEST(ISA, jscvt) { 538 ASSERT_FALSE(cpuinfo_has_arm_jscvt()); 539 } 540 541 TEST(ISA, fcma) { 542 ASSERT_FALSE(cpuinfo_has_arm_fcma()); 543 } 544 545 TEST(ISA, aes) { 546 ASSERT_TRUE(cpuinfo_has_arm_aes()); 547 } 548 549 TEST(ISA, sha1) { 550 ASSERT_TRUE(cpuinfo_has_arm_sha1()); 551 } 552 553 TEST(ISA, sha2) { 554 ASSERT_TRUE(cpuinfo_has_arm_sha2()); 555 } 556 557 TEST(ISA, pmull) { 558 ASSERT_TRUE(cpuinfo_has_arm_pmull()); 559 } 560 561 TEST(ISA, crc32) { 562 ASSERT_TRUE(cpuinfo_has_arm_crc32()); 563 } 564 565 TEST(L1I, count) { 566 ASSERT_EQ(8, cpuinfo_get_l1i_caches_count()); 567 } 568 569 TEST(L1I, non_null) { 570 ASSERT_TRUE(cpuinfo_get_l1i_caches()); 571 } 572 573 TEST(L1I, size) { 574 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 575 switch (i) { 576 case 0: 577 case 1: 578 case 2: 579 case 3: 580 ASSERT_EQ(64 * 1024, cpuinfo_get_l1i_cache(i)->size); 581 break; 582 case 4: 583 case 5: 584 case 6: 585 case 7: 586 ASSERT_EQ(32 * 1024, cpuinfo_get_l1i_cache(i)->size); 587 break; 588 } 589 } 590 } 591 592 TEST(L1I, associativity) { 593 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 594 ASSERT_EQ(4, cpuinfo_get_l1i_cache(i)->associativity); 595 } 596 } 597 598 TEST(L1I, sets) { 599 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 600 ASSERT_EQ(cpuinfo_get_l1i_cache(i)->size, 601 cpuinfo_get_l1i_cache(i)->sets * cpuinfo_get_l1i_cache(i)->line_size * cpuinfo_get_l1i_cache(i)->partitions * cpuinfo_get_l1i_cache(i)->associativity); 602 } 603 } 604 605 TEST(L1I, partitions) { 606 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 607 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->partitions); 608 } 609 } 610 611 TEST(L1I, line_size) { 612 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 613 ASSERT_EQ(64, cpuinfo_get_l1i_cache(i)->line_size); 614 } 615 } 616 617 TEST(L1I, flags) { 618 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 619 ASSERT_EQ(0, cpuinfo_get_l1i_cache(i)->flags); 620 } 621 } 622 623 TEST(L1I, processors) { 624 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 625 ASSERT_EQ(i, cpuinfo_get_l1i_cache(i)->processor_start); 626 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->processor_count); 627 } 628 } 629 630 TEST(L1D, count) { 631 ASSERT_EQ(8, cpuinfo_get_l1d_caches_count()); 632 } 633 634 TEST(L1D, non_null) { 635 ASSERT_TRUE(cpuinfo_get_l1d_caches()); 636 } 637 638 TEST(L1D, size) { 639 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 640 switch (i) { 641 case 0: 642 case 1: 643 case 2: 644 case 3: 645 ASSERT_EQ(64 * 1024, cpuinfo_get_l1d_cache(i)->size); 646 break; 647 case 4: 648 case 5: 649 case 6: 650 case 7: 651 ASSERT_EQ(32 * 1024, cpuinfo_get_l1d_cache(i)->size); 652 break; 653 } 654 } 655 } 656 657 TEST(L1D, associativity) { 658 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 659 ASSERT_EQ(4, cpuinfo_get_l1d_cache(i)->associativity); 660 } 661 } 662 663 TEST(L1D, sets) { 664 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 665 ASSERT_EQ(cpuinfo_get_l1d_cache(i)->size, 666 cpuinfo_get_l1d_cache(i)->sets * cpuinfo_get_l1d_cache(i)->line_size * cpuinfo_get_l1d_cache(i)->partitions * cpuinfo_get_l1d_cache(i)->associativity); 667 } 668 } 669 670 TEST(L1D, partitions) { 671 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 672 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->partitions); 673 } 674 } 675 676 TEST(L1D, line_size) { 677 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 678 ASSERT_EQ(64, cpuinfo_get_l1d_cache(i)->line_size); 679 } 680 } 681 682 TEST(L1D, flags) { 683 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 684 ASSERT_EQ(0, cpuinfo_get_l1d_cache(i)->flags); 685 } 686 } 687 688 TEST(L1D, processors) { 689 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 690 ASSERT_EQ(i, cpuinfo_get_l1d_cache(i)->processor_start); 691 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->processor_count); 692 } 693 } 694 695 TEST(L2, count) { 696 ASSERT_EQ(8, cpuinfo_get_l2_caches_count()); 697 } 698 699 TEST(L2, non_null) { 700 ASSERT_TRUE(cpuinfo_get_l2_caches()); 701 } 702 703 TEST(L2, size) { 704 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 705 switch (i) { 706 case 0: 707 case 1: 708 case 2: 709 case 3: 710 ASSERT_EQ(512 * 1024, cpuinfo_get_l2_cache(i)->size); 711 break; 712 case 4: 713 case 5: 714 case 6: 715 case 7: 716 ASSERT_EQ(128 * 1024, cpuinfo_get_l2_cache(i)->size); 717 break; 718 } 719 } 720 } 721 722 TEST(L2, associativity) { 723 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 724 switch (i) { 725 case 0: 726 case 1: 727 case 2: 728 case 3: 729 ASSERT_EQ(8, cpuinfo_get_l2_cache(i)->associativity); 730 break; 731 case 4: 732 case 5: 733 case 6: 734 case 7: 735 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->associativity); 736 break; 737 } 738 } 739 } 740 741 TEST(L2, sets) { 742 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 743 ASSERT_EQ(cpuinfo_get_l2_cache(i)->size, 744 cpuinfo_get_l2_cache(i)->sets * cpuinfo_get_l2_cache(i)->line_size * cpuinfo_get_l2_cache(i)->partitions * cpuinfo_get_l2_cache(i)->associativity); 745 } 746 } 747 748 TEST(L2, partitions) { 749 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 750 ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->partitions); 751 } 752 } 753 754 TEST(L2, line_size) { 755 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 756 ASSERT_EQ(64, cpuinfo_get_l2_cache(i)->line_size); 757 } 758 } 759 760 TEST(L2, flags) { 761 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 762 switch (i) { 763 case 0: 764 case 1: 765 case 2: 766 case 3: 767 ASSERT_EQ(CPUINFO_CACHE_INCLUSIVE, cpuinfo_get_l2_cache(i)->flags); 768 break; 769 case 4: 770 case 5: 771 case 6: 772 case 7: 773 ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->flags); 774 break; 775 } 776 } 777 } 778 779 TEST(L2, processors) { 780 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 781 ASSERT_EQ(i, cpuinfo_get_l2_cache(i)->processor_start); 782 ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->processor_count); 783 } 784 } 785 786 TEST(L3, count) { 787 ASSERT_EQ(1, cpuinfo_get_l3_caches_count()); 788 } 789 790 TEST(L3, non_null) { 791 ASSERT_TRUE(cpuinfo_get_l3_caches()); 792 } 793 794 TEST(L3, size) { 795 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 796 ASSERT_EQ(4 * 1024 * 1024, cpuinfo_get_l3_cache(i)->size); 797 } 798 } 799 800 TEST(L3, associativity) { 801 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 802 ASSERT_EQ(16, cpuinfo_get_l3_cache(i)->associativity); 803 } 804 } 805 806 TEST(L3, sets) { 807 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 808 ASSERT_EQ(cpuinfo_get_l3_cache(i)->size, 809 cpuinfo_get_l3_cache(i)->sets * cpuinfo_get_l3_cache(i)->line_size * cpuinfo_get_l3_cache(i)->partitions * cpuinfo_get_l3_cache(i)->associativity); 810 } 811 } 812 813 TEST(L3, partitions) { 814 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 815 ASSERT_EQ(1, cpuinfo_get_l3_cache(i)->partitions); 816 } 817 } 818 819 TEST(L3, line_size) { 820 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 821 ASSERT_EQ(64, cpuinfo_get_l3_cache(i)->line_size); 822 } 823 } 824 825 TEST(L3, flags) { 826 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 827 ASSERT_EQ(0, cpuinfo_get_l3_cache(i)->flags); 828 } 829 } 830 831 TEST(L4, none) { 832 ASSERT_EQ(0, cpuinfo_get_l4_caches_count()); 833 ASSERT_FALSE(cpuinfo_get_l4_caches()); 834 } 835 836 #include <huawei-mate-20.h> 837 838 int main(int argc, char* argv[]) { 839 #if CPUINFO_ARCH_ARM 840 cpuinfo_set_hwcap(UINT32_C(0x0037B0D6)); 841 cpuinfo_set_hwcap2(UINT32_C(0x0000001F)); 842 #elif CPUINFO_ARCH_ARM64 843 cpuinfo_set_hwcap(UINT32_C(0x000007FF)); 844 #endif 845 cpuinfo_mock_filesystem(filesystem); 846 #ifdef __ANDROID__ 847 cpuinfo_mock_android_properties(properties); 848 #endif 849 cpuinfo_initialize(); 850 ::testing::InitGoogleTest(&argc, argv); 851 return RUN_ALL_TESTS(); 852 } 853