1 #include <gtest/gtest.h> 2 3 #include <cpuinfo.h> 4 #include <cpuinfo-mock.h> 5 6 7 TEST(PROCESSORS, count) { 8 ASSERT_EQ(8, cpuinfo_get_processors_count()); 9 } 10 11 TEST(PROCESSORS, non_null) { 12 ASSERT_TRUE(cpuinfo_get_processors()); 13 } 14 15 TEST(PROCESSORS, smt_id) { 16 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 17 ASSERT_EQ(0, cpuinfo_get_processor(i)->smt_id); 18 } 19 } 20 21 TEST(PROCESSORS, core) { 22 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 23 ASSERT_EQ(cpuinfo_get_core(i), cpuinfo_get_processor(i)->core); 24 } 25 } 26 27 TEST(PROCESSORS, cluster) { 28 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 29 switch (i) { 30 case 0: 31 case 1: 32 case 2: 33 case 3: 34 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_processor(i)->cluster); 35 break; 36 case 4: 37 case 5: 38 case 6: 39 case 7: 40 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_processor(i)->cluster); 41 break; 42 } 43 } 44 } 45 46 TEST(PROCESSORS, package) { 47 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 48 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_processor(i)->package); 49 } 50 } 51 52 TEST(PROCESSORS, linux_id) { 53 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 54 switch (i) { 55 case 0: 56 case 1: 57 case 2: 58 case 3: 59 ASSERT_EQ(i + 4, cpuinfo_get_processor(i)->linux_id); 60 break; 61 case 4: 62 case 5: 63 case 6: 64 case 7: 65 ASSERT_EQ(i - 4, cpuinfo_get_processor(i)->linux_id); 66 break; 67 } 68 } 69 } 70 71 TEST(PROCESSORS, l1i) { 72 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 73 ASSERT_EQ(cpuinfo_get_l1i_cache(i), cpuinfo_get_processor(i)->cache.l1i); 74 } 75 } 76 77 TEST(PROCESSORS, l1d) { 78 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 79 ASSERT_EQ(cpuinfo_get_l1d_cache(i), cpuinfo_get_processor(i)->cache.l1d); 80 } 81 } 82 83 TEST(PROCESSORS, l2) { 84 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 85 switch (i) { 86 case 0: 87 case 1: 88 case 2: 89 case 3: 90 ASSERT_EQ(cpuinfo_get_l2_cache(0), cpuinfo_get_processor(i)->cache.l2); 91 break; 92 case 4: 93 case 5: 94 case 6: 95 case 7: 96 ASSERT_EQ(cpuinfo_get_l2_cache(1), cpuinfo_get_processor(i)->cache.l2); 97 break; 98 } 99 } 100 } 101 102 TEST(PROCESSORS, l3) { 103 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 104 ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l3); 105 } 106 } 107 108 TEST(PROCESSORS, l4) { 109 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 110 ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l4); 111 } 112 } 113 114 TEST(CORES, count) { 115 ASSERT_EQ(8, cpuinfo_get_cores_count()); 116 } 117 118 TEST(CORES, non_null) { 119 ASSERT_TRUE(cpuinfo_get_cores()); 120 } 121 122 TEST(CORES, processor_start) { 123 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 124 ASSERT_EQ(i, cpuinfo_get_core(i)->processor_start); 125 } 126 } 127 128 TEST(CORES, processor_count) { 129 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 130 ASSERT_EQ(1, cpuinfo_get_core(i)->processor_count); 131 } 132 } 133 134 TEST(CORES, core_id) { 135 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 136 ASSERT_EQ(i, cpuinfo_get_core(i)->core_id); 137 } 138 } 139 140 TEST(CORES, cluster) { 141 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 142 switch (i) { 143 case 0: 144 case 1: 145 case 2: 146 case 3: 147 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_core(i)->cluster); 148 break; 149 case 4: 150 case 5: 151 case 6: 152 case 7: 153 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_core(i)->cluster); 154 break; 155 } 156 } 157 } 158 159 TEST(CORES, package) { 160 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 161 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_core(i)->package); 162 } 163 } 164 165 TEST(CORES, vendor) { 166 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 167 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_core(i)->vendor); 168 } 169 } 170 171 TEST(CORES, uarch) { 172 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 173 switch (i) { 174 case 0: 175 case 1: 176 case 2: 177 case 3: 178 ASSERT_EQ(cpuinfo_uarch_cortex_a73, cpuinfo_get_core(i)->uarch); 179 break; 180 case 4: 181 case 5: 182 case 6: 183 case 7: 184 ASSERT_EQ(cpuinfo_uarch_cortex_a53, cpuinfo_get_core(i)->uarch); 185 break; 186 } 187 } 188 } 189 190 TEST(CORES, midr) { 191 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 192 switch (i) { 193 case 0: 194 case 1: 195 case 2: 196 case 3: 197 ASSERT_EQ(UINT32_C(0x410FD092), cpuinfo_get_core(i)->midr); 198 break; 199 case 4: 200 case 5: 201 case 6: 202 case 7: 203 ASSERT_EQ(UINT32_C(0x410FD034), cpuinfo_get_core(i)->midr); 204 break; 205 } 206 } 207 } 208 209 TEST(CORES, DISABLED_frequency) { 210 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 211 switch (i) { 212 case 0: 213 case 1: 214 case 2: 215 case 3: 216 ASSERT_EQ(UINT64_C(2362000000), cpuinfo_get_core(i)->frequency); 217 break; 218 case 4: 219 case 5: 220 case 6: 221 case 7: 222 ASSERT_EQ(UINT64_C(1844000000), cpuinfo_get_core(i)->frequency); 223 break; 224 } 225 } 226 } 227 228 TEST(CLUSTERS, count) { 229 ASSERT_EQ(2, cpuinfo_get_clusters_count()); 230 } 231 232 TEST(CLUSTERS, non_null) { 233 ASSERT_TRUE(cpuinfo_get_clusters()); 234 } 235 236 TEST(CLUSTERS, processor_start) { 237 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 238 switch (i) { 239 case 0: 240 ASSERT_EQ(0, cpuinfo_get_cluster(i)->processor_start); 241 break; 242 case 1: 243 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_start); 244 break; 245 } 246 } 247 } 248 249 TEST(CLUSTERS, processor_count) { 250 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 251 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_count); 252 } 253 } 254 255 TEST(CLUSTERS, core_start) { 256 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 257 switch (i) { 258 case 0: 259 ASSERT_EQ(0, cpuinfo_get_cluster(i)->core_start); 260 break; 261 case 1: 262 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_start); 263 break; 264 } 265 } 266 } 267 268 TEST(CLUSTERS, core_count) { 269 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 270 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_count); 271 } 272 } 273 274 TEST(CLUSTERS, cluster_id) { 275 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 276 ASSERT_EQ(i, cpuinfo_get_cluster(i)->cluster_id); 277 } 278 } 279 280 TEST(CLUSTERS, package) { 281 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 282 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_cluster(i)->package); 283 } 284 } 285 286 TEST(CLUSTERS, vendor) { 287 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 288 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_cluster(i)->vendor); 289 } 290 } 291 292 TEST(CLUSTERS, uarch) { 293 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 294 switch (i) { 295 case 0: 296 ASSERT_EQ(cpuinfo_uarch_cortex_a73, cpuinfo_get_cluster(i)->uarch); 297 break; 298 case 1: 299 ASSERT_EQ(cpuinfo_uarch_cortex_a53, cpuinfo_get_cluster(i)->uarch); 300 break; 301 } 302 } 303 } 304 305 TEST(CLUSTERS, midr) { 306 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 307 switch (i) { 308 case 0: 309 ASSERT_EQ(UINT32_C(0x410FD092), cpuinfo_get_cluster(i)->midr); 310 break; 311 case 1: 312 ASSERT_EQ(UINT32_C(0x410FD034), cpuinfo_get_cluster(i)->midr); 313 break; 314 } 315 } 316 } 317 318 TEST(CLUSTERS, DISABLED_frequency) { 319 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 320 switch (i) { 321 case 0: 322 ASSERT_EQ(UINT64_C(2362000000), cpuinfo_get_cluster(i)->frequency); 323 break; 324 case 1: 325 ASSERT_EQ(UINT64_C(1844000000), cpuinfo_get_cluster(i)->frequency); 326 break; 327 } 328 } 329 } 330 331 TEST(PACKAGES, count) { 332 ASSERT_EQ(1, cpuinfo_get_packages_count()); 333 } 334 335 TEST(PACKAGES, name) { 336 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 337 ASSERT_EQ("HiSilicon Kirin 970", 338 std::string(cpuinfo_get_package(i)->name, 339 strnlen(cpuinfo_get_package(i)->name, CPUINFO_PACKAGE_NAME_MAX))); 340 } 341 } 342 343 TEST(PACKAGES, processor_start) { 344 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 345 ASSERT_EQ(0, cpuinfo_get_package(i)->processor_start); 346 } 347 } 348 349 TEST(PACKAGES, processor_count) { 350 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 351 ASSERT_EQ(8, cpuinfo_get_package(i)->processor_count); 352 } 353 } 354 355 TEST(PACKAGES, core_start) { 356 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 357 ASSERT_EQ(0, cpuinfo_get_package(i)->core_start); 358 } 359 } 360 361 TEST(PACKAGES, core_count) { 362 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 363 ASSERT_EQ(8, cpuinfo_get_package(i)->core_count); 364 } 365 } 366 367 TEST(PACKAGES, cluster_start) { 368 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 369 ASSERT_EQ(0, cpuinfo_get_package(i)->cluster_start); 370 } 371 } 372 373 TEST(PACKAGES, cluster_count) { 374 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 375 ASSERT_EQ(2, cpuinfo_get_package(i)->cluster_count); 376 } 377 } 378 379 TEST(ISA, thumb) { 380 #if CPUINFO_ARCH_ARM 381 ASSERT_TRUE(cpuinfo_has_arm_thumb()); 382 #elif CPUINFO_ARCH_ARM64 383 ASSERT_FALSE(cpuinfo_has_arm_thumb()); 384 #endif 385 } 386 387 TEST(ISA, thumb2) { 388 #if CPUINFO_ARCH_ARM 389 ASSERT_TRUE(cpuinfo_has_arm_thumb2()); 390 #elif CPUINFO_ARCH_ARM64 391 ASSERT_FALSE(cpuinfo_has_arm_thumb2()); 392 #endif 393 } 394 395 TEST(ISA, armv5e) { 396 #if CPUINFO_ARCH_ARM 397 ASSERT_TRUE(cpuinfo_has_arm_v5e()); 398 #elif CPUINFO_ARCH_ARM64 399 ASSERT_FALSE(cpuinfo_has_arm_v5e()); 400 #endif 401 } 402 403 TEST(ISA, armv6) { 404 #if CPUINFO_ARCH_ARM 405 ASSERT_TRUE(cpuinfo_has_arm_v6()); 406 #elif CPUINFO_ARCH_ARM64 407 ASSERT_FALSE(cpuinfo_has_arm_v6()); 408 #endif 409 } 410 411 TEST(ISA, armv6k) { 412 #if CPUINFO_ARCH_ARM 413 ASSERT_TRUE(cpuinfo_has_arm_v6k()); 414 #elif CPUINFO_ARCH_ARM64 415 ASSERT_FALSE(cpuinfo_has_arm_v6k()); 416 #endif 417 } 418 419 TEST(ISA, armv7) { 420 #if CPUINFO_ARCH_ARM 421 ASSERT_TRUE(cpuinfo_has_arm_v7()); 422 #elif CPUINFO_ARCH_ARM64 423 ASSERT_FALSE(cpuinfo_has_arm_v7()); 424 #endif 425 } 426 427 TEST(ISA, armv7mp) { 428 #if CPUINFO_ARCH_ARM 429 ASSERT_TRUE(cpuinfo_has_arm_v7mp()); 430 #elif CPUINFO_ARCH_ARM64 431 ASSERT_FALSE(cpuinfo_has_arm_v7mp()); 432 #endif 433 } 434 435 TEST(ISA, idiv) { 436 ASSERT_TRUE(cpuinfo_has_arm_idiv()); 437 } 438 439 TEST(ISA, vfpv2) { 440 ASSERT_FALSE(cpuinfo_has_arm_vfpv2()); 441 } 442 443 TEST(ISA, vfpv3) { 444 ASSERT_TRUE(cpuinfo_has_arm_vfpv3()); 445 } 446 447 TEST(ISA, vfpv3_d32) { 448 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_d32()); 449 } 450 451 TEST(ISA, vfpv3_fp16) { 452 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16()); 453 } 454 455 TEST(ISA, vfpv3_fp16_d32) { 456 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16_d32()); 457 } 458 459 TEST(ISA, vfpv4) { 460 ASSERT_TRUE(cpuinfo_has_arm_vfpv4()); 461 } 462 463 TEST(ISA, vfpv4_d32) { 464 ASSERT_TRUE(cpuinfo_has_arm_vfpv4_d32()); 465 } 466 467 TEST(ISA, wmmx) { 468 ASSERT_FALSE(cpuinfo_has_arm_wmmx()); 469 } 470 471 TEST(ISA, wmmx2) { 472 ASSERT_FALSE(cpuinfo_has_arm_wmmx2()); 473 } 474 475 TEST(ISA, neon) { 476 ASSERT_TRUE(cpuinfo_has_arm_neon()); 477 } 478 479 TEST(ISA, neon_fp16) { 480 ASSERT_TRUE(cpuinfo_has_arm_neon_fp16()); 481 } 482 483 TEST(ISA, neon_fma) { 484 ASSERT_TRUE(cpuinfo_has_arm_neon_fma()); 485 } 486 487 TEST(ISA, atomics) { 488 ASSERT_FALSE(cpuinfo_has_arm_atomics()); 489 } 490 491 TEST(ISA, neon_rdm) { 492 ASSERT_FALSE(cpuinfo_has_arm_neon_rdm()); 493 } 494 495 TEST(ISA, fp16_arith) { 496 ASSERT_FALSE(cpuinfo_has_arm_fp16_arith()); 497 } 498 499 TEST(ISA, neon_fp16_arith) { 500 ASSERT_FALSE(cpuinfo_has_arm_neon_fp16_arith()); 501 } 502 503 TEST(ISA, neon_dot) { 504 ASSERT_FALSE(cpuinfo_has_arm_neon_dot()); 505 } 506 507 TEST(ISA, jscvt) { 508 ASSERT_FALSE(cpuinfo_has_arm_jscvt()); 509 } 510 511 TEST(ISA, fcma) { 512 ASSERT_FALSE(cpuinfo_has_arm_fcma()); 513 } 514 515 TEST(ISA, aes) { 516 ASSERT_TRUE(cpuinfo_has_arm_aes()); 517 } 518 519 TEST(ISA, sha1) { 520 ASSERT_TRUE(cpuinfo_has_arm_sha1()); 521 } 522 523 TEST(ISA, sha2) { 524 ASSERT_TRUE(cpuinfo_has_arm_sha2()); 525 } 526 527 TEST(ISA, pmull) { 528 ASSERT_TRUE(cpuinfo_has_arm_pmull()); 529 } 530 531 TEST(ISA, crc32) { 532 ASSERT_TRUE(cpuinfo_has_arm_crc32()); 533 } 534 535 TEST(L1I, count) { 536 ASSERT_EQ(8, cpuinfo_get_l1i_caches_count()); 537 } 538 539 TEST(L1I, non_null) { 540 ASSERT_TRUE(cpuinfo_get_l1i_caches()); 541 } 542 543 TEST(L1I, size) { 544 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 545 switch (i) { 546 case 0: 547 case 1: 548 case 2: 549 case 3: 550 ASSERT_EQ(64 * 1024, cpuinfo_get_l1i_cache(i)->size); 551 break; 552 case 4: 553 case 5: 554 case 6: 555 case 7: 556 ASSERT_EQ(32 * 1024, cpuinfo_get_l1i_cache(i)->size); 557 break; 558 } 559 } 560 } 561 562 TEST(L1I, associativity) { 563 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 564 switch (i) { 565 case 0: 566 case 1: 567 case 2: 568 case 3: 569 ASSERT_EQ(4, cpuinfo_get_l1i_cache(i)->associativity); 570 break; 571 case 4: 572 case 5: 573 case 6: 574 case 7: 575 ASSERT_EQ(2, cpuinfo_get_l1i_cache(i)->associativity); 576 break; 577 } 578 } 579 } 580 581 TEST(L1I, sets) { 582 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 583 ASSERT_EQ(cpuinfo_get_l1i_cache(i)->size, 584 cpuinfo_get_l1i_cache(i)->sets * cpuinfo_get_l1i_cache(i)->line_size * cpuinfo_get_l1i_cache(i)->partitions * cpuinfo_get_l1i_cache(i)->associativity); 585 } 586 } 587 588 TEST(L1I, partitions) { 589 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 590 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->partitions); 591 } 592 } 593 594 TEST(L1I, line_size) { 595 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 596 ASSERT_EQ(64, cpuinfo_get_l1i_cache(i)->line_size); 597 } 598 } 599 600 TEST(L1I, flags) { 601 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 602 ASSERT_EQ(0, cpuinfo_get_l1i_cache(i)->flags); 603 } 604 } 605 606 TEST(L1I, processors) { 607 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 608 ASSERT_EQ(i, cpuinfo_get_l1i_cache(i)->processor_start); 609 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->processor_count); 610 } 611 } 612 613 TEST(L1D, count) { 614 ASSERT_EQ(8, cpuinfo_get_l1d_caches_count()); 615 } 616 617 TEST(L1D, non_null) { 618 ASSERT_TRUE(cpuinfo_get_l1d_caches()); 619 } 620 621 TEST(L1D, size) { 622 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 623 switch (i) { 624 case 0: 625 case 1: 626 case 2: 627 case 3: 628 ASSERT_EQ(64 * 1024, cpuinfo_get_l1d_cache(i)->size); 629 break; 630 case 4: 631 case 5: 632 case 6: 633 case 7: 634 ASSERT_EQ(32 * 1024, cpuinfo_get_l1d_cache(i)->size); 635 break; 636 } 637 } 638 } 639 640 TEST(L1D, associativity) { 641 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 642 switch (i) { 643 case 0: 644 case 1: 645 case 2: 646 case 3: 647 ASSERT_EQ(16, cpuinfo_get_l1d_cache(i)->associativity); 648 break; 649 case 4: 650 case 5: 651 case 6: 652 case 7: 653 ASSERT_EQ(4, cpuinfo_get_l1d_cache(i)->associativity); 654 break; 655 } 656 } 657 } 658 659 TEST(L1D, sets) { 660 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 661 ASSERT_EQ(cpuinfo_get_l1d_cache(i)->size, 662 cpuinfo_get_l1d_cache(i)->sets * cpuinfo_get_l1d_cache(i)->line_size * cpuinfo_get_l1d_cache(i)->partitions * cpuinfo_get_l1d_cache(i)->associativity); 663 } 664 } 665 666 TEST(L1D, partitions) { 667 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 668 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->partitions); 669 } 670 } 671 672 TEST(L1D, line_size) { 673 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 674 ASSERT_EQ(64, cpuinfo_get_l1d_cache(i)->line_size); 675 } 676 } 677 678 TEST(L1D, flags) { 679 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 680 ASSERT_EQ(0, cpuinfo_get_l1d_cache(i)->flags); 681 } 682 } 683 684 TEST(L1D, processors) { 685 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 686 ASSERT_EQ(i, cpuinfo_get_l1d_cache(i)->processor_start); 687 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->processor_count); 688 } 689 } 690 691 TEST(L2, count) { 692 ASSERT_EQ(2, cpuinfo_get_l2_caches_count()); 693 } 694 695 TEST(L2, non_null) { 696 ASSERT_TRUE(cpuinfo_get_l2_caches()); 697 } 698 699 TEST(L2, size) { 700 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 701 switch (i) { 702 case 0: 703 ASSERT_EQ(2 * 1024 * 1024, cpuinfo_get_l2_cache(i)->size); 704 break; 705 case 1: 706 ASSERT_EQ(1024 * 1024, cpuinfo_get_l2_cache(i)->size); 707 break; 708 } 709 } 710 } 711 712 TEST(L2, associativity) { 713 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 714 ASSERT_EQ(16, cpuinfo_get_l2_cache(i)->associativity); 715 } 716 } 717 718 TEST(L2, sets) { 719 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 720 ASSERT_EQ(cpuinfo_get_l2_cache(i)->size, 721 cpuinfo_get_l2_cache(i)->sets * cpuinfo_get_l2_cache(i)->line_size * cpuinfo_get_l2_cache(i)->partitions * cpuinfo_get_l2_cache(i)->associativity); 722 } 723 } 724 725 TEST(L2, partitions) { 726 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 727 ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->partitions); 728 } 729 } 730 731 TEST(L2, line_size) { 732 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 733 ASSERT_EQ(64, cpuinfo_get_l2_cache(i)->line_size); 734 } 735 } 736 737 TEST(L2, flags) { 738 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 739 switch (i) { 740 case 0: 741 ASSERT_EQ(CPUINFO_CACHE_INCLUSIVE, cpuinfo_get_l2_cache(i)->flags); 742 break; 743 case 1: 744 ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->flags); 745 break; 746 } 747 } 748 } 749 750 TEST(L2, processors) { 751 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 752 switch (i) { 753 case 0: 754 ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->processor_start); 755 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_count); 756 break; 757 case 1: 758 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_start); 759 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_count); 760 break; 761 } 762 } 763 } 764 765 TEST(L3, none) { 766 ASSERT_EQ(0, cpuinfo_get_l3_caches_count()); 767 ASSERT_FALSE(cpuinfo_get_l3_caches()); 768 } 769 770 TEST(L4, none) { 771 ASSERT_EQ(0, cpuinfo_get_l4_caches_count()); 772 ASSERT_FALSE(cpuinfo_get_l4_caches()); 773 } 774 775 #include <huawei-mate-10.h> 776 777 int main(int argc, char* argv[]) { 778 #if CPUINFO_ARCH_ARM 779 cpuinfo_set_hwcap(UINT32_C(0x0037B0D6)); 780 cpuinfo_set_hwcap2(UINT32_C(0x0000001F)); 781 #elif CPUINFO_ARCH_ARM64 782 cpuinfo_set_hwcap(UINT32_C(0x000000FF)); 783 #endif 784 cpuinfo_mock_filesystem(filesystem); 785 #ifdef __ANDROID__ 786 cpuinfo_mock_android_properties(properties); 787 #endif 788 cpuinfo_initialize(); 789 ::testing::InitGoogleTest(&argc, argv); 790 return RUN_ALL_TESTS(); 791 } 792