1 #include <gtest/gtest.h> 2 3 #include <cpuinfo.h> 4 #include <cpuinfo-mock.h> 5 6 7 TEST(PROCESSORS, count) { 8 ASSERT_EQ(8, cpuinfo_get_processors_count()); 9 } 10 11 TEST(PROCESSORS, non_null) { 12 ASSERT_TRUE(cpuinfo_get_processors()); 13 } 14 15 TEST(PROCESSORS, smt_id) { 16 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 17 ASSERT_EQ(0, cpuinfo_get_processor(i)->smt_id); 18 } 19 } 20 21 TEST(PROCESSORS, core) { 22 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 23 ASSERT_EQ(cpuinfo_get_core(i), cpuinfo_get_processor(i)->core); 24 } 25 } 26 27 TEST(PROCESSORS, cluster) { 28 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 29 switch (i) { 30 case 0: 31 case 1: 32 case 2: 33 case 3: 34 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_processor(i)->cluster); 35 break; 36 case 4: 37 case 5: 38 case 6: 39 case 7: 40 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_processor(i)->cluster); 41 break; 42 } 43 } 44 } 45 46 TEST(PROCESSORS, package) { 47 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 48 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_processor(i)->package); 49 } 50 } 51 52 TEST(PROCESSORS, linux_id) { 53 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 54 switch (i) { 55 case 0: 56 case 1: 57 case 2: 58 case 3: 59 ASSERT_EQ(i + 4, cpuinfo_get_processor(i)->linux_id); 60 break; 61 case 4: 62 case 5: 63 case 6: 64 case 7: 65 ASSERT_EQ(i - 4, cpuinfo_get_processor(i)->linux_id); 66 break; 67 } 68 } 69 } 70 71 TEST(PROCESSORS, l1i) { 72 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 73 ASSERT_EQ(cpuinfo_get_l1i_cache(i), cpuinfo_get_processor(i)->cache.l1i); 74 } 75 } 76 77 TEST(PROCESSORS, l1d) { 78 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 79 ASSERT_EQ(cpuinfo_get_l1d_cache(i), cpuinfo_get_processor(i)->cache.l1d); 80 } 81 } 82 83 TEST(PROCESSORS, l2) { 84 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 85 ASSERT_EQ(cpuinfo_get_l2_cache(i), cpuinfo_get_processor(i)->cache.l2); 86 } 87 } 88 89 TEST(PROCESSORS, l3) { 90 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 91 ASSERT_EQ(cpuinfo_get_l3_cache(0), cpuinfo_get_processor(i)->cache.l3); 92 } 93 } 94 95 TEST(PROCESSORS, l4) { 96 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 97 ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l4); 98 } 99 } 100 101 TEST(CORES, count) { 102 ASSERT_EQ(8, cpuinfo_get_cores_count()); 103 } 104 105 TEST(CORES, non_null) { 106 ASSERT_TRUE(cpuinfo_get_cores()); 107 } 108 109 TEST(CORES, processor_start) { 110 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 111 ASSERT_EQ(i, cpuinfo_get_core(i)->processor_start); 112 } 113 } 114 115 TEST(CORES, processor_count) { 116 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 117 ASSERT_EQ(1, cpuinfo_get_core(i)->processor_count); 118 } 119 } 120 121 TEST(CORES, core_id) { 122 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 123 ASSERT_EQ(i, cpuinfo_get_core(i)->core_id); 124 } 125 } 126 127 TEST(CORES, cluster) { 128 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 129 switch (i) { 130 case 0: 131 case 1: 132 case 2: 133 case 3: 134 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_core(i)->cluster); 135 break; 136 case 4: 137 case 5: 138 case 6: 139 case 7: 140 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_core(i)->cluster); 141 break; 142 } 143 } 144 } 145 146 TEST(CORES, package) { 147 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 148 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_core(i)->package); 149 } 150 } 151 152 TEST(CORES, vendor) { 153 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 154 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_core(i)->vendor); 155 } 156 } 157 158 TEST(CORES, uarch) { 159 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 160 switch (i) { 161 case 0: 162 case 1: 163 case 2: 164 case 3: 165 ASSERT_EQ(cpuinfo_uarch_cortex_a75, cpuinfo_get_core(i)->uarch); 166 break; 167 case 4: 168 case 5: 169 case 6: 170 case 7: 171 ASSERT_EQ(cpuinfo_uarch_cortex_a55, cpuinfo_get_core(i)->uarch); 172 break; 173 } 174 } 175 } 176 177 TEST(CORES, midr) { 178 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 179 switch (i) { 180 case 0: 181 case 1: 182 case 2: 183 case 3: 184 ASSERT_EQ(UINT32_C(0x516F802D), cpuinfo_get_core(i)->midr); 185 break; 186 case 4: 187 case 5: 188 case 6: 189 case 7: 190 ASSERT_EQ(UINT32_C(0x517F803C), cpuinfo_get_core(i)->midr); 191 break; 192 } 193 } 194 } 195 196 TEST(CORES, DISABLED_frequency) { 197 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 198 switch (i) { 199 case 0: 200 case 1: 201 case 2: 202 case 3: 203 ASSERT_EQ(UINT64_C(2803200000), cpuinfo_get_core(i)->frequency); 204 break; 205 case 4: 206 case 5: 207 case 6: 208 case 7: 209 ASSERT_EQ(UINT64_C(1766400000), cpuinfo_get_core(i)->frequency); 210 break; 211 } 212 } 213 } 214 215 TEST(CLUSTERS, count) { 216 ASSERT_EQ(2, cpuinfo_get_clusters_count()); 217 } 218 219 TEST(CLUSTERS, non_null) { 220 ASSERT_TRUE(cpuinfo_get_clusters()); 221 } 222 223 TEST(CLUSTERS, processor_start) { 224 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 225 switch (i) { 226 case 0: 227 ASSERT_EQ(0, cpuinfo_get_cluster(i)->processor_start); 228 break; 229 case 1: 230 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_start); 231 break; 232 } 233 } 234 } 235 236 TEST(CLUSTERS, processor_count) { 237 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 238 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_count); 239 } 240 } 241 242 TEST(CLUSTERS, core_start) { 243 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 244 switch (i) { 245 case 0: 246 ASSERT_EQ(0, cpuinfo_get_cluster(i)->core_start); 247 break; 248 case 1: 249 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_start); 250 break; 251 } 252 } 253 } 254 255 TEST(CLUSTERS, core_count) { 256 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 257 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_count); 258 } 259 } 260 261 TEST(CLUSTERS, cluster_id) { 262 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 263 ASSERT_EQ(i, cpuinfo_get_cluster(i)->cluster_id); 264 } 265 } 266 267 TEST(CLUSTERS, package) { 268 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 269 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_cluster(i)->package); 270 } 271 } 272 273 TEST(CLUSTERS, vendor) { 274 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 275 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_cluster(i)->vendor); 276 } 277 } 278 279 TEST(CLUSTERS, uarch) { 280 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 281 switch (i) { 282 case 0: 283 ASSERT_EQ(cpuinfo_uarch_cortex_a75, cpuinfo_get_cluster(i)->uarch); 284 break; 285 case 1: 286 ASSERT_EQ(cpuinfo_uarch_cortex_a55, cpuinfo_get_cluster(i)->uarch); 287 break; 288 } 289 } 290 } 291 292 TEST(CLUSTERS, midr) { 293 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 294 switch (i) { 295 case 0: 296 ASSERT_EQ(UINT32_C(0x516F802D), cpuinfo_get_cluster(i)->midr); 297 break; 298 case 1: 299 ASSERT_EQ(UINT32_C(0x517F803C), cpuinfo_get_cluster(i)->midr); 300 break; 301 } 302 } 303 } 304 305 TEST(CLUSTERS, DISABLED_frequency) { 306 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 307 switch (i) { 308 case 0: 309 ASSERT_EQ(UINT64_C(2803200000), cpuinfo_get_cluster(i)->frequency); 310 break; 311 case 1: 312 ASSERT_EQ(UINT64_C(1766400000), cpuinfo_get_cluster(i)->frequency); 313 break; 314 } 315 } 316 } 317 318 TEST(PACKAGES, count) { 319 ASSERT_EQ(1, cpuinfo_get_packages_count()); 320 } 321 322 TEST(PACKAGES, name) { 323 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 324 ASSERT_EQ("Qualcomm Snapdragon 845", 325 std::string(cpuinfo_get_package(i)->name, 326 strnlen(cpuinfo_get_package(i)->name, CPUINFO_PACKAGE_NAME_MAX))); 327 } 328 } 329 330 TEST(PACKAGES, processor_start) { 331 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 332 ASSERT_EQ(0, cpuinfo_get_package(i)->processor_start); 333 } 334 } 335 336 TEST(PACKAGES, processor_count) { 337 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 338 ASSERT_EQ(8, cpuinfo_get_package(i)->processor_count); 339 } 340 } 341 342 TEST(PACKAGES, core_start) { 343 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 344 ASSERT_EQ(0, cpuinfo_get_package(i)->core_start); 345 } 346 } 347 348 TEST(PACKAGES, core_count) { 349 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 350 ASSERT_EQ(8, cpuinfo_get_package(i)->core_count); 351 } 352 } 353 354 TEST(PACKAGES, cluster_start) { 355 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 356 ASSERT_EQ(0, cpuinfo_get_package(i)->cluster_start); 357 } 358 } 359 360 TEST(PACKAGES, cluster_count) { 361 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 362 ASSERT_EQ(2, cpuinfo_get_package(i)->cluster_count); 363 } 364 } 365 366 TEST(ISA, thumb) { 367 #if CPUINFO_ARCH_ARM 368 ASSERT_TRUE(cpuinfo_has_arm_thumb()); 369 #elif CPUINFO_ARCH_ARM64 370 ASSERT_FALSE(cpuinfo_has_arm_thumb()); 371 #endif 372 } 373 374 TEST(ISA, thumb2) { 375 #if CPUINFO_ARCH_ARM 376 ASSERT_TRUE(cpuinfo_has_arm_thumb2()); 377 #elif CPUINFO_ARCH_ARM64 378 ASSERT_FALSE(cpuinfo_has_arm_thumb2()); 379 #endif 380 } 381 382 TEST(ISA, armv5e) { 383 #if CPUINFO_ARCH_ARM 384 ASSERT_TRUE(cpuinfo_has_arm_v5e()); 385 #elif CPUINFO_ARCH_ARM64 386 ASSERT_FALSE(cpuinfo_has_arm_v5e()); 387 #endif 388 } 389 390 TEST(ISA, armv6) { 391 #if CPUINFO_ARCH_ARM 392 ASSERT_TRUE(cpuinfo_has_arm_v6()); 393 #elif CPUINFO_ARCH_ARM64 394 ASSERT_FALSE(cpuinfo_has_arm_v6()); 395 #endif 396 } 397 398 TEST(ISA, armv6k) { 399 #if CPUINFO_ARCH_ARM 400 ASSERT_TRUE(cpuinfo_has_arm_v6k()); 401 #elif CPUINFO_ARCH_ARM64 402 ASSERT_FALSE(cpuinfo_has_arm_v6k()); 403 #endif 404 } 405 406 TEST(ISA, armv7) { 407 #if CPUINFO_ARCH_ARM 408 ASSERT_TRUE(cpuinfo_has_arm_v7()); 409 #elif CPUINFO_ARCH_ARM64 410 ASSERT_FALSE(cpuinfo_has_arm_v7()); 411 #endif 412 } 413 414 TEST(ISA, armv7mp) { 415 #if CPUINFO_ARCH_ARM 416 ASSERT_TRUE(cpuinfo_has_arm_v7mp()); 417 #elif CPUINFO_ARCH_ARM64 418 ASSERT_FALSE(cpuinfo_has_arm_v7mp()); 419 #endif 420 } 421 422 TEST(ISA, idiv) { 423 ASSERT_TRUE(cpuinfo_has_arm_idiv()); 424 } 425 426 TEST(ISA, vfpv2) { 427 ASSERT_FALSE(cpuinfo_has_arm_vfpv2()); 428 } 429 430 TEST(ISA, vfpv3) { 431 ASSERT_TRUE(cpuinfo_has_arm_vfpv3()); 432 } 433 434 TEST(ISA, vfpv3_d32) { 435 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_d32()); 436 } 437 438 TEST(ISA, vfpv3_fp16) { 439 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16()); 440 } 441 442 TEST(ISA, vfpv3_fp16_d32) { 443 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16_d32()); 444 } 445 446 TEST(ISA, vfpv4) { 447 ASSERT_TRUE(cpuinfo_has_arm_vfpv4()); 448 } 449 450 TEST(ISA, vfpv4_d32) { 451 ASSERT_TRUE(cpuinfo_has_arm_vfpv4_d32()); 452 } 453 454 TEST(ISA, wmmx) { 455 ASSERT_FALSE(cpuinfo_has_arm_wmmx()); 456 } 457 458 TEST(ISA, wmmx2) { 459 ASSERT_FALSE(cpuinfo_has_arm_wmmx2()); 460 } 461 462 TEST(ISA, neon) { 463 ASSERT_TRUE(cpuinfo_has_arm_neon()); 464 } 465 466 TEST(ISA, neon_fp16) { 467 ASSERT_TRUE(cpuinfo_has_arm_neon_fp16()); 468 } 469 470 TEST(ISA, neon_fma) { 471 ASSERT_TRUE(cpuinfo_has_arm_neon_fma()); 472 } 473 474 TEST(ISA, atomics) { 475 #if CPUINFO_ARCH_ARM 476 ASSERT_FALSE(cpuinfo_has_arm_atomics()); 477 #elif CPUINFO_ARCH_ARM64 478 ASSERT_TRUE(cpuinfo_has_arm_atomics()); 479 #endif 480 } 481 482 TEST(ISA, neon_rdm) { 483 ASSERT_TRUE(cpuinfo_has_arm_neon_rdm()); 484 } 485 486 TEST(ISA, fp16_arith) { 487 ASSERT_TRUE(cpuinfo_has_arm_fp16_arith()); 488 } 489 490 TEST(ISA, neon_fp16_arith) { 491 ASSERT_TRUE(cpuinfo_has_arm_neon_fp16_arith()); 492 } 493 494 TEST(ISA, neon_dot) { 495 ASSERT_FALSE(cpuinfo_has_arm_neon_dot()); 496 } 497 498 TEST(ISA, jscvt) { 499 ASSERT_FALSE(cpuinfo_has_arm_jscvt()); 500 } 501 502 TEST(ISA, fcma) { 503 ASSERT_FALSE(cpuinfo_has_arm_fcma()); 504 } 505 506 TEST(ISA, aes) { 507 ASSERT_TRUE(cpuinfo_has_arm_aes()); 508 } 509 510 TEST(ISA, sha1) { 511 ASSERT_TRUE(cpuinfo_has_arm_sha1()); 512 } 513 514 TEST(ISA, sha2) { 515 ASSERT_TRUE(cpuinfo_has_arm_sha2()); 516 } 517 518 TEST(ISA, pmull) { 519 ASSERT_TRUE(cpuinfo_has_arm_pmull()); 520 } 521 522 TEST(ISA, crc32) { 523 ASSERT_TRUE(cpuinfo_has_arm_crc32()); 524 } 525 526 TEST(L1I, count) { 527 ASSERT_EQ(8, cpuinfo_get_l1i_caches_count()); 528 } 529 530 TEST(L1I, non_null) { 531 ASSERT_TRUE(cpuinfo_get_l1i_caches()); 532 } 533 534 TEST(L1I, size) { 535 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 536 switch (i) { 537 case 0: 538 case 1: 539 case 2: 540 case 3: 541 ASSERT_EQ(64 * 1024, cpuinfo_get_l1i_cache(i)->size); 542 break; 543 case 4: 544 case 5: 545 case 6: 546 case 7: 547 ASSERT_EQ(32 * 1024, cpuinfo_get_l1i_cache(i)->size); 548 break; 549 } 550 } 551 } 552 553 TEST(L1I, associativity) { 554 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 555 ASSERT_EQ(4, cpuinfo_get_l1i_cache(i)->associativity); 556 } 557 } 558 559 TEST(L1I, sets) { 560 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 561 ASSERT_EQ(cpuinfo_get_l1i_cache(i)->size, 562 cpuinfo_get_l1i_cache(i)->sets * cpuinfo_get_l1i_cache(i)->line_size * cpuinfo_get_l1i_cache(i)->partitions * cpuinfo_get_l1i_cache(i)->associativity); 563 } 564 } 565 566 TEST(L1I, partitions) { 567 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 568 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->partitions); 569 } 570 } 571 572 TEST(L1I, line_size) { 573 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 574 ASSERT_EQ(64, cpuinfo_get_l1i_cache(i)->line_size); 575 } 576 } 577 578 TEST(L1I, flags) { 579 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 580 ASSERT_EQ(0, cpuinfo_get_l1i_cache(i)->flags); 581 } 582 } 583 584 TEST(L1I, processors) { 585 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 586 ASSERT_EQ(i, cpuinfo_get_l1i_cache(i)->processor_start); 587 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->processor_count); 588 } 589 } 590 591 TEST(L1D, count) { 592 ASSERT_EQ(8, cpuinfo_get_l1d_caches_count()); 593 } 594 595 TEST(L1D, non_null) { 596 ASSERT_TRUE(cpuinfo_get_l1d_caches()); 597 } 598 599 TEST(L1D, size) { 600 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 601 switch (i) { 602 case 0: 603 case 1: 604 case 2: 605 case 3: 606 ASSERT_EQ(64 * 1024, cpuinfo_get_l1d_cache(i)->size); 607 break; 608 case 4: 609 case 5: 610 case 6: 611 case 7: 612 ASSERT_EQ(32 * 1024, cpuinfo_get_l1d_cache(i)->size); 613 break; 614 } 615 } 616 } 617 618 TEST(L1D, associativity) { 619 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 620 switch (i) { 621 case 0: 622 case 1: 623 case 2: 624 case 3: 625 ASSERT_EQ(16, cpuinfo_get_l1d_cache(i)->associativity); 626 break; 627 case 4: 628 case 5: 629 case 6: 630 case 7: 631 ASSERT_EQ(4, cpuinfo_get_l1d_cache(i)->associativity); 632 break; 633 } 634 } 635 } 636 637 TEST(L1D, sets) { 638 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 639 ASSERT_EQ(cpuinfo_get_l1d_cache(i)->size, 640 cpuinfo_get_l1d_cache(i)->sets * cpuinfo_get_l1d_cache(i)->line_size * cpuinfo_get_l1d_cache(i)->partitions * cpuinfo_get_l1d_cache(i)->associativity); 641 } 642 } 643 644 TEST(L1D, partitions) { 645 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 646 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->partitions); 647 } 648 } 649 650 TEST(L1D, line_size) { 651 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 652 ASSERT_EQ(64, cpuinfo_get_l1d_cache(i)->line_size); 653 } 654 } 655 656 TEST(L1D, flags) { 657 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 658 ASSERT_EQ(0, cpuinfo_get_l1d_cache(i)->flags); 659 } 660 } 661 662 TEST(L1D, processors) { 663 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 664 ASSERT_EQ(i, cpuinfo_get_l1d_cache(i)->processor_start); 665 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->processor_count); 666 } 667 } 668 669 TEST(L2, count) { 670 ASSERT_EQ(8, cpuinfo_get_l2_caches_count()); 671 } 672 673 TEST(L2, non_null) { 674 ASSERT_TRUE(cpuinfo_get_l2_caches()); 675 } 676 677 TEST(L2, size) { 678 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 679 switch (i) { 680 case 0: 681 case 1: 682 case 2: 683 case 3: 684 ASSERT_EQ(256 * 1024, cpuinfo_get_l2_cache(i)->size); 685 break; 686 case 4: 687 case 5: 688 case 6: 689 case 7: 690 ASSERT_EQ(128 * 1024, cpuinfo_get_l2_cache(i)->size); 691 break; 692 } 693 } 694 } 695 696 TEST(L2, associativity) { 697 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 698 switch (i) { 699 case 0: 700 case 1: 701 case 2: 702 case 3: 703 ASSERT_EQ(8, cpuinfo_get_l2_cache(i)->associativity); 704 break; 705 case 4: 706 case 5: 707 case 6: 708 case 7: 709 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->associativity); 710 break; 711 } 712 } 713 } 714 715 TEST(L2, sets) { 716 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 717 ASSERT_EQ(cpuinfo_get_l2_cache(i)->size, 718 cpuinfo_get_l2_cache(i)->sets * cpuinfo_get_l2_cache(i)->line_size * cpuinfo_get_l2_cache(i)->partitions * cpuinfo_get_l2_cache(i)->associativity); 719 } 720 } 721 722 TEST(L2, partitions) { 723 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 724 ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->partitions); 725 } 726 } 727 728 TEST(L2, line_size) { 729 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 730 ASSERT_EQ(64, cpuinfo_get_l2_cache(i)->line_size); 731 } 732 } 733 734 TEST(L2, flags) { 735 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 736 ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->flags); 737 } 738 } 739 740 TEST(L2, processors) { 741 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 742 ASSERT_EQ(i, cpuinfo_get_l2_cache(i)->processor_start); 743 ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->processor_count); 744 } 745 } 746 747 TEST(L3, count) { 748 ASSERT_EQ(1, cpuinfo_get_l3_caches_count()); 749 } 750 751 TEST(L3, non_null) { 752 ASSERT_TRUE(cpuinfo_get_l3_caches()); 753 } 754 755 TEST(L3, size) { 756 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 757 ASSERT_EQ(2 * 1024 * 1024, cpuinfo_get_l3_cache(i)->size); 758 } 759 } 760 761 TEST(L3, associativity) { 762 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 763 ASSERT_EQ(16, cpuinfo_get_l3_cache(i)->associativity); 764 } 765 } 766 767 TEST(L3, sets) { 768 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 769 ASSERT_EQ(cpuinfo_get_l3_cache(i)->size, 770 cpuinfo_get_l3_cache(i)->sets * cpuinfo_get_l3_cache(i)->line_size * cpuinfo_get_l3_cache(i)->partitions * cpuinfo_get_l3_cache(i)->associativity); 771 } 772 } 773 774 TEST(L3, partitions) { 775 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 776 ASSERT_EQ(1, cpuinfo_get_l3_cache(i)->partitions); 777 } 778 } 779 780 TEST(L3, line_size) { 781 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 782 ASSERT_EQ(64, cpuinfo_get_l3_cache(i)->line_size); 783 } 784 } 785 786 TEST(L3, flags) { 787 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 788 ASSERT_EQ(0, cpuinfo_get_l3_cache(i)->flags); 789 } 790 } 791 792 TEST(L3, processors) { 793 for (uint32_t i = 0; i < cpuinfo_get_l3_caches_count(); i++) { 794 ASSERT_EQ(0, cpuinfo_get_l3_cache(i)->processor_start); 795 ASSERT_EQ(8, cpuinfo_get_l3_cache(i)->processor_count); 796 } 797 } 798 799 TEST(L4, none) { 800 ASSERT_EQ(0, cpuinfo_get_l4_caches_count()); 801 ASSERT_FALSE(cpuinfo_get_l4_caches()); 802 } 803 804 #include <galaxy-s9-us.h> 805 806 int main(int argc, char* argv[]) { 807 #if CPUINFO_ARCH_ARM 808 cpuinfo_set_hwcap(UINT32_C(0x0037B0D6)); 809 cpuinfo_set_hwcap2(UINT32_C(0x0000001F)); 810 #elif CPUINFO_ARCH_ARM64 811 cpuinfo_set_hwcap(UINT32_C(0x000007FF)); 812 #endif 813 cpuinfo_mock_filesystem(filesystem); 814 #ifdef __ANDROID__ 815 cpuinfo_mock_android_properties(properties); 816 #endif 817 cpuinfo_initialize(); 818 ::testing::InitGoogleTest(&argc, argv); 819 return RUN_ALL_TESTS(); 820 }