1 #include <gtest/gtest.h> 2 3 #include <cpuinfo.h> 4 #include <cpuinfo-mock.h> 5 6 7 TEST(PROCESSORS, count) { 8 ASSERT_EQ(8, cpuinfo_get_processors_count()); 9 } 10 11 TEST(PROCESSORS, non_null) { 12 ASSERT_TRUE(cpuinfo_get_processors()); 13 } 14 15 TEST(PROCESSORS, smt_id) { 16 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 17 ASSERT_EQ(0, cpuinfo_get_processor(i)->smt_id); 18 } 19 } 20 21 TEST(PROCESSORS, core) { 22 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 23 ASSERT_EQ(cpuinfo_get_core(i), cpuinfo_get_processor(i)->core); 24 } 25 } 26 27 TEST(PROCESSORS, cluster) { 28 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 29 switch (i) { 30 case 0: 31 case 1: 32 case 2: 33 case 3: 34 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_processor(i)->cluster); 35 break; 36 case 4: 37 case 5: 38 case 6: 39 case 7: 40 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_processor(i)->cluster); 41 break; 42 } 43 } 44 } 45 46 TEST(PROCESSORS, package) { 47 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 48 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_processor(i)->package); 49 } 50 } 51 52 TEST(PROCESSORS, DISABLED_linux_id) { 53 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 54 switch (i) { 55 case 0: 56 case 1: 57 case 2: 58 case 3: 59 ASSERT_EQ(i + 4, cpuinfo_get_processor(i)->linux_id); 60 break; 61 case 4: 62 case 5: 63 case 6: 64 case 7: 65 ASSERT_EQ(i - 4, cpuinfo_get_processor(i)->linux_id); 66 break; 67 } 68 } 69 } 70 71 TEST(PROCESSORS, l1i) { 72 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 73 ASSERT_EQ(cpuinfo_get_l1i_cache(i), cpuinfo_get_processor(i)->cache.l1i); 74 } 75 } 76 77 TEST(PROCESSORS, l1d) { 78 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 79 ASSERT_EQ(cpuinfo_get_l1d_cache(i), cpuinfo_get_processor(i)->cache.l1d); 80 } 81 } 82 83 TEST(PROCESSORS, l2) { 84 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 85 switch (i) { 86 case 0: 87 case 1: 88 case 2: 89 case 3: 90 ASSERT_EQ(cpuinfo_get_l2_cache(0), cpuinfo_get_processor(i)->cache.l2); 91 break; 92 case 4: 93 case 5: 94 case 6: 95 case 7: 96 ASSERT_EQ(cpuinfo_get_l2_cache(1), cpuinfo_get_processor(i)->cache.l2); 97 break; 98 } 99 } 100 } 101 102 TEST(PROCESSORS, l3) { 103 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 104 ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l3); 105 } 106 } 107 108 TEST(PROCESSORS, l4) { 109 for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { 110 ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l4); 111 } 112 } 113 114 TEST(CORES, count) { 115 ASSERT_EQ(8, cpuinfo_get_cores_count()); 116 } 117 118 TEST(CORES, non_null) { 119 ASSERT_TRUE(cpuinfo_get_cores()); 120 } 121 122 TEST(CORES, processor_start) { 123 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 124 ASSERT_EQ(i, cpuinfo_get_core(i)->processor_start); 125 } 126 } 127 128 TEST(CORES, processor_count) { 129 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 130 ASSERT_EQ(1, cpuinfo_get_core(i)->processor_count); 131 } 132 } 133 134 TEST(CORES, core_id) { 135 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 136 ASSERT_EQ(i, cpuinfo_get_core(i)->core_id); 137 } 138 } 139 140 TEST(CORES, cluster) { 141 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 142 switch (i) { 143 case 0: 144 case 1: 145 case 2: 146 case 3: 147 ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_core(i)->cluster); 148 break; 149 case 4: 150 case 5: 151 case 6: 152 case 7: 153 ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_core(i)->cluster); 154 break; 155 } 156 } 157 } 158 159 TEST(CORES, package) { 160 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 161 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_core(i)->package); 162 } 163 } 164 165 TEST(CORES, vendor) { 166 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 167 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_core(i)->vendor); 168 } 169 } 170 171 TEST(CORES, uarch) { 172 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 173 ASSERT_EQ(cpuinfo_uarch_cortex_a53, cpuinfo_get_core(i)->uarch); 174 } 175 } 176 177 TEST(CORES, midr) { 178 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 179 ASSERT_EQ(UINT32_C(0x410FD032), cpuinfo_get_core(i)->midr); 180 } 181 } 182 183 TEST(CORES, DISABLED_frequency) { 184 for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { 185 ASSERT_EQ(UINT64_C(1690000000), cpuinfo_get_core(i)->frequency); 186 } 187 } 188 189 TEST(CLUSTERS, count) { 190 ASSERT_EQ(2, cpuinfo_get_clusters_count()); 191 } 192 193 TEST(CLUSTERS, non_null) { 194 ASSERT_TRUE(cpuinfo_get_clusters()); 195 } 196 197 TEST(CLUSTERS, processor_start) { 198 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 199 switch (i) { 200 case 0: 201 ASSERT_EQ(0, cpuinfo_get_cluster(i)->processor_start); 202 break; 203 case 1: 204 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_start); 205 break; 206 } 207 } 208 } 209 210 TEST(CLUSTERS, processor_count) { 211 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 212 ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_count); 213 } 214 } 215 216 TEST(CLUSTERS, core_start) { 217 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 218 switch (i) { 219 case 0: 220 ASSERT_EQ(0, cpuinfo_get_cluster(i)->core_start); 221 break; 222 case 1: 223 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_start); 224 break; 225 } 226 } 227 } 228 229 TEST(CLUSTERS, core_count) { 230 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 231 ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_count); 232 } 233 } 234 235 TEST(CLUSTERS, cluster_id) { 236 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 237 ASSERT_EQ(i, cpuinfo_get_cluster(i)->cluster_id); 238 } 239 } 240 241 TEST(CLUSTERS, package) { 242 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 243 ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_cluster(i)->package); 244 } 245 } 246 247 TEST(CLUSTERS, vendor) { 248 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 249 ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_cluster(i)->vendor); 250 } 251 } 252 253 TEST(CLUSTERS, uarch) { 254 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 255 ASSERT_EQ(cpuinfo_uarch_cortex_a53, cpuinfo_get_cluster(i)->uarch); 256 } 257 } 258 259 TEST(CLUSTERS, midr) { 260 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 261 ASSERT_EQ(UINT32_C(0x410FD032), cpuinfo_get_cluster(i)->midr); 262 } 263 } 264 265 TEST(CLUSTERS, DISABLED_frequency) { 266 for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { 267 ASSERT_EQ(UINT64_C(1690000000), cpuinfo_get_core(i)->frequency); 268 } 269 } 270 271 TEST(PACKAGES, count) { 272 ASSERT_EQ(1, cpuinfo_get_packages_count()); 273 } 274 275 TEST(PACKAGES, name) { 276 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 277 ASSERT_EQ("MediaTek MT6752", 278 std::string(cpuinfo_get_package(i)->name, 279 strnlen(cpuinfo_get_package(i)->name, CPUINFO_PACKAGE_NAME_MAX))); 280 } 281 } 282 283 TEST(PACKAGES, processor_start) { 284 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 285 ASSERT_EQ(0, cpuinfo_get_package(i)->processor_start); 286 } 287 } 288 289 TEST(PACKAGES, processor_count) { 290 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 291 ASSERT_EQ(8, cpuinfo_get_package(i)->processor_count); 292 } 293 } 294 295 TEST(PACKAGES, core_start) { 296 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 297 ASSERT_EQ(0, cpuinfo_get_package(i)->core_start); 298 } 299 } 300 301 TEST(PACKAGES, core_count) { 302 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 303 ASSERT_EQ(8, cpuinfo_get_package(i)->core_count); 304 } 305 } 306 307 TEST(PACKAGES, cluster_start) { 308 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 309 ASSERT_EQ(0, cpuinfo_get_package(i)->cluster_start); 310 } 311 } 312 313 TEST(PACKAGES, cluster_count) { 314 for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { 315 ASSERT_EQ(2, cpuinfo_get_package(i)->cluster_count); 316 } 317 } 318 319 TEST(ISA, thumb) { 320 #if CPUINFO_ARCH_ARM 321 ASSERT_TRUE(cpuinfo_has_arm_thumb()); 322 #elif CPUINFO_ARCH_ARM64 323 ASSERT_FALSE(cpuinfo_has_arm_thumb()); 324 #endif 325 } 326 327 TEST(ISA, thumb2) { 328 #if CPUINFO_ARCH_ARM 329 ASSERT_TRUE(cpuinfo_has_arm_thumb2()); 330 #elif CPUINFO_ARCH_ARM64 331 ASSERT_FALSE(cpuinfo_has_arm_thumb2()); 332 #endif 333 } 334 335 TEST(ISA, armv5e) { 336 #if CPUINFO_ARCH_ARM 337 ASSERT_TRUE(cpuinfo_has_arm_v5e()); 338 #elif CPUINFO_ARCH_ARM64 339 ASSERT_FALSE(cpuinfo_has_arm_v5e()); 340 #endif 341 } 342 343 TEST(ISA, armv6) { 344 #if CPUINFO_ARCH_ARM 345 ASSERT_TRUE(cpuinfo_has_arm_v6()); 346 #elif CPUINFO_ARCH_ARM64 347 ASSERT_FALSE(cpuinfo_has_arm_v6()); 348 #endif 349 } 350 351 TEST(ISA, armv6k) { 352 #if CPUINFO_ARCH_ARM 353 ASSERT_TRUE(cpuinfo_has_arm_v6k()); 354 #elif CPUINFO_ARCH_ARM64 355 ASSERT_FALSE(cpuinfo_has_arm_v6k()); 356 #endif 357 } 358 359 TEST(ISA, armv7) { 360 #if CPUINFO_ARCH_ARM 361 ASSERT_TRUE(cpuinfo_has_arm_v7()); 362 #elif CPUINFO_ARCH_ARM64 363 ASSERT_FALSE(cpuinfo_has_arm_v7()); 364 #endif 365 } 366 367 TEST(ISA, armv7mp) { 368 #if CPUINFO_ARCH_ARM 369 ASSERT_TRUE(cpuinfo_has_arm_v7mp()); 370 #elif CPUINFO_ARCH_ARM64 371 ASSERT_FALSE(cpuinfo_has_arm_v7mp()); 372 #endif 373 } 374 375 TEST(ISA, idiv) { 376 ASSERT_TRUE(cpuinfo_has_arm_idiv()); 377 } 378 379 TEST(ISA, vfpv2) { 380 ASSERT_FALSE(cpuinfo_has_arm_vfpv2()); 381 } 382 383 TEST(ISA, vfpv3) { 384 ASSERT_TRUE(cpuinfo_has_arm_vfpv3()); 385 } 386 387 TEST(ISA, vfpv3_d32) { 388 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_d32()); 389 } 390 391 TEST(ISA, vfpv3_fp16) { 392 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16()); 393 } 394 395 TEST(ISA, vfpv3_fp16_d32) { 396 ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16_d32()); 397 } 398 399 TEST(ISA, vfpv4) { 400 ASSERT_TRUE(cpuinfo_has_arm_vfpv4()); 401 } 402 403 TEST(ISA, vfpv4_d32) { 404 ASSERT_TRUE(cpuinfo_has_arm_vfpv4_d32()); 405 } 406 407 TEST(ISA, wmmx) { 408 ASSERT_FALSE(cpuinfo_has_arm_wmmx()); 409 } 410 411 TEST(ISA, wmmx2) { 412 ASSERT_FALSE(cpuinfo_has_arm_wmmx2()); 413 } 414 415 TEST(ISA, neon) { 416 ASSERT_TRUE(cpuinfo_has_arm_neon()); 417 } 418 419 TEST(ISA, neon_fp16) { 420 ASSERT_TRUE(cpuinfo_has_arm_neon_fp16()); 421 } 422 423 TEST(ISA, neon_fma) { 424 ASSERT_TRUE(cpuinfo_has_arm_neon_fma()); 425 } 426 427 TEST(ISA, atomics) { 428 ASSERT_FALSE(cpuinfo_has_arm_atomics()); 429 } 430 431 TEST(ISA, neon_rdm) { 432 ASSERT_FALSE(cpuinfo_has_arm_neon_rdm()); 433 } 434 435 TEST(ISA, fp16_arith) { 436 ASSERT_FALSE(cpuinfo_has_arm_fp16_arith()); 437 } 438 439 TEST(ISA, neon_fp16_arith) { 440 ASSERT_FALSE(cpuinfo_has_arm_neon_fp16_arith()); 441 } 442 443 TEST(ISA, neon_dot) { 444 ASSERT_FALSE(cpuinfo_has_arm_neon_dot()); 445 } 446 447 TEST(ISA, jscvt) { 448 ASSERT_FALSE(cpuinfo_has_arm_jscvt()); 449 } 450 451 TEST(ISA, fcma) { 452 ASSERT_FALSE(cpuinfo_has_arm_fcma()); 453 } 454 455 TEST(ISA, aes) { 456 #if CPUINFO_ARCH_ARM64 457 ASSERT_TRUE(cpuinfo_has_arm_aes()); 458 #elif CPUINFO_ARCH_ARM 459 ASSERT_FALSE(cpuinfo_has_arm_aes()); 460 #endif 461 } 462 463 TEST(ISA, sha1) { 464 #if CPUINFO_ARCH_ARM64 465 ASSERT_TRUE(cpuinfo_has_arm_sha1()); 466 #elif CPUINFO_ARCH_ARM 467 ASSERT_FALSE(cpuinfo_has_arm_sha1()); 468 #endif 469 } 470 471 TEST(ISA, sha2) { 472 #if CPUINFO_ARCH_ARM64 473 ASSERT_TRUE(cpuinfo_has_arm_sha2()); 474 #elif CPUINFO_ARCH_ARM 475 ASSERT_FALSE(cpuinfo_has_arm_sha2()); 476 #endif 477 } 478 479 TEST(ISA, pmull) { 480 #if CPUINFO_ARCH_ARM64 481 ASSERT_TRUE(cpuinfo_has_arm_pmull()); 482 #elif CPUINFO_ARCH_ARM 483 ASSERT_FALSE(cpuinfo_has_arm_pmull()); 484 #endif 485 } 486 487 TEST(ISA, crc32) { 488 #if CPUINFO_ARCH_ARM64 489 ASSERT_TRUE(cpuinfo_has_arm_crc32()); 490 #elif CPUINFO_ARCH_ARM 491 ASSERT_FALSE(cpuinfo_has_arm_crc32()); 492 #endif 493 } 494 495 TEST(L1I, count) { 496 ASSERT_EQ(8, cpuinfo_get_l1i_caches_count()); 497 } 498 499 TEST(L1I, non_null) { 500 ASSERT_TRUE(cpuinfo_get_l1i_caches()); 501 } 502 503 TEST(L1I, size) { 504 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 505 ASSERT_EQ(16 * 1024, cpuinfo_get_l1i_cache(i)->size); 506 } 507 } 508 509 TEST(L1I, associativity) { 510 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 511 ASSERT_EQ(2, cpuinfo_get_l1i_cache(i)->associativity); 512 } 513 } 514 515 TEST(L1I, sets) { 516 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 517 ASSERT_EQ(cpuinfo_get_l1i_cache(i)->size, 518 cpuinfo_get_l1i_cache(i)->sets * cpuinfo_get_l1i_cache(i)->line_size * cpuinfo_get_l1i_cache(i)->partitions * cpuinfo_get_l1i_cache(i)->associativity); 519 } 520 } 521 522 TEST(L1I, partitions) { 523 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 524 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->partitions); 525 } 526 } 527 528 TEST(L1I, line_size) { 529 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 530 ASSERT_EQ(64, cpuinfo_get_l1i_cache(i)->line_size); 531 } 532 } 533 534 TEST(L1I, flags) { 535 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 536 ASSERT_EQ(0, cpuinfo_get_l1i_cache(i)->flags); 537 } 538 } 539 540 TEST(L1I, processors) { 541 for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { 542 ASSERT_EQ(i, cpuinfo_get_l1i_cache(i)->processor_start); 543 ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->processor_count); 544 } 545 } 546 547 TEST(L1D, count) { 548 ASSERT_EQ(8, cpuinfo_get_l1d_caches_count()); 549 } 550 551 TEST(L1D, non_null) { 552 ASSERT_TRUE(cpuinfo_get_l1d_caches()); 553 } 554 555 TEST(L1D, size) { 556 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 557 ASSERT_EQ(16 * 1024, cpuinfo_get_l1d_cache(i)->size); 558 } 559 } 560 561 TEST(L1D, associativity) { 562 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 563 ASSERT_EQ(4, cpuinfo_get_l1d_cache(i)->associativity); 564 } 565 } 566 567 TEST(L1D, sets) { 568 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 569 ASSERT_EQ(cpuinfo_get_l1d_cache(i)->size, 570 cpuinfo_get_l1d_cache(i)->sets * cpuinfo_get_l1d_cache(i)->line_size * cpuinfo_get_l1d_cache(i)->partitions * cpuinfo_get_l1d_cache(i)->associativity); 571 } 572 } 573 574 TEST(L1D, partitions) { 575 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 576 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->partitions); 577 } 578 } 579 580 TEST(L1D, line_size) { 581 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 582 ASSERT_EQ(64, cpuinfo_get_l1d_cache(i)->line_size); 583 } 584 } 585 586 TEST(L1D, flags) { 587 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 588 ASSERT_EQ(0, cpuinfo_get_l1d_cache(i)->flags); 589 } 590 } 591 592 TEST(L1D, processors) { 593 for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { 594 ASSERT_EQ(i, cpuinfo_get_l1d_cache(i)->processor_start); 595 ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->processor_count); 596 } 597 } 598 599 TEST(L2, count) { 600 ASSERT_EQ(2, cpuinfo_get_l2_caches_count()); 601 } 602 603 TEST(L2, non_null) { 604 ASSERT_TRUE(cpuinfo_get_l2_caches()); 605 } 606 607 TEST(L2, size) { 608 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 609 ASSERT_EQ(256 * 1024, cpuinfo_get_l2_cache(i)->size); 610 } 611 } 612 613 TEST(L2, associativity) { 614 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 615 ASSERT_EQ(16, cpuinfo_get_l2_cache(i)->associativity); 616 } 617 } 618 619 TEST(L2, sets) { 620 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 621 ASSERT_EQ(cpuinfo_get_l2_cache(i)->size, 622 cpuinfo_get_l2_cache(i)->sets * cpuinfo_get_l2_cache(i)->line_size * cpuinfo_get_l2_cache(i)->partitions * cpuinfo_get_l2_cache(i)->associativity); 623 } 624 } 625 626 TEST(L2, partitions) { 627 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 628 ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->partitions); 629 } 630 } 631 632 TEST(L2, line_size) { 633 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 634 ASSERT_EQ(64, cpuinfo_get_l2_cache(i)->line_size); 635 } 636 } 637 638 TEST(L2, flags) { 639 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 640 ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->flags); 641 } 642 } 643 644 TEST(L2, processors) { 645 for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { 646 switch (i) { 647 case 0: 648 ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->processor_start); 649 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_count); 650 break; 651 case 1: 652 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_start); 653 ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_count); 654 break; 655 } 656 } 657 } 658 659 TEST(L3, none) { 660 ASSERT_EQ(0, cpuinfo_get_l3_caches_count()); 661 ASSERT_FALSE(cpuinfo_get_l3_caches()); 662 } 663 664 TEST(L4, none) { 665 ASSERT_EQ(0, cpuinfo_get_l4_caches_count()); 666 ASSERT_FALSE(cpuinfo_get_l4_caches()); 667 } 668 669 #include <xperia-c4-dual.h> 670 671 int main(int argc, char* argv[]) { 672 #if CPUINFO_ARCH_ARM 673 cpuinfo_set_hwcap(UINT32_C(0x0007B0D6)); 674 cpuinfo_set_hwcap2(UINT32_C(0x00000000)); 675 #elif CPUINFO_ARCH_ARM64 676 cpuinfo_set_hwcap(UINT32_C(0x000000FB)); 677 #endif 678 cpuinfo_mock_filesystem(filesystem); 679 #ifdef __ANDROID__ 680 cpuinfo_mock_android_properties(properties); 681 #endif 682 cpuinfo_initialize(); 683 ::testing::InitGoogleTest(&argc, argv); 684 return RUN_ALL_TESTS(); 685 }