1/* 2 32006-05-21: vex r1619 finally causes the x86->IR front end to state 4exactly the %eflags dataflow surrounding 'cmpb $0, ... ; js ..' 5and so memcheck no longer gives a false positive on this test. 6 7----------- 8 9(original comments) 10Assembly derived from the following program compiled with -O2. 11This fools Valgrind, causing it to give a false error. 12 13#include <stdio.h> 14 15struct Foo 16{ 17 int a1 : 1; 18 int a2 : 1; 19 int a3 : 1; 20 int a4 : 1; 21 int a5 : 1; 22 int a6 : 1; 23 int a7 : 1; 24 int bleh : 1; 25}; 26 27struct Foo* foo; 28 29void set() 30{ 31 foo->bleh = 1; 32} 33 34void get() 35{ 36 if ( foo->bleh == 0 ) 37 printf( "blieb\n" ); 38} 39 40int main() 41{ 42 foo = malloc(sizeof(struct Foo)); 43 set(); 44 45 get(); 46 47 return 0; 48} 49 50*/ 51 52#include "tests/asm.h" 53 54 .file "tronical.c" 55#if defined(VGO_linux) 56 .version "01.01" 57#endif 58gcc2_compiled.: 59.text 60 .align 4 61.globl set 62set: 63 pushl %ebp 64 movl foo, %eax 65 orb $128, (%eax) 66 movl %esp, %ebp 67 popl %ebp 68 ret 69.Lfe1: 70.LC0: 71 .ascii "blieb\n" 72.text 73 .align 4 74.globl get 75get: 76 pushl %ebp 77 movl %esp, %ebp 78 subl $8, %esp 79 movl foo, %eax 80 cmpb $0, (%eax) 81 js .L4 82 subl $12, %esp 83 pushl $.LC0 84 call VG_SYM_ASM(printf) 85 addl $16, %esp 86.L4: 87 leave 88 ret 89.Lfe2: 90 .align 4 91.globl VG_SYM_ASM(main) 92VG_SYM_ASM(main): 93 pushl %ebp 94 movl %esp, %ebp 95 subl $20, %esp 96 pushl $4 97 call VG_SYM_ASM(malloc) 98 movl %eax, foo 99 call set 100 call get 101 xorl %eax, %eax 102 leave 103 ret 104.Lfe3: 105 .comm foo,4,4 106 .ident "GCC: (GNU) 2.96 20000731 (Red Hat Linux 7.1 2.96-98)" 107