1/*
2
32006-05-21: vex r1619 finally causes the x86->IR front end to state
4exactly the %eflags dataflow surrounding 'cmpb $0, ... ;  js ..'
5and so memcheck no longer gives a false positive on this test.
6
7-----------
8
9(original comments)
10Assembly derived from the following program compiled with -O2.
11This fools Valgrind, causing it to give a false error.
12
13#include <stdio.h>
14
15struct Foo
16{
17    int a1 : 1;
18    int a2 : 1;
19    int a3 : 1;
20    int a4 : 1;
21    int a5 : 1;
22    int a6 : 1;
23    int a7 : 1;
24    int bleh : 1;
25};
26
27struct Foo* foo;
28
29void set()
30{
31    foo->bleh = 1;
32}
33
34void get()
35{
36    if ( foo->bleh == 0 )
37        printf( "blieb\n" );
38}
39
40int main()
41{
42  foo = malloc(sizeof(struct Foo));
43    set();
44
45    get();
46
47    return 0;
48}
49
50*/
51
52#include "tests/asm.h"
53
54	.file	"tronical.c"
55	.version	"01.01"
56gcc2_compiled.:
57.text
58	.align 4
59.globl set
60set:
61	pushl	%ebp
62	movl	foo, %eax
63	orb	$128, (%eax)
64	movl	%esp, %ebp
65	popl	%ebp
66	ret
67.Lfe1:
68.LC0:
69	.ascii	"blieb\n"
70.text
71	.align 4
72.globl get
73get:
74	pushl	%ebp
75	movl	%esp, %ebp
76	subl	$8, %esp
77	movl	foo, %eax
78	cmpb	$0, (%eax)
79	js	.L4
80	subl	$12, %esp
81	pushl	$.LC0
82	call	VG_SYM_ASM(printf)
83	addl	$16, %esp
84.L4:
85	leave
86	ret
87.Lfe2:
88	.align 4
89.globl VG_SYM_ASM(main)
90VG_SYM_ASM(main):
91	pushl	%ebp
92	movl	%esp, %ebp
93	subl	$20, %esp
94	pushl	$4
95	call	VG_SYM_ASM(malloc)
96	movl	%eax, foo
97	call	set
98	call	get
99	xorl	%eax, %eax
100	leave
101	ret
102.Lfe3:
103	.comm	foo,4,4
104	.ident	"GCC: (GNU) 2.96 20000731 (Red Hat Linux 7.1 2.96-98)"
105