1
2/*--------------------------------------------------------------------*/
3/*--- The core dispatch loop, for jumping to a code address.       ---*/
4/*---                                         dispatch-x86-linux.S ---*/
5/*--------------------------------------------------------------------*/
6
7/*
8  This file is part of Valgrind, a dynamic binary instrumentation
9  framework.
10
11  Copyright (C) 2000-2013 Julian Seward
12     jseward@acm.org
13
14  This program is free software; you can redistribute it and/or
15  modify it under the terms of the GNU General Public License as
16  published by the Free Software Foundation; either version 2 of the
17  License, or (at your option) any later version.
18
19  This program is distributed in the hope that it will be useful, but
20  WITHOUT ANY WARRANTY; without even the implied warranty of
21  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
22  General Public License for more details.
23
24  You should have received a copy of the GNU General Public License
25  along with this program; if not, write to the Free Software
26  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
27  02111-1307, USA.
28
29  The GNU General Public License is contained in the file COPYING.
30*/
31
32#if defined(VGP_x86_linux)
33
34#include "pub_core_basics_asm.h"
35#include "pub_core_dispatch_asm.h"
36#include "pub_core_transtab_asm.h"
37#include "libvex_guest_offsets.h"	/* for OFFSET_x86_EIP */
38
39
40/*------------------------------------------------------------*/
41/*---                                                      ---*/
42/*--- The dispatch loop.  VG_(disp_run_translations) is    ---*/
43/*--- used to run all translations,                        ---*/
44/*--- including no-redir ones.                             ---*/
45/*---                                                      ---*/
46/*------------------------------------------------------------*/
47
48/*----------------------------------------------------*/
49/*--- Entry and preamble (set everything up)       ---*/
50/*----------------------------------------------------*/
51
52/* signature:
53void VG_(disp_run_translations)( UWord* two_words,
54                                 void*  guest_state,
55                                 Addr   host_addr );
56*/
57.text
58.globl VG_(disp_run_translations)
59.type  VG_(disp_run_translations), @function
60VG_(disp_run_translations):
61        /* 0(%esp) holds our return address. */
62	/* 4(%esp) holds two_words */
63	/* 8(%esp) holds guest_state */
64	/* 12(%esp) holds host_addr */
65
66        /* The preamble */
67
68        /* Save integer registers, since this is a pseudo-function. */
69        pushl   %eax
70	pushl	%ebx
71	pushl	%ecx
72	pushl	%edx
73	pushl	%esi
74	pushl	%edi
75	pushl	%ebp
76
77	/* 28+4(%esp) holds two_words */
78	/* 28+8(%esp) holds guest_state */
79	/* 28+12(%esp) holds host_addr */
80
81        /* Get the host CPU in the state expected by generated code. */
82
83	/* set host FPU control word to the default mode expected
84           by VEX-generated code.  See comments in libvex.h for
85           more info. */
86	finit
87	pushl	$0x027F
88	fldcw	(%esp)
89	addl	$4, %esp
90
91	/* set host SSE control word to the default mode expected
92	   by VEX-generated code. */
93	cmpl	$0, VG_(machine_x86_have_mxcsr)
94	jz	L1
95	pushl	$0x1F80
96	ldmxcsr	(%esp)
97	addl	$4, %esp
98L1:
99	/* set dir flag to known value */
100	cld
101
102	/* Set up the guest state pointer */
103	movl	28+8(%esp), %ebp
104
105        /* and jump into the code cache.  Chained translations in
106           the code cache run, until for whatever reason, they can't
107           continue.  When that happens, the translation in question
108           will jump (or call) to one of the continuation points
109           VG_(cp_...) below. */
110        jmpl    *28+12(%esp)
111	/*NOTREACHED*/
112
113/*----------------------------------------------------*/
114/*--- Postamble and exit.                          ---*/
115/*----------------------------------------------------*/
116
117postamble:
118        /* At this point, %eax and %edx contain two
119           words to be returned to the caller.  %eax
120           holds a TRC value, and %edx optionally may
121           hold another word (for CHAIN_ME exits, the
122           address of the place to patch.) */
123
124	/* We're leaving.  Check that nobody messed with %mxcsr
125           or %fpucw.  We can't mess with %eax or %edx here as they
126	   holds the tentative return value, but any others are OK. */
127#if !defined(ENABLE_INNER)
128        /* This check fails for self-hosting, so skip in that case */
129	pushl	$0
130	fstcw	(%esp)
131	cmpl	$0x027F, (%esp)
132	popl	%esi /* get rid of the word without trashing %eflags */
133	jnz	invariant_violation
134#endif
135#	cmpl	$0, VG_(machine_x86_have_mxcsr)
136	jz	L2
137	pushl	$0
138	stmxcsr	(%esp)
139	andl	$0xFFFFFFC0, (%esp)  /* mask out status flags */
140	cmpl	$0x1F80, (%esp)
141	popl	%esi
142	jnz	invariant_violation
143L2:	/* otherwise we're OK */
144	jmp	remove_frame
145invariant_violation:
146	movl	$VG_TRC_INVARIANT_FAILED, %eax
147        movl    $0, %edx
148
149remove_frame:
150        /* Stash return values */
151        movl    28+4(%esp), %edi        /* two_words */
152        movl    %eax, 0(%edi)
153        movl    %edx, 4(%edi)
154        /* Restore int regs and return. */
155	popl	%ebp
156	popl	%edi
157	popl	%esi
158	popl	%edx
159	popl	%ecx
160	popl	%ebx
161	popl	%eax
162	ret
163
164/*----------------------------------------------------*/
165/*--- Continuation points                          ---*/
166/*----------------------------------------------------*/
167
168/* ------ Chain me to slow entry point ------ */
169.global VG_(disp_cp_chain_me_to_slowEP)
170VG_(disp_cp_chain_me_to_slowEP):
171        /* We got called.  The return address indicates
172           where the patching needs to happen.  Collect
173           the return address and, exit back to C land,
174           handing the caller the pair (Chain_me_S, RA) */
175        movl    $VG_TRC_CHAIN_ME_TO_SLOW_EP, %eax
176        popl    %edx
177        /* 5 = movl $VG_(disp_chain_me_to_slowEP), %edx;
178           2 = call *%edx */
179        subl    $5+2, %edx
180        jmp     postamble
181
182/* ------ Chain me to fast entry point ------ */
183.global VG_(disp_cp_chain_me_to_fastEP)
184VG_(disp_cp_chain_me_to_fastEP):
185        /* We got called.  The return address indicates
186           where the patching needs to happen.  Collect
187           the return address and, exit back to C land,
188           handing the caller the pair (Chain_me_F, RA) */
189        movl    $VG_TRC_CHAIN_ME_TO_FAST_EP, %eax
190        popl    %edx
191        /* 5 = movl $VG_(disp_chain_me_to_fastEP), %edx;
192           2 = call *%edx */
193        subl    $5+2, %edx
194        jmp     postamble
195
196/* ------ Indirect but boring jump ------ */
197.global VG_(disp_cp_xindir)
198VG_(disp_cp_xindir):
199	/* Where are we going? */
200	movl	OFFSET_x86_EIP(%ebp), %eax
201
202        /* stats only */
203        addl    $1, VG_(stats__n_xindirs_32)
204
205        /* try a fast lookup in the translation cache */
206        movl    %eax, %ebx                      /* next guest addr */
207        andl    $VG_TT_FAST_MASK, %ebx          /* entry# */
208        movl    0+VG_(tt_fast)(,%ebx,8), %esi   /* .guest */
209        movl    4+VG_(tt_fast)(,%ebx,8), %edi   /* .host */
210        cmpl    %eax, %esi
211        jnz     fast_lookup_failed
212
213        /* Found a match.  Jump to .host. */
214	jmp 	*%edi
215	ud2	/* persuade insn decoders not to speculate past here */
216
217fast_lookup_failed:
218        /* stats only */
219        addl    $1, VG_(stats__n_xindir_misses_32)
220
221	movl	$VG_TRC_INNER_FASTMISS, %eax
222        movl    $0, %edx
223	jmp	postamble
224
225/* ------ Assisted jump ------ */
226.global VG_(disp_cp_xassisted)
227VG_(disp_cp_xassisted):
228        /* %ebp contains the TRC */
229        movl    %ebp, %eax
230        movl    $0, %edx
231        jmp     postamble
232
233/* ------ Event check failed ------ */
234.global VG_(disp_cp_evcheck_fail)
235VG_(disp_cp_evcheck_fail):
236       	movl	$VG_TRC_INNER_COUNTERZERO, %eax
237        movl    $0, %edx
238	jmp	postamble
239
240
241.size VG_(disp_run_translations), .-VG_(disp_run_translations)
242
243/* Let the linker know we don't need an executable stack */
244.section .note.GNU-stack,"",@progbits
245
246#endif // defined(VGP_x86_linux)
247
248/*--------------------------------------------------------------------*/
249/*--- end                                                          ---*/
250/*--------------------------------------------------------------------*/
251