2 /*---------------------------------------------------------------*/
3 /*--- begin guest_x86_defs.h ---*/
4 /*---------------------------------------------------------------*/
7 This file is part of Valgrind, a dynamic binary instrumentation
10 Copyright (C) 2004-2010 OpenWorks LLP
13 This program is free software; you can redistribute it and/or
14 modify it under the terms of the GNU General Public License as
15 published by the Free Software Foundation; either version 2 of the
16 License, or (at your option) any later version.
18 This program is distributed in the hope that it will be useful, but
19 WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 General Public License for more details.
23 You should have received a copy of the GNU General Public License
24 along with this program; if not, write to the Free Software
25 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
28 The GNU General Public License is contained in the file COPYING.
30 Neither the names of the U.S. Department of Energy nor the
31 University of California nor the names of its contributors may be
32 used to endorse or promote products derived from this software
33 without prior written permission.
36 /* Only to be used within the guest-x86 directory. */
38 #ifndef __VEX_GUEST_X86_DEFS_H
39 #define __VEX_GUEST_X86_DEFS_H
42 /*---------------------------------------------------------*/
43 /*--- x86 to IR conversion ---*/
44 /*---------------------------------------------------------*/
46 /* Convert one x86 insn to IR. See the type DisOneInstrFn in
49 DisResult disInstr_X86 ( IRSB* irbb,
51 Bool (*resteerOkFn) ( void*, Addr64 ),
53 void* callback_opaque,
58 VexArchInfo* archinfo,
60 Bool host_bigendian );
62 /* Used by the optimiser to specialise calls to helpers. */
64 IRExpr* guest_x86_spechelper ( HChar* function_name,
67 /* Describes to the optimiser which part of the guest state require
68 precise memory exceptions. This is logically part of the guest
71 Bool guest_x86_state_requires_precise_mem_exns ( Int, Int );
74 VexGuestLayout x86guest_layout;
77 /*---------------------------------------------------------*/
78 /*--- x86 guest helpers ---*/
79 /*---------------------------------------------------------*/
81 /* --- CLEAN HELPERS --- */
83 extern UInt x86g_calculate_eflags_all (
84 UInt cc_op, UInt cc_dep1, UInt cc_dep2, UInt cc_ndep
87 __attribute((regparm(3)))
88 extern UInt x86g_calculate_eflags_c (
89 UInt cc_op, UInt cc_dep1, UInt cc_dep2, UInt cc_ndep
92 extern UInt x86g_calculate_condition (
93 UInt/*X86Condcode*/ cond,
95 UInt cc_dep1, UInt cc_dep2, UInt cc_ndep
98 extern UInt x86g_calculate_FXAM ( UInt tag, ULong dbl );
100 extern ULong x86g_calculate_RCR (
101 UInt arg, UInt rot_amt, UInt eflags_in, UInt sz
103 extern ULong x86g_calculate_RCL (
104 UInt arg, UInt rot_amt, UInt eflags_in, UInt sz
107 extern UInt x86g_calculate_daa_das_aaa_aas ( UInt AX_and_flags, UInt opcode );
109 extern ULong x86g_check_fldcw ( UInt fpucw );
111 extern UInt x86g_create_fpucw ( UInt fpround );
113 extern ULong x86g_check_ldmxcsr ( UInt mxcsr );
115 extern UInt x86g_create_mxcsr ( UInt sseround );
118 /* Translate a guest virtual_addr into a guest linear address by
119 consulting the supplied LDT/GDT structures. Their representation
120 must be as specified in pub/libvex_guest_x86.h. To indicate a
121 translation failure, 1<<32 is returned. On success, the lower 32
122 bits of the returned result indicate the linear address.
125 ULong x86g_use_seg_selector ( HWord ldt, HWord gdt,
126 UInt seg_selector, UInt virtual_addr );
128 extern ULong x86g_calculate_mmx_pmaddwd ( ULong, ULong );
129 extern ULong x86g_calculate_mmx_psadbw ( ULong, ULong );
130 extern UInt x86g_calculate_mmx_pmovmskb ( ULong );
131 extern UInt x86g_calculate_sse_pmovmskb ( ULong w64hi, ULong w64lo );
134 /* --- DIRTY HELPERS --- */
136 extern ULong x86g_dirtyhelper_loadF80le ( UInt );
138 extern void x86g_dirtyhelper_storeF80le ( UInt, ULong );
140 extern void x86g_dirtyhelper_CPUID_sse0 ( VexGuestX86State* );
141 extern void x86g_dirtyhelper_CPUID_sse1 ( VexGuestX86State* );
142 extern void x86g_dirtyhelper_CPUID_sse2 ( VexGuestX86State* );
144 extern void x86g_dirtyhelper_FINIT ( VexGuestX86State* );
146 extern void x86g_dirtyhelper_FXSAVE ( VexGuestX86State*, HWord );
147 extern void x86g_dirtyhelper_FSAVE ( VexGuestX86State*, HWord );
148 extern void x86g_dirtyhelper_FSTENV ( VexGuestX86State*, HWord );
150 extern ULong x86g_dirtyhelper_RDTSC ( void );
152 extern UInt x86g_dirtyhelper_IN ( UInt portno, UInt sz/*1,2 or 4*/ );
153 extern void x86g_dirtyhelper_OUT ( UInt portno, UInt data,
154 UInt sz/*1,2 or 4*/ );
157 x86g_dirtyhelper_FXRSTOR ( VexGuestX86State*, HWord );
160 x86g_dirtyhelper_FRSTOR ( VexGuestX86State*, HWord );
163 x86g_dirtyhelper_FLDENV ( VexGuestX86State*, HWord );
166 /*---------------------------------------------------------*/
167 /*--- Condition code stuff ---*/
168 /*---------------------------------------------------------*/
171 #define X86G_CC_SHIFT_O 11
172 #define X86G_CC_SHIFT_S 7
173 #define X86G_CC_SHIFT_Z 6
174 #define X86G_CC_SHIFT_A 4
175 #define X86G_CC_SHIFT_C 0
176 #define X86G_CC_SHIFT_P 2
178 #define X86G_CC_MASK_O (1 << X86G_CC_SHIFT_O)
179 #define X86G_CC_MASK_S (1 << X86G_CC_SHIFT_S)
180 #define X86G_CC_MASK_Z (1 << X86G_CC_SHIFT_Z)
181 #define X86G_CC_MASK_A (1 << X86G_CC_SHIFT_A)
182 #define X86G_CC_MASK_C (1 << X86G_CC_SHIFT_C)
183 #define X86G_CC_MASK_P (1 << X86G_CC_SHIFT_P)
186 #define X86G_FC_SHIFT_C3 14
187 #define X86G_FC_SHIFT_C2 10
188 #define X86G_FC_SHIFT_C1 9
189 #define X86G_FC_SHIFT_C0 8
191 #define X86G_FC_MASK_C3 (1 << X86G_FC_SHIFT_C3)
192 #define X86G_FC_MASK_C2 (1 << X86G_FC_SHIFT_C2)
193 #define X86G_FC_MASK_C1 (1 << X86G_FC_SHIFT_C1)
194 #define X86G_FC_MASK_C0 (1 << X86G_FC_SHIFT_C0)
197 /* %EFLAGS thunk descriptors. A four-word thunk is used to record
198 details of the most recent flag-setting operation, so the flags can
199 be computed later if needed. It is possible to do this a little
200 more efficiently using a 3-word thunk, but that makes it impossible
201 to describe the flag data dependencies sufficiently accurately for
202 Memcheck. Hence 4 words are used, with minimal loss of efficiency.
206 CC_OP, which describes the operation.
208 CC_DEP1 and CC_DEP2. These are arguments to the operation.
209 We want Memcheck to believe that the resulting flags are
210 data-dependent on both CC_DEP1 and CC_DEP2, hence the
213 CC_NDEP. This is a 3rd argument to the operation which is
214 sometimes needed. We arrange things so that Memcheck does
215 not believe the resulting flags are data-dependent on CC_NDEP
218 To make Memcheck believe that (the definedness of) the encoded
219 flags depends only on (the definedness of) CC_DEP1 and CC_DEP2
222 (1) In the guest state layout info (x86guest_layout), CC_OP and
223 CC_NDEP are marked as always defined.
225 (2) When passing the thunk components to an evaluation function
226 (calculate_condition, calculate_eflags, calculate_eflags_c) the
227 IRCallee's mcx_mask must be set so as to exclude from
228 consideration all passed args except CC_DEP1 and CC_DEP2.
230 Strictly speaking only (2) is necessary for correctness. However,
231 (1) helps efficiency in that since (2) means we never ask about the
232 definedness of CC_OP or CC_NDEP, we may as well not even bother to
233 track their definedness.
235 When building the thunk, it is always necessary to write words into
236 CC_DEP1 and CC_DEP2, even if those args are not used given the
237 CC_OP field (eg, CC_DEP2 is not used if CC_OP is CC_LOGIC1/2/4).
238 This is important because otherwise Memcheck could give false
239 positives as it does not understand the relationship between the
240 CC_OP field and CC_DEP1 and CC_DEP2, and so believes that the
241 definedness of the stored flags always depends on both CC_DEP1 and
244 However, it is only necessary to set CC_NDEP when the CC_OP value
245 requires it, because Memcheck ignores CC_NDEP, and the evaluation
246 functions do understand the CC_OP fields and will only examine
247 CC_NDEP for suitable values of CC_OP.
249 A summary of the field usages is:
251 Operation DEP1 DEP2 NDEP
252 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
254 add/sub/mul first arg second arg unused
256 adc/sbb first arg (second arg)
257 XOR old_carry old_carry
259 and/or/xor result zero unused
261 inc/dec result zero old_carry
263 shl/shr/sar result subshifted- unused
266 rol/ror result zero old_flags
268 copy old_flags zero unused.
271 Therefore Memcheck will believe the following:
273 * add/sub/mul -- definedness of result flags depends on definedness
276 * adc/sbb -- definedness of result flags depends on definedness of
277 both args and definedness of the old C flag. Because only two
278 DEP fields are available, the old C flag is XOR'd into the second
279 arg so that Memcheck sees the data dependency on it. That means
280 the NDEP field must contain a second copy of the old C flag
281 so that the evaluation functions can correctly recover the second
284 * and/or/xor are straightforward -- definedness of result flags
285 depends on definedness of result value.
287 * inc/dec -- definedness of result flags depends only on
288 definedness of result. This isn't really true -- it also depends
289 on the old C flag. However, we don't want Memcheck to see that,
290 and so the old C flag must be passed in NDEP and not in DEP2.
291 It's inconceivable that a compiler would generate code that puts
292 the C flag in an undefined state, then does an inc/dec, which
293 leaves C unchanged, and then makes a conditional jump/move based
294 on C. So our fiction seems a good approximation.
296 * shl/shr/sar -- straightforward, again, definedness of result
297 flags depends on definedness of result value. The subshifted
298 value (value shifted one less) is also needed, but its
299 definedness is the same as the definedness of the shifted value.
301 * rol/ror -- these only set O and C, and leave A Z C P alone.
302 However it seems prudent (as per inc/dec) to say the definedness
303 of all resulting flags depends on the definedness of the result,
304 hence the old flags must go in as NDEP and not DEP2.
306 * rcl/rcr are too difficult to do in-line, and so are done by a
307 helper function. They are not part of this scheme. The helper
308 function takes the value to be rotated, the rotate amount and the
309 old flags, and returns the new flags and the rotated value.
310 Since the helper's mcx_mask does not have any set bits, Memcheck
311 will lazily propagate undefinedness from any of the 3 args into
312 both results (flags and actual value).
315 X86G_CC_OP_COPY=0, /* DEP1 = current flags, DEP2 = 0, NDEP = unused */
316 /* just copy DEP1 to output */
318 X86G_CC_OP_ADDB, /* 1 */
319 X86G_CC_OP_ADDW, /* 2 DEP1 = argL, DEP2 = argR, NDEP = unused */
320 X86G_CC_OP_ADDL, /* 3 */
322 X86G_CC_OP_SUBB, /* 4 */
323 X86G_CC_OP_SUBW, /* 5 DEP1 = argL, DEP2 = argR, NDEP = unused */
324 X86G_CC_OP_SUBL, /* 6 */
326 X86G_CC_OP_ADCB, /* 7 */
327 X86G_CC_OP_ADCW, /* 8 DEP1 = argL, DEP2 = argR ^ oldCarry, NDEP = oldCarry */
328 X86G_CC_OP_ADCL, /* 9 */
330 X86G_CC_OP_SBBB, /* 10 */
331 X86G_CC_OP_SBBW, /* 11 DEP1 = argL, DEP2 = argR ^ oldCarry, NDEP = oldCarry */
332 X86G_CC_OP_SBBL, /* 12 */
334 X86G_CC_OP_LOGICB, /* 13 */
335 X86G_CC_OP_LOGICW, /* 14 DEP1 = result, DEP2 = 0, NDEP = unused */
336 X86G_CC_OP_LOGICL, /* 15 */
338 X86G_CC_OP_INCB, /* 16 */
339 X86G_CC_OP_INCW, /* 17 DEP1 = result, DEP2 = 0, NDEP = oldCarry (0 or 1) */
340 X86G_CC_OP_INCL, /* 18 */
342 X86G_CC_OP_DECB, /* 19 */
343 X86G_CC_OP_DECW, /* 20 DEP1 = result, DEP2 = 0, NDEP = oldCarry (0 or 1) */
344 X86G_CC_OP_DECL, /* 21 */
346 X86G_CC_OP_SHLB, /* 22 DEP1 = res, DEP2 = res', NDEP = unused */
347 X86G_CC_OP_SHLW, /* 23 where res' is like res but shifted one bit less */
348 X86G_CC_OP_SHLL, /* 24 */
350 X86G_CC_OP_SHRB, /* 25 DEP1 = res, DEP2 = res', NDEP = unused */
351 X86G_CC_OP_SHRW, /* 26 where res' is like res but shifted one bit less */
352 X86G_CC_OP_SHRL, /* 27 */
354 X86G_CC_OP_ROLB, /* 28 */
355 X86G_CC_OP_ROLW, /* 29 DEP1 = res, DEP2 = 0, NDEP = old flags */
356 X86G_CC_OP_ROLL, /* 30 */
358 X86G_CC_OP_RORB, /* 31 */
359 X86G_CC_OP_RORW, /* 32 DEP1 = res, DEP2 = 0, NDEP = old flags */
360 X86G_CC_OP_RORL, /* 33 */
362 X86G_CC_OP_UMULB, /* 34 */
363 X86G_CC_OP_UMULW, /* 35 DEP1 = argL, DEP2 = argR, NDEP = unused */
364 X86G_CC_OP_UMULL, /* 36 */
366 X86G_CC_OP_SMULB, /* 37 */
367 X86G_CC_OP_SMULW, /* 38 DEP1 = argL, DEP2 = argR, NDEP = unused */
368 X86G_CC_OP_SMULL, /* 39 */
375 X86CondO = 0, /* overflow */
376 X86CondNO = 1, /* no overflow */
378 X86CondB = 2, /* below */
379 X86CondNB = 3, /* not below */
381 X86CondZ = 4, /* zero */
382 X86CondNZ = 5, /* not zero */
384 X86CondBE = 6, /* below or equal */
385 X86CondNBE = 7, /* not below or equal */
387 X86CondS = 8, /* negative */
388 X86CondNS = 9, /* not negative */
390 X86CondP = 10, /* parity even */
391 X86CondNP = 11, /* not parity even */
393 X86CondL = 12, /* jump less */
394 X86CondNL = 13, /* not less */
396 X86CondLE = 14, /* less or equal */
397 X86CondNLE = 15, /* not less or equal */
399 X86CondAlways = 16 /* HACK */
403 #endif /* ndef __VEX_GUEST_X86_DEFS_H */
405 /*---------------------------------------------------------------*/
406 /*--- end guest_x86_defs.h ---*/
407 /*---------------------------------------------------------------*/