2 /*---------------------------------------------------------------*/
3 /*--- begin main_main.c ---*/
4 /*---------------------------------------------------------------*/
7 This file is part of Valgrind, a dynamic binary instrumentation
10 Copyright (C) 2004-2010 OpenWorks LLP
13 This program is free software; you can redistribute it and/or
14 modify it under the terms of the GNU General Public License as
15 published by the Free Software Foundation; either version 2 of the
16 License, or (at your option) any later version.
18 This program is distributed in the hope that it will be useful, but
19 WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 General Public License for more details.
23 You should have received a copy of the GNU General Public License
24 along with this program; if not, write to the Free Software
25 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
28 The GNU General Public License is contained in the file COPYING.
30 Neither the names of the U.S. Department of Energy nor the
31 University of California nor the names of its contributors may be
32 used to endorse or promote products derived from this software
33 without prior written permission.
37 #include "libvex_emwarn.h"
38 #include "libvex_guest_x86.h"
39 #include "libvex_guest_amd64.h"
40 #include "libvex_guest_arm.h"
41 #include "libvex_guest_ppc32.h"
42 #include "libvex_guest_ppc64.h"
44 #include "main_globals.h"
45 #include "main_util.h"
46 #include "host_generic_regs.h"
49 #include "host_x86_defs.h"
50 #include "host_amd64_defs.h"
51 #include "host_ppc_defs.h"
52 #include "host_arm_defs.h"
54 #include "guest_generic_bb_to_IR.h"
55 #include "guest_x86_defs.h"
56 #include "guest_amd64_defs.h"
57 #include "guest_arm_defs.h"
58 #include "guest_ppc_defs.h"
61 /* This file contains the top level interface to the library. */
63 /* --------- fwds ... --------- */
65 static Bool are_valid_hwcaps ( VexArch arch, UInt hwcaps );
66 static HChar* show_hwcaps ( VexArch arch, UInt hwcaps );
69 /* --------- Initialise the library. --------- */
71 /* Exported to library client. */
73 void LibVEX_default_VexControl ( /*OUT*/ VexControl* vcon )
75 vcon->iropt_verbosity = 0;
76 vcon->iropt_level = 2;
77 vcon->iropt_precise_memory_exns = False;
78 vcon->iropt_unroll_thresh = 120;
79 vcon->guest_max_insns = 60;
80 vcon->guest_chase_thresh = 10;
81 vcon->guest_chase_cond = False;
85 /* Exported to library client. */
88 /* failure exit function */
89 __attribute__ ((noreturn))
90 void (*failure_exit) ( void ),
91 /* logging output function */
92 void (*log_bytes) ( HChar*, Int nbytes ),
93 /* debug paranoia level */
95 /* Are we supporting valgrind checking? */
96 Bool valgrind_support,
98 /*READONLY*/VexControl* vcon
101 /* First off, do enough minimal setup so that the following
102 assertions can fail in a sane fashion, if need be. */
103 vex_failure_exit = failure_exit;
104 vex_log_bytes = log_bytes;
106 /* Now it's safe to check parameters for sanity. */
107 vassert(!vex_initdone);
108 vassert(failure_exit);
110 vassert(debuglevel >= 0);
112 vassert(vcon->iropt_verbosity >= 0);
113 vassert(vcon->iropt_level >= 0);
114 vassert(vcon->iropt_level <= 2);
115 vassert(vcon->iropt_unroll_thresh >= 0);
116 vassert(vcon->iropt_unroll_thresh <= 400);
117 vassert(vcon->guest_max_insns >= 1);
118 vassert(vcon->guest_max_insns <= 100);
119 vassert(vcon->guest_chase_thresh >= 0);
120 vassert(vcon->guest_chase_thresh < vcon->guest_max_insns);
121 vassert(vcon->guest_chase_cond == True
122 || vcon->guest_chase_cond == False);
124 /* Check that Vex has been built with sizes of basic types as
125 stated in priv/libvex_basictypes.h. Failure of any of these is
126 a serious configuration error and should be corrected
127 immediately. If any of these assertions fail you can fully
128 expect Vex not to work properly, if at all. */
130 vassert(1 == sizeof(UChar));
131 vassert(1 == sizeof(Char));
132 vassert(2 == sizeof(UShort));
133 vassert(2 == sizeof(Short));
134 vassert(4 == sizeof(UInt));
135 vassert(4 == sizeof(Int));
136 vassert(8 == sizeof(ULong));
137 vassert(8 == sizeof(Long));
138 vassert(4 == sizeof(Float));
139 vassert(8 == sizeof(Double));
140 vassert(1 == sizeof(Bool));
141 vassert(4 == sizeof(Addr32));
142 vassert(8 == sizeof(Addr64));
143 vassert(16 == sizeof(U128));
145 vassert(sizeof(void*) == 4 || sizeof(void*) == 8);
146 vassert(sizeof(void*) == sizeof(int*));
147 vassert(sizeof(void*) == sizeof(HWord));
149 vassert(VEX_HOST_WORDSIZE == sizeof(void*));
150 vassert(VEX_HOST_WORDSIZE == sizeof(HWord));
152 /* Really start up .. */
153 vex_debuglevel = debuglevel;
154 vex_valgrind_support = valgrind_support;
157 vexSetAllocMode ( VexAllocModeTEMP );
161 /* --------- Make a translation. --------- */
163 /* Exported to library client. */
165 VexTranslateResult LibVEX_Translate ( VexTranslateArgs* vta )
167 /* This the bundle of functions we need to do the back-end stuff
168 (insn selection, reg-alloc, assembly) whilst being insulated
169 from the target instruction set. */
170 HReg* available_real_regs;
171 Int n_available_real_regs;
172 Bool (*isMove) ( HInstr*, HReg*, HReg* );
173 void (*getRegUsage) ( HRegUsage*, HInstr*, Bool );
174 void (*mapRegs) ( HRegRemap*, HInstr*, Bool );
175 void (*genSpill) ( HInstr**, HInstr**, HReg, Int, Bool );
176 void (*genReload) ( HInstr**, HInstr**, HReg, Int, Bool );
177 HInstr* (*directReload) ( HInstr*, HReg, Short );
178 void (*ppInstr) ( HInstr*, Bool );
179 void (*ppReg) ( HReg );
180 HInstrArray* (*iselSB) ( IRSB*, VexArch, VexArchInfo*,
182 Int (*emit) ( UChar*, Int, HInstr*, Bool, void* );
183 IRExpr* (*specHelper) ( HChar*, IRExpr** );
184 Bool (*preciseMemExnsFn) ( Int, Int );
186 DisOneInstrFn disInstrFn;
188 VexGuestLayout* guest_layout;
189 Bool host_is_bigendian = False;
193 Int i, j, k, out_used, guest_sizeB;
194 Int offB_TISTART, offB_TILEN;
195 UChar insn_bytes[32];
196 IRType guest_word_type;
197 IRType host_word_type;
201 available_real_regs = NULL;
202 n_available_real_regs = 0;
214 preciseMemExnsFn = NULL;
216 guest_word_type = Ity_INVALID;
217 host_word_type = Ity_INVALID;
222 vex_traceflags = vta->traceflags;
224 vassert(vex_initdone);
225 vexSetAllocModeTEMP_and_clear();
226 vexAllocSanityCheck();
228 /* First off, check that the guest and host insn sets
231 switch (vta->arch_host) {
235 getAllocableRegs_X86 ( &n_available_real_regs,
236 &available_real_regs );
237 isMove = (Bool(*)(HInstr*,HReg*,HReg*)) isMove_X86Instr;
238 getRegUsage = (void(*)(HRegUsage*,HInstr*, Bool))
239 getRegUsage_X86Instr;
240 mapRegs = (void(*)(HRegRemap*,HInstr*, Bool)) mapRegs_X86Instr;
241 genSpill = (void(*)(HInstr**,HInstr**,HReg,Int,Bool))
243 genReload = (void(*)(HInstr**,HInstr**,HReg,Int,Bool))
245 directReload = (HInstr*(*)(HInstr*,HReg,Short)) directReload_X86;
246 ppInstr = (void(*)(HInstr*, Bool)) ppX86Instr;
247 ppReg = (void(*)(HReg)) ppHRegX86;
249 emit = (Int(*)(UChar*,Int,HInstr*,Bool,void*)) emit_X86Instr;
250 host_is_bigendian = False;
251 host_word_type = Ity_I32;
252 vassert(are_valid_hwcaps(VexArchX86, vta->archinfo_host.hwcaps));
253 vassert(vta->dispatch != NULL); /* jump-to-dispatcher scheme */
258 getAllocableRegs_AMD64 ( &n_available_real_regs,
259 &available_real_regs );
260 isMove = (Bool(*)(HInstr*,HReg*,HReg*)) isMove_AMD64Instr;
261 getRegUsage = (void(*)(HRegUsage*,HInstr*, Bool))
262 getRegUsage_AMD64Instr;
263 mapRegs = (void(*)(HRegRemap*,HInstr*, Bool)) mapRegs_AMD64Instr;
264 genSpill = (void(*)(HInstr**,HInstr**,HReg,Int,Bool))
266 genReload = (void(*)(HInstr**,HInstr**,HReg,Int,Bool))
268 ppInstr = (void(*)(HInstr*, Bool)) ppAMD64Instr;
269 ppReg = (void(*)(HReg)) ppHRegAMD64;
270 iselSB = iselSB_AMD64;
271 emit = (Int(*)(UChar*,Int,HInstr*,Bool,void*)) emit_AMD64Instr;
272 host_is_bigendian = False;
273 host_word_type = Ity_I64;
274 vassert(are_valid_hwcaps(VexArchAMD64, vta->archinfo_host.hwcaps));
275 vassert(vta->dispatch != NULL); /* jump-to-dispatcher scheme */
280 getAllocableRegs_PPC ( &n_available_real_regs,
281 &available_real_regs, mode64 );
282 isMove = (Bool(*)(HInstr*,HReg*,HReg*)) isMove_PPCInstr;
283 getRegUsage = (void(*)(HRegUsage*,HInstr*,Bool)) getRegUsage_PPCInstr;
284 mapRegs = (void(*)(HRegRemap*,HInstr*,Bool)) mapRegs_PPCInstr;
285 genSpill = (void(*)(HInstr**,HInstr**,HReg,Int,Bool)) genSpill_PPC;
286 genReload = (void(*)(HInstr**,HInstr**,HReg,Int,Bool)) genReload_PPC;
287 ppInstr = (void(*)(HInstr*,Bool)) ppPPCInstr;
288 ppReg = (void(*)(HReg)) ppHRegPPC;
290 emit = (Int(*)(UChar*,Int,HInstr*,Bool,void*)) emit_PPCInstr;
291 host_is_bigendian = True;
292 host_word_type = Ity_I32;
293 vassert(are_valid_hwcaps(VexArchPPC32, vta->archinfo_host.hwcaps));
294 vassert(vta->dispatch == NULL); /* return-to-dispatcher scheme */
299 getAllocableRegs_PPC ( &n_available_real_regs,
300 &available_real_regs, mode64 );
301 isMove = (Bool(*)(HInstr*,HReg*,HReg*)) isMove_PPCInstr;
302 getRegUsage = (void(*)(HRegUsage*,HInstr*, Bool)) getRegUsage_PPCInstr;
303 mapRegs = (void(*)(HRegRemap*,HInstr*, Bool)) mapRegs_PPCInstr;
304 genSpill = (void(*)(HInstr**,HInstr**,HReg,Int,Bool)) genSpill_PPC;
305 genReload = (void(*)(HInstr**,HInstr**,HReg,Int,Bool)) genReload_PPC;
306 ppInstr = (void(*)(HInstr*, Bool)) ppPPCInstr;
307 ppReg = (void(*)(HReg)) ppHRegPPC;
309 emit = (Int(*)(UChar*,Int,HInstr*,Bool,void*)) emit_PPCInstr;
310 host_is_bigendian = True;
311 host_word_type = Ity_I64;
312 vassert(are_valid_hwcaps(VexArchPPC64, vta->archinfo_host.hwcaps));
313 vassert(vta->dispatch == NULL); /* return-to-dispatcher scheme */
318 getAllocableRegs_ARM ( &n_available_real_regs,
319 &available_real_regs );
320 isMove = (Bool(*)(HInstr*,HReg*,HReg*)) isMove_ARMInstr;
321 getRegUsage = (void(*)(HRegUsage*,HInstr*, Bool)) getRegUsage_ARMInstr;
322 mapRegs = (void(*)(HRegRemap*,HInstr*, Bool)) mapRegs_ARMInstr;
323 genSpill = (void(*)(HInstr**,HInstr**,HReg,Int,Bool)) genSpill_ARM;
324 genReload = (void(*)(HInstr**,HInstr**,HReg,Int,Bool)) genReload_ARM;
325 ppInstr = (void(*)(HInstr*, Bool)) ppARMInstr;
326 ppReg = (void(*)(HReg)) ppHRegARM;
328 emit = (Int(*)(UChar*,Int,HInstr*,Bool,void*)) emit_ARMInstr;
329 host_is_bigendian = False;
330 host_word_type = Ity_I32;
331 vassert(are_valid_hwcaps(VexArchARM, vta->archinfo_host.hwcaps));
332 vassert(vta->dispatch == NULL); /* return-to-dispatcher scheme */
336 vpanic("LibVEX_Translate: unsupported host insn set");
340 switch (vta->arch_guest) {
343 preciseMemExnsFn = guest_x86_state_requires_precise_mem_exns;
344 disInstrFn = disInstr_X86;
345 specHelper = guest_x86_spechelper;
346 guest_sizeB = sizeof(VexGuestX86State);
347 guest_word_type = Ity_I32;
348 guest_layout = &x86guest_layout;
349 offB_TISTART = offsetof(VexGuestX86State,guest_TISTART);
350 offB_TILEN = offsetof(VexGuestX86State,guest_TILEN);
351 vassert(are_valid_hwcaps(VexArchX86, vta->archinfo_guest.hwcaps));
352 vassert(0 == sizeof(VexGuestX86State) % 16);
353 vassert(sizeof( ((VexGuestX86State*)0)->guest_TISTART) == 4);
354 vassert(sizeof( ((VexGuestX86State*)0)->guest_TILEN ) == 4);
355 vassert(sizeof( ((VexGuestX86State*)0)->guest_NRADDR ) == 4);
359 preciseMemExnsFn = guest_amd64_state_requires_precise_mem_exns;
360 disInstrFn = disInstr_AMD64;
361 specHelper = guest_amd64_spechelper;
362 guest_sizeB = sizeof(VexGuestAMD64State);
363 guest_word_type = Ity_I64;
364 guest_layout = &amd64guest_layout;
365 offB_TISTART = offsetof(VexGuestAMD64State,guest_TISTART);
366 offB_TILEN = offsetof(VexGuestAMD64State,guest_TILEN);
367 vassert(are_valid_hwcaps(VexArchAMD64, vta->archinfo_guest.hwcaps));
368 vassert(0 == sizeof(VexGuestAMD64State) % 16);
369 vassert(sizeof( ((VexGuestAMD64State*)0)->guest_TISTART ) == 8);
370 vassert(sizeof( ((VexGuestAMD64State*)0)->guest_TILEN ) == 8);
371 vassert(sizeof( ((VexGuestAMD64State*)0)->guest_NRADDR ) == 8);
375 preciseMemExnsFn = guest_ppc32_state_requires_precise_mem_exns;
376 disInstrFn = disInstr_PPC;
377 specHelper = guest_ppc32_spechelper;
378 guest_sizeB = sizeof(VexGuestPPC32State);
379 guest_word_type = Ity_I32;
380 guest_layout = &ppc32Guest_layout;
381 offB_TISTART = offsetof(VexGuestPPC32State,guest_TISTART);
382 offB_TILEN = offsetof(VexGuestPPC32State,guest_TILEN);
383 vassert(are_valid_hwcaps(VexArchPPC32, vta->archinfo_guest.hwcaps));
384 vassert(0 == sizeof(VexGuestPPC32State) % 16);
385 vassert(sizeof( ((VexGuestPPC32State*)0)->guest_TISTART ) == 4);
386 vassert(sizeof( ((VexGuestPPC32State*)0)->guest_TILEN ) == 4);
387 vassert(sizeof( ((VexGuestPPC32State*)0)->guest_NRADDR ) == 4);
391 preciseMemExnsFn = guest_ppc64_state_requires_precise_mem_exns;
392 disInstrFn = disInstr_PPC;
393 specHelper = guest_ppc64_spechelper;
394 guest_sizeB = sizeof(VexGuestPPC64State);
395 guest_word_type = Ity_I64;
396 guest_layout = &ppc64Guest_layout;
397 offB_TISTART = offsetof(VexGuestPPC64State,guest_TISTART);
398 offB_TILEN = offsetof(VexGuestPPC64State,guest_TILEN);
399 vassert(are_valid_hwcaps(VexArchPPC64, vta->archinfo_guest.hwcaps));
400 vassert(0 == sizeof(VexGuestPPC64State) % 16);
401 vassert(sizeof( ((VexGuestPPC64State*)0)->guest_TISTART ) == 8);
402 vassert(sizeof( ((VexGuestPPC64State*)0)->guest_TILEN ) == 8);
403 vassert(sizeof( ((VexGuestPPC64State*)0)->guest_NRADDR ) == 8);
404 vassert(sizeof( ((VexGuestPPC64State*)0)->guest_NRADDR_GPR2) == 8);
408 preciseMemExnsFn = guest_arm_state_requires_precise_mem_exns;
409 disInstrFn = disInstr_ARM;
410 specHelper = guest_arm_spechelper;
411 guest_sizeB = sizeof(VexGuestARMState);
412 guest_word_type = Ity_I32;
413 guest_layout = &armGuest_layout;
414 offB_TISTART = offsetof(VexGuestARMState,guest_TISTART);
415 offB_TILEN = offsetof(VexGuestARMState,guest_TILEN);
416 vassert(are_valid_hwcaps(VexArchARM, vta->archinfo_guest.hwcaps));
417 vassert(0 == sizeof(VexGuestARMState) % 16);
418 vassert(sizeof( ((VexGuestARMState*)0)->guest_TISTART) == 4);
419 vassert(sizeof( ((VexGuestARMState*)0)->guest_TILEN ) == 4);
420 vassert(sizeof( ((VexGuestARMState*)0)->guest_NRADDR ) == 4);
424 vpanic("LibVEX_Translate: unsupported guest insn set");
427 /* yet more sanity checks ... */
428 if (vta->arch_guest == vta->arch_host) {
429 /* doesn't necessarily have to be true, but if it isn't it means
430 we are simulating one flavour of an architecture a different
431 flavour of the same architecture, which is pretty strange. */
432 vassert(vta->archinfo_guest.hwcaps == vta->archinfo_host.hwcaps);
435 vexAllocSanityCheck();
437 if (vex_traceflags & VEX_TRACE_FE)
438 vex_printf("\n------------------------"
440 "------------------------\n\n");
442 irsb = bb_to_IR ( vta->guest_extents,
443 vta->callback_opaque,
446 vta->guest_bytes_addr,
450 &vta->archinfo_guest,
454 vta->preamble_function,
458 vexAllocSanityCheck();
461 /* Access failure. */
462 vexSetAllocModeTEMP_and_clear();
464 return VexTransAccessFail;
467 vassert(vta->guest_extents->n_used >= 1 && vta->guest_extents->n_used <= 3);
468 vassert(vta->guest_extents->base[0] == vta->guest_bytes_addr);
469 for (i = 0; i < vta->guest_extents->n_used; i++) {
470 vassert(vta->guest_extents->len[i] < 10000); /* sanity */
473 /* If debugging, show the raw guest bytes for this bb. */
474 if (0 || (vex_traceflags & VEX_TRACE_FE)) {
475 if (vta->guest_extents->n_used > 1) {
476 vex_printf("can't show code due to extents > 1\n");
479 UChar* p = (UChar*)vta->guest_bytes;
481 UInt guest_bytes_read = (UInt)vta->guest_extents->len[0];
482 vex_printf("GuestBytes %llx %u ", vta->guest_bytes_addr,
484 for (i = 0; i < guest_bytes_read; i++) {
486 vex_printf(" %02x", b );
487 sum = (sum << 1) ^ b;
489 vex_printf(" %08x\n\n", sum);
493 /* Sanity check the initial IR. */
494 sanityCheckIRSB( irsb, "initial IR",
495 False/*can be non-flat*/, guest_word_type );
497 vexAllocSanityCheck();
499 /* Clean it up, hopefully a lot. */
500 irsb = do_iropt_BB ( irsb, specHelper, preciseMemExnsFn,
501 vta->guest_bytes_addr );
502 sanityCheckIRSB( irsb, "after initial iropt",
503 True/*must be flat*/, guest_word_type );
505 if (vex_traceflags & VEX_TRACE_OPT1) {
506 vex_printf("\n------------------------"
507 " After pre-instr IR optimisation "
508 "------------------------\n\n");
513 vexAllocSanityCheck();
515 /* Get the thing instrumented. */
516 if (vta->instrument1)
517 irsb = vta->instrument1(vta->callback_opaque,
520 guest_word_type, host_word_type);
521 vexAllocSanityCheck();
523 if (vta->instrument2)
524 irsb = vta->instrument2(vta->callback_opaque,
527 guest_word_type, host_word_type);
529 if (vex_traceflags & VEX_TRACE_INST) {
530 vex_printf("\n------------------------"
531 " After instrumentation "
532 "------------------------\n\n");
537 if (vta->instrument1 || vta->instrument2)
538 sanityCheckIRSB( irsb, "after instrumentation",
539 True/*must be flat*/, guest_word_type );
541 /* Do a post-instrumentation cleanup pass. */
542 if (vta->instrument1 || vta->instrument2) {
543 do_deadcode_BB( irsb );
544 irsb = cprop_BB( irsb );
545 do_deadcode_BB( irsb );
546 sanityCheckIRSB( irsb, "after post-instrumentation cleanup",
547 True/*must be flat*/, guest_word_type );
550 vexAllocSanityCheck();
552 if (vex_traceflags & VEX_TRACE_OPT2) {
553 vex_printf("\n------------------------"
554 " After post-instr IR optimisation "
555 "------------------------\n\n");
560 /* Turn it into virtual-registerised code. Build trees -- this
561 also throws away any dead bindings. */
562 ado_treebuild_BB( irsb );
564 if (vta->finaltidy) {
565 irsb = vta->finaltidy(irsb);
568 vexAllocSanityCheck();
570 if (vex_traceflags & VEX_TRACE_TREES) {
571 vex_printf("\n------------------------"
572 " After tree-building "
573 "------------------------\n\n");
579 if (0) { *(vta->host_bytes_used) = 0; return VexTransOK; }
582 if (vex_traceflags & VEX_TRACE_VCODE)
583 vex_printf("\n------------------------"
584 " Instruction selection "
585 "------------------------\n");
587 vcode = iselSB ( irsb, vta->arch_host, &vta->archinfo_host,
588 &vta->abiinfo_both );
590 vexAllocSanityCheck();
592 if (vex_traceflags & VEX_TRACE_VCODE)
595 if (vex_traceflags & VEX_TRACE_VCODE) {
596 for (i = 0; i < vcode->arr_used; i++) {
597 vex_printf("%3d ", i);
598 ppInstr(vcode->arr[i], mode64);
604 /* Register allocate. */
605 rcode = doRegisterAllocation ( vcode, available_real_regs,
606 n_available_real_regs,
607 isMove, getRegUsage, mapRegs,
608 genSpill, genReload, directReload,
610 ppInstr, ppReg, mode64 );
612 vexAllocSanityCheck();
614 if (vex_traceflags & VEX_TRACE_RCODE) {
615 vex_printf("\n------------------------"
616 " Register-allocated code "
617 "------------------------\n\n");
618 for (i = 0; i < rcode->arr_used; i++) {
619 vex_printf("%3d ", i);
620 ppInstr(rcode->arr[i], mode64);
627 if (0) { *(vta->host_bytes_used) = 0; return VexTransOK; }
631 if (vex_traceflags & VEX_TRACE_ASM) {
632 vex_printf("\n------------------------"
634 "------------------------\n\n");
637 out_used = 0; /* tracks along the host_bytes array */
638 for (i = 0; i < rcode->arr_used; i++) {
639 if (vex_traceflags & VEX_TRACE_ASM) {
640 ppInstr(rcode->arr[i], mode64);
643 j = (*emit)( insn_bytes, 32, rcode->arr[i], mode64, vta->dispatch );
644 if (vex_traceflags & VEX_TRACE_ASM) {
645 for (k = 0; k < j; k++)
646 if (insn_bytes[k] < 16)
647 vex_printf("0%x ", (UInt)insn_bytes[k]);
649 vex_printf("%x ", (UInt)insn_bytes[k]);
652 if (out_used + j > vta->host_bytes_size) {
653 vexSetAllocModeTEMP_and_clear();
655 return VexTransOutputFull;
657 for (k = 0; k < j; k++) {
658 vta->host_bytes[out_used] = insn_bytes[k];
661 vassert(out_used <= vta->host_bytes_size);
663 *(vta->host_bytes_used) = out_used;
665 vexAllocSanityCheck();
667 vexSetAllocModeTEMP_and_clear();
674 /* --------- Emulation warnings. --------- */
676 HChar* LibVEX_EmWarn_string ( VexEmWarn ew )
681 case EmWarn_X86_x87exns:
682 return "Unmasking x87 FP exceptions";
683 case EmWarn_X86_x87precision:
684 return "Selection of non-80-bit x87 FP precision";
685 case EmWarn_X86_sseExns:
686 return "Unmasking SSE FP exceptions";
688 return "Setting %mxcsr.fz (SSE flush-underflows-to-zero mode)";
690 return "Setting %mxcsr.daz (SSE treat-denormals-as-zero mode)";
691 case EmWarn_X86_acFlag:
692 return "Setting %eflags.ac (setting noted but ignored)";
694 return "Unmasking PPC32/64 FP exceptions";
695 case EmWarn_PPC64_redir_overflow:
696 return "PPC64 function redirection stack overflow";
697 case EmWarn_PPC64_redir_underflow:
698 return "PPC64 function redirection stack underflow";
700 vpanic("LibVEX_EmWarn_string: unknown warning");
704 /* ------------------ Arch/HwCaps stuff. ------------------ */
706 const HChar* LibVEX_ppVexArch ( VexArch arch )
709 case VexArch_INVALID: return "INVALID";
710 case VexArchX86: return "X86";
711 case VexArchAMD64: return "AMD64";
712 case VexArchARM: return "ARM";
713 case VexArchPPC32: return "PPC32";
714 case VexArchPPC64: return "PPC64";
715 default: return "VexArch???";
719 const HChar* LibVEX_ppVexHwCaps ( VexArch arch, UInt hwcaps )
721 HChar* str = show_hwcaps(arch,hwcaps);
722 return str ? str : "INVALID";
726 /* Write default settings info *vai. */
727 void LibVEX_default_VexArchInfo ( /*OUT*/VexArchInfo* vai )
730 vai->ppc_cache_line_szB = 0;
733 /* Write default settings info *vbi. */
734 void LibVEX_default_VexAbiInfo ( /*OUT*/VexAbiInfo* vbi )
736 vbi->guest_stack_redzone_size = 0;
737 vbi->guest_amd64_assume_fs_is_zero = False;
738 vbi->guest_amd64_assume_gs_is_0x60 = False;
739 vbi->guest_ppc_zap_RZ_at_blr = False;
740 vbi->guest_ppc_zap_RZ_at_bl = NULL;
741 vbi->guest_ppc_sc_continues_at_LR = False;
742 vbi->host_ppc_calls_use_fndescrs = False;
743 vbi->host_ppc32_regalign_int64_args = False;
747 /* Return a string showing the hwcaps in a nice way. The string will
748 be NULL for invalid combinations of flags, so these functions also
749 serve as a way to validate hwcaps values. */
751 static HChar* show_hwcaps_x86 ( UInt hwcaps )
753 /* Monotonic, SSE3 > SSE2 > SSE1 > baseline. */
756 if (hwcaps == VEX_HWCAPS_X86_SSE1)
758 if (hwcaps == (VEX_HWCAPS_X86_SSE1 | VEX_HWCAPS_X86_SSE2))
759 return "x86-sse1-sse2";
760 if (hwcaps == (VEX_HWCAPS_X86_SSE1
761 | VEX_HWCAPS_X86_SSE2 | VEX_HWCAPS_X86_SSE3))
762 return "x86-sse1-sse2-sse3";
767 static HChar* show_hwcaps_amd64 ( UInt hwcaps )
769 /* SSE3 and CX16 are orthogonal and > baseline, although we really
770 don't expect to come across anything which can do SSE3 but can't
771 do CX16. Still, we can handle that case. */
772 const UInt SSE3 = VEX_HWCAPS_AMD64_SSE3;
773 const UInt CX16 = VEX_HWCAPS_AMD64_CX16;
775 if (c == 0) return "amd64-sse2";
776 if (c == SSE3) return "amd64-sse3";
777 if (c == CX16) return "amd64-sse2-cx16";
778 if (c == (SSE3|CX16)) return "amd64-sse3-cx16";
782 static HChar* show_hwcaps_ppc32 ( UInt hwcaps )
784 /* Monotonic with complications. Basically V > F > baseline,
785 but once you have F then you can have FX or GX too. */
786 const UInt F = VEX_HWCAPS_PPC32_F;
787 const UInt V = VEX_HWCAPS_PPC32_V;
788 const UInt FX = VEX_HWCAPS_PPC32_FX;
789 const UInt GX = VEX_HWCAPS_PPC32_GX;
791 if (c == 0) return "ppc32-int";
792 if (c == F) return "ppc32-int-flt";
793 if (c == (F|FX)) return "ppc32-int-flt-FX";
794 if (c == (F|GX)) return "ppc32-int-flt-GX";
795 if (c == (F|FX|GX)) return "ppc32-int-flt-FX-GX";
796 if (c == (F|V)) return "ppc32-int-flt-vmx";
797 if (c == (F|V|FX)) return "ppc32-int-flt-vmx-FX";
798 if (c == (F|V|GX)) return "ppc32-int-flt-vmx-GX";
799 if (c == (F|V|FX|GX)) return "ppc32-int-flt-vmx-FX-GX";
803 static HChar* show_hwcaps_ppc64 ( UInt hwcaps )
805 /* Monotonic with complications. Basically V > baseline(==F),
806 but once you have F then you can have FX or GX too. */
807 const UInt V = VEX_HWCAPS_PPC64_V;
808 const UInt FX = VEX_HWCAPS_PPC64_FX;
809 const UInt GX = VEX_HWCAPS_PPC64_GX;
811 if (c == 0) return "ppc64-int-flt";
812 if (c == FX) return "ppc64-int-flt-FX";
813 if (c == GX) return "ppc64-int-flt-GX";
814 if (c == (FX|GX)) return "ppc64-int-flt-FX-GX";
815 if (c == V) return "ppc64-int-flt-vmx";
816 if (c == (V|FX)) return "ppc64-int-flt-vmx-FX";
817 if (c == (V|GX)) return "ppc64-int-flt-vmx-GX";
818 if (c == (V|FX|GX)) return "ppc64-int-flt-vmx-FX-GX";
822 static HChar* show_hwcaps_arm ( UInt hwcaps )
824 if (hwcaps == 0) return "arm-baseline";
829 static HChar* show_hwcaps ( VexArch arch, UInt hwcaps )
832 case VexArchX86: return show_hwcaps_x86(hwcaps);
833 case VexArchAMD64: return show_hwcaps_amd64(hwcaps);
834 case VexArchPPC32: return show_hwcaps_ppc32(hwcaps);
835 case VexArchPPC64: return show_hwcaps_ppc64(hwcaps);
836 case VexArchARM: return show_hwcaps_arm(hwcaps);
837 default: return NULL;
841 static Bool are_valid_hwcaps ( VexArch arch, UInt hwcaps )
843 return show_hwcaps(arch,hwcaps) != NULL;
847 /*---------------------------------------------------------------*/
848 /*--- end main_main.c ---*/
849 /*---------------------------------------------------------------*/