IDE_INT reg_save_offset; HOST_WIDE_INT stack_realign_allocate; HOST_WIDE_INT stack_realign_offset; HOST_WIDE_INT sse_reg_save_offset; /* When save_regs_using_mov is set, emit prologue using move instead of push instructions. */ bool save_regs_using_mov; /* Assume without checking that: EXPENSIVE_P = expensive_function_p (EXPENSIVE_COUNT). */ bool expensive_p; int expensive_count; }; /* Machine specific frame tracking during prologue/epilogue generation. All values are positive, but since the x86 stack grows downward, are subtratced from the CFA to produce a valid address. */ struct GTY(()) machine_frame_state { /* This pair tracks the currently active CFA as reg+offset. When reg is drap_reg, we don't bother trying to record here the real CFA when it might really be a DW_CFA_def_cfa_expression. */ rtx cfa_reg; HOST_WIDE_INT cfa_offset; /* The current offset (canonically from the CFA) of ESP and EBP. When stack frame re-alignment is active, these may not be relative to the CFA. However, in all cases they are relative to the offsets of the saved registers stored in ix86_frame. */ HOST_WIDE_INT sp_offset; HOST_WIDE_INT fp_offset; /* The size of the red-zone that may be assumed for the purposes of eliding register restore notes in the epilogue. This may be zero if no red-zone is in effect, or may be reduced from the real red-zone value by a maximum runtime stack re-alignment value. */ int red_zone_offset; /* Indicate whether each of ESP, EBP or DRAP currently holds a valid value within the frame. If false then the offset above should be ignored. Note that DRAP, if valid, *always* points to the CFA and thus has an offset of zero. */ BOOL_BITFIELD sp_valid : 1; BOOL_BITFIELD fp_valid : 1; BOOL_BITFIELD drap_valid : 1; /* Indicate whether the local stack frame has been re-aligned. When set, the SP/FP offsets above are relative to the aligned frame and not the CFA. */ BOOL_BITFIELD realigned : 1; /* Indicates whether the stack pointer has been re-aligned. When set, SP/FP continue to be relative to the CFA, but the stack pointer should only be used for offsets > sp_realigned_offset, while the frame pointer should be used for offsets <= sp_realigned_fp_last. The flags realigned and sp_realigned are mutually exclusive. */ BOOL_BITFIELD sp_realigned : 1; /* When APX_PPX used in prologue, force epilogue to emit popp instead of move and leave. */ BOOL_BITFIELD apx_ppx_used : 1; /* If sp_realigned is set, this is the last valid offset from the CFA that can be used for access with the frame pointer. */ HOST_WIDE_INT sp_realigned_fp_last; /* If sp_realigned is set, this is the offset from the CFA that the stack pointer was realigned, and may or may not be equal to sp_realigned_fp_last. Access via the stack pointer is only valid for offsets that are greater than this value. */ HOST_WIDE_INT sp_realigned_offset; }; /* Private to winnt.cc. */ struct seh_frame_state; enum function_type { TYPE_UNKNOWN = 0, TYPE_NORMAL, /* The current function is an interrupt service routine with a pointer argument as specified by the "interrupt" attribute. */ TYPE_INTERRUPT, /* The current function is an interrupt service routine with a pointer argument and an integer argument as specified by the "interrupt" attribute. */ TYPE_EXCEPTION }; enum call_saved_registers_type { TYPE_DEFAULT_CALL_SAVED_REGISTERS = 0, /* The current function is a function specified with the "interrupt" or "no_caller_saved_registers" attribute. */ TYPE_NO_CALLER_SAVED_REGISTERS, /* The current function is a function specified with the "no_callee_saved_registers" attribute. */ TYPE_NO_CALLEE_SAVED_REGISTERS, /* The current function is a function specified with the "noreturn" attribute. */ TYPE_NO_CALLEE_SAVED_REGISTERS_EXCEPT_BP, }; enum queued_insn_type { TYPE_NONE = 0, TYPE_ENDBR, TYPE_PATCHABLE_AREA }; struct GTY(()) machine_function { struct stack_local_entry *stack_locals; int varargs_gpr_size; int varargs_fpr_size; int optimize_mode_switching[MAX_386_ENTITIES]; /* Cached initial frame layout for the current function. */ struct ix86_frame frame; /* For -fsplit-stack support: A stack local which holds a pointer to the stack arguments for a function with a variable number of arguments. This is set at the start of the function and is used to initialize the overflow_arg_area field of the va_list structure. */ rtx split_stack_varargs_pointer; /* This value is used for amd64 targets and specifies the current abi to be used. MS_ABI means ms abi. Otherwise SYSV_ABI means sysv abi. */ ENUM_BITFIELD(calling_abi) call_abi : 8; /* Nonzero if the function accesses a previous frame. */ BOOL_BITFIELD accesses_prev_frame : 1; /* Set by ix86_compute_frame_layout and used by prologue/epilogue expander to determine the style used. */ BOOL_BITFIELD use_fast_prologue_epilogue : 1; /* Nonzero if the current function calls pc thunk and must not use the red zone. */ BOOL_BITFIELD pc_thunk_call_expanded : 1; /* If true, the current function needs the default PIC register, not an alternate register (on x86) and must not use the red zone (on x86_64), even if it's a leaf function. We don't want the function to be regarded as non-leaf because TLS calls need not affect register allocation. This flag is set when a TLS call instruction is expanded within a function, and never reset, even if all such instructions are optimized away. Use the ix86_current_function_calls_tls_descriptor macro for a better approximation. */ BOOL_BITFIELD tls_descriptor_call_expanded_p : 1; /* If true, the current function has a STATIC_CHAIN is placed on the stack below the return address. */ BOOL_BITFIELD static_chain_on_stack : 1; /* If true, it is safe to not save/restore DRAP register. */ BOOL_BITFIELD no_drap_save_restore : 1; /* Function type. */ ENUM_BITFIELD(function_type) func_type : 2; /* How to generate indirec branch. */ ENUM_BITFIELD(indirect_branch) indirect_branch_type : 3; /* If true, the current function has local indirect jumps, like "indirect_jump" or "tablejump". */ BOOL_BITFIELD has_local_indirect_jump : 1; /* How to generate function return. */ ENUM_BITFIELD(indirect_branch) function_return_type : 3; /* Call saved registers type. */ ENUM_BITFIELD(call_saved_registers_type) call_saved_registers : 2; /* If true, there is register available for argument passing. This is used only in ix86_function_ok_for_sibcall by 32-bit to determine if there is scratch register available for indirect sibcall. In 64-bit, rax, r10 and r11 are scratch registers which aren't used to pass arguments and can be used for indirect sibcall. */ BOOL_BITFIELD arg_reg_available : 1; /* If true, we're out-of-lining reg save/restore for regs clobbered by 64-bit ms_abi functions calling a sysv_abi function. */ BOOL_BITFIELD call_ms2sysv : 1; /* If true, the incoming 16-byte aligned stack has an offset (of 8) and needs padding prior to out-of-line stub save/restore area. */ BOOL_BITFIELD call_ms2sysv_pad_in : 1; /* This is the number of extra registers saved by stub (valid range is 0-6). Each additional register is only saved/restored by the stubs if all successive ones are. (Will always be zero when using a hard frame pointer.) */ unsigned int call_ms2sysv_extra_regs:3; /* Nonzero if the function places outgoing arguments on stack. */ BOOL_BITFIELD outgoing_args_on_stack : 1; /* If true, ENDBR or patchable area is queued at function entrance. */ ENUM_BITFIELD(queued_insn_type) insn_queued_at_entrance : 2; /* If true, the function label has been emitted. */ BOOL_BITFIELD function_label_emitted : 1; /* True if the function needs a stack frame. */ BOOL_BITFIELD stack_frame_required : 1; /* True if we should act silently, rather than raise an error for invalid calls. */ BOOL_BITFIELD silent_p : 1; /* True if red zone is used. */ BOOL_BITFIELD red_zone_used : 1; /* True if inline asm with redzone clobber has been seen. */ BOOL_BITFIELD asm_redzone_clobber_seen : 1; /* The largest alignment, in bytes, of stack slot actually used. */ unsigned int max_used_stack_alignment; /* During prologue/epilogue generation, the current frame state. Otherwise, the frame state at the end of the prologue. */ struct machine_frame_state fs; /* During SEH output, this is non-null. */ struct seh_frame_state * GTY((skip(""))) seh; }; extern GTY(()) tree sysv_va_list_type_node; extern GTY(()) tree ms_va_list_type_node; #endif #define ix86_stack_locals (cfun->machine->stack_locals) #define ix86_varargs_gpr_size (cfun->machine->varargs_gpr_size) #define ix86_varargs_fpr_size (cfun->machine->varargs_fpr_size) #define ix86_optimize_mode_switching (cfun->machine->optimize_mode_switching) #define ix86_pc_thunk_call_expanded (cfun->machine->pc_thunk_call_expanded) #define ix86_tls_descriptor_calls_expanded_in_cfun \ (cfun->machine->tls_descriptor_call_expanded_p) /* Since tls_descriptor_call_expanded is not cleared, even if all TLS calls are optimized away, we try to detect cases in which it was optimized away. Since such instructions (use (reg REG_SP)), we can verify whether there's any such instruction live by testing that REG_SP is live. */ #define ix86_current_function_calls_tls_descriptor \ (ix86_tls_descriptor_calls_expanded_in_cfun && df_regs_ever_live_p (SP_REG)) #define ix86_static_chain_on_stack (cfun->machine->static_chain_on_stack) #define ix86_red_zone_used (cfun->machine->red_zone_used) /* Control behavior of x86_file_start. */ #define X86_FILE_START_VERSION_DIRECTIVE false #define X86_FILE_START_FLTUSED false /* Flag to mark data that is in the large address area. */ #define SYMBOL_FLAG_FAR_ADDR (SYMBOL_FLAG_MACH_DEP << 0) #define SYMBOL_REF_FAR_ADDR_P(X) \ ((SYMBOL_REF_FLAGS (X) & SYMBOL_FLAG_FAR_ADDR) != 0) /* Flags to mark dllimport/dllexport. Used by PE ports, but handy to have defined always, to avoid ifdefing. */ #define SYMBOL_FLAG_DLLIMPORT (SYMBOL_FLAG_MACH_DEP << 1) #define SYMBOL_REF_DLLIMPORT_P(X) \ ((SYMBOL_REF_FLAGS (X) & SYMBOL_FLAG_DLLIMPORT) != 0) #define SYMBOL_FLAG_DLLEXPORT (SYMBOL_FLAG_MACH_DEP << 2) #define SYMBOL_REF_DLLEXPORT_P(X) \ ((SYMBOL_REF_FLAGS (X) & SYMBOL_FLAG_DLLEXPORT) != 0) #define SYMBOL_FLAG_STUBVAR (SYMBOL_FLAG_MACH_DEP << 4) #define SYMBOL_REF_STUBVAR_P(X) \ ((SYMBOL_REF_FLAGS (X) & SYMBOL_FLAG_STUBVAR) != 0) extern void debug_ready_dispatch (void); extern void debug_dispatch_window (int); /* The value at zero is only defined for the BMI instructions LZCNT and TZCNT, not the BSR/BSF insns in the original isa. */ #define CTZ_DEFINED_VALUE_AT_ZERO(MODE, VALUE) \ ((VALUE) = GET_MODE_BITSIZE (MODE), TARGET_BMI ? 2 : 0) #define CLZ_DEFINED_VALUE_AT_ZERO(MODE, VALUE) \ ((VALUE) = GET_MODE_BITSIZE (MODE), TARGET_LZCNT ? 2 : 0) /* Flags returned by ix86_get_callcvt (). */ #define IX86_CALLCVT_CDECL 0x1 #define IX86_CALLCVT_STDCALL 0x2 #define IX86_CALLCVT_FASTCALL 0x4 #define IX86_CALLCVT_THISCALL 0x8 #define IX86_CALLCVT_REGPARM 0x10 #define IX86_CALLCVT_SSEREGPARM 0x20 #define IX86_BASE_CALLCVT(FLAGS) \ ((FLAGS) & (IX86_CALLCVT_CDECL | IX86_CALLCVT_STDCALL \ | IX86_CALLCVT_FASTCALL | IX86_CALLCVT_THISCALL)) #define RECIP_MASK_NONE 0x00 #define RECIP_MASK_DIV 0x01 #define RECIP_MASK_SQRT 0x02 #define RECIP_MASK_VEC_DIV 0x04 #define RECIP_MASK_VEC_SQRT 0x08 #define RECIP_MASK_ALL (RECIP_MASK_DIV | RECIP_MASK_SQRT \ | RECIP_MASK_VEC_DIV | RECIP_MASK_VEC_SQRT) #define RECIP_MASK_DEFAULT (RECIP_MASK_VEC_DIV | RECIP_MASK_VEC_SQRT) #define TARGET_RECIP_DIV ((recip_mask & RECIP_MASK_DIV) != 0) #define TARGET_RECIP_SQRT ((recip_mask & RECIP_MASK_SQRT) != 0) #define TARGET_RECIP_VEC_DIV ((recip_mask & RECIP_MASK_VEC_DIV) != 0) #define TARGET_RECIP_VEC_SQRT ((recip_mask & RECIP_MASK_VEC_SQRT) != 0) /* Use 128-bit AVX instructions in the auto-vectorizer. */ #define TARGET_PREFER_AVX128 (prefer_vector_width_type == PVW_AVX128) /* Use 256-bit AVX instructions in the auto-vectorizer. */ #define TARGET_PREFER_AVX256 (TARGET_PREFER_AVX128 \ || prefer_vector_width_type == PVW_AVX256) #define TARGET_INDIRECT_BRANCH_REGISTER \ (ix86_indirect_branch_register \ || cfun->machine->indirect_branch_type != indirect_branch_keep) #define IX86_HLE_ACQUIRE (1 << 16) #define IX86_HLE_RELEASE (1 << 17) /* For switching between functions with different target attributes. */ #define SWITCHABLE_TARGET 1 #define TARGET_SUPPORTS_WIDE_INT 1 #if !defined(GENERATOR_FILE) && !defined(IN_LIBGCC2) extern enum attr_cpu ix86_schedule; #define NUM_X86_64_MS_CLOBBERED_REGS 12 #endif /* __builtin_eh_return can't handle stack realignment, so disable MMX/SSE in 32-bit libgcc functions that call it. */ #ifndef __x86_64__ #define LIBGCC2_UNWIND_ATTRIBUTE __attribute__((target ("no-mmx,no-sse"))) #endif /* Local variables: version-control: t End: */