ually needed to represent the constant. CONST_WIDE_INT_ELT gets one of the elements. 0 is the least significant HOST_WIDE_INT. */ #define CONST_WIDE_INT_VEC(RTX) HWIVEC_CHECK (RTX, CONST_WIDE_INT) #define CONST_WIDE_INT_NUNITS(RTX) CWI_GET_NUM_ELEM (RTX) #define CONST_WIDE_INT_ELT(RTX, N) CWI_ELT (RTX, N) /* For a CONST_POLY_INT, CONST_POLY_INT_COEFFS gives access to the individual coefficients, in the form of a trailing_wide_ints structure. */ #define CONST_POLY_INT_COEFFS(RTX) \ (RTL_FLAG_CHECK1("CONST_POLY_INT_COEFFS", (RTX), \ CONST_POLY_INT)->u.cpi.coeffs) /* For a CONST_DOUBLE: #if TARGET_SUPPORTS_WIDE_INT == 0 For a VOIDmode, there are two integers CONST_DOUBLE_LOW is the low-order word and ..._HIGH the high-order. #endif For a float, there is a REAL_VALUE_TYPE structure, and CONST_DOUBLE_REAL_VALUE(r) is a pointer to it. */ #define CONST_DOUBLE_LOW(r) XCMWINT (r, 0, CONST_DOUBLE, VOIDmode) #define CONST_DOUBLE_HIGH(r) XCMWINT (r, 1, CONST_DOUBLE, VOIDmode) #define CONST_DOUBLE_REAL_VALUE(r) \ ((const struct real_value *) XCNMPRV (r, CONST_DOUBLE, VOIDmode)) #define CONST_FIXED_VALUE(r) \ ((const struct fixed_value *) XCNMPFV (r, CONST_FIXED, VOIDmode)) #define CONST_FIXED_VALUE_HIGH(r) \ ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.high)) #define CONST_FIXED_VALUE_LOW(r) \ ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.low)) /* For a CONST_VECTOR, return element #n. */ #define CONST_VECTOR_ELT(RTX, N) const_vector_elt (RTX, N) /* See rtl.texi for a description of these macros. */ #define CONST_VECTOR_NPATTERNS(RTX) \ (RTL_FLAG_CHECK1 ("CONST_VECTOR_NPATTERNS", (RTX), CONST_VECTOR) \ ->u2.const_vector.npatterns) #define CONST_VECTOR_NELTS_PER_PATTERN(RTX) \ (RTL_FLAG_CHECK1 ("CONST_VECTOR_NELTS_PER_PATTERN", (RTX), CONST_VECTOR) \ ->u2.const_vector.nelts_per_pattern) #define CONST_VECTOR_DUPLICATE_P(RTX) \ (CONST_VECTOR_NELTS_PER_PATTERN (RTX) == 1) #define CONST_VECTOR_STEPPED_P(RTX) \ (CONST_VECTOR_NELTS_PER_PATTERN (RTX) == 3) #define CONST_VECTOR_ENCODED_ELT(RTX, N) XCVECEXP (RTX, 0, N, CONST_VECTOR) /* Return the number of elements encoded directly in a CONST_VECTOR. */ inline unsigned int const_vector_encoded_nelts (const_rtx x) { return CONST_VECTOR_NPATTERNS (x) * CONST_VECTOR_NELTS_PER_PATTERN (x); } /* For a CONST_VECTOR, return the number of elements in a vector. */ #define CONST_VECTOR_NUNITS(RTX) GET_MODE_NUNITS (GET_MODE (RTX)) /* For a SUBREG rtx, SUBREG_REG extracts the value we want a subreg of. SUBREG_BYTE extracts the byte-number. */ #define SUBREG_REG(RTX) XCEXP (RTX, 0, SUBREG) #define SUBREG_BYTE(RTX) XCSUBREG (RTX, 1, SUBREG) /* in rtlanal.cc */ /* Return the right cost to give to an operation to make the cost of the corresponding register-to-register instruction N times that of a fast register-to-register instruction. */ #define COSTS_N_INSNS(N) ((N) * 4) /* Maximum cost of an rtl expression. This value has the special meaning not to use an rtx with this cost under any circumstances. */ #define MAX_COST INT_MAX /* Return true if CODE always has VOIDmode. */ inline bool always_void_p (enum rtx_code code) { return code == SET; } /* A structure to hold all available cost information about an rtl expression. */ struct full_rtx_costs { int speed; int size; }; /* Initialize a full_rtx_costs structure C to the maximum cost. */ inline void init_costs_to_max (struct full_rtx_costs *c) { c->speed = MAX_COST; c->size = MAX_COST; } /* Initialize a full_rtx_costs structure C to zero cost. */ inline void init_costs_to_zero (struct full_rtx_costs *c) { c->speed = 0; c->size = 0; } /* Compare two full_rtx_costs structures A and B, returning true if A < B when optimizing for speed. */ inline bool costs_lt_p (struct full_rtx_costs *a, struct full_rtx_costs *b, bool speed) { if (speed) return (a->speed < b->speed || (a->speed == b->speed && a->size < b->size)); else return (a->size < b->size || (a->size == b->size && a->speed < b->speed)); } /* Increase both members of the full_rtx_costs structure C by the cost of N insns. */ inline void costs_add_n_insns (struct full_rtx_costs *c, int n) { c->speed += COSTS_N_INSNS (n); c->size += COSTS_N_INSNS (n); } /* Describes the shape of a subreg: inner_mode == the mode of the SUBREG_REG offset == the SUBREG_BYTE outer_mode == the mode of the SUBREG itself. */ class subreg_shape { public: subreg_shape (machine_mode, poly_uint16, machine_mode); bool operator == (const subreg_shape &) const; bool operator != (const subreg_shape &) const; unsigned HOST_WIDE_INT unique_id () const; machine_mode inner_mode; poly_uint16 offset; machine_mode outer_mode; }; inline subreg_shape::subreg_shape (machine_mode inner_mode_in, poly_uint16 offset_in, machine_mode outer_mode_in) : inner_mode (inner_mode_in), offset (offset_in), outer_mode (outer_mode_in) {} inline bool subreg_shape::operator == (const subreg_shape &other) const { return (inner_mode == other.inner_mode && known_eq (offset, other.offset) && outer_mode == other.outer_mode); } inline bool subreg_shape::operator != (const subreg_shape &other) const { return !operator == (other); } /* Return an integer that uniquely identifies this shape. Structures like rtx_def assume that a mode can fit in an 8-bit bitfield and no current mode is anywhere near being 65536 bytes in size, so the id comfortably fits in an int. */ inline unsigned HOST_WIDE_INT subreg_shape::unique_id () const { { STATIC_ASSERT (MAX_MACHINE_MODE <= 256); } { STATIC_ASSERT (NUM_POLY_INT_COEFFS <= 3); } { STATIC_ASSERT (sizeof (offset.coeffs[0]) <= 2); } int res = (int) inner_mode + ((int) outer_mode << 8); for (int i = 0; i < NUM_POLY_INT_COEFFS; ++i) res += (HOST_WIDE_INT) offset.coeffs[i] << ((1 + i) * 16); return res; } /* Return the shape of a SUBREG rtx. */ inline subreg_shape shape_of_subreg (const_rtx x) { return subreg_shape (GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x), GET_MODE (x)); } /* Information about an address. This structure is supposed to be able to represent all supported target addresses. Please extend it if it is not yet general enough. */ struct address_info { /* The mode of the value being addressed, or VOIDmode if this is a load-address operation with no known address mode. */ machine_mode mode; /* The address space. */ addr_space_t as; /* True if this is an RTX_AUTOINC address. */ bool autoinc_p; /* A pointer to the top-level address. */ rtx *outer; /* A pointer to the inner address, after all address mutations have been stripped from the top-level address. It can be one of the following: - A {PRE,POST}_{INC,DEC} of *BASE. SEGMENT, INDEX and DISP are null. - A {PRE,POST}_MODIFY of *BASE. In this case either INDEX or DISP points to the step value, depending on whether the step is variable or constant respectively. SEGMENT is null. - A plain sum of the form SEGMENT + BASE + INDEX + DISP, with null fields evaluating to 0. */ rtx *inner; /* Components that make up *INNER. Each one may be null or nonnull. When nonnull, their meanings are as follows: - *SEGMENT is the "segment" of memory to which the address refers. This value is entirely target-specific and is only called a "segment" because that's its most typical use. It contains exactly one UNSPEC, pointed to by SEGMENT_TERM. The contents of *SEGMENT do not need reloading. - *BASE is a variable expression representing a base address. It contains exactly one REG, SUBREG or MEM, pointed to by BASE_TERM. - *INDEX is a variable expression representing an index value. It may be a scaled expression, such as a MULT. It has exactly one REG, SUBREG or MEM, pointed to by INDEX_TERM. - *DISP is a constant, possibly mutated. DISP_TERM points to the unmutated RTX_CONST_OBJ. */ rtx *segment; rtx *base; rtx *index; rtx *disp; rtx *segment_term; rtx *base_term; rtx *index_term; rtx *disp_term; /* In a {PRE,POST}_MODIFY address, this points to a second copy of BASE_TERM, otherwise it is null. */ rtx *base_term2; /* ADDRESS if this structure describes an address operand, MEM if it describes a MEM address. */ enum rtx_code addr_outer_code; /* If BASE is nonnull, this is the code of the rtx that contains it. */ enum rtx_code base_outer_code; }; /* This is used to bundle an rtx and a mode together so that the pair can be used with the wi:: routines. If we ever put modes into rtx integer constants, this should go away and then just pass an rtx in. */ typedef std::pair rtx_mode_t; namespace wi { template <> struct int_traits { static const enum precision_type precision_type = VAR_PRECISION; static const bool host_dependent_precision = false; /* This ought to be true, except for the special case that BImode is canonicalized to STORE_FLAG_VALUE, which might be 1. */ static const bool is_sign_extended = false; static unsigned int get_precision (const rtx_mode_t &); static wi::storage_ref decompose (HOST_WIDE_INT *, unsigned int, const rtx_mode_t &); }; } inline unsigned int wi::int_traits ::get_precision (const rtx_mode_t &x) { return GET_MODE_PRECISION (as_a (x.second)); } inline wi::storage_ref wi::int_traits ::decompose (HOST_WIDE_INT *, unsigned int precision, const rtx_mode_t &x) { gcc_checking_assert (precision == get_precision (x)); switch (GET_CODE (x.first)) { case CONST_INT: if (precision < HOST_BITS_PER_WIDE_INT) /* Nonzero BImodes are stored as STORE_FLAG_VALUE, which on many targets is 1 rather than -1. */ gcc_checking_assert (INTVAL (x.first) == sext_hwi (INTVAL (x.first), precision) || (x.second == BImode && INTVAL (x.first) == 1)); return wi::storage_ref (&INTVAL (x.first), 1, precision); case CONST_WIDE_INT: return wi::storage_ref (&CONST_WIDE_INT_ELT (x.first, 0), CONST_WIDE_INT_NUNITS (x.first), precision); #if TARGET_SUPPORTS_WIDE_INT == 0 case CONST_DOUBLE: return wi::storage_ref (&CONST_DOUBLE_LOW (x.first), 2, precision); #endif default: gcc_unreachable (); } } namespace wi { hwi_with_prec shwi (HOST_WIDE_INT, machine_mode mode); wide_int min_value (machine_mode, signop); wide_int max_value (machine_mode, signop); } inline wi::hwi_with_prec wi::shwi (HOST_WIDE_INT val, machine_mode mode) { return shwi (val, GET_MODE_PRECISION (as_a (mode))); } /* Produce the smallest number that is represented in MODE. The precision is taken from MODE and the sign from SGN. */ inline wide_int wi::min_value (machine_mode mode, signop sgn) { return min_value (GET_MODE_PRECISION (as_a (mode)), sgn); } /* Produce the largest number that is represented in MODE. The precision is taken from MODE and the sign from SGN. */ inline wide_int wi::max_value (machine_mode mode, signop sgn) { return max_value (GET_MODE_PRECISION (as_a (mode)), sgn); } namespace wi { typedef poly_int > > rtx_to_poly_wide_ref; rtx_to_poly_wide_ref to_poly_wide (const_rtx, machine_mode); } /* Return the value of a CONST_POLY_INT in its native precision. */ inline wi::rtx_to_poly_wide_ref const_poly_int_value (const_rtx x) { poly_int res; for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) res.coeffs[i] = CONST_POLY_INT_COEFFS (x)[i]; return res; } /* Return true if X is a scalar integer or a CONST_POLY_INT. The value can then be extracted using wi::to_poly_wide. */ inline bool poly_int_rtx_p (const_rtx x) { return CONST_SCALAR_INT_P (x) || CONST_POLY_INT_P (x); } /* Access X (which satisfies poly_int_rtx_p) as a poly_wide_int. MODE is the mode of X. */ inline wi::rtx_to_poly_wide_ref wi::to_poly_wide (const_rtx x, machine_mode mode) { if (CONST_POLY_INT_P (x)) return const_poly_int_value (x); return rtx_mode_t (const_cast (x), mode); } /* Return the value of X as a poly_int64. */ inline poly_int64 rtx_to_poly_int64 (const_rtx x) { if (CONST_POLY_INT_P (x)) { poly_int64 res; for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) res.coeffs[i] = CONST_POLY_INT_COEFFS (x)[i].to_shwi (); return res; } return INTVAL (x); } /* Return true if arbitrary value X is an integer constant that can be represented as a poly_int64. Store the value in *RES if so, otherwise leave it unmodified. */ inline bool poly_int_rtx_p (const_rtx x, poly_int64_pod *res) { if (CONST_INT_P (x)) { *res = INTVAL (x); return true; } if (CONST_POLY_INT_P (x)) { for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) if (!wi::fits_shwi_p (CONST_POLY_INT_COEFFS (x)[i])) return false; for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) res->coeffs[i] = CONST_POLY_INT_COEFFS (x)[i].to_shwi (); return true; } return false; } extern void init_rtlanal (void); extern int rtx_cost (rtx, machine_mode, enum rtx_code, int, bool); extern int address_cost (rtx, machine_mode, addr_space_t, bool); extern void get_full_rtx_cost (rtx, machine_mode, enum rtx_code, int, struct full_rtx_costs *); extern bool native_encode_rtx (machine_mode, rtx, vec &, unsigned int, unsigned int); extern rtx native_decode_rtx (machine_mode, const vec &, unsigned int); extern rtx native_decode_vector_rtx (machine_mode, const vec &, unsigned int, unsigned int, unsigned int); extern poly_uint64 subreg_lsb (const_rtx); extern poly_uint64 subreg_size_lsb (poly_uint64, poly_uint64, poly_uint64); extern poly_uint64 subreg_size_offset_from_lsb (poly_uint64, poly_uint64, poly_uint64); extern bool read_modify_subreg_p (const_rtx); /* Given a subreg's OUTER_MODE, INNER_MODE, and SUBREG_BYTE, return the bit offset at which the subreg begins (counting from the least significant bit of the operand). */ inline poly_uint64 subreg_lsb_1 (machine_mode outer_mode, machine_mode inner_mode, poly_uint64 subreg_byte) { return subreg_size_lsb (GET_MODE_SIZE (outer_mode), GET_MODE_SIZE (inner_mode), subreg_byte); } /* Return the subreg byte offset for a subreg whose outer mode is OUTER_MODE, whose inner mode is INNER_MODE, and where there are LSB_SHIFT *bits* between the lsb of the outer value and the lsb of the inner value. This is the inverse of subreg_lsb_1 (which converts byte offsets to bit shifts). */ inline poly_uint64 subreg_offset_from_lsb (machine_mode outer_mode, machine_mode inner_mode, poly_uint64 lsb_shift) { return subreg_size_offset_from_lsb (GET_MODE_SIZE (outer_mode), GET_MODE_SIZE (inner_mode), lsb_shift); } extern unsigned int subreg_regno_offset (unsigned int, machine_mode, poly_uint64, machine_mode); extern bool subreg_offset_representable_p (unsigned int, machine_mode, poly_uint64, machine_mode); extern unsigned int subreg_regno (const_rtx); extern int simplify_subreg_regno (unsigned int, machine_mode, poly_uint64, machine_mode); extern int lowpart_subreg_regno (unsigned int, machine_mode, machine_mode); extern unsigned int subreg_nregs (const_rtx); extern unsigned int subreg_nregs_with_regno (unsigned int, const_rtx); extern unsigned HOST_WIDE_INT nonzero_bits (const_rtx, machine_mode); extern unsigned int num_sign_bit_copies (const_rtx, machine_mode); extern bool constant_pool_constant_p (rtx); extern bool truncated_to_mode (machine_mode, const_rtx); extern int low_bitmask_len (machine_mode, unsigned HOST_WIDE_INT); extern void split_double (rtx, rtx *, rtx *); extern rtx *strip_address_mutations (rtx *, enum rtx_code * = 0); extern void decompose_address (struct address_info *, rtx *, machine_mode, addr_space_t, enum rtx_code); extern void decompose_lea_address (struct address_info *, rtx *); extern void decompose_mem_address (struct address_info *, rtx); extern void update_address (struct address_info *); extern HOST_WIDE_INT get_index_scale (const struct address_info *); extern enum rtx_code get_index_code (const struct address_info *); /* 1 if RTX is a subreg containing a reg that is already known to be sign- or zero-extended from the mode of the subreg to the mode of the reg. SUBREG_PROMOTED_UNSIGNED_P gives the signedness of the extension. When used as a LHS, is means that this extension must be done when assigning to SUBREG_REG. */ #define SUBREG_PROMOTED_VAR_P(RTX) \ (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED", (RTX), SUBREG)->in_struct) /* Valid for subregs which are SUBREG_PROMOTED_VAR_P(). In that case this gives the necessary extensions: 0 - signed (SPR_SIGNED) 1 - normal unsigned (SPR_UNSIGNED) 2 - value is both sign and unsign extended for mode (SPR_SIGNED_AND_UNSIGNED). -1 - pointer unsigned, which most often can be handled like unsigned extension, except for generating instructions where we need to emit special code (ptr_extend insns) on some architectures (SPR_POINTER). */ const int SRP_POINTER = -1; const int SRP_SIGNED = 0; const int SRP_UNSIGNED = 1; const int SRP_SIGNED_AND_UNSIGNED = 2; /* Sets promoted mode for SUBREG_PROMOTED_VAR_P(). */ #define SUBREG_PROMOTED_SET(RTX, VAL) \ do { \ rtx const _rtx = RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SET", \ (RTX), SUBREG); \ switch (VAL) \ { \ case SRP_POINTER: \ _rtx->volatil = 0; \ _rtx->unchanging = 0; \ break; \ case SRP_SIGNED: \ _rtx->volatil = 0; \ _rtx->unchanging = 1; \ break; \ case SRP_UNSIGNED: \ _rtx->volatil = 1; \ _rtx->unchanging = 0; \ break; \ case SRP_SIGNED_AND_UNSIGNED: \ _rtx->volatil = 1; \ _rtx->unchanging = 1; \ break; \ } \ } while (0) /* Gets the value stored in promoted mode for SUBREG_PROMOTED_VAR_P(), including SRP_SIGNED_AND_UNSIGNED if promoted for both signed and unsigned. */ #define SUBREG_PROMOTED_GET(RTX) \ (2 * (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_GET", (RTX), SUBREG)->volatil)\ + (RTX)->unchanging - 1) /* Returns sign of promoted mode for SUBREG_PROMOTED_VAR_P(). */ #define SUBREG_PROMOTED_SIGN(RTX) \ ((RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGN", (RTX), SUBREG)->volatil) ? 1\ : (RTX)->unchanging - 1) /* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted for SIGNED type. */ #define SUBREG_PROMOTED_SIGNED_P(RTX) \ (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGNED_P", (RTX), SUBREG)->unchanging) /* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted for UNSIGNED type. */ #define SUBREG_PROMOTED_UNSIGNED_P(RTX) \ (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_UNSIGNED_P", (RTX), SUBREG)->volatil) /* Checks if RTX of SUBREG_PROMOTED_VAR_P() is promoted for given SIGN. */ #define SUBREG_CHECK_PROMOTED_SIGN(RTX, SIGN) \ ((SIGN) == SRP_POINTER ? SUBREG_PROMOTED_GET (RTX) == SRP_POINTER \ : (SIGN) == SRP_SIGNED ? SUBREG_PROMOTED_SIGNED_P (RTX) \ : SUBREG_PROMOTED_UNSIGNED_P (RTX)) /* True if the REG is the static chain register for some CALL_INSN. */ #define STATIC_CHAIN_REG_P(RTX) \ (RTL_FLAG_CHECK1 ("STATIC_CHAIN_REG_P", (RTX), REG)->jump) /* True if the subreg was generated by LRA for reload insns. Such subregs are valid only during LRA. */ #define LRA_SUBREG_P(RTX) \ (RTL_FLAG_CHECK1 ("LRA_SUBREG_P", (RTX), SUBREG)->jump) /* Access various components of an ASM_OPERANDS rtx. */ #define ASM_OPERANDS_TEMPLATE(RTX) XCSTR (RTX, 0, ASM_OPERANDS) #define ASM_OPERANDS_OUTPUT_CONSTRAINT(RTX) XCSTR (RTX, 1, ASM_OPERANDS) #define ASM_OPERANDS_OUTPUT_IDX(RTX) XCINT (RTX, 2, ASM_OPERANDS) #define ASM_OPERANDS_INPUT_VEC(RTX) XCVEC (RTX, 3, ASM_OPERANDS) #define ASM_OPERANDS_INPUT_CONSTRAINT_VEC(RTX) XCVEC (RTX, 4, ASM_OPERANDS) #define ASM_OPERANDS_INPUT(RTX, N) XCVECEXP (RTX, 3, N, ASM_OPERANDS) #define ASM_OPERANDS_INPUT_LENGTH(RTX) XCVECLEN (RTX, 3, ASM_OPERANDS) #define ASM_OPERANDS_INPUT_CONSTRAINT_EXP(RTX, N) \ XCVECEXP (RTX, 4, N, ASM_OPERANDS) #define ASM_OPERANDS_INPUT_CONSTRAINT(RTX, N) \ XSTR (XCVECEXP (RTX, 4, N, ASM_OPERANDS), 0) #define ASM_OPERANDS_INPUT_MODE(RTX, N) \ GET_MODE (XCVECEXP (RTX, 4, N, ASM_OPERANDS)) #define ASM_OPERANDS_LABEL_VEC(RTX) XCVEC (RTX, 5, ASM_OPERANDS) #define ASM_OPERANDS_LABEL_LENGTH(RTX) XCVECLEN (RTX, 5, ASM_OPERANDS) #define ASM_OPERANDS_LABEL(RTX, N) XCVECEXP (RTX, 5, N, ASM_OPERANDS) #define ASM_OPERANDS_SOURCE_LOCATION(RTX) XCUINT (RTX, 6, ASM_OPERANDS) #define ASM_INPUT_SOURCE_LOCATION(RTX) XCUINT (RTX, 1, ASM_INPUT) /* 1 if RTX is a mem that is statically allocated in read-only memory. */ #define MEM_READONLY_P(RTX) \ (RTL_FLAG_CHECK1 ("MEM_READONLY_P", (RTX), MEM)->unchanging) /* 1 if RTX is a mem and we should keep the alias set for this mem unchanged when we access a component. Set to 1, or example, when we are already in a non-addressable component of an aggregate. */ #define MEM_KEEP_ALIAS_SET_P(RTX) \ (RTL_FLAG_CHECK1 ("MEM_KEEP_ALIAS_SET_P", (RTX), MEM)->jump) /* 1 if RTX is a mem or asm_operand for a volatile reference. */ #define MEM_VOLATILE_P(RTX) \ (RTL_FLAG_CHECK3 ("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS, \ ASM_INPUT)->volatil) /* 1 if RTX is a mem that cannot trap. */ #define MEM_NOTRAP_P(RTX) \ (RTL_FLAG_CHECK1 ("MEM_NOTRAP_P", (RTX), MEM)->call) /* The memory attribute block. We provide access macros for each value in the block and provide defaults if none specified. */ #define MEM_ATTRS(RTX) X0MEMATTR (RTX, 1) /* The register attribute block. We provide access macros for each value in the block and provide defaults if none specified. */ #define REG_ATTRS(RTX) (REG_CHECK (RTX)->attrs) #ifndef GENERATOR_FILE /* For a MEM rtx, the alias set. If 0, this MEM is not in any alias set, and may alias anything. Otherwise, the MEM can only alias MEMs in a conflicting alias set. This value is set in a language-dependent manner in the front-end, and should not be altered in the back-end. These set numbers are tested with alias_sets_conflict_p. */ #define MEM_ALIAS_SET(RTX) (get_mem_attrs (RTX)->alias) /* For a MEM rtx, the decl it is known to refer to, if it is known to refer to part of a DECL. It may also be a COMPONENT_REF. */ #define MEM_EXPR(RTX) (get_mem_attrs (RTX)->expr) /* For a MEM rtx, true if its MEM_OFFSET is known. */ #define MEM_OFFSET_KNOWN_P(RTX) (get_mem_attrs (RTX)->offset_known_p) /* For a MEM rtx, the offset from the start of MEM_EXPR. */ #define MEM_OFFSET(RTX) (get_mem_attrs (RTX)->offset) /* For a MEM rtx, the address space. */ #define MEM_ADDR_SPACE(RTX) (get_mem_attrs (RTX)->addrspace) /* For a MEM rtx, true if its MEM_SIZE is known. */ #define MEM_SIZE_KNOWN_P(RTX) (get_mem_attrs (RTX)->size_known_p) /* For a MEM rtx, the size in bytes of the MEM. */ #define MEM_SIZE(RTX) (get_mem_attrs (RTX)->size) /* For a MEM rtx, the alignment in bits. We can use the alignment of the mode as a default when STRICT_ALIGNMENT, but not if not. */ #define MEM_ALIGN(RTX) (get_mem_attrs (RTX)->align) #else #define MEM_ADDR_SPACE(RTX) ADDR_SPACE_GENERIC #endif /* For a REG rtx, the decl it is known to refer to, if it is known to refer to part of a DECL. */ #define REG_EXPR(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->decl) /* For a REG rtx, the offset from the start of REG_EXPR, if known, as an HOST_WIDE_INT. */ #define REG_OFFSET(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->offset) /* Copy the attributes that apply to memory locations from RHS to LHS. */ #define MEM_COPY_ATTRIBUTES(LHS, RHS) \ (MEM_VOLATILE_P (LHS) = MEM_VOLATILE_P (RHS), \ MEM_NOTRAP_P (LHS) = MEM_NOTRAP_P (RHS), \ MEM_READONLY_P (LHS) = MEM_READONLY_P (RHS), \ MEM_KEEP_ALIAS_SET_P (LHS) = MEM_KEEP_ALIAS_SET_P (RHS), \ MEM_POINTER (LHS) = MEM_POINTER (RHS), \ MEM_ATTRS (LHS) = MEM_ATTRS (RHS)) /* 1 if RTX is a label_ref for a nonlocal label. */ /* Likewise in an expr_list for a REG_LABEL_OPERAND or REG_LABEL_TARGET note. */ #define LABEL_REF_NONLOCAL_P(RTX) \ (RTL_FLAG_CHECK1 ("LABEL_REF_NONLOCAL_P", (RTX), LABEL_REF)->volatil) /* 1 if RTX is a code_label that should always be considered to be needed. */ #define LABEL_PRESERVE_P(RTX) \ (RTL_FLAG_CHECK2 ("LABEL_PRESERVE_P", (RTX), CODE_LABEL, NOTE)->in_struct) /* During sched, 1 if RTX is an insn that must be scheduled together with the preceding insn. */ #define SCHED_GROUP_P(RTX) \ (RTL_FLAG_CHECK4 ("SCHED_GROUP_P", (RTX), DEBUG_INSN, INSN, \ JUMP_INSN, CALL_INSN)->in_struct) /* For a SET rtx, SET_DEST is the place that is set and SET_SRC is the value it is set to. */ #define SET_DEST(RTX) XC2EXP (RTX, 0, SET, CLOBBER) #define SET_SRC(RTX) XCEXP (RTX, 1, SET) #define SET_IS_RETURN_P(RTX) \ (RTL_FLAG_CHECK1 ("SET_IS_RETURN_P", (RTX), SET)->jump) /* For a TRAP_IF rtx, TRAP_CONDITION is an expression. */ #define TRAP_CONDITION(RTX) XCEXP (RTX, 0, TRAP_IF) #define TRAP_CODE(RTX) XCEXP (RTX, 1, TRAP_IF) /* For a COND_EXEC rtx, COND_EXEC_TEST is the condition to base conditionally executing the code on, COND_EXEC_CODE is the code to execute if the condition is true. */ #define COND_EXEC_TEST(RTX) XCEXP (RTX, 0, COND_EXEC) #define COND_EXEC_CODE(RTX) XCEXP (RTX, 1, COND_EXEC) /* 1 if RTX is a symbol_ref that addresses this function's rtl constants pool. */ #define CONSTANT_POOL_ADDRESS_P(RTX) \ (RTL_FLAG_CHECK1 ("CONSTANT_POOL_ADDRESS_P", (RTX), SYMBOL_REF)->unchanging) /* 1 if RTX is a symbol_ref that addresses a value in the file's tree constant pool. This information is private to varasm.cc. */ #define TREE_CONSTANT_POOL_ADDRESS_P(RTX) \ (RTL_FLAG_CHECK1 ("TREE_CONSTANT_POOL_ADDRESS_P", \ (RTX), SYMBOL_REF)->frame_related) /* Used if RTX is a symbol_ref, for machine-specific purposes. */ #define SYMBOL_REF_FLAG(RTX) \ (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAG", (RTX), SYMBOL_REF)->volatil) /* 1 if RTX is a symbol_ref that has been the library function in emit_library_call. */ #define SYMBOL_REF_USED(RTX) \ (RTL_FLAG_CHECK1 ("SYMBOL_REF_USED", (RTX), SYMBOL_REF)->used) /* 1 if RTX is a symbol_ref for a weak symbol. */ #define SYMBOL_REF_WEAK(RTX) \ (RTL_FLAG_CHECK1 ("SYMBOL_REF_WEAK", (RTX), SYMBOL_REF)->return_val) /* A pointer attached to the SYMBOL_REF; either SYMBOL_REF_DECL or SYMBOL_REF_CONSTANT. */ #define SYMBOL_REF_DATA(RTX) X0ANY ((RTX), 1) /* Set RTX's SYMBOL_REF_DECL to DECL. RTX must not be a constant pool symbol. */ #define SET_SYMBOL_REF_DECL(RTX, DECL) \ (gcc_assert (!CONSTANT_POOL_ADDRESS_P (RTX)), X0TREE ((RTX), 1) = (DECL)) /* The tree (decl or constant) associated with the symbol, or null. */ #define SYMBOL_REF_DECL(RTX) \ (CONSTANT_POOL_ADDRESS_P (RTX) ? NULL : X0TREE ((RTX), 1)) /* Set RTX's SYMBOL_REF_CONSTANT to C. RTX must be a constant pool symbol. */ #define SET_SYMBOL_REF_CONSTANT(RTX, C) \ (gcc_assert (CONSTANT_POOL_ADDRESS_P (RTX)), X0CONSTANT ((RTX), 1) = (C)) /* The rtx constant pool entry for a symbol, or null. */ #define SYMBOL_REF_CONSTANT(RTX) \ (CONSTANT_POOL_ADDRESS_P (RTX) ? X0CONSTANT ((RTX), 1) : NULL) /* A set of flags on a symbol_ref that are, in some respects, redundant with information derivable from the tree decl associated with this symbol. Except that we build a *lot* of SYMBOL_REFs that aren't associated with a decl. In some cases this is a bug. But beyond that, it's nice to cache this information to avoid recomputing it. Finally, this allows space for the target to store more than one bit of information, as with SYMBOL_REF_FLAG. */ #define SYMBOL_REF_FLAGS(RTX) \ (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAGS", (RTX), SYMBOL_REF) \ ->u2.symbol_ref_flags) /* These flags are common enough to be defined for all targets. They are computed by the default version of targetm.encode_section_info. */ /* Set if this symbol is a function. */ #define SYMBOL_FLAG_FUNCTION (1 << 0) #define SYMBOL_REF_FUNCTION_P(RTX) \ ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_FUNCTION) != 0) /* Set if targetm.binds_local_p is true. */ #define SYMBOL_FLAG_LOCAL (1 << 1) #define SYMBOL_REF_LOCAL_P(RTX) \ ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_LOCAL) != 0) /* Set if targetm.in_small_data_p is true. */ #define SYMBOL_FLAG_SMALL (1 << 2) #define SYMBOL_REF_SMALL_P(RTX) \ ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_SMALL) != 0) /* The three-bit field at [5:3] is true for TLS variables; use SYMBOL_REF_TLS_MODEL to extract the field as an enum tls_model. */ #define SYMBOL_FLAG_TLS_SHIFT 3 #define SYMBOL_REF_TLS_MODEL(RTX) \ ((enum tls_model) ((SYMBOL_REF_FLAGS (RTX) >> SYMBOL_FLAG_TLS_SHIFT) & 7)) /* Set if this symbol is not defined in this translation unit. */ #define SYMBOL_FLAG_EXTERNAL (1 << 6) #define SYMBOL_REF_EXTERNAL_P(RTX) \ ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_EXTERNAL) != 0) /* Set if this symbol has a block_symbol structure associated with it. */ #define SYMBOL_FLAG_HAS_BLOCK_INFO (1 << 7) #define SYMBOL_REF_HAS_BLOCK_INFO_P(RTX) \ ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_HAS_BLOCK_INFO) != 0) /* Set if this symbol is a section anchor. SYMBOL_REF_ANCHOR_P implies SYMBOL_REF_HAS_BLOCK_INFO_P. */ #define SYMBOL_FLAG_ANCHOR (1 << 8) #define SYMBOL_REF_ANCHOR_P(RTX) \ ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_ANCHOR) != 0) /* Subsequent bits are available for the target to use. */ #define SYMBOL_FLAG_MACH_DEP_SHIFT 9 #define SYMBOL_FLAG_MACH_DEP (1 << SYMBOL_FLAG_MACH_DEP_SHIFT) /* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the object_block structure to which the symbol belongs, or NULL if it has not been assigned a block. */ #define SYMBOL_REF_BLOCK(RTX) (BLOCK_SYMBOL_CHECK (RTX)->block) /* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the offset of RTX from the first object in SYMBOL_REF_BLOCK (RTX). The value is negative if RTX has not yet been assigned to a block, or it has not been given an offset within that block. */ #define SYMBOL_REF_BLOCK_OFFSET(RTX) (BLOCK_SYMBOL_CHECK (RTX)->offset) /* True if RTX is flagged to be a scheduling barrier. */ #define PREFETCH_SCHEDULE_BARRIER_P(RTX) \ (RTL_FLAG_CHECK1 ("PREFETCH_SCHEDULE_BARRIER_P", (RTX), PREFETCH)->volatil) /* Indicate whether the machine has any sort of auto increment addressing. If not, we can avoid checking for REG_INC notes. */ #if (defined (HAVE_PRE_INCREMENT) || defined (HAVE_PRE_DECREMENT) \ || defined (HAVE_POST_INCREMENT) || defined (HAVE_POST_DECREMENT) \ || defined (HAVE_PRE_MODIFY_DISP) || defined (HAVE_POST_MODIFY_DISP) \ || defined (HAVE_PRE_MODIFY_REG) || defined (HAVE_POST_MODIFY_REG)) #define AUTO_INC_DEC 1 #else #define AUTO_INC_DEC 0 #endif /* Define a macro to look for REG_INC notes, but save time on machines where they never exist. */ #if AUTO_INC_DEC #define FIND_REG_INC_NOTE(INSN, REG) \ ((REG) != NULL_RTX && REG_P ((REG)) \ ? find_regno_note ((INSN), REG_INC, REGNO (REG)) \ : find_reg_note ((INSN), REG_INC, (REG))) #else #define FIND_REG_INC_NOTE(INSN, REG) 0 #endif #ifndef HAVE_PRE_INCREMENT #define HAVE_PRE_INCREMENT 0 #endif #ifndef HAVE_PRE_DECREMENT #define HAVE_PRE_DECREMENT 0 #endif #ifndef HAVE_POST_INCREMENT #define HAVE_POST_INCREMENT 0 #endif #ifndef HAVE_POST_DECREMENT #define HAVE_POST_DECREMENT 0 #endif #ifndef HAVE_POST_MODIFY_DISP #define HAVE_POST_MODIFY_DISP 0 #endif #ifndef HAVE_POST_MODIFY_REG #define HAVE_POST_MODIFY_REG 0 #endif #ifndef HAVE_PRE_MODIFY_DISP #define HAVE_PRE_MODIFY_DISP 0 #endif #ifndef HAVE_PRE_MODIFY_REG #define HAVE_PRE_MODIFY_REG 0 #endif /* Some architectures do not have complete pre/post increment/decrement instruction sets, or only move some modes efficiently. These macros allow us to tune autoincrement generation. */ #ifndef USE_LOAD_POST_INCREMENT #define USE_LOAD_POST_INCREMENT(MODE) HAVE_POST_INCREMENT #endif #ifndef USE_LOAD_POST_DECREMENT #define USE_LOAD_POST_DECREMENT(MODE) HAVE_POST_DECREMENT #endif #ifndef USE_LOAD_PRE_INCREMENT #define USE_LOAD_PRE_INCREMENT(MODE) HAVE_PRE_INCREMENT #endif #ifndef USE_LOAD_PRE_DECREMENT #define USE_LOAD_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT #endif #ifndef USE_STORE_POST_INCREMENT #define USE_STORE_POST_INCREMENT(MODE) HAVE_POST_INCREMENT #endif #ifndef USE_STORE_POST_DECREMENT #define USE_STORE_POST_DECREMENT(MODE) HAVE_POST_DECREMENT #endif #ifndef USE_STORE_PRE_INCREMENT #define USE_STORE_PRE_INCREMENT(MODE) HAVE_PRE_INCREMENT #endif #ifndef USE_STORE_PRE_DECREMENT #define USE_STORE_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT #endif