} return false; } /* Return true if X is a vector with a duplicated element value, either constant or nonconstant. Store the duplicated element in *ELT if so. */ template inline bool vec_duplicate_p (T x, T *elt) { if (GET_CODE (x) == VEC_DUPLICATE && !VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))) { *elt = XEXP (x, 0); return true; } return const_vec_duplicate_p (x, elt); } /* If X is a vector constant with a duplicated element value, return that element value, otherwise return X. */ template inline T unwrap_const_vec_duplicate (T x) { if (const_vec_duplicate_p (x)) x = CONST_VECTOR_ELT (x, 0); return x; } /* In emit-rtl.cc. */ extern wide_int const_vector_int_elt (const_rtx, unsigned int); extern rtx const_vector_elt (const_rtx, unsigned int); extern bool const_vec_series_p_1 (const_rtx, rtx *, rtx *); /* Return true if X is an integer constant vector that contains a linear series of the form: { B, B + S, B + 2 * S, B + 3 * S, ... } for a nonzero S. Store B and S in *BASE_OUT and *STEP_OUT on sucess. */ inline bool const_vec_series_p (const_rtx x, rtx *base_out, rtx *step_out) { if (GET_CODE (x) == CONST_VECTOR && CONST_VECTOR_NPATTERNS (x) == 1 && !CONST_VECTOR_DUPLICATE_P (x)) return const_vec_series_p_1 (x, base_out, step_out); return false; } /* Return true if X is a vector that contains a linear series of the form: { B, B + S, B + 2 * S, B + 3 * S, ... } where B and S are constant or nonconstant. Store B and S in *BASE_OUT and *STEP_OUT on sucess. */ inline bool vec_series_p (const_rtx x, rtx *base_out, rtx *step_out) { if (GET_CODE (x) == VEC_SERIES) { *base_out = XEXP (x, 0); *step_out = XEXP (x, 1); return true; } return const_vec_series_p (x, base_out, step_out); } /* Return true if CONST_VECTORs X and Y, which are known to have the same mode, also have the same encoding. This means that they are equal whenever their operands are equal. */ inline bool same_vector_encodings_p (const_rtx x, const_rtx y) { /* Don't be fussy about the encoding of constant-length vectors, since XVECEXP (X, 0) and XVECEXP (Y, 0) list all the elements anyway. */ if (poly_uint64 (CONST_VECTOR_NUNITS (x)).is_constant ()) return true; return (CONST_VECTOR_NPATTERNS (x) == CONST_VECTOR_NPATTERNS (y) && (CONST_VECTOR_NELTS_PER_PATTERN (x) == CONST_VECTOR_NELTS_PER_PATTERN (y))); } /* Return the unpromoted (outer) mode of SUBREG_PROMOTED_VAR_P subreg X. */ inline scalar_int_mode subreg_unpromoted_mode (rtx x) { gcc_checking_assert (SUBREG_PROMOTED_VAR_P (x)); return as_a (GET_MODE (x)); } /* Return the promoted (inner) mode of SUBREG_PROMOTED_VAR_P subreg X. */ inline scalar_int_mode subreg_promoted_mode (rtx x) { gcc_checking_assert (SUBREG_PROMOTED_VAR_P (x)); return as_a (GET_MODE (SUBREG_REG (x))); } /* In emit-rtl.cc */ extern rtvec gen_rtvec_v (int, rtx *); extern rtvec gen_rtvec_v (int, rtx_insn **); extern rtx gen_reg_rtx (machine_mode); extern rtx gen_rtx_REG_offset (rtx, machine_mode, unsigned int, poly_int64); extern rtx gen_reg_rtx_offset (rtx, machine_mode, int); extern rtx gen_reg_rtx_and_attrs (rtx); extern rtx_code_label *gen_label_rtx (void); extern rtx gen_lowpart_common (machine_mode, rtx); /* In cse.cc */ extern rtx gen_lowpart_if_possible (machine_mode, rtx); /* In emit-rtl.cc */ extern rtx gen_highpart (machine_mode, rtx); extern rtx gen_highpart_mode (machine_mode, machine_mode, rtx); extern rtx operand_subword (rtx, poly_uint64, int, machine_mode); /* In emit-rtl.cc */ extern rtx operand_subword_force (rtx, poly_uint64, machine_mode); extern int subreg_lowpart_p (const_rtx); extern poly_uint64 subreg_size_lowpart_offset (poly_uint64, poly_uint64); /* Return true if a subreg of mode OUTERMODE would only access part of an inner register with mode INNERMODE. The other bits of the inner register would then be "don't care" on read. The behavior for writes depends on REGMODE_NATURAL_SIZE; bits in the same REGMODE_NATURAL_SIZE-d chunk would be clobbered but other bits would be preserved. */ inline bool partial_subreg_p (machine_mode outermode, machine_mode innermode) { /* Modes involved in a subreg must be ordered. In particular, we must always know at compile time whether the subreg is paradoxical. */ poly_int64 outer_prec = GET_MODE_PRECISION (outermode); poly_int64 inner_prec = GET_MODE_PRECISION (innermode); gcc_checking_assert (ordered_p (outer_prec, inner_prec)); return maybe_lt (outer_prec, inner_prec); } /* Likewise return true if X is a subreg that is smaller than the inner register. Use read_modify_subreg_p to test whether writing to such a subreg preserves any part of the inner register. */ inline bool partial_subreg_p (const_rtx x) { if (GET_CODE (x) != SUBREG) return false; return partial_subreg_p (GET_MODE (x), GET_MODE (SUBREG_REG (x))); } /* Return true if a subreg with the given outer and inner modes is paradoxical. */ inline bool paradoxical_subreg_p (machine_mode outermode, machine_mode innermode) { /* Modes involved in a subreg must be ordered. In particular, we must always know at compile time whether the subreg is paradoxical. */ poly_int64 outer_prec = GET_MODE_PRECISION (outermode); poly_int64 inner_prec = GET_MODE_PRECISION (innermode); gcc_checking_assert (ordered_p (outer_prec, inner_prec)); return maybe_gt (outer_prec, inner_prec); } /* Return true if X is a paradoxical subreg, false otherwise. */ inline bool paradoxical_subreg_p (const_rtx x) { if (GET_CODE (x) != SUBREG) return false; return paradoxical_subreg_p (GET_MODE (x), GET_MODE (SUBREG_REG (x))); } /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */ inline poly_uint64 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode) { return subreg_size_lowpart_offset (GET_MODE_SIZE (outermode), GET_MODE_SIZE (innermode)); } /* Given that a subreg has outer mode OUTERMODE and inner mode INNERMODE, return the smaller of the two modes if they are different sizes, otherwise return the outer mode. */ inline machine_mode narrower_subreg_mode (machine_mode outermode, machine_mode innermode) { return paradoxical_subreg_p (outermode, innermode) ? innermode : outermode; } /* Given that a subreg has outer mode OUTERMODE and inner mode INNERMODE, return the mode that is big enough to hold both the outer and inner values. Prefer the outer mode in the event of a tie. */ inline machine_mode wider_subreg_mode (machine_mode outermode, machine_mode innermode) { return partial_subreg_p (outermode, innermode) ? innermode : outermode; } /* Likewise for subreg X. */ inline machine_mode wider_subreg_mode (const_rtx x) { return wider_subreg_mode (GET_MODE (x), GET_MODE (SUBREG_REG (x))); } extern poly_uint64 subreg_size_highpart_offset (poly_uint64, poly_uint64); /* Return the SUBREG_BYTE for an OUTERMODE highpart of an INNERMODE value. */ inline poly_uint64 subreg_highpart_offset (machine_mode outermode, machine_mode innermode) { return subreg_size_highpart_offset (GET_MODE_SIZE (outermode), GET_MODE_SIZE (innermode)); } extern poly_int64 byte_lowpart_offset (machine_mode, machine_mode); extern poly_int64 subreg_memory_offset (machine_mode, machine_mode, poly_uint64); extern poly_int64 subreg_memory_offset (const_rtx); extern rtx make_safe_from (rtx, rtx); extern rtx convert_memory_address_addr_space_1 (scalar_int_mode, rtx, addr_space_t, bool, bool); extern rtx convert_memory_address_addr_space (scalar_int_mode, rtx, addr_space_t); #define convert_memory_address(to_mode,x) \ convert_memory_address_addr_space ((to_mode), (x), ADDR_SPACE_GENERIC) extern const char *get_insn_name (int); extern rtx_insn *get_last_insn_anywhere (void); extern rtx_insn *get_first_nonnote_insn (void); extern rtx_insn *get_last_nonnote_insn (void); extern void start_sequence (void); extern void push_to_sequence (rtx_insn *); extern void push_to_sequence2 (rtx_insn *, rtx_insn *); extern void end_sequence (void); #if TARGET_SUPPORTS_WIDE_INT == 0 extern double_int rtx_to_double_int (const_rtx); #endif extern void cwi_output_hex (FILE *, const_rtx); #if TARGET_SUPPORTS_WIDE_INT == 0 extern rtx immed_double_const (HOST_WIDE_INT, HOST_WIDE_INT, machine_mode); #endif extern rtx immed_wide_int_const (const poly_wide_int_ref &, machine_mode); /* In varasm.cc */ extern rtx force_const_mem (machine_mode, rtx); /* In varasm.cc */ struct function; extern rtx get_pool_constant (const_rtx); extern rtx get_pool_constant_mark (rtx, bool *); extern fixed_size_mode get_pool_mode (const_rtx); extern rtx simplify_subtraction (rtx); extern void decide_function_section (tree); /* In emit-rtl.cc */ extern rtx_insn *emit_insn_before (rtx, rtx_insn *); extern rtx_insn *emit_insn_before_noloc (rtx, rtx_insn *, basic_block); extern rtx_insn *emit_insn_before_setloc (rtx, rtx_insn *, location_t); extern rtx_jump_insn *emit_jump_insn_before (rtx, rtx_insn *); extern rtx_jump_insn *emit_jump_insn_before_noloc (rtx, rtx_insn *); extern rtx_jump_insn *emit_jump_insn_before_setloc (rtx, rtx_insn *, location_t); extern rtx_insn *emit_call_insn_before (rtx, rtx_insn *); extern rtx_insn *emit_call_insn_before_noloc (rtx, rtx_insn *); extern rtx_insn *emit_call_insn_before_setloc (rtx, rtx_insn *, location_t); extern rtx_insn *emit_debug_insn_before (rtx, rtx_insn *); extern rtx_insn *emit_debug_insn_before_noloc (rtx, rtx_insn *); extern rtx_insn *emit_debug_insn_before_setloc (rtx, rtx_insn *, location_t); extern rtx_barrier *emit_barrier_before (rtx_insn *); extern rtx_code_label *emit_label_before (rtx_code_label *, rtx_insn *); extern rtx_note *emit_note_before (enum insn_note, rtx_insn *); extern rtx_insn *emit_insn_after (rtx, rtx_insn *); extern rtx_insn *emit_insn_after_noloc (rtx, rtx_insn *, basic_block); extern rtx_insn *emit_insn_after_setloc (rtx, rtx_insn *, location_t); extern rtx_jump_insn *emit_jump_insn_after (rtx, rtx_insn *); extern rtx_jump_insn *emit_jump_insn_after_noloc (rtx, rtx_insn *); extern rtx_jump_insn *emit_jump_insn_after_setloc (rtx, rtx_insn *, location_t); extern rtx_insn *emit_call_insn_after (rtx, rtx_insn *); extern rtx_insn *emit_call_insn_after_noloc (rtx, rtx_insn *); extern rtx_insn *emit_call_insn_after_setloc (rtx, rtx_insn *, location_t); extern rtx_insn *emit_debug_insn_after (rtx, rtx_insn *); extern rtx_insn *emit_debug_insn_after_noloc (rtx, rtx_insn *); extern rtx_insn *emit_debug_insn_after_setloc (rtx, rtx_insn *, location_t); extern rtx_barrier *emit_barrier_after (rtx_insn *); extern rtx_insn *emit_label_after (rtx_insn *, rtx_insn *); extern rtx_note *emit_note_after (enum insn_note, rtx_insn *); extern rtx_insn *emit_insn (rtx); extern rtx_insn *emit_debug_insn (rtx); extern rtx_insn *emit_jump_insn (rtx); extern rtx_insn *emit_call_insn (rtx); extern rtx_code_label *emit_label (rtx); extern rtx_jump_table_data *emit_jump_table_data (rtx); extern rtx_barrier *emit_barrier (void); extern rtx_note *emit_note (enum insn_note); extern rtx_note *emit_note_copy (rtx_note *); extern rtx_insn *gen_clobber (rtx); extern rtx_insn *emit_clobber (rtx); extern rtx_insn *gen_use (rtx); extern rtx_insn *emit_use (rtx); extern rtx_insn *make_insn_raw (rtx); extern void add_function_usage_to (rtx, rtx); extern rtx_call_insn *last_call_insn (void); extern rtx_insn *previous_insn (rtx_insn *); extern rtx_insn *next_insn (rtx_insn *); extern rtx_insn *prev_nonnote_insn (rtx_insn *); extern rtx_insn *next_nonnote_insn (rtx_insn *); extern rtx_insn *prev_nondebug_insn (rtx_insn *); extern rtx_insn *next_nondebug_insn (rtx_insn *); extern rtx_insn *prev_nonnote_nondebug_insn (rtx_insn *); extern rtx_insn *prev_nonnote_nondebug_insn_bb (rtx_insn *); extern rtx_insn *next_nonnote_nondebug_insn (rtx_insn *); extern rtx_insn *next_nonnote_nondebug_insn_bb (rtx_insn *); extern rtx_insn *prev_real_insn (rtx_insn *); extern rtx_insn *next_real_insn (rtx_insn *); extern rtx_insn *prev_real_nondebug_insn (rtx_insn *); extern rtx_insn *next_real_nondebug_insn (rtx); extern rtx_insn *prev_active_insn (rtx_insn *); extern rtx_insn *next_active_insn (rtx_insn *); extern int active_insn_p (const rtx_insn *); /* In emit-rtl.cc */ extern int insn_line (const rtx_insn *); extern const char * insn_file (const rtx_insn *); extern tree insn_scope (const rtx_insn *); extern expanded_location insn_location (const rtx_insn *); extern int insn_discriminator (const rtx_insn *); extern location_t prologue_location, epilogue_location; /* In jump.cc */ extern enum rtx_code reverse_condition (enum rtx_code); extern enum rtx_code reverse_condition_maybe_unordered (enum rtx_code); extern enum rtx_code swap_condition (enum rtx_code); extern enum rtx_code unsigned_condition (enum rtx_code); extern enum rtx_code signed_condition (enum rtx_code); extern void mark_jump_label (rtx, rtx_insn *, int); /* Return true if integer comparison operator CODE interprets its operands as unsigned. */ inline bool unsigned_condition_p (enum rtx_code code) { return unsigned_condition (code) == code; } /* In jump.cc */ extern rtx_insn *delete_related_insns (rtx); /* In recog.cc */ extern rtx *find_constant_term_loc (rtx *); /* In emit-rtl.cc */ extern rtx_insn *try_split (rtx, rtx_insn *, int); /* In insn-recog.cc (generated by genrecog). */ extern rtx_insn *split_insns (rtx, rtx_insn *); /* In simplify-rtx.cc */ /* A class that records the context in which a simplification is being mode. */ class simplify_context { public: rtx simplify_unary_operation (rtx_code, machine_mode, rtx, machine_mode); rtx simplify_binary_operation (rtx_code, machine_mode, rtx, rtx); rtx simplify_ternary_operation (rtx_code, machine_mode, machine_mode, rtx, rtx, rtx); rtx simplify_relational_operation (rtx_code, machine_mode, machine_mode, rtx, rtx); rtx simplify_subreg (machine_mode, rtx, machine_mode, poly_uint64); rtx lowpart_subreg (machine_mode, rtx, machine_mode); rtx simplify_merge_mask (rtx, rtx, int); rtx simplify_gen_unary (rtx_code, machine_mode, rtx, machine_mode); rtx simplify_gen_binary (rtx_code, machine_mode, rtx, rtx); rtx simplify_gen_ternary (rtx_code, machine_mode, machine_mode, rtx, rtx, rtx); rtx simplify_gen_relational (rtx_code, machine_mode, machine_mode, rtx, rtx); rtx simplify_gen_subreg (machine_mode, rtx, machine_mode, poly_uint64); rtx simplify_gen_vec_select (rtx, unsigned int); /* Tracks the level of MEM nesting for the value being simplified: 0 means the value is not in a MEM, >0 means it is. This is needed because the canonical representation of multiplication is different inside a MEM than outside. */ unsigned int mem_depth = 0; /* Tracks number of simplify_associative_operation calls performed during outermost simplify* call. */ unsigned int assoc_count = 0; /* Limit for the above number, return NULL from simplify_associative_operation after we reach that assoc_count. */ static const unsigned int max_assoc_count = 64; private: rtx simplify_truncation (machine_mode, rtx, machine_mode); rtx simplify_byte_swapping_operation (rtx_code, machine_mode, rtx, rtx); rtx simplify_associative_operation (rtx_code, machine_mode, rtx, rtx); rtx simplify_distributive_operation (rtx_code, machine_mode, rtx, rtx); rtx simplify_logical_relational_operation (rtx_code, machine_mode, rtx, rtx); rtx simplify_binary_operation_series (rtx_code, machine_mode, rtx, rtx); rtx simplify_distribute_over_subregs (rtx_code, machine_mode, rtx, rtx); rtx simplify_shift_const_int (rtx_code, machine_mode, rtx, unsigned int); rtx simplify_plus_minus (rtx_code, machine_mode, rtx, rtx); rtx simplify_cond_clz_ctz (rtx, rtx_code, rtx, rtx); rtx simplify_unary_operation_1 (rtx_code, machine_mode, rtx); rtx simplify_binary_operation_1 (rtx_code, machine_mode, rtx, rtx, rtx, rtx); rtx simplify_ternary_operation_1 (rtx_code, machine_mode, machine_mode, rtx, rtx, rtx); rtx simplify_relational_operation_1 (rtx_code, machine_mode, machine_mode, rtx, rtx); }; inline rtx simplify_unary_operation (rtx_code code, machine_mode mode, rtx op, machine_mode op_mode) { return simplify_context ().simplify_unary_operation (code, mode, op, op_mode); } inline rtx simplify_binary_operation (rtx_code code, machine_mode mode, rtx op0, rtx op1) { return simplify_context ().simplify_binary_operation (code, mode, op0, op1); } inline rtx simplify_ternary_operation (rtx_code code, machine_mode mode, machine_mode op0_mode, rtx op0, rtx op1, rtx op2) { return simplify_context ().simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2); } inline rtx simplify_relational_operation (rtx_code code, machine_mode mode, machine_mode op_mode, rtx op0, rtx op1) { return simplify_context ().simplify_relational_operation (code, mode, op_mode, op0, op1); } inline rtx simplify_subreg (machine_mode outermode, rtx op, machine_mode innermode, poly_uint64 byte) { return simplify_context ().simplify_subreg (outermode, op, innermode, byte); } inline rtx simplify_gen_unary (rtx_code code, machine_mode mode, rtx op, machine_mode op_mode) { return simplify_context ().simplify_gen_unary (code, mode, op, op_mode); } inline rtx simplify_gen_binary (rtx_code code, machine_mode mode, rtx op0, rtx op1) { return simplify_context ().simplify_gen_binary (code, mode, op0, op1); } inline rtx simplify_gen_ternary (rtx_code code, machine_mode mode, machine_mode op0_mode, rtx op0, rtx op1, rtx op2) { return simplify_context ().simplify_gen_ternary (code, mode, op0_mode, op0, op1, op2); } inline rtx simplify_gen_relational (rtx_code code, machine_mode mode, machine_mode op_mode, rtx op0, rtx op1) { return simplify_context ().simplify_gen_relational (code, mode, op_mode, op0, op1); } inline rtx simplify_gen_subreg (machine_mode outermode, rtx op, machine_mode innermode, poly_uint64 byte) { return simplify_context ().simplify_gen_subreg (outermode, op, innermode, byte); } inline rtx simplify_gen_vec_select (rtx op, unsigned int index) { return simplify_context ().simplify_gen_vec_select (op, index); } inline rtx lowpart_subreg (machine_mode outermode, rtx op, machine_mode innermode) { return simplify_context ().lowpart_subreg (outermode, op, innermode); } extern rtx simplify_const_unary_operation (enum rtx_code, machine_mode, rtx, machine_mode); extern rtx simplify_const_binary_operation (enum rtx_code, machine_mode, rtx, rtx); extern rtx simplify_const_relational_operation (enum rtx_code, machine_mode, rtx, rtx); extern rtx simplify_replace_fn_rtx (rtx, const_rtx, rtx (*fn) (rtx, const_rtx, void *), void *); extern rtx simplify_replace_rtx (rtx, const_rtx, rtx); extern rtx simplify_rtx (const_rtx); extern rtx avoid_constant_pool_reference (rtx); extern rtx delegitimize_mem_from_attrs (rtx); extern bool mode_signbit_p (machine_mode, const_rtx); extern bool val_signbit_p (machine_mode, unsigned HOST_WIDE_INT); extern bool val_signbit_known_set_p (machine_mode, unsigned HOST_WIDE_INT); extern bool val_signbit_known_clear_p (machine_mode, unsigned HOST_WIDE_INT); /* In reginfo.cc */ extern machine_mode choose_hard_reg_mode (unsigned int, unsigned int, const predefined_function_abi *); extern const HARD_REG_SET &simplifiable_subregs (const subreg_shape &); /* In emit-rtl.cc */ extern rtx set_for_reg_notes (rtx); extern rtx set_unique_reg_note (rtx, enum reg_note, rtx); extern rtx set_dst_reg_note (rtx, enum reg_note, rtx, rtx); extern void set_insn_deleted (rtx_insn *); /* Functions in rtlanal.cc */ extern rtx single_set_2 (const rtx_insn *, const_rtx); extern rtx simple_regno_set (rtx, unsigned int); extern bool contains_symbol_ref_p (const_rtx); extern bool contains_symbolic_reference_p (const_rtx); extern bool contains_constant_pool_address_p (const_rtx); extern void add_auto_inc_notes (rtx_insn *, rtx); /* Handle the cheap and common cases inline for performance. */ inline rtx single_set (const rtx_insn *insn) { if (!INSN_P (insn)) return NULL_RTX; if (GET_CODE (PATTERN (insn)) == SET) return PATTERN (insn); /* Defer to the more expensive case. */ return single_set_2 (insn, PATTERN (insn)); } extern scalar_int_mode get_address_mode (rtx mem); extern int rtx_addr_can_trap_p (const_rtx); extern bool nonzero_address_p (const_rtx); extern int rtx_unstable_p (const_rtx); extern bool rtx_varies_p (const_rtx, bool); extern bool rtx_addr_varies_p (const_rtx, bool); extern rtx get_call_rtx_from (const rtx_insn *); extern tree get_call_fndecl (const rtx_insn *); extern HOST_WIDE_INT get_integer_term (const_rtx); extern rtx get_related_value (const_rtx); extern bool offset_within_block_p (const_rtx, HOST_WIDE_INT); extern void split_const (rtx, rtx *, rtx *); extern rtx strip_offset (rtx, poly_int64_pod *); extern poly_int64 get_args_size (const_rtx); extern bool unsigned_reg_p (rtx); extern int reg_mentioned_p (const_rtx, const_rtx); extern int count_occurrences (const_rtx, const_rtx, int); extern int reg_referenced_p (const_rtx, const_rtx); extern int reg_used_between_p (const_rtx, const rtx_insn *, const rtx_insn *); extern int reg_set_between_p (const_rtx, const rtx_insn *, const rtx_insn *); extern int commutative_operand_precedence (rtx); extern bool swap_commutative_operands_p (rtx, rtx); extern int modified_between_p (const_rtx, const rtx_insn *, const rtx_insn *); extern int no_labels_between_p (const rtx_insn *, const rtx_insn *); extern int modified_in_p (const_rtx, const_rtx); extern int reg_set_p (const_rtx, const_rtx); extern int multiple_sets (const_rtx); extern int set_noop_p (const_rtx); extern int noop_move_p (const rtx_insn *); extern bool refers_to_regno_p (unsigned int, unsigned int, const_rtx, rtx *); extern int reg_overlap_mentioned_p (const_rtx, const_rtx); extern const_rtx set_of (const_rtx, const_rtx); extern void record_hard_reg_sets (rtx, const_rtx, void *); extern void record_hard_reg_uses (rtx *, void *); extern void find_all_hard_regs (const_rtx, HARD_REG_SET *); extern void find_all_hard_reg_sets (const rtx_insn *, HARD_REG_SET *, bool); extern void note_pattern_stores (const_rtx, void (*) (rtx, const_rtx, void *), void *); extern void note_stores (const rtx_insn *, void (*) (rtx, const_rtx, void *), void *); extern void note_uses (rtx *, void (*) (rtx *, void *), void *); extern int dead_or_set_p (const rtx_insn *, const_rtx); extern int dead_or_set_regno_p (const rtx_insn *, unsigned int); extern rtx find_reg_note (const_rtx, enum reg_note, const_rtx); extern rtx find_regno_note (const_rtx, enum reg_note, unsigned int); extern rtx find_reg_equal_equiv_note (const_rtx); extern rtx find_constant_src (const rtx_insn *); extern int find_reg_fusage (const_rtx, enum rtx_code, const_rtx); extern int find_regno_fusage (const_rtx, enum rtx_code, unsigned int); extern rtx alloc_reg_note (enum reg_note, rtx, rtx); extern void add_reg_note (rtx, enum reg_note, rtx); extern void add_int_reg_note (rtx_insn *, enum reg_note, int); extern void add_args_size_note (rtx_insn *, poly_int64); extern void add_shallow_copy_of_reg_note (rtx_insn *, rtx); extern rtx duplicate_reg_note (rtx); extern void remove_note (rtx_insn *, const_rtx); extern bool remove_reg_equal_equiv_notes (rtx_insn *, bool = false); extern void remove_reg_equal_equiv_notes_for_regno (unsigned int); extern int side_effects_p (const_rtx); extern int volatile_refs_p (const_rtx); extern int volatile_insn_p (const_rtx); extern int may_trap_p_1 (const_rtx, unsigned); extern int may_trap_p (const_rtx); extern int may_trap_or_fault_p (const_rtx); extern bool can_throw_internal (const_rtx); extern bool can_throw_external (const_rtx); extern bool insn_could_throw_p (const_rtx); extern bool insn_nothrow_p (const_rtx); extern bool can_nonlocal_goto (const rtx_insn *); extern void copy_reg_eh_region_note_forward (rtx, rtx_insn *, rtx); extern void copy_reg_eh_region_note_backward (rtx, rtx_insn *, rtx); extern rtx replace_rtx (rtx, rtx, rtx, bool = false); extern void replace_label (rtx *, rtx, rtx, bool); extern void replace_label_in_insn (rtx_insn *, rtx_insn *, rtx_insn *, bool); extern bool rtx_referenced_p (const_rtx, const_rtx); extern bool tablejump_p (const rtx_insn *, rtx_insn **, rtx_jump_table_data **); extern rtx tablejump_casesi_pattern (const rtx_insn *insn); extern int computed_jump_p (const rtx_insn *); extern bool tls_referenced_p (const_rtx); extern bool contains_mem_rtx_p (rtx x); extern bool register_asm_p (const_rtx); /* Overload for refers_to_regno_p for checking a single register. */ inline bool refers_to_regno_p (unsigned int regnum, const_rtx x, rtx* loc = NULL) { return refers_to_regno_p (regnum, regnum + 1, x, loc); } /* Callback for for_each_inc_dec, to process the autoinc operation OP within MEM that sets DEST to SRC + SRCOFF, or SRC if SRCOFF is NULL. The callback is passed the same opaque ARG passed to for_each_inc_dec. Return zero to continue looking for other autoinc operations or any other value to interrupt the traversal and return that value to the caller of for_each_inc_dec. */ typedef int (*for_each_inc_dec_fn) (rtx mem, rtx op, rtx dest, rtx src, rtx srcoff, void *arg); extern int for_each_inc_dec (rtx, for_each_inc_dec_fn, void *arg); typedef int (*rtx_equal_p_callback_function) (const_rtx *, const_rtx *, rtx *, rtx *); extern int rtx_equal_p_cb (const_rtx, const_rtx, rtx_equal_p_callback_function); typedef int (*hash_rtx_callback_function) (const_rtx, machine_mode, rtx *, machine_mode *); extern unsigned hash_rtx_cb (const_rtx, machine_mode, int *, int *, bool, hash_rtx_callback_function); extern rtx regno_use_in (unsigned int, rtx); extern int auto_inc_p (const_rtx); extern bool in_insn_list_p (const rtx_insn_list *, const rtx_insn *); extern void remove_node_from_insn_list (const rtx_insn *, rtx_insn_list **); extern int loc_mentioned_in_p (rtx *, const_rtx); extern rtx_insn *find_first_parameter_load (rtx_insn *, rtx_insn *); extern bool keep_with_call_p (const rtx_insn *); extern bool label_is_jump_target_p (const_rtx, const rtx_insn *); extern int pattern_cost (rtx, bool); extern int insn_cost (rtx_insn *, bool); extern unsigned seq_cost (const rtx_insn *, bool); /* Given an insn and condition, return a canonical description of the test being made. */ extern rtx canonicalize_condition (rtx_insn *, rtx, int, rtx_insn **, rtx, int, int); /* Given a JUMP_INSN, return a canonical description of the test being made. */ extern rtx get_condition (rtx_insn *, rtx_insn **, int, int); /* Information about a subreg of a hard register. */ struct subreg_info { /* Offset of first hard register involved in the subreg. */ int offset; /* Number of hard registers involved in the subreg. In the case of a paradoxical subreg, this is the number of registers that would be modified by writing to the subreg; some of them may be don't-care when reading from the subreg. */ int nregs; /* Whether this subreg can be represented as a hard reg with the new mode (by adding OFFSET to the original hard register). */ bool representable_p; }; extern void subreg_get_info (unsigned int, machine_mode, poly_uint64, machine_mode, struct subreg_info *); /* lists.cc */ extern void free_EXPR_LIST_list (rtx_expr_list **); extern void free_INSN_LIST_list (rtx_insn_list **); extern void free_EXPR_LIST_node (rtx); extern void free_INSN_LIST_node (rtx); extern rtx_insn_list *alloc_INSN_LIST (rtx, rtx); extern rtx_insn_list *copy_INSN_LIST (rtx_insn_list *); extern rtx_insn_list *concat_INSN_LIST (rtx_insn_list *, rtx_insn_list *); extern rtx_expr_list *alloc_EXPR_LIST (int, rtx, rtx); extern void remove_free_INSN_LIST_elem (rtx_insn *, rtx_insn_list **); extern rtx remove_list_elem (rtx, rtx *); extern rtx_insn *remove_free_INSN_LIST_node (rtx_insn_list **); extern rtx remove_free_EXPR_LIST_node (rtx_expr_list **); /* reginfo.cc */ /* Resize reg info. */ extern bool resize_reg_info (void); /* Free up register info memory. */ extern void free_reg_info (void); extern void init_subregs_of_mode (void); extern void finish_subregs_of_mode (void); extern void reginfo_cc_finalize (void); /* recog.cc */ extern rtx extract_asm_operands (rtx); extern int asm_noperands (const_rtx); extern const char *decode_asm_operands (rtx, rtx *, rtx **, const char **, machine_mode *, location_t *); extern void get_referenced_operands (const char *, bool *, unsigned int); extern enum reg_class reg_preferred_class (int); extern enum reg_class reg_alternate_class (int); extern enum reg_class reg_allocno_class (int); extern void setup_reg_classes (int, enum reg_class, enum reg_class, enum reg_class); extern void split_all_insns (void); extern unsigned int split_all_insns_noflow (void); #define MAX_SAVED_CONST_INT 64 extern GTY(()) rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1]; #define const0_rtx (const_int_rtx[MAX_SAVED_CONST_INT]) #define const1_rtx (const_int_rtx[MAX_SAVED_CONST_INT+1]) #define const2_rtx (const_int_rtx[MAX_SAVED_CONST_INT+2]) #define constm1_rtx (const_int_rtx[MAX_SAVED_CONST_INT-1]) extern GTY(()) rtx const_true_rtx; extern GTY(()) rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE]; /* Returns a constant 0 rtx in mode MODE. Integer modes are treated the same as VOIDmode. */ #define CONST0_RTX(MODE) (const_tiny_rtx[0][(int) (MODE)]) /* Likewise, for the constants 1 and 2 and -1. */ #define CONST1_RTX(MODE) (const_tiny_rtx[1][(int) (MODE)]) #define CONST2_RTX(MODE) (const_tiny_rtx[2][(int) (MODE)]) #define CONSTM1_RTX(MODE) (const_tiny_rtx[3][(int) (MODE)]) extern GTY(()) rtx pc_rtx; extern GTY(()) rtx ret_rtx; extern GTY(()) rtx simple_return_rtx; extern GTY(()) rtx_insn *invalid_insn_rtx; /* If HARD_FRAME_POINTER_REGNUM is defined, then a special dummy reg is used to represent the frame pointer. This is because the hard frame pointer and the automatic variables are separated by an amount that cannot be determined until after register allocation. We can assume that in this case ELIMINABLE_REGS will be defined, one action of which will be to eliminate FRAME_POINTER_REGNUM into HARD_FRAME_POINTER_REGNUM. */ #ifndef HARD_FRAME_POINTER_REGNUM #define HARD_FRAME_POINTER_REGNUM FRAME_POINTER_REGNUM #endif #ifndef HARD_FRAME_POINTER_IS_FRAME_POINTER #define HARD_FRAME_POINTER_IS_FRAME_POINTER \ (HARD_FRAME_POINTER_REGNUM == FRAME_POINTER_REGNUM) #endif #ifndef HARD_FRAME_POINTER_IS_ARG_POINTER #define HARD_FRAME_POINTER_IS_ARG_POINTER \ (HARD_FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM) #endif /* Index labels for global_rtl. */ enum global_rtl_index { GR_STACK_POINTER, GR_FRAME_POINTER, /* For register elimination to work properly these hard_frame_pointer_rtx, frame_pointer_rtx, and arg_pointer_rtx must be the same if they refer to the same register. */ #if FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM GR_ARG_POINTER = GR_FRAME_POINTER, #endif #if HARD_FRAME_POINTER_IS_FRAME_POINTER GR_HARD_FRAME_POINTER = GR_FRAME_POINTER, #else GR_HARD_FRAME_POINTER, #endif #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM #if HARD_FRAME_POINTER_IS_ARG_POINTER GR_ARG_POINTER = GR_HARD_FRAME_POINTER, #else GR_ARG_POINTER, #endif #endif GR_VIRTUAL_INCOMING_ARGS, GR_VIRTUAL_STACK_ARGS, GR_VIRTUAL_STACK_DYNAMIC, GR_VIRTUAL_OUTGOING_ARGS, GR_VIRTUAL_CFA, GR_VIRTUAL_PREFERRED_STACK_BOUNDARY, GR_MAX }; /* Target-dependent globals. */ struct GTY(()) target_rtl { /* All references to the hard registers in global_rtl_index go through these unique rtl objects. On machines where the frame-pointer and arg-pointer are the same register, they use the same unique object. After register allocation, other rtl objects which used to be pseudo-regs may be clobbered to refer to the frame-pointer register. But references that were originally to the frame-pointer can be distinguished from the others because they contain frame_pointer_rtx. When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little tricky: until register elimination has taken place hard_frame_pointer_rtx should be used if it is being set, and frame_pointer_rtx otherwise. After register elimination hard_frame_pointer_rtx should always be used. On machines where the two registers are same (most) then these are the same. */ rtx x_global_rtl[GR_MAX]; /* A unique representation of (REG:Pmode PIC_OFFSET_TABLE_REGNUM). */ rtx x_pic_offset_table_rtx; /* A unique representation of (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM). This is used to implement __builtin_return_address for some machines; see for instance the MIPS port. */ rtx x_return_address_pointer_rtx; /* Commonly used RTL for hard registers. These objects are not necessarily unique, so we allocate them separately from global_rtl. They are initialized once per compilation unit, then copied into regno_reg_rtx at the beginning of each function. */ rtx x_initial_regno_reg_rtx[FIRST_PSEUDO_REGISTER]; /* A sample (mem:M stack_pointer_rtx) rtx for each mode M. */ rtx x_top_of_stack[MAX_MACHINE_MODE]; /* Static hunks of RTL used by the aliasing code; these are treated as persistent to avoid unnecessary RTL allocations. */ rtx x_static_reg_base_value[FIRST_PSEUDO_REGISTER]; /* The default memory attributes for each mode. */ class mem_attrs *x_mode_mem_attrs[(int) MAX_MACHINE_MODE]; /* Track if RTL has been initialized. */ bool target_specific_initialized; }; extern GTY(()) struct target_rtl default_target_rtl; #if SWITCHABLE_TARGET extern struct target_rtl *this_target_rtl; #else #define this_target_rtl (&default_target_rtl) #endif #define global_rtl \ (this_target_rtl->x_global_rtl) #define pic_offset_table_rtx \ (this_target_rtl->x_pic_offset_table_rtx) #define return_address_pointer_rtx \ (this_target_rtl->x_return_address_pointer_rtx) #define top_of_stack \ (this_target_rtl->x_top_of_stack) #define mode_mem_attrs \ (this_target_rtl->x_mode_mem_attrs) /* All references to certain hard regs, except those created by allocating pseudo regs into them (when that's possible), go through these unique rtx objects. */ #define stack_pointer_rtx (global_rtl[GR_STACK_POINTER]) #define frame_pointer_rtx (global_rtl[GR_FRAME_POINTER]) #define hard_frame_pointer_rtx (global_rtl[GR_HARD_FRAME_POINTER]) #define arg_pointer_rtx (global_rtl[GR_ARG_POINTER]) #ifndef GENERATOR_FILE /* Return the attributes of a MEM rtx. */ inline const class mem_attrs * get_mem_attrs (const_rtx x) { class mem_attrs *attrs; attrs = MEM_ATTRS (x); if (!attrs) attrs = mode_mem_attrs[(int) GET_MODE (x)]; return attrs; } #endif /* Include the RTL generation functions. */ #ifndef GENERATOR_FILE #include "genrtl.h" #undef gen_rtx_ASM_INPUT #define gen_rtx_ASM_INPUT(MODE, ARG0) \ gen_rtx_fmt_si (ASM_INPUT, (MODE), (ARG0), 0) #define gen_rtx_ASM_INPUT_loc(MODE, ARG0, LOC) \ gen_rtx_fmt_si (ASM_INPUT, (MODE), (ARG0), (LOC)) #endif /* There are some RTL codes that require special attention; the generation functions included above do the raw handling. If you add to this list, modify special_rtx in gengenrtl.cc as well. */ extern rtx_expr_list *gen_rtx_EXPR_LIST (machine_mode, rtx, rtx); extern rtx_insn_list *gen_rtx_INSN_LIST (machine_mode, rtx, rtx); extern rtx_insn * gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn, basic_block bb, rtx pattern, int location, int code, rtx reg_notes); extern rtx gen_rtx_CONST_INT (machine_mode, HOST_WIDE_INT); extern rtx gen_rtx_CONST_VECTOR (machine_mode, rtvec); extern void set_mode_and_regno (rtx, machine_mode, unsigned int); extern rtx init_raw_REG (rtx, machine_mode, unsigned int); extern rtx gen_raw_REG (machine_mode, unsigned int); #define alloca_raw_REG(mode, regno) \ init_raw_REG (rtx_alloca (REG), (mode), (regno)) extern rtx gen_rtx_REG (machine_mode, unsigned int); extern rtx gen_rtx_SUBREG (machine_mode, rtx, poly_uint64); extern rtx gen_rtx_MEM (machine_mode, rtx); extern rtx gen_rtx_VAR_LOCATION (machine_mode, tree, rtx, enum var_init_status); #ifdef GENERATOR_FILE #define PUT_MODE(RTX, MODE) PUT_MODE_RAW (RTX, MODE) #else inline void PUT_MODE (rtx x, machine_mode mode) { if (REG_P (x)) set_mode_and_regno (x, mode, REGNO (x)); else PUT_MODE_RAW (x, mode); } #endif #define GEN_INT(N) gen_rtx_CONST_INT (VOIDmode, (N)) /* Virtual registers are used during RTL generation to refer to locations into the stack frame when the actual location isn't known until RTL generation is complete. The routine instantiate_virtual_regs replaces these with the proper value, which is normally {frame,arg,stack}_pointer_rtx plus a constant. */ #define FIRST_VIRTUAL_REGISTER (FIRST_PSEUDO_REGISTER) /* This points to the first word of the incoming arguments passed on the stack, either by the caller or by the callee when pretending it was passed by the caller. */ #define virtual_incoming_args_rtx (global_rtl[GR_VIRTUAL_INCOMING_ARGS]) #define VIRTUAL_INCOMING_ARGS_REGNUM (FIRST_VIRTUAL_REGISTER) /* If FRAME_GROWS_DOWNWARD, this points to immediately above the first variable on the stack. Otherwise, it points to the first variable on the stack. */ #define virtual_stack_vars_rtx (global_rtl[GR_VIRTUAL_STACK_ARGS]) #define VIRTUAL_STACK_VARS_REGNUM ((FIRST_VIRTUAL_REGISTER) + 1) /* This points to the location of dynamically-allocated memory on the stack immediately after the stack pointer has been adjusted by the amount desired. */ #define virtual_stack_dynamic_rtx (global_rtl[GR_VIRTUAL_STACK_DYNAMIC]) #define VIRTUAL_STACK_DYNAMIC_REGNUM ((FIRST_VIRTUAL_REGISTER) + 2) /* This points to the location in the stack at which outgoing arguments should be written when the stack is pre-pushed (arguments pushed using push insns always use sp). */ #define virtual_outgoing_args_rtx (global_rtl[GR_VIRTUAL_OUTGOING_ARGS]) #define VIRTUAL_OUTGOING_ARGS_REGNUM ((FIRST_VIRTUAL_REGISTER) + 3) /* This points to the Canonical Frame Address of the function. This should correspond to the CFA produced by INCOMING_FRAME_SP_OFFSET, but is calculated relative to the arg pointer for simplicity; the frame pointer nor stack pointer are necessarily fixed relative to the CFA until after reload. */ #define virtual_cfa_rtx (global_rtl[GR_VIRTUAL_CFA]) #define VIRTUAL_CFA_REGNUM ((FIRST_VIRTUAL_REGISTER) + 4) #define LAST_VIRTUAL_POINTER_REGISTER ((FIRST_VIRTUAL_REGISTER) + 4) /* This is replaced by crtl->preferred_stack_boundary / BITS_PER_UNIT when finalized. */ #define virtual_preferred_stack_boundary_rtx \ (global_rtl[GR_VIRTUAL_PREFERRED_STACK_BOUNDARY]) #define VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM \ ((FIRST_VIRTUAL_REGISTER) + 5) #define LAST_VIRTUAL_REGISTER ((FIRST_VIRTUAL_REGISTER) + 5) /* Nonzero if REGNUM is a pointer into the stack frame. */ #define REGNO_PTR_FRAME_P(REGNUM) \ ((REGNUM) == STACK_POINTER_REGNUM \ || (REGNUM) == FRAME_POINTER_REGNUM \ || (REGNUM) == HARD_FRAME_POINTER_REGNUM \ || (REGNUM) == ARG_POINTER_REGNUM \ || ((REGNUM) >= FIRST_VIRTUAL_REGISTER \ && (REGNUM) <= LAST_VIRTUAL_POINTER_REGISTER)) /* REGNUM never really appearing in the INSN stream. */ #define INVALID_REGNUM (~(unsigned int) 0) /* REGNUM for which no debug information can be generated. */ #define IGNORED_DWARF_REGNUM (INVALID_REGNUM - 1) extern rtx output_constant_def (tree, int); extern rtx lookup_constant_def (tree); /* Nonzero after end of reload pass. Set to 1 or 0 by reload1.cc. */ extern int reload_completed; /* Nonzero after thread_prologue_and_epilogue_insns has run. */ extern int epilogue_completed; /* Set to 1 while reload_as_needed is operating. Required by some machines to handle any generated moves differently. */ extern int reload_in_progress; /* Set to 1 while in lra. */ extern int lra_in_progress; /* This macro indicates whether you may create a new pseudo-register. */ #define can_create_pseudo_p() (!reload_in_progress && !reload_completed) #ifdef STACK_REGS /* Nonzero after end of regstack pass. Set to 1 or 0 by reg-stack.cc. */ extern int regstack_completed; #endif /* If this is nonzero, we do not bother generating VOLATILE around volatile memory references, and we are willing to output indirect addresses. If cse is to follow, we reject indirect addresses so a useful potential cse is generated; if it is used only once, instruction combination will produce the same indirect address eventually. */ extern int cse_not_expected; /* Translates rtx code to tree code, for those codes needed by real_arithmetic. The function returns an int because the caller may not know what `enum tree_code' means. */ extern int rtx_to_tree_code (enum rtx_code); /* In cse.cc */ extern int delete_trivially_dead_insns (rtx_insn *, int); extern int exp_equiv_p (const_rtx, const_rtx, int, bool); extern unsigned hash_rtx (const_rtx x, machine_mode, int *, int *, bool); /* In dse.cc */ extern bool check_for_inc_dec (rtx_insn *insn); /* In jump.cc */ extern int comparison_dominates_p (enum rtx_code, enum rtx_code); extern bool jump_to_label_p (const rtx_insn *); extern int condjump_p (const rtx_insn *); extern int any_condjump_p (const rtx_insn *); extern int any_uncondjump_p (const rtx_insn *); extern rtx pc_set (const rtx_insn *); extern rtx condjump_label (const rtx_insn *); extern int simplejump_p (const rtx_insn *); extern int returnjump_p (const rtx_insn *); extern int eh_returnjump_p (rtx_insn *); extern int onlyjump_p (const rtx_insn *); extern int invert_jump_1 (rtx_jump_insn *, rtx); extern int invert_jump (rtx_jump_insn *, rtx, int); extern int rtx_renumbered_equal_p (const_rtx, const_rtx); extern int true_regnum (const_rtx); extern unsigned int reg_or_subregno (const_rtx); extern int redirect_jump_1 (rtx_insn *, rtx); extern void redirect_jump_2 (rtx_jump_insn *, rtx, rtx, int, int); extern int redirect_jump (rtx_jump_insn *, rtx, int); extern void rebuild_jump_labels (rtx_insn *); extern void rebuild_jump_labels_chain (rtx_insn *); extern rtx reversed_comparison (const_rtx, machine_mode); extern enum rtx_code reversed_comparison_code (const_rtx, const rtx_insn *); extern enum rtx_code reversed_comparison_code_parts (enum rtx_code, const_rtx, const_rtx, const rtx_insn *); extern void delete_for_peephole (rtx_insn *, rtx_insn *); extern int condjump_in_parallel_p (const rtx_insn *); /* In emit-rtl.cc. */ extern int max_reg_num (void); extern int max_label_num (void); extern int get_first_label_num (void); extern void maybe_set_first_label_num (rtx_code_label *); extern void delete_insns_since (rtx_insn *); extern void mark_reg_pointer (rtx, int); extern void mark_user_reg (rtx); extern void reset_used_flags (rtx); extern void set_used_flags (rtx); extern void reorder_insns (rtx_insn *, rtx_insn *, rtx_insn *); extern void reorder_insns_nobb (rtx_insn *, rtx_insn *, rtx_insn *); extern int get_max_insn_count (void); extern int in_sequence_p (void); extern void init_emit (void); extern void init_emit_regs (void); extern void init_derived_machine_modes (void); extern void init_emit_once (void); extern void push_topmost_sequence (void); extern void pop_topmost_sequence (void); extern void set_new_first_and_last_insn (rtx_insn *, rtx_insn *); extern unsigned int unshare_all_rtl (void); extern void unshare_all_rtl_again (rtx_insn *); extern void unshare_all_rtl_in_chain (rtx_insn *); extern void verify_rtl_sharing (void); extern void add_insn (rtx_insn *); extern void add_insn_before (rtx_insn *, rtx_insn *, basic_block); extern void add_insn_after (rtx_insn *, rtx_insn *, basic_block); extern void remove_insn (rtx_insn *); extern rtx_insn *emit (rtx, bool = true); extern void emit_insn_at_entry (rtx); extern rtx gen_lowpart_SUBREG (machine_mode, rtx); extern rtx gen_const_mem (machine_mode, rtx); extern rtx gen_frame_mem (machine_mode, rtx); extern rtx gen_tmp_stack_mem (machine_mode, rtx); extern bool validate_subreg (machine_mode, machine_mode, const_rtx, poly_uint64); /* In combine.cc */ extern unsigned int extended_count (const_rtx, machine_mode, int); extern rtx remove_death (unsigned int, rtx_insn *); extern void dump_combine_stats (FILE *); extern void dump_combine_total_stats (FILE *); extern rtx make_compound_operation (rtx, enum rtx_code); /* In sched-rgn.cc. */ extern void schedule_insns (void); /* In sched-ebb.cc. */ extern void schedule_ebbs (void); /* In sel-sched-dump.cc. */ extern void sel_sched_fix_param (const char *param, const char *val); /* In print-rtl.cc */ extern const char *print_rtx_head; extern void debug (const rtx_def &ref); extern void debug (const rtx_def *ptr); extern void debug_rtx (const_rtx); extern void debug_rtx_list (const rtx_insn *, int); extern void debug_rtx_range (const rtx_insn *, const rtx_insn *); extern const rtx_insn *debug_rtx_find (const rtx_insn *, int); extern void print_mem_expr (FILE *, const_tree); extern void print_rtl (FILE *, const_rtx); extern void print_simple_rtl (FILE *, const_rtx); extern int print_rtl_single (FILE *, const_rtx); extern int print_rtl_single_with_indent (FILE *, const_rtx, int); extern void print_inline_rtx (FILE *, const_rtx, int); /* In stmt.cc */ extern void expand_null_return (void); extern void expand_naked_return (void); extern void emit_jump (rtx); /* Memory operation built-ins differ by return value. Mapping of the enum values is following: - RETURN_BEGIN - return destination, e.g. memcpy - RETURN_END - return destination + n, e.g. mempcpy - RETURN_END_MINUS_ONE - return a pointer to the terminating null byte of the string, e.g. strcpy */ enum memop_ret { RETURN_BEGIN, RETURN_END, RETURN_END_MINUS_ONE }; /* In expr.cc */ extern rtx move_by_pieces (rtx, rtx, unsigned HOST_WIDE_INT, unsigned int, memop_ret); extern poly_int64 find_args_size_adjust (rtx_insn *); extern poly_int64 fixup_args_size_notes (rtx_insn *, rtx_insn *, poly_int64); /* In expmed.cc */ extern void init_expmed (void); extern void expand_inc (rtx, rtx); extern void expand_dec (rtx, rtx); /* In lower-subreg.cc */ extern void init_lower_subreg (void); /* In gcse.cc */ extern bool can_copy_p (machine_mode); extern bool can_assign_to_reg_without_clobbers_p (rtx, machine_mode); extern rtx_insn *prepare_copy_insn (rtx, rtx); /* In cprop.cc */ extern rtx fis_get_condition (rtx_insn *); /* In ira.cc */ extern HARD_REG_SET eliminable_regset; extern void mark_elimination (int, int); /* In reginfo.cc */ extern int reg_classes_intersect_p (reg_class_t, reg_class_t); extern int reg_class_subset_p (reg_class_t, reg_class_t); extern void globalize_reg (tree, int); extern void init_reg_modes_target (void); extern void init_regs (void); extern void reinit_regs (void); extern void init_fake_stack_mems (void); extern void save_register_info (void); extern void init_reg_sets (void); extern void regclass (rtx, int); extern void reg_scan (rtx_insn *, unsigned int); extern void fix_register (const char *, int, int); extern const HARD_REG_SET *valid_mode_changes_for_regno (unsigned int); /* In reload1.cc */ extern int function_invariant_p (const_rtx); /* In calls.cc */ enum libcall_type { LCT_NORMAL = 0, LCT_CONST = 1, LCT_PURE = 2, LCT_NORETURN = 3, LCT_THROW = 4, LCT_RETURNS_TWICE = 5 }; extern rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type, machine_mode, int, rtx_mode_t *); /* Output a library call and discard the returned value. FUN is the address of the function, as a SYMBOL_REF rtx, and OUTMODE is the mode of the (discarded) return value. FN_TYPE is LCT_NORMAL for `normal' calls, LCT_CONST for `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for other types of library calls. There are different overloads of this function for different numbers of arguments. In each case the argument value is followed by its mode. */ inline void emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode) { emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 0, NULL); } inline void emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode, rtx arg1, machine_mode arg1_mode) { rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode) }; emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 1, args); } inline void emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode, rtx arg1, machine_mode arg1_mode, rtx arg2, machine_mode arg2_mode) { rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode), rtx_mode_t (arg2, arg2_mode) }; emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 2, args); } inline void emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode, rtx arg1, machine_mode arg1_mode, rtx arg2, machine_mode arg2_mode, rtx arg3, machine_mode arg3_mode) { rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode), rtx_mode_t (arg2, arg2_mode), rtx_mode_t (arg3, arg3_mode) }; emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 3, args); } inline void emit_library_call (rtx fun, libcall_type fn_type, machine_mode outmode, rtx arg1, machine_mode arg1_mode, rtx arg2, machine_mode arg2_mode, rtx arg3, machine_mode arg3_mode, rtx arg4, machine_mode arg4_mode) { rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode), rtx_mode_t (arg2, arg2_mode), rtx_mode_t (arg3, arg3_mode), rtx_mode_t (arg4, arg4_mode) }; emit_library_call_value_1 (0, fun, NULL_RTX, fn_type, outmode, 4, args); } /* Like emit_library_call, but return the value produced by the call. Use VALUE to store the result if it is nonnull, otherwise pick a convenient location. */ inline rtx emit_library_call_value (rtx fun, rtx value, libcall_type fn_type, machine_mode outmode) { return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 0, NULL); } inline rtx emit_library_call_value (rtx fun, rtx value, libcall_type fn_type, machine_mode outmode, rtx arg1, machine_mode arg1_mode) { rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode) }; return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 1, args); } inline rtx emit_library_call_value (rtx fun, rtx value, libcall_type fn_type, machine_mode outmode, rtx arg1, machine_mode arg1_mode, rtx arg2, machine_mode arg2_mode) { rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode), rtx_mode_t (arg2, arg2_mode) }; return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 2, args); } inline rtx emit_library_call_value (rtx fun, rtx value, libcall_type fn_type, machine_mode outmode, rtx arg1, machine_mode arg1_mode, rtx arg2, machine_mode arg2_mode, rtx arg3, machine_mode arg3_mode) { rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode), rtx_mode_t (arg2, arg2_mode), rtx_mode_t (arg3, arg3_mode) }; return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 3, args); } inline rtx emit_library_call_value (rtx fun, rtx value, libcall_type fn_type, machine_mode outmode, rtx arg1, machine_mode arg1_mode, rtx arg2, machine_mode arg2_mode, rtx arg3, machine_mode arg3_mode, rtx arg4, machine_mode arg4_mode) { rtx_mode_t args[] = { rtx_mode_t (arg1, arg1_mode), rtx_mode_t (arg2, arg2_mode), rtx_mode_t (arg3, arg3_mode), rtx_mode_t (arg4, arg4_mode) }; return emit_library_call_value_1 (1, fun, value, fn_type, outmode, 4, args); } /* In varasm.cc */ extern void init_varasm_once (void); extern rtx make_debug_expr_from_rtl (const_rtx); /* In read-rtl.cc */ #ifdef GENERATOR_FILE extern bool read_rtx (const char *, vec *); #endif /* In alias.cc */ extern rtx canon_rtx (rtx); extern int true_dependence (const_rtx, machine_mode, const_rtx); extern rtx get_addr (rtx); extern int canon_true_dependence (const_rtx, machine_mode, rtx, const_rtx, rtx); extern int read_dependence (const_rtx, const_rtx); extern int anti_dependence (const_rtx, const_rtx); extern int canon_anti_dependence (const_rtx, bool, const_rtx, machine_mode, rtx); extern int output_dependence (const_rtx, const_rtx); extern int canon_output_dependence (const_rtx, bool, const_rtx, machine_mode, rtx); extern int may_alias_p (const_rtx, const_rtx); extern void init_alias_target (void); extern void init_alias_analysis (void); extern void end_alias_analysis (void); extern void vt_equate_reg_base_value (const_rtx, const_rtx); extern bool memory_modified_in_insn_p (const_rtx, const_rtx); extern bool may_be_sp_based_p (rtx); extern rtx gen_hard_reg_clobber (machine_mode, unsigned int); extern rtx get_reg_known_value (unsigned int); extern bool get_reg_known_equiv_p (unsigned int); extern rtx get_reg_base_value (unsigned int); extern rtx extract_mem_from_operand (rtx); #ifdef STACK_REGS extern int stack_regs_mentioned (const_rtx insn); #endif /* In toplev.cc */ extern GTY(()) rtx stack_limit_rtx; /* In var-tracking.cc */ extern unsigned int variable_tracking_main (void); extern void delete_vta_debug_insns (bool); /* In stor-layout.cc. */ extern void get_mode_bounds (scalar_int_mode, int, scalar_int_mode, rtx *, rtx *); /* In loop-iv.cc */ extern rtx canon_condition (rtx); extern void simplify_using_condition (rtx, rtx *, bitmap); /* In final.cc */ extern unsigned int compute_alignments (void); extern void update_alignments (vec &); extern int asm_str_count (const char *templ);