#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "rtl.h"
#include "tree.h"
#include "obstack.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "real.h"
#include "insn-config.h"
#include "conditions.h"
#include "output.h"
#include "insn-attr.h"
#include "flags.h"
#include "reload.h"
#include "function.h"
#include "expr.h"
#include "optabs.h"
#include "toplev.h"
#include "recog.h"
#include "ggc.h"
#include "except.h"
#include "c-pragma.h"
#include "integrate.h"
#include "tm_p.h"
#include "target.h"
#include "target-def.h"
#include "debug.h"
#include "langhooks.h"
#include "../../libcpp/internal.h"
#include "params.h"
typedef struct minipool_node Mnode;
typedef struct minipool_fixup Mfix;
const struct attribute_spec arm_attribute_table[];
static arm_stack_offsets *arm_get_frame_offsets (void);
static void arm_add_gc_roots (void);
static int arm_gen_constant (enum rtx_code, enum machine_mode, rtx,
HOST_WIDE_INT, rtx, rtx, int, int);
static unsigned bit_count (unsigned long);
static int arm_address_register_rtx_p (rtx, int);
static int arm_legitimate_index_p (enum machine_mode, rtx, RTX_CODE, int);
static int thumb_base_register_rtx_p (rtx, enum machine_mode, int);
inline static int thumb_index_register_rtx_p (rtx, int);
static int thumb_far_jump_used_p (void);
static bool thumb_force_lr_save (void);
static int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
static rtx emit_sfm (int, int);
static int arm_size_return_regs (void);
#ifndef AOF_ASSEMBLER
static bool arm_assemble_integer (rtx, unsigned int, int);
#endif
static const char *fp_const_from_val (REAL_VALUE_TYPE *);
static arm_cc get_arm_condition_code (rtx);
static HOST_WIDE_INT int_log2 (HOST_WIDE_INT);
#ifndef ENABLE_LLVM
static rtx is_jump_table (rtx);
#endif
static const char *output_multi_immediate (rtx *, const char *, const char *,
int, HOST_WIDE_INT);
static const char *shift_op (rtx, HOST_WIDE_INT *);
static struct machine_function *arm_init_machine_status (void);
static int handle_thumb_unexpanded_prologue (FILE *, bool);
static int handle_thumb_unexpanded_epilogue (bool);
static int handle_thumb_exit (FILE *, int, bool);
static int handle_thumb_pushpop (FILE *, unsigned long, int, int *, unsigned long, bool);
#ifndef ENABLE_LLVM
static HOST_WIDE_INT get_jump_table_size (rtx);
static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
static Mnode *add_minipool_forward_ref (Mfix *);
static Mnode *move_minipool_fix_backward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
static Mnode *add_minipool_backward_ref (Mfix *);
static void assign_minipool_offsets (Mfix *);
static void arm_print_value (FILE *, rtx);
static void dump_minipool (rtx);
static int arm_barrier_cost (rtx);
static Mfix *create_fix_barrier (Mfix *, HOST_WIDE_INT);
static void push_minipool_barrier (rtx, HOST_WIDE_INT);
static void push_minipool_fix (rtx, HOST_WIDE_INT, rtx *, enum machine_mode,
rtx);
#endif
static void arm_reorg (void);
#ifndef ENABLE_LLVM
static bool note_invalid_constants (rtx, HOST_WIDE_INT, int);
#endif
static int current_file_function_operand (rtx);
static unsigned long arm_compute_save_reg0_reg12_mask (void);
static unsigned long arm_compute_save_reg_mask (void);
static unsigned long arm_isr_value (tree);
static unsigned long arm_compute_func_type (void);
static tree arm_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
static tree arm_handle_isr_attribute (tree *, tree, tree, int, bool *);
#if TARGET_DLLIMPORT_DECL_ATTRIBUTES
static tree arm_handle_notshared_attribute (tree *, tree, tree, int, bool *);
#endif
static void arm_output_function_epilogue (FILE *, HOST_WIDE_INT);
static void arm_output_function_prologue (FILE *, HOST_WIDE_INT);
static void thumb_output_function_prologue (FILE *, HOST_WIDE_INT);
static int arm_comp_type_attributes (tree, tree);
static void arm_set_default_type_attributes (tree);
static int arm_adjust_cost (rtx, rtx, rtx, int);
static int count_insns_for_constant (HOST_WIDE_INT, int);
static int arm_get_strip_length (int);
static bool arm_function_ok_for_sibcall (tree, tree);
static void arm_internal_label (FILE *, const char *, unsigned long);
static void arm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
tree);
static int arm_rtx_costs_1 (rtx, enum rtx_code, enum rtx_code);
static bool arm_size_rtx_costs (rtx, int, int, int *);
static bool arm_slowmul_rtx_costs (rtx, int, int, int *);
static bool arm_fastmul_rtx_costs (rtx, int, int, int *);
static bool arm_xscale_rtx_costs (rtx, int, int, int *);
static bool arm_9e_rtx_costs (rtx, int, int, int *);
static int arm_address_cost (rtx);
#ifndef ENABLE_LLVM
static bool arm_memory_load_p (rtx);
static bool arm_cirrus_insn_p (rtx);
static void cirrus_reorg (rtx);
#endif
static void arm_init_builtins (void);
static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
static void arm_init_iwmmxt_builtins (void);
static rtx safe_vector_operand (rtx, enum machine_mode);
static rtx arm_expand_binop_builtin (enum insn_code, tree, rtx);
static rtx arm_expand_unop_builtin (enum insn_code, tree, rtx, int);
static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
static void emit_constant_insn (rtx cond, rtx pattern);
static rtx emit_set_insn (rtx, rtx);
static int arm_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
tree, bool);
#ifdef OBJECT_FORMAT_ELF
static void arm_elf_asm_constructor (rtx, int);
#endif
#if TARGET_MACHO
static void arm_darwin_encode_section_info (tree, rtx, int);
#elif !defined(ARM_PE)
static void arm_encode_section_info (tree, rtx, int);
#endif
static void arm_file_end (void);
#if TARGET_MACHO
static void arm_darwin_file_start (void);
static void arm_darwin_file_end (void);
#endif
#ifdef AOF_ASSEMBLER
static void aof_globalize_label (FILE *, const char *);
static void aof_dump_imports (FILE *);
static void aof_dump_pic_table (FILE *);
static void aof_file_start (void);
static void aof_file_end (void);
static void aof_asm_init_sections (void);
#endif
static void arm_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
tree, int *, int);
static bool arm_pass_by_reference (CUMULATIVE_ARGS *,
enum machine_mode, tree, bool);
static bool arm_promote_prototypes (tree);
static bool arm_default_short_enums (void);
static bool arm_align_anon_bitfield (void);
static bool arm_return_in_msb (tree);
static bool arm_must_pass_in_stack (enum machine_mode, tree);
#ifdef TARGET_UNWIND_INFO
static void arm_unwind_emit (FILE *, rtx);
static bool arm_output_ttype (rtx);
#endif
static tree arm_cxx_guard_type (void);
static bool arm_cxx_guard_mask_bit (void);
static tree arm_get_cookie_size (tree);
static bool arm_cookie_has_size (void);
static bool arm_cxx_cdtor_returns_this (void);
static bool arm_cxx_key_method_may_be_inline (void);
static void arm_cxx_determine_class_data_visibility (tree);
static bool arm_cxx_class_data_always_comdat (void);
static bool arm_cxx_use_aeabi_atexit (void);
static void arm_init_libfuncs (void);
static bool arm_handle_option (size_t, const char *, int);
static unsigned HOST_WIDE_INT arm_shift_truncation_mask (enum machine_mode);
static bool arm_cannot_copy_insn_p (rtx);
static bool arm_tls_symbol_p (rtx x);
static int symbol_mentioned_with_filter (rtx, int);
static bool arm_cannot_force_const_mem (rtx x);
static rtx arm_builtin_setjmp_frame_value (void);
#if TARGET_MACHO
static bool arm_binds_local_p (tree);
#endif
static tree arm_handle_ms_struct_attribute (tree *, tree, tree, int, bool *);
static tree arm_handle_gcc_struct_attribute (tree *, tree, tree, int, bool *);
static bool arm_ms_bitfield_layout_p (tree);
#if TARGET_DLLIMPORT_DECL_ATTRIBUTES
#undef TARGET_MERGE_DECL_ATTRIBUTES
#define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
#endif
#undef TARGET_ATTRIBUTE_TABLE
#define TARGET_ATTRIBUTE_TABLE arm_attribute_table
#undef TARGET_ASM_FILE_END
#define TARGET_ASM_FILE_END arm_file_end
#if TARGET_MACHO
#undef TARGET_ASM_FILE_START
#define TARGET_ASM_FILE_START arm_darwin_file_start
#undef TARGET_ASM_FILE_END
#define TARGET_ASM_FILE_END arm_darwin_file_end
#endif
#ifdef AOF_ASSEMBLER
#undef TARGET_ASM_BYTE_OP
#define TARGET_ASM_BYTE_OP "\tDCB\t"
#undef TARGET_ASM_ALIGNED_HI_OP
#define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
#undef TARGET_ASM_ALIGNED_SI_OP
#define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
#undef TARGET_ASM_GLOBALIZE_LABEL
#define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
#undef TARGET_ASM_FILE_START
#define TARGET_ASM_FILE_START aof_file_start
#undef TARGET_ASM_FILE_END
#define TARGET_ASM_FILE_END aof_file_end
#else
#undef TARGET_ASM_ALIGNED_SI_OP
#define TARGET_ASM_ALIGNED_SI_OP NULL
#undef TARGET_ASM_INTEGER
#define TARGET_ASM_INTEGER arm_assemble_integer
#endif
#ifdef OBJECT_FORMAT_MACHO
#undef TARGET_ASM_ALIGNED_SI_OP
#define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
#endif
#undef TARGET_ASM_FUNCTION_PROLOGUE
#define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
#undef TARGET_ASM_FUNCTION_EPILOGUE
#define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
#undef TARGET_DEFAULT_TARGET_FLAGS
#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_SCHED_PROLOG)
#undef TARGET_HANDLE_OPTION
#define TARGET_HANDLE_OPTION arm_handle_option
#undef TARGET_COMP_TYPE_ATTRIBUTES
#define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
#undef TARGET_SCHED_ADJUST_COST
#define TARGET_SCHED_ADJUST_COST arm_adjust_cost
#undef TARGET_ENCODE_SECTION_INFO
#ifdef ARM_PE
#define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
#elif TARGET_MACHO
#define TARGET_ENCODE_SECTION_INFO arm_darwin_encode_section_info
#else
#define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
#endif
#undef TARGET_STRIP_NAME_ENCODING
#define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
#undef TARGET_ASM_INTERNAL_LABEL
#define TARGET_ASM_INTERNAL_LABEL arm_internal_label
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
#define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
#undef TARGET_ASM_OUTPUT_MI_THUNK
#define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
#undef TARGET_RTX_COSTS
#define TARGET_RTX_COSTS arm_slowmul_rtx_costs
#undef TARGET_ADDRESS_COST
#define TARGET_ADDRESS_COST arm_address_cost
#undef TARGET_SHIFT_TRUNCATION_MASK
#define TARGET_SHIFT_TRUNCATION_MASK arm_shift_truncation_mask
#undef TARGET_VECTOR_MODE_SUPPORTED_P
#define TARGET_VECTOR_MODE_SUPPORTED_P arm_vector_mode_supported_p
#undef TARGET_MACHINE_DEPENDENT_REORG
#define TARGET_MACHINE_DEPENDENT_REORG arm_reorg
#undef TARGET_INIT_BUILTINS
#define TARGET_INIT_BUILTINS arm_init_builtins
#undef TARGET_EXPAND_BUILTIN
#define TARGET_EXPAND_BUILTIN arm_expand_builtin
#undef TARGET_INIT_LIBFUNCS
#define TARGET_INIT_LIBFUNCS arm_init_libfuncs
#undef TARGET_PROMOTE_FUNCTION_ARGS
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
#undef TARGET_PROMOTE_FUNCTION_RETURN
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
#undef TARGET_PROMOTE_PROTOTYPES
#define TARGET_PROMOTE_PROTOTYPES arm_promote_prototypes
#undef TARGET_PASS_BY_REFERENCE
#define TARGET_PASS_BY_REFERENCE arm_pass_by_reference
#undef TARGET_ARG_PARTIAL_BYTES
#define TARGET_ARG_PARTIAL_BYTES arm_arg_partial_bytes
#undef TARGET_SETUP_INCOMING_VARARGS
#define TARGET_SETUP_INCOMING_VARARGS arm_setup_incoming_varargs
#undef TARGET_DEFAULT_SHORT_ENUMS
#define TARGET_DEFAULT_SHORT_ENUMS arm_default_short_enums
#undef TARGET_ALIGN_ANON_BITFIELD
#define TARGET_ALIGN_ANON_BITFIELD arm_align_anon_bitfield
#undef TARGET_NARROW_VOLATILE_BITFIELD
#define TARGET_NARROW_VOLATILE_BITFIELD hook_bool_void_false
#undef TARGET_CXX_GUARD_TYPE
#define TARGET_CXX_GUARD_TYPE arm_cxx_guard_type
#undef TARGET_CXX_GUARD_MASK_BIT
#define TARGET_CXX_GUARD_MASK_BIT arm_cxx_guard_mask_bit
#undef TARGET_CXX_GET_COOKIE_SIZE
#define TARGET_CXX_GET_COOKIE_SIZE arm_get_cookie_size
#undef TARGET_CXX_COOKIE_HAS_SIZE
#define TARGET_CXX_COOKIE_HAS_SIZE arm_cookie_has_size
#undef TARGET_CXX_CDTOR_RETURNS_THIS
#define TARGET_CXX_CDTOR_RETURNS_THIS arm_cxx_cdtor_returns_this
#undef TARGET_CXX_KEY_METHOD_MAY_BE_INLINE
#define TARGET_CXX_KEY_METHOD_MAY_BE_INLINE arm_cxx_key_method_may_be_inline
#undef TARGET_CXX_USE_AEABI_ATEXIT
#define TARGET_CXX_USE_AEABI_ATEXIT arm_cxx_use_aeabi_atexit
#undef TARGET_CXX_DETERMINE_CLASS_DATA_VISIBILITY
#define TARGET_CXX_DETERMINE_CLASS_DATA_VISIBILITY \
arm_cxx_determine_class_data_visibility
#undef TARGET_CXX_CLASS_DATA_ALWAYS_COMDAT
#define TARGET_CXX_CLASS_DATA_ALWAYS_COMDAT arm_cxx_class_data_always_comdat
#undef TARGET_RETURN_IN_MSB
#define TARGET_RETURN_IN_MSB arm_return_in_msb
#undef TARGET_MUST_PASS_IN_STACK
#define TARGET_MUST_PASS_IN_STACK arm_must_pass_in_stack
#ifdef TARGET_UNWIND_INFO
#undef TARGET_UNWIND_EMIT
#define TARGET_UNWIND_EMIT arm_unwind_emit
#undef TARGET_ASM_TTYPE
#define TARGET_ASM_TTYPE arm_output_ttype
#undef TARGET_ARM_EABI_UNWINDER
#define TARGET_ARM_EABI_UNWINDER true
#endif
#undef TARGET_CANNOT_COPY_INSN_P
#define TARGET_CANNOT_COPY_INSN_P arm_cannot_copy_insn_p
#ifdef HAVE_AS_TLS
#undef TARGET_HAVE_TLS
#define TARGET_HAVE_TLS true
#endif
#undef TARGET_CANNOT_FORCE_CONST_MEM
#define TARGET_CANNOT_FORCE_CONST_MEM arm_cannot_force_const_mem
#if TARGET_MACHO
#undef TARGET_BINDS_LOCAL_P
#define TARGET_BINDS_LOCAL_P arm_binds_local_p
#endif
#ifndef ENABLE_LLVM
static HOST_WIDE_INT get_label_pad (rtx, HOST_WIDE_INT);
#endif
#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
#define TARGET_BUILTIN_SETJMP_FRAME_VALUE arm_builtin_setjmp_frame_value
#undef TARGET_MS_BITFIELD_LAYOUT_P
#define TARGET_MS_BITFIELD_LAYOUT_P arm_ms_bitfield_layout_p
struct gcc_target targetm = TARGET_INITIALIZER;
static struct obstack minipool_obstack;
static char * minipool_startobj;
static int max_insns_skipped = 5;
extern FILE * asm_out_file;
int making_const_table;
rtx arm_compare_op0, arm_compare_op1;
enum processor_type arm_tune = arm_none;
enum arm_fp_model arm_fp_model;
enum fputype arm_fpu_arch;
enum fputype arm_fpu_tune;
enum float_abi_type arm_float_abi;
enum arm_abi_type arm_abi;
enum arm_tp_type target_thread_pointer = TP_AUTO;
int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
rtx thumb_call_via_label[14];
static int thumb_call_reg_needed;
static GTY(()) rtx switch8_libfunc;
static GTY(()) rtx switchu8_libfunc;
static GTY(()) rtx switch16_libfunc;
static GTY(()) rtx switch32_libfunc;
#define FL_CO_PROC (1 << 0)
#define FL_ARCH3M (1 << 1)
#define FL_MODE26 (1 << 2)
#define FL_MODE32 (1 << 3)
#define FL_ARCH4 (1 << 4)
#define FL_ARCH5 (1 << 5)
#define FL_THUMB (1 << 6)
#define FL_LDSCHED (1 << 7)
#define FL_STRONG (1 << 8)
#define FL_ARCH5E (1 << 9)
#define FL_XSCALE (1 << 10)
#define FL_CIRRUS (1 << 11)
#define FL_ARCH6 (1 << 12)
#define FL_VFPV2 (1 << 13)
#define FL_WBUF (1 << 14)
#define FL_ARCH6K (1 << 15)
#define FL_IWMMXT (1 << 29)
#define FL_FOR_ARCH2 0
#define FL_FOR_ARCH3 FL_MODE32
#define FL_FOR_ARCH3M (FL_FOR_ARCH3 | FL_ARCH3M)
#define FL_FOR_ARCH4 (FL_FOR_ARCH3M | FL_ARCH4)
#define FL_FOR_ARCH4T (FL_FOR_ARCH4 | FL_THUMB)
#define FL_FOR_ARCH5 (FL_FOR_ARCH4 | FL_ARCH5)
#define FL_FOR_ARCH5T (FL_FOR_ARCH5 | FL_THUMB)
#define FL_FOR_ARCH5E (FL_FOR_ARCH5 | FL_ARCH5E)
#define FL_FOR_ARCH5TE (FL_FOR_ARCH5E | FL_THUMB)
#define FL_FOR_ARCH5TEJ FL_FOR_ARCH5TE
#define FL_FOR_ARCH6 (FL_FOR_ARCH5TE | FL_ARCH6)
#define FL_FOR_ARCH6J FL_FOR_ARCH6
#define FL_FOR_ARCH6K (FL_FOR_ARCH6 | FL_ARCH6K)
#define FL_FOR_ARCH6Z FL_FOR_ARCH6
#define FL_FOR_ARCH6ZK FL_FOR_ARCH6K
static unsigned long insn_flags = 0;
static unsigned long tune_flags = 0;
int arm_arch3m = 0;
int arm_arch4 = 0;
int arm_arch4t = 0;
int arm_arch5 = 0;
int arm_arch5e = 0;
int arm_arch6 = 0;
int arm_arch6k = 0;
int arm_ld_sched = 0;
int arm_tune_strongarm = 0;
int arm_arch_cirrus = 0;
int arm_arch_iwmmxt = 0;
int arm_arch_xscale = 0;
int arm_tune_xscale = 0;
int arm_tune_wbuf = 0;
int thumb_code = 0;
int arm_cpp_interwork = 0;
enum machine_mode output_memory_reference_mode;
unsigned arm_pic_register = INVALID_REGNUM;
int return_used_this_function;
static int after_arm_reorg = 0;
static int arm_constant_limit = 3;
int arm_ccfsm_state;
enum arm_cond_code arm_current_cc;
rtx arm_target_insn;
int arm_target_label;
static const char * const arm_condition_codes[] =
{
"eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
"hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
};
#define streq(string1, string2) (strcmp (string1, string2) == 0)
struct processors
{
const char *const name;
enum processor_type core;
const char *arch;
const unsigned long flags;
bool (* rtx_costs) (rtx, int, int, int *);
};
static const struct processors all_cores[] =
{
#define ARM_CORE(NAME, IDENT, ARCH, FLAGS, COSTS) \
{NAME, arm_none, #ARCH, FLAGS | FL_FOR_ARCH##ARCH, arm_##COSTS##_rtx_costs},
#include "arm-cores.def"
#undef ARM_CORE
{NULL, arm_none, NULL, 0, NULL}
};
static const struct processors all_architectures[] =
{
{"armv2", arm2, "2", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH2, NULL},
{"armv2a", arm2, "2", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH2, NULL},
{"armv3", arm6, "3", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH3, NULL},
{"armv3m", arm7m, "3M", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH3M, NULL},
{"armv4", arm7tdmi, "4", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH4, NULL},
#if TARGET_MACHO
{"armv4t", arm7tdmi, "4T", FL_CO_PROC | FL_FOR_ARCH4, NULL},
#else
{"armv4t", arm7tdmi, "4T", FL_CO_PROC | FL_FOR_ARCH4T, NULL},
#endif
{"armv5", arm10tdmi, "5", FL_CO_PROC | FL_FOR_ARCH5, NULL},
{"armv5t", arm10tdmi, "5T", FL_CO_PROC | FL_FOR_ARCH5T, NULL},
{"armv5e", arm1026ejs, "5E", FL_CO_PROC | FL_FOR_ARCH5E, NULL},
{"armv5te", arm1026ejs, "5TE", FL_CO_PROC | FL_FOR_ARCH5TE, NULL},
#if TARGET_MACHO
{"armv5tej",arm926ejs, "5TEJ",FL_CO_PROC | FL_FOR_ARCH5TEJ, NULL},
{"xscale", xscale, "5TE", FL_CO_PROC | FL_XSCALE | FL_FOR_ARCH5TE, NULL},
{"armv6", arm1136jfs, "6", FL_CO_PROC | FL_FOR_ARCH6, NULL},
{"armv6j", arm1136js, "6J", FL_CO_PROC | FL_FOR_ARCH6J, NULL},
{"armv6k", arm1136jfs, "6K", FL_CO_PROC | FL_FOR_ARCH6K, NULL},
#else
{"armv6", arm1136js, "6", FL_CO_PROC | FL_FOR_ARCH6, NULL},
{"armv6j", arm1136js, "6J", FL_CO_PROC | FL_FOR_ARCH6J, NULL},
{"armv6k", mpcore, "6K", FL_CO_PROC | FL_FOR_ARCH6K, NULL},
#endif
{"armv6z", arm1176jzs, "6Z", FL_CO_PROC | FL_FOR_ARCH6Z, NULL},
{"armv6zk", arm1176jzs, "6ZK", FL_CO_PROC | FL_FOR_ARCH6ZK, NULL},
{"ep9312", ep9312, "4T", FL_LDSCHED | FL_CIRRUS | FL_FOR_ARCH4, NULL},
{"iwmmxt", iwmmxt, "5TE", FL_LDSCHED | FL_STRONG | FL_FOR_ARCH5TE | FL_XSCALE | FL_IWMMXT , NULL},
{NULL, arm_none, NULL, 0 , NULL}
};
struct arm_cpu_select
{
const char * string;
const char * name;
const struct processors * processors;
};
static struct arm_cpu_select arm_select[] =
{
{ NULL, "-mcpu=", all_cores },
{ NULL, "-march=", all_architectures },
{ NULL, "-mtune=", all_cores }
};
#define ARM_OPT_SET_CPU 0
#define ARM_OPT_SET_ARCH 1
#define ARM_OPT_SET_TUNE 2
char arm_arch_name[] = "__ARM_ARCH_0UNK__";
struct fpu_desc
{
const char * name;
enum fputype fpu;
};
static const struct fpu_desc all_fpus[] =
{
{"fpa", FPUTYPE_FPA},
{"fpe2", FPUTYPE_FPA_EMU2},
{"fpe3", FPUTYPE_FPA_EMU2},
{"maverick", FPUTYPE_MAVERICK},
{"vfp", FPUTYPE_VFP}
};
static const enum fputype fp_model_for_fpu[] =
{
ARM_FP_MODEL_UNKNOWN,
ARM_FP_MODEL_FPA,
ARM_FP_MODEL_FPA,
ARM_FP_MODEL_FPA,
ARM_FP_MODEL_MAVERICK,
ARM_FP_MODEL_VFP
};
struct float_abi
{
const char * name;
enum float_abi_type abi_type;
};
static const struct float_abi all_float_abis[] =
{
{"soft", ARM_FLOAT_ABI_SOFT},
{"softfp", ARM_FLOAT_ABI_SOFTFP},
{"hard", ARM_FLOAT_ABI_HARD}
};
struct abi_name
{
const char *name;
enum arm_abi_type abi_type;
};
static const struct abi_name arm_all_abis[] =
{
{"apcs-gnu", ARM_ABI_APCS},
{"atpcs", ARM_ABI_ATPCS},
{"aapcs", ARM_ABI_AAPCS},
{"iwmmxt", ARM_ABI_IWMMXT},
{"aapcs-linux", ARM_ABI_AAPCS_LINUX}
};
enum tls_reloc {
TLS_GD32,
TLS_LDM32,
TLS_LDO32,
TLS_IE32,
TLS_LE32
};
inline static rtx
emit_set_insn (rtx x, rtx y)
{
return emit_insn (gen_rtx_SET (VOIDmode, x, y));
}
static unsigned
bit_count (unsigned long value)
{
unsigned long count = 0;
while (value)
{
count++;
value &= value - 1;
}
return count;
}
static unsigned long
inclusive_bitmask (int start, int end)
{
return (((unsigned long) 1 << start) - 1)
^ (((unsigned long) 1 << (end + 1)) - 1);
}
void register_switch8_libfunc (void)
{
#if TARGET_MACHO
if (switch8_libfunc == NULL)
switch8_libfunc = gen_rtx_SYMBOL_REF (Pmode,
ggc_alloc_string ("__switch8", sizeof ("__switch8")));
if (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
machopic_validate_stub_or_non_lazy_ptr
(machopic_indirection_name (switch8_libfunc, true));
#endif
}
void register_switchu8_libfunc (void)
{
#if TARGET_MACHO
if (switchu8_libfunc == NULL)
switchu8_libfunc = gen_rtx_SYMBOL_REF (Pmode,
ggc_alloc_string ("__switchu8", sizeof ("__switchu8")));
if (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
machopic_validate_stub_or_non_lazy_ptr
(machopic_indirection_name (switchu8_libfunc, true));
#endif
}
void register_switch16_libfunc (void)
{
#if TARGET_MACHO
if (switch16_libfunc == NULL)
switch16_libfunc = gen_rtx_SYMBOL_REF (Pmode,
ggc_alloc_string ("__switch16", sizeof ("__switch16")));
if (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
machopic_validate_stub_or_non_lazy_ptr
(machopic_indirection_name (switch16_libfunc, true));
#endif
}
void register_switch32_libfunc (void)
{
#if TARGET_MACHO
if (switch32_libfunc == NULL)
switch32_libfunc = gen_rtx_SYMBOL_REF (Pmode,
ggc_alloc_string ("__switch32", sizeof ("__switch32")));
if (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
machopic_validate_stub_or_non_lazy_ptr
(machopic_indirection_name (switch32_libfunc, true));
#endif
}
static void
arm_init_libfuncs (void)
{
if (TARGET_MACHO && TARGET_THUMB && !TARGET_SOFT_FLOAT
&& (flag_pic || MACHO_DYNAMIC_NO_PIC_P))
{
set_optab_libfunc (add_optab, DFmode, "__adddf3vfp");
set_optab_libfunc (sdiv_optab, DFmode, "__divdf3vfp");
set_optab_libfunc (smul_optab, DFmode, "__muldf3vfp");
set_optab_libfunc (neg_optab, DFmode, NULL);
set_optab_libfunc (sub_optab, DFmode, "__subdf3vfp");
set_optab_libfunc (eq_optab, DFmode, "__eqdf2vfp");
set_optab_libfunc (ne_optab, DFmode, "__nedf2vfp");
set_optab_libfunc (lt_optab, DFmode, "__ltdf2vfp");
set_optab_libfunc (le_optab, DFmode, "__ledf2vfp");
set_optab_libfunc (ge_optab, DFmode, "__gedf2vfp");
set_optab_libfunc (gt_optab, DFmode, "__gtdf2vfp");
set_optab_libfunc (unord_optab, DFmode, "__unorddf2vfp");
set_optab_libfunc (add_optab, SFmode, "__addsf3vfp");
set_optab_libfunc (sdiv_optab, SFmode, "__divsf3vfp");
set_optab_libfunc (smul_optab, SFmode, "__mulsf3vfp");
set_optab_libfunc (neg_optab, SFmode, NULL);
set_optab_libfunc (sub_optab, SFmode, "__subsf3vfp");
set_optab_libfunc (eq_optab, SFmode, "__eqsf2vfp");
set_optab_libfunc (ne_optab, SFmode, "__nesf2vfp");
set_optab_libfunc (lt_optab, SFmode, "__ltsf2vfp");
set_optab_libfunc (le_optab, SFmode, "__lesf2vfp");
set_optab_libfunc (ge_optab, SFmode, "__gesf2vfp");
set_optab_libfunc (gt_optab, SFmode, "__gtsf2vfp");
set_optab_libfunc (unord_optab, SFmode, "__unordsf2vfp");
set_conv_libfunc (sfix_optab, SImode, DFmode, "__fixdfsivfp");
set_conv_libfunc (ufix_optab, SImode, DFmode, "__fixunsdfsivfp");
set_conv_libfunc (sfix_optab, SImode, SFmode, "__fixsfsivfp");
set_conv_libfunc (ufix_optab, SImode, SFmode, "__fixunssfsivfp");
set_conv_libfunc (trunc_optab, SFmode, DFmode, "__truncdfsf2vfp");
set_conv_libfunc (sext_optab, DFmode, SFmode, "__extendsfdf2vfp");
set_conv_libfunc (sfloat_optab, DFmode, SImode, "__floatsidfvfp");
set_conv_libfunc (ufloat_optab, DFmode, SImode, "__floatunssidfvfp");
set_conv_libfunc (sfloat_optab, SFmode, SImode, "__floatsisfvfp");
set_conv_libfunc (ufloat_optab, SFmode, SImode, "__floatunssisfvfp");
return;
}
if (!TARGET_BPABI)
return;
set_optab_libfunc (add_optab, DFmode, "__aeabi_dadd");
set_optab_libfunc (sdiv_optab, DFmode, "__aeabi_ddiv");
set_optab_libfunc (smul_optab, DFmode, "__aeabi_dmul");
set_optab_libfunc (neg_optab, DFmode, "__aeabi_dneg");
set_optab_libfunc (sub_optab, DFmode, "__aeabi_dsub");
set_optab_libfunc (eq_optab, DFmode, "__aeabi_dcmpeq");
set_optab_libfunc (ne_optab, DFmode, NULL);
set_optab_libfunc (lt_optab, DFmode, "__aeabi_dcmplt");
set_optab_libfunc (le_optab, DFmode, "__aeabi_dcmple");
set_optab_libfunc (ge_optab, DFmode, "__aeabi_dcmpge");
set_optab_libfunc (gt_optab, DFmode, "__aeabi_dcmpgt");
set_optab_libfunc (unord_optab, DFmode, "__aeabi_dcmpun");
set_optab_libfunc (add_optab, SFmode, "__aeabi_fadd");
set_optab_libfunc (sdiv_optab, SFmode, "__aeabi_fdiv");
set_optab_libfunc (smul_optab, SFmode, "__aeabi_fmul");
set_optab_libfunc (neg_optab, SFmode, "__aeabi_fneg");
set_optab_libfunc (sub_optab, SFmode, "__aeabi_fsub");
set_optab_libfunc (eq_optab, SFmode, "__aeabi_fcmpeq");
set_optab_libfunc (ne_optab, SFmode, NULL);
set_optab_libfunc (lt_optab, SFmode, "__aeabi_fcmplt");
set_optab_libfunc (le_optab, SFmode, "__aeabi_fcmple");
set_optab_libfunc (ge_optab, SFmode, "__aeabi_fcmpge");
set_optab_libfunc (gt_optab, SFmode, "__aeabi_fcmpgt");
set_optab_libfunc (unord_optab, SFmode, "__aeabi_fcmpun");
set_conv_libfunc (sfix_optab, SImode, DFmode, "__aeabi_d2iz");
set_conv_libfunc (ufix_optab, SImode, DFmode, "__aeabi_d2uiz");
set_conv_libfunc (sfix_optab, DImode, DFmode, "__aeabi_d2lz");
set_conv_libfunc (ufix_optab, DImode, DFmode, "__aeabi_d2ulz");
set_conv_libfunc (sfix_optab, SImode, SFmode, "__aeabi_f2iz");
set_conv_libfunc (ufix_optab, SImode, SFmode, "__aeabi_f2uiz");
set_conv_libfunc (sfix_optab, DImode, SFmode, "__aeabi_f2lz");
set_conv_libfunc (ufix_optab, DImode, SFmode, "__aeabi_f2ulz");
set_conv_libfunc (trunc_optab, SFmode, DFmode, "__aeabi_d2f");
set_conv_libfunc (sext_optab, DFmode, SFmode, "__aeabi_f2d");
set_conv_libfunc (sfloat_optab, DFmode, SImode, "__aeabi_i2d");
set_conv_libfunc (ufloat_optab, DFmode, SImode, "__aeabi_ui2d");
set_conv_libfunc (sfloat_optab, DFmode, DImode, "__aeabi_l2d");
set_conv_libfunc (ufloat_optab, DFmode, DImode, "__aeabi_ul2d");
set_conv_libfunc (sfloat_optab, SFmode, SImode, "__aeabi_i2f");
set_conv_libfunc (ufloat_optab, SFmode, SImode, "__aeabi_ui2f");
set_conv_libfunc (sfloat_optab, SFmode, DImode, "__aeabi_l2f");
set_conv_libfunc (ufloat_optab, SFmode, DImode, "__aeabi_ul2f");
set_optab_libfunc (smul_optab, DImode, "__aeabi_lmul");
set_optab_libfunc (sdivmod_optab, DImode, "__aeabi_ldivmod");
set_optab_libfunc (udivmod_optab, DImode, "__aeabi_uldivmod");
set_optab_libfunc (ashl_optab, DImode, "__aeabi_llsl");
set_optab_libfunc (lshr_optab, DImode, "__aeabi_llsr");
set_optab_libfunc (ashr_optab, DImode, "__aeabi_lasr");
set_optab_libfunc (cmp_optab, DImode, "__aeabi_lcmp");
set_optab_libfunc (ucmp_optab, DImode, "__aeabi_ulcmp");
set_optab_libfunc (sdivmod_optab, SImode, "__aeabi_idivmod");
set_optab_libfunc (udivmod_optab, SImode, "__aeabi_uidivmod");
set_optab_libfunc (sdiv_optab, DImode, "__aeabi_ldivmod");
set_optab_libfunc (udiv_optab, DImode, "__aeabi_uldivmod");
set_optab_libfunc (sdiv_optab, SImode, "__aeabi_idiv");
set_optab_libfunc (udiv_optab, SImode, "__aeabi_uidiv");
set_optab_libfunc (smod_optab, DImode, NULL);
set_optab_libfunc (umod_optab, DImode, NULL);
set_optab_libfunc (smod_optab, SImode, NULL);
set_optab_libfunc (umod_optab, SImode, NULL);
}
static bool
arm_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
{
switch (code)
{
case OPT_march_:
arm_select[1].string = arg;
return true;
case OPT_mcpu_:
arm_select[0].string = arg;
return true;
case OPT_mhard_float:
target_float_abi_name = "hard";
return true;
case OPT_msoft_float:
target_float_abi_name = "soft";
return true;
case OPT_mtune_:
arm_select[2].string = arg;
return true;
default:
return true;
}
}
void
arm_override_options (void)
{
unsigned i;
enum processor_type target_arch_cpu = arm_none;
for (i = ARRAY_SIZE (arm_select); i--;)
{
struct arm_cpu_select * ptr = arm_select + i;
if (ptr->string != NULL && ptr->string[0] != '\0')
{
const struct processors * sel;
for (sel = ptr->processors; sel->name != NULL; sel++)
if (streq (ptr->string, sel->name))
{
if (i != ARM_OPT_SET_TUNE)
sprintf (arm_arch_name, "__ARM_ARCH_%s__", sel->arch);
if (
i == ARM_OPT_SET_CPU
|| i == ARM_OPT_SET_TUNE)
arm_tune = (enum processor_type) (sel - ptr->processors);
if (i == ARM_OPT_SET_ARCH)
target_arch_cpu = sel->core;
if (i != ARM_OPT_SET_TUNE)
{
#if !TARGET_MACHO
if (insn_flags != 0 && (insn_flags ^ sel->flags))
warning (0, "switch -mcpu=%s conflicts with -march= switch",
ptr->string);
#else
#endif
insn_flags = sel->flags;
}
break;
}
if (sel->name == NULL)
error ("bad value (%s) for %s switch", ptr->string, ptr->name);
}
}
if (arm_tune == arm_none)
arm_tune = target_arch_cpu;
if (insn_flags == 0)
{
const struct processors * sel;
unsigned int sought;
enum processor_type cpu;
cpu = TARGET_CPU_DEFAULT;
if (cpu == arm_none)
{
#ifdef SUBTARGET_CPU_DEFAULT
cpu = SUBTARGET_CPU_DEFAULT;
#endif
if (cpu == arm_none)
cpu = arm6;
}
sel = &all_cores[cpu];
insn_flags = sel->flags;
sought = 0;
if (TARGET_INTERWORK || TARGET_THUMB)
{
sought |= (FL_THUMB | FL_MODE32);
insn_flags &= ~FL_MODE26;
}
if (sought != 0 && ((sought & insn_flags) != sought))
{
for (sel = all_cores; sel->name != NULL; sel++)
if ((sel->flags & sought) == (sought | insn_flags))
break;
if (sel->name == NULL)
{
unsigned current_bit_count = 0;
const struct processors * best_fit = NULL;
for (sel = all_cores; sel->name != NULL; sel++)
if ((sel->flags & sought) == sought)
{
unsigned count;
count = bit_count (sel->flags & insn_flags);
if (count >= current_bit_count)
{
best_fit = sel;
current_bit_count = count;
}
}
gcc_assert (best_fit);
sel = best_fit;
}
insn_flags = sel->flags;
}
sprintf (arm_arch_name, "__ARM_ARCH_%s__", sel->arch);
if (arm_tune == arm_none)
arm_tune = (enum processor_type) (sel - all_cores);
}
gcc_assert (arm_tune != arm_none);
tune_flags = all_cores[(int)arm_tune].flags;
if (optimize_size)
targetm.rtx_costs = arm_size_rtx_costs;
else
targetm.rtx_costs = all_cores[(int)arm_tune].rtx_costs;
if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
{
if (! TARGET_MACHO)
warning (0, "target CPU does not support interworking" );
interwork_option = 0;
}
if (TARGET_THUMB && !(insn_flags & FL_THUMB))
{
warning (0, "target CPU does not support THUMB instructions");
target_flags &= ~MASK_THUMB;
}
if (TARGET_APCS_FRAME && TARGET_THUMB)
{
target_flags &= ~MASK_APCS_FRAME;
}
if (TARGET_THUMB && TARGET_CALLEE_INTERWORKING)
interwork_option = 1;
if ((TARGET_TPCS_FRAME || TARGET_TPCS_LEAF_FRAME) && TARGET_ARM)
warning (0, "enabling backtrace support is only meaningful when compiling for the Thumb");
if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
warning (0, "enabling callee interworking support is only meaningful when compiling for the Thumb");
if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
warning (0, "enabling caller interworking support is only meaningful when compiling for the Thumb");
if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
{
warning (0, "-mapcs-stack-check incompatible with -mno-apcs-frame");
target_flags |= MASK_APCS_FRAME;
}
if (TARGET_POKE_FUNCTION_NAME)
target_flags |= MASK_APCS_FRAME;
if (TARGET_APCS_REENT && flag_pic)
error ("-fpic and -mapcs-reent are incompatible");
if (TARGET_APCS_REENT)
warning (0, "APCS reentrant code not supported. Ignored");
if (TARGET_ARM
&& write_symbols != NO_DEBUG
&& !TARGET_APCS_FRAME
&& (TARGET_DEFAULT & MASK_APCS_FRAME))
warning (0, "-g with -mno-apcs-frame may not give sensible debugging");
if (flag_pic && TARGET_SINGLE_PIC_BASE && !TARGET_MACHO)
arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
if (TARGET_APCS_FLOAT)
warning (0, "passing floating point arguments in fp regs not yet supported");
arm_arch3m = (insn_flags & FL_ARCH3M) != 0;
arm_arch4 = (insn_flags & FL_ARCH4) != 0;
arm_arch4t = arm_arch4 & ((insn_flags & FL_THUMB) != 0);
arm_arch5 = (insn_flags & FL_ARCH5) != 0;
arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
arm_arch6 = (insn_flags & FL_ARCH6) != 0;
arm_arch6k = (insn_flags & FL_ARCH6K) != 0;
arm_arch_xscale = (insn_flags & FL_XSCALE) != 0;
arm_arch_cirrus = (insn_flags & FL_CIRRUS) != 0;
arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
arm_tune_strongarm = (tune_flags & FL_STRONG) != 0;
thumb_code = (TARGET_ARM == 0);
arm_tune_wbuf = (tune_flags & FL_WBUF) != 0;
arm_tune_xscale = (tune_flags & FL_XSCALE) != 0;
arm_arch_iwmmxt = (insn_flags & FL_IWMMXT) != 0;
if (interwork_option == -1)
interwork_option = arm_arch5 ? 1 : 0;
if (TARGET_INTERWORK)
arm_cpp_interwork = 1;
if (target_abi_name)
{
for (i = 0; i < ARRAY_SIZE (arm_all_abis); i++)
{
if (streq (arm_all_abis[i].name, target_abi_name))
{
arm_abi = arm_all_abis[i].abi_type;
break;
}
}
if (i == ARRAY_SIZE (arm_all_abis))
error ("invalid ABI option: -mabi=%s", target_abi_name);
}
else
arm_abi = ARM_DEFAULT_ABI;
if (TARGET_IWMMXT && !ARM_DOUBLEWORD_ALIGN)
error ("iwmmxt requires an AAPCS compatible ABI for proper operation");
if (TARGET_IWMMXT_ABI && !TARGET_IWMMXT)
error ("iwmmxt abi requires an iwmmxt capable cpu");
arm_fp_model = ARM_FP_MODEL_UNKNOWN;
if (target_fpu_name == NULL && target_fpe_name != NULL)
{
if (streq (target_fpe_name, "2"))
target_fpu_name = "fpe2";
else if (streq (target_fpe_name, "3"))
target_fpu_name = "fpe3";
else
error ("invalid floating point emulation option: -mfpe=%s",
target_fpe_name);
}
if (target_fpu_name != NULL)
{
for (i = 0; i < ARRAY_SIZE (all_fpus); i++)
{
if (streq (all_fpus[i].name, target_fpu_name))
{
arm_fpu_arch = all_fpus[i].fpu;
arm_fpu_tune = arm_fpu_arch;
arm_fp_model = fp_model_for_fpu[arm_fpu_arch];
break;
}
}
if (arm_fp_model == ARM_FP_MODEL_UNKNOWN)
error ("invalid floating point option: -mfpu=%s", target_fpu_name);
}
else
{
#ifdef FPUTYPE_DEFAULT
arm_fpu_arch = FPUTYPE_DEFAULT;
arm_fpu_tune = FPUTYPE_DEFAULT;
#else
if (arm_arch_cirrus)
arm_fpu_arch = FPUTYPE_MAVERICK;
else
arm_fpu_arch = FPUTYPE_FPA_EMU2;
#endif
if (tune_flags & FL_CO_PROC && arm_fpu_arch == FPUTYPE_FPA_EMU2)
arm_fpu_tune = FPUTYPE_FPA;
else
arm_fpu_tune = arm_fpu_arch;
arm_fp_model = fp_model_for_fpu[arm_fpu_arch];
gcc_assert (arm_fp_model != ARM_FP_MODEL_UNKNOWN);
}
if (target_float_abi_name != NULL)
{
for (i = 0; i < ARRAY_SIZE (all_float_abis); i++)
{
if (streq (all_float_abis[i].name, target_float_abi_name))
{
arm_float_abi = all_float_abis[i].abi_type;
break;
}
}
if (i == ARRAY_SIZE (all_float_abis))
error ("invalid floating point abi: -mfloat-abi=%s",
target_float_abi_name);
}
else
arm_float_abi = TARGET_DEFAULT_FLOAT_ABI;
if (arm_float_abi == ARM_FLOAT_ABI_HARD && TARGET_VFP)
sorry ("-mfloat-abi=hard and VFP");
if (TARGET_IWMMXT && !TARGET_SOFT_FLOAT)
sorry ("iWMMXt and hardware floating point");
if (TARGET_SOFT_FLOAT)
arm_fpu_arch = FPUTYPE_NONE;
if ((TARGET_SOFT_FLOAT
|| arm_fpu_tune == FPUTYPE_FPA_EMU2
|| arm_fpu_tune == FPUTYPE_FPA_EMU3)
&& (tune_flags & FL_MODE32) == 0)
flag_schedule_insns = flag_schedule_insns_after_reload = 0;
if (target_thread_switch)
{
if (strcmp (target_thread_switch, "soft") == 0)
target_thread_pointer = TP_SOFT;
else if (strcmp (target_thread_switch, "auto") == 0)
target_thread_pointer = TP_AUTO;
else if (strcmp (target_thread_switch, "cp15") == 0)
target_thread_pointer = TP_CP15;
else
error ("invalid thread pointer option: -mtp=%s", target_thread_switch);
}
if (target_thread_pointer == TP_AUTO)
{
if (arm_arch6k && !TARGET_THUMB)
target_thread_pointer = TP_CP15;
else
target_thread_pointer = TP_SOFT;
}
if (TARGET_HARD_TP && TARGET_THUMB)
error ("can not use -mtp=cp15 with -mthumb");
if (TARGET_AAPCS_BASED)
arm_structure_size_boundary = 8;
if (structure_size_string != NULL)
{
int size = strtol (structure_size_string, NULL, 0);
if (size == 8 || size == 32
|| (ARM_DOUBLEWORD_ALIGN && size == 64))
arm_structure_size_boundary = size;
else
warning (0, "structure size boundary can only be set to %s",
ARM_DOUBLEWORD_ALIGN ? "8, 32 or 64": "8 or 32");
}
if (arm_pic_register_string != NULL)
{
int pic_register = decode_reg_name (arm_pic_register_string);
if (!flag_pic)
warning (0, "-mpic-register= is useless without -fpic");
else if (pic_register < 0 || call_used_regs[pic_register]
|| pic_register == HARD_FRAME_POINTER_REGNUM
|| pic_register == STACK_POINTER_REGNUM
|| pic_register >= PC_REGNUM)
error ("unable to use '%s' for PIC register", arm_pic_register_string);
else
arm_pic_register = pic_register;
}
if (TARGET_THUMB && flag_schedule_insns)
{
flag_schedule_insns = 0;
}
if (optimize_size)
{
arm_constant_limit = 1;
max_insns_skipped = 6;
}
else
{
if (arm_ld_sched)
arm_constant_limit = 1;
if (arm_tune_xscale)
arm_constant_limit = 2;
if (arm_tune_strongarm)
max_insns_skipped = 3;
}
#ifdef SUBTARGET_OVERRIDE_OPTIONS
SUBTARGET_OVERRIDE_OPTIONS;
#endif
#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
SUBSUBTARGET_OVERRIDE_OPTIONS;
#endif
arm_add_gc_roots ();
}
static void
arm_add_gc_roots (void)
{
gcc_obstack_init(&minipool_obstack);
minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
}
typedef struct
{
const char *const arg;
const unsigned long return_value;
}
isr_attribute_arg;
static const isr_attribute_arg isr_attribute_args [] =
{
{ "IRQ", ARM_FT_ISR },
{ "irq", ARM_FT_ISR },
{ "FIQ", ARM_FT_FIQ },
{ "fiq", ARM_FT_FIQ },
{ "ABORT", ARM_FT_ISR },
{ "abort", ARM_FT_ISR },
{ "ABORT", ARM_FT_ISR },
{ "abort", ARM_FT_ISR },
{ "UNDEF", ARM_FT_EXCEPTION },
{ "undef", ARM_FT_EXCEPTION },
{ "SWI", ARM_FT_EXCEPTION },
{ "swi", ARM_FT_EXCEPTION },
{ NULL, ARM_FT_NORMAL }
};
static unsigned long
arm_isr_value (tree argument)
{
const isr_attribute_arg * ptr;
const char * arg;
if (argument == NULL_TREE)
return ARM_FT_ISR;
if (TREE_VALUE (argument) == NULL_TREE
|| TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
return ARM_FT_UNKNOWN;
arg = TREE_STRING_POINTER (TREE_VALUE (argument));
for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
if (streq (arg, ptr->arg))
return ptr->return_value;
return ARM_FT_UNKNOWN;
}
static unsigned long
arm_compute_func_type (void)
{
unsigned long type = ARM_FT_UNKNOWN;
tree a;
tree attr;
gcc_assert (TREE_CODE (current_function_decl) == FUNCTION_DECL);
if (optimize > 0
&& (TREE_NOTHROW (current_function_decl)
|| !(flag_unwind_tables
|| (flag_exceptions && !USING_SJLJ_EXCEPTIONS)))
&& TREE_THIS_VOLATILE (current_function_decl))
type |= ARM_FT_VOLATILE;
if (cfun->static_chain_decl != NULL)
type |= ARM_FT_NESTED;
attr = DECL_ATTRIBUTES (current_function_decl);
a = lookup_attribute ("naked", attr);
if (a != NULL_TREE)
type |= ARM_FT_NAKED;
a = lookup_attribute ("isr", attr);
if (a == NULL_TREE)
a = lookup_attribute ("interrupt", attr);
if (a == NULL_TREE)
type |= (TARGET_INTERWORK && !arm_arch5) ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
else
type |= arm_isr_value (TREE_VALUE (a));
return type;
}
unsigned long
arm_current_func_type (void)
{
if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
cfun->machine->func_type = arm_compute_func_type ();
return cfun->machine->func_type;
}
static int
indirect_sibreturn_reg (rtx sibling, bool *is_value)
{
if (GET_CODE (sibling) == CALL_INSN
&& GET_CODE (PATTERN (sibling)) == PARALLEL
&& GET_CODE (XVECEXP (PATTERN (sibling), 0, 0)) == CALL
&& GET_CODE (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0)) == MEM
&& GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0), 0)) == REG)
{
*is_value = 0;
return REGNO (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0), 0));
}
if (GET_CODE (sibling) == CALL_INSN
&& GET_CODE (PATTERN (sibling)) == PARALLEL
&& GET_CODE (XVECEXP (PATTERN (sibling), 0, 0)) == SET
&& GET_CODE (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1)) == CALL
&& GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0)) == MEM
&& GET_CODE (XEXP (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0), 0)) == REG)
{
*is_value = 1;
return REGNO (XEXP (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0), 0));
}
return -1;
}
static rtx
indirect_sibreturn_mem (rtx sibling, rtx* reg, bool *is_value)
{
rtx mem = NULL_RTX;
if (GET_CODE (sibling) == CALL_INSN
&& GET_CODE (PATTERN (sibling)) == PARALLEL
&& GET_CODE (XVECEXP (PATTERN (sibling), 0, 0)) == CALL
&& GET_CODE (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0)) == MEM
&& GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0), 0)) == MEM)
{
*is_value = 0;
mem = XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0), 0);
}
else if (GET_CODE (sibling) == CALL_INSN
&& GET_CODE (PATTERN (sibling)) == PARALLEL
&& GET_CODE (XVECEXP (PATTERN (sibling), 0, 0)) == SET
&& GET_CODE (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1)) == CALL
&& GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0)) == MEM
&& GET_CODE (XEXP (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0), 0)) == MEM)
{
*is_value = 1;
mem = XEXP (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0), 0);
}
if (mem
&& GET_CODE (XEXP (mem, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
&& GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT)
{
*reg = XEXP (XEXP (mem, 0), 0);
return XEXP (XEXP (mem, 0), 1);
}
else if (mem && GET_CODE (XEXP (mem, 0)) == REG)
{
*reg = XEXP (mem, 0);
return const0_rtx;
}
return NULL_RTX;
}
int
use_return_insn (int iscond, rtx sibling)
{
int regno;
unsigned int func_type;
unsigned long saved_int_regs;
unsigned HOST_WIDE_INT stack_adjust;
arm_stack_offsets *offsets;
if (!reload_completed)
return 0;
func_type = arm_current_func_type ();
if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
return 0;
if (IS_INTERRUPT (func_type) && frame_pointer_needed)
return 0;
offsets = arm_get_frame_offsets ();
stack_adjust = offsets->outgoing_args - offsets->saved_regs;
if (current_function_pretend_args_size
|| cfun->machine->uses_anonymous_args
|| current_function_calls_eh_return
|| current_function_calls_alloca
|| !(stack_adjust == 0))
return 0;
saved_int_regs = arm_compute_save_reg_mask ();
if (stack_adjust == 4 && !arm_arch5)
{
if (!call_used_regs[3])
return 0;
if (arm_size_return_regs () >= (4 * UNITS_PER_WORD))
return 0;
if (sibling)
{
gcc_assert (GET_CODE (sibling) == CALL_INSN);
if (find_regno_fusage (sibling, USE, 3))
return 0;
{
bool ignored;
int regno = indirect_sibreturn_reg (sibling, &ignored);
if (regno == 3)
return 0;
}
}
if (saved_int_regs & 0x7)
return 0;
}
if (TARGET_INTERWORK && !arm_arch5 && saved_int_regs != 0)
return 0;
if (iscond && arm_tune_strongarm)
{
if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
return 0;
if (flag_pic
&& arm_pic_register != INVALID_REGNUM
&& regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
return 0;
}
if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
return 0;
if (sibling)
{
bool ignored;
int regno = indirect_sibreturn_reg (sibling, &ignored);
if (regno > 3 && regno != 12)
return 0;
if (regno == -1)
return 0;
}
if (TARGET_HARD_FLOAT && TARGET_FPA)
for (regno = FIRST_FPA_REGNUM; regno <= LAST_FPA_REGNUM; regno++)
if (regs_ever_live[regno] && !call_used_regs[regno])
return 0;
if (TARGET_HARD_FLOAT && TARGET_VFP)
for (regno = FIRST_VFP_REGNUM; regno <= LAST_VFP_REGNUM; regno++)
if (regs_ever_live[regno] && !call_used_regs[regno])
return 0;
if (TARGET_REALLY_IWMMXT)
for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
if (regs_ever_live[regno] && ! call_used_regs [regno])
return 0;
if (saved_int_regs & (1 << LR_REGNUM))
for (regno = ARM_HARD_FRAME_POINTER_REGNUM + 1; regno <= 11; regno++)
if (saved_int_regs & (1 << regno))
return 0;
return 1;
}
int
const_ok_for_arm (HOST_WIDE_INT i)
{
int lowbit;
if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
&& ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
!= ((~(unsigned HOST_WIDE_INT) 0)
& ~(unsigned HOST_WIDE_INT) 0xffffffff)))
return FALSE;
i &= (unsigned HOST_WIDE_INT) 0xffffffff;
if ((i & ~(unsigned HOST_WIDE_INT) 0xff) == 0)
return TRUE;
lowbit = (ffs ((int) i) - 1) & ~1;
if ((i & ~(((unsigned HOST_WIDE_INT) 0xff) << lowbit)) == 0)
return TRUE;
else if (lowbit <= 4
&& ((i & ~0xc000003f) == 0
|| (i & ~0xf000000f) == 0
|| (i & ~0xfc000003) == 0))
return TRUE;
return FALSE;
}
static int
const_ok_for_op (HOST_WIDE_INT i, enum rtx_code code)
{
if (const_ok_for_arm (i))
return 1;
switch (code)
{
case PLUS:
return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
case MINUS:
case XOR:
case IOR:
return 0;
case AND:
return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
default:
gcc_unreachable ();
}
}
int
arm_split_constant (enum rtx_code code, enum machine_mode mode, rtx insn,
HOST_WIDE_INT val, rtx target, rtx source, int subtargets)
{
rtx cond;
if (insn && GET_CODE (PATTERN (insn)) == COND_EXEC)
cond = COND_EXEC_TEST (PATTERN (insn));
else
cond = NULL_RTX;
if (subtargets || code == SET
|| (GET_CODE (target) == REG && GET_CODE (source) == REG
&& REGNO (target) != REGNO (source)))
{
if (!after_arm_reorg
&& !cond
&& (arm_gen_constant (code, mode, NULL_RTX, val, target, source,
1, 0)
> arm_constant_limit + (code != SET)))
{
if (code == SET)
{
emit_set_insn (target, GEN_INT (val));
return 1;
}
else
{
rtx temp = subtargets ? gen_reg_rtx (mode) : target;
emit_set_insn (temp, GEN_INT (val));
if (code == MINUS)
emit_set_insn (target, gen_rtx_MINUS (mode, temp, source));
else
emit_set_insn (target,
gen_rtx_fmt_ee (code, mode, source, temp));
return 2;
}
}
}
return arm_gen_constant (code, mode, cond, val, target, source, subtargets,
1);
}
static int
count_insns_for_constant (HOST_WIDE_INT remainder, int i)
{
HOST_WIDE_INT temp1;
int num_insns = 0;
do
{
int end;
if (i <= 0)
i += 32;
if (remainder & (3 << (i - 2)))
{
end = i - 8;
if (end < 0)
end += 32;
temp1 = remainder & ((0x0ff << end)
| ((i < end) ? (0xff >> (32 - end)) : 0));
remainder &= ~temp1;
num_insns++;
i -= 6;
}
i -= 2;
} while (remainder);
return num_insns;
}
static void
emit_constant_insn (rtx cond, rtx pattern)
{
if (cond)
pattern = gen_rtx_COND_EXEC (VOIDmode, copy_rtx (cond), pattern);
emit_insn (pattern);
}
static int
arm_gen_constant (enum rtx_code code, enum machine_mode mode, rtx cond,
HOST_WIDE_INT val, rtx target, rtx source, int subtargets,
int generate)
{
int can_invert = 0;
int can_negate = 0;
int can_negate_initial = 0;
int can_shift = 0;
int i;
int num_bits_set = 0;
int set_sign_bit_copies = 0;
int clear_sign_bit_copies = 0;
int clear_zero_bit_copies = 0;
int set_zero_bit_copies = 0;
int insns = 0;
unsigned HOST_WIDE_INT temp1, temp2;
unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
switch (code)
{
case SET:
can_invert = 1;
can_shift = 1;
can_negate = 1;
break;
case PLUS:
can_negate = 1;
can_negate_initial = 1;
break;
case IOR:
if (remainder == 0xffffffff)
{
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target,
GEN_INT (ARM_SIGN_EXTEND (val))));
return 1;
}
if (remainder == 0)
{
if (reload_completed && rtx_equal_p (target, source))
return 0;
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target, source));
return 1;
}
break;
case AND:
if (remainder == 0)
{
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target, const0_rtx));
return 1;
}
if (remainder == 0xffffffff)
{
if (reload_completed && rtx_equal_p (target, source))
return 0;
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target, source));
return 1;
}
can_invert = 1;
break;
case XOR:
if (remainder == 0)
{
if (reload_completed && rtx_equal_p (target, source))
return 0;
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target, source));
return 1;
}
gcc_assert (remainder == 0xffffffff);
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target,
gen_rtx_NOT (mode, source)));
return 1;
case MINUS:
if (remainder == 0)
{
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target,
gen_rtx_NEG (mode, source)));
return 1;
}
if (const_ok_for_arm (val))
{
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target,
gen_rtx_MINUS (mode, GEN_INT (val),
source)));
return 1;
}
can_negate = 1;
break;
default:
gcc_unreachable ();
}
if (const_ok_for_arm (val)
|| (can_negate_initial && const_ok_for_arm (-val))
|| (can_invert && const_ok_for_arm (~val)))
{
if (generate)
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target,
(source
? gen_rtx_fmt_ee (code, mode, source,
GEN_INT (val))
: GEN_INT (val))));
return 1;
}
for (i = 31; i >= 0; i--)
{
if ((remainder & (1 << i)) == 0)
clear_sign_bit_copies++;
else
break;
}
for (i = 31; i >= 0; i--)
{
if ((remainder & (1 << i)) != 0)
set_sign_bit_copies++;
else
break;
}
for (i = 0; i <= 31; i++)
{
if ((remainder & (1 << i)) == 0)
clear_zero_bit_copies++;
else
break;
}
for (i = 0; i <= 31; i++)
{
if ((remainder & (1 << i)) != 0)
set_zero_bit_copies++;
else
break;
}
switch (code)
{
case SET:
if (set_sign_bit_copies > 1)
{
if (const_ok_for_arm
(temp1 = ARM_SIGN_EXTEND (remainder
<< (set_sign_bit_copies - 1))))
{
if (generate)
{
rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, new_src,
GEN_INT (temp1)));
emit_constant_insn (cond,
gen_ashrsi3 (target, new_src,
GEN_INT (set_sign_bit_copies - 1)));
}
return 2;
}
temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
if (const_ok_for_arm (~temp1))
{
if (generate)
{
rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, new_src,
GEN_INT (temp1)));
emit_constant_insn (cond,
gen_ashrsi3 (target, new_src,
GEN_INT (set_sign_bit_copies - 1)));
}
return 2;
}
}
if (clear_sign_bit_copies + clear_zero_bit_copies <= 16)
{
int topshift = clear_sign_bit_copies & ~1;
temp1 = ARM_SIGN_EXTEND ((remainder + (0x00800000 >> topshift))
& (0xff000000 >> topshift));
if (temp1 == 0 && topshift != 0)
temp1 = 0x80000000 >> (topshift - 1);
temp2 = ARM_SIGN_EXTEND (temp1 - remainder);
if (const_ok_for_arm (temp2))
{
if (generate)
{
rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, new_src,
GEN_INT (temp1)));
emit_constant_insn (cond,
gen_addsi3 (target, new_src,
GEN_INT (-temp2)));
}
return 2;
}
}
if (val & 0xffff0000)
{
temp1 = remainder & 0xffff0000;
temp2 = remainder & 0x0000ffff;
for (i = 9; i < 24; i++)
{
if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
&& !const_ok_for_arm (temp2))
{
rtx new_src = (subtargets
? (generate ? gen_reg_rtx (mode) : NULL_RTX)
: target);
insns = arm_gen_constant (code, mode, cond, temp2, new_src,
source, subtargets, generate);
source = new_src;
if (generate)
emit_constant_insn
(cond,
gen_rtx_SET
(VOIDmode, target,
gen_rtx_IOR (mode,
gen_rtx_ASHIFT (mode, source,
GEN_INT (i)),
source)));
return insns + 1;
}
}
for (i = 17; i < 24; i++)
{
if (((temp1 | (temp1 >> i)) == remainder)
&& !const_ok_for_arm (temp1))
{
rtx new_src = (subtargets
? (generate ? gen_reg_rtx (mode) : NULL_RTX)
: target);
insns = arm_gen_constant (code, mode, cond, temp1, new_src,
source, subtargets, generate);
source = new_src;
if (generate)
emit_constant_insn
(cond,
gen_rtx_SET (VOIDmode, target,
gen_rtx_IOR
(mode,
gen_rtx_LSHIFTRT (mode, source,
GEN_INT (i)),
source)));
return insns + 1;
}
}
}
break;
case IOR:
case XOR:
if (subtargets
|| (reload_completed && !reg_mentioned_p (target, source)))
{
if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
{
if (generate)
{
rtx sub = subtargets ? gen_reg_rtx (mode) : target;
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, sub,
GEN_INT (val)));
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target,
gen_rtx_fmt_ee (code, mode,
source, sub)));
}
return 2;
}
}
if (code == XOR)
break;
if (set_sign_bit_copies > 8
&& (val & (-1 << (32 - set_sign_bit_copies))) == val)
{
if (generate)
{
rtx sub = subtargets ? gen_reg_rtx (mode) : target;
rtx shift = GEN_INT (set_sign_bit_copies);
emit_constant_insn
(cond,
gen_rtx_SET (VOIDmode, sub,
gen_rtx_NOT (mode,
gen_rtx_ASHIFT (mode,
source,
shift))));
emit_constant_insn
(cond,
gen_rtx_SET (VOIDmode, target,
gen_rtx_NOT (mode,
gen_rtx_LSHIFTRT (mode, sub,
shift))));
}
return 2;
}
if (set_zero_bit_copies > 8
&& (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
{
if (generate)
{
rtx sub = subtargets ? gen_reg_rtx (mode) : target;
rtx shift = GEN_INT (set_zero_bit_copies);
emit_constant_insn
(cond,
gen_rtx_SET (VOIDmode, sub,
gen_rtx_NOT (mode,
gen_rtx_LSHIFTRT (mode,
source,
shift))));
emit_constant_insn
(cond,
gen_rtx_SET (VOIDmode, target,
gen_rtx_NOT (mode,
gen_rtx_ASHIFT (mode, sub,
shift))));
}
return 2;
}
if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
{
if (generate)
{
rtx sub = subtargets ? gen_reg_rtx (mode) : target;
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, sub,
gen_rtx_NOT (mode, source)));
source = sub;
if (subtargets)
sub = gen_reg_rtx (mode);
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, sub,
gen_rtx_AND (mode, source,
GEN_INT (temp1))));
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, target,
gen_rtx_NOT (mode, sub)));
}
return 3;
}
break;
case AND:
if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
{
HOST_WIDE_INT shift_mask = ((0xffffffff
<< (32 - clear_sign_bit_copies))
& 0xffffffff);
if ((remainder | shift_mask) != 0xffffffff)
{
if (generate)
{
rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
insns = arm_gen_constant (AND, mode, cond,
remainder | shift_mask,
new_src, source, subtargets, 1);
source = new_src;
}
else
{
rtx targ = subtargets ? NULL_RTX : target;
insns = arm_gen_constant (AND, mode, cond,
remainder | shift_mask,
targ, source, subtargets, 0);
}
}
if (generate)
{
rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
rtx shift = GEN_INT (clear_sign_bit_copies);
emit_insn (gen_ashlsi3 (new_src, source, shift));
emit_insn (gen_lshrsi3 (target, new_src, shift));
}
return insns + 2;
}
if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
{
HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
if ((remainder | shift_mask) != 0xffffffff)
{
if (generate)
{
rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
insns = arm_gen_constant (AND, mode, cond,
remainder | shift_mask,
new_src, source, subtargets, 1);
source = new_src;
}
else
{
rtx targ = subtargets ? NULL_RTX : target;
insns = arm_gen_constant (AND, mode, cond,
remainder | shift_mask,
targ, source, subtargets, 0);
}
}
if (generate)
{
rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
rtx shift = GEN_INT (clear_zero_bit_copies);
emit_insn (gen_lshrsi3 (new_src, source, shift));
emit_insn (gen_ashlsi3 (target, new_src, shift));
}
return insns + 2;
}
break;
default:
break;
}
for (i = 0; i < 32; i++)
if (remainder & (1 << i))
num_bits_set++;
if (code == AND || (can_invert && num_bits_set > 16))
remainder = (~remainder) & 0xffffffff;
else if (code == PLUS && num_bits_set > 16)
remainder = (-remainder) & 0xffffffff;
else
{
can_invert = 0;
can_negate = 0;
}
{
int best_start = 0;
int best_consecutive_zeros = 0;
for (i = 0; i < 32; i += 2)
{
int consecutive_zeros = 0;
if (!(remainder & (3 << i)))
{
while ((i < 32) && !(remainder & (3 << i)))
{
consecutive_zeros += 2;
i += 2;
}
if (consecutive_zeros > best_consecutive_zeros)
{
best_consecutive_zeros = consecutive_zeros;
best_start = i - consecutive_zeros;
}
i -= 2;
}
}
if (best_start != 0
&& ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
&& (count_insns_for_constant (remainder, 0) <=
count_insns_for_constant (remainder, best_start)))
best_start = 0;
i = best_start;
do
{
int end;
if (i <= 0)
i += 32;
if (remainder & (3 << (i - 2)))
{
end = i - 8;
if (end < 0)
end += 32;
temp1 = remainder & ((0x0ff << end)
| ((i < end) ? (0xff >> (32 - end)) : 0));
remainder &= ~temp1;
if (generate)
{
rtx new_src, temp1_rtx;
if (code == SET || code == MINUS)
{
new_src = (subtargets ? gen_reg_rtx (mode) : target);
if (can_invert && code != MINUS)
temp1 = ~temp1;
}
else
{
if (remainder && subtargets)
new_src = gen_reg_rtx (mode);
else
new_src = target;
if (can_invert)
temp1 = ~temp1;
else if (can_negate)
temp1 = -temp1;
}
temp1 = trunc_int_for_mode (temp1, mode);
temp1_rtx = GEN_INT (temp1);
if (code == SET)
;
else if (code == MINUS)
temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
else
temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
emit_constant_insn (cond,
gen_rtx_SET (VOIDmode, new_src,
temp1_rtx));
source = new_src;
}
if (code == SET)
{
can_invert = 0;
code = PLUS;
}
else if (code == MINUS)
code = PLUS;
insns++;
i -= 6;
}
i -= 2;
}
while (remainder);
}
return insns;
}
enum rtx_code
arm_canonicalize_comparison (enum rtx_code code, enum machine_mode mode,
rtx * op1)
{
unsigned HOST_WIDE_INT i = INTVAL (*op1);
unsigned HOST_WIDE_INT maxval;
maxval = (((unsigned HOST_WIDE_INT) 1) << (GET_MODE_BITSIZE(mode) - 1)) - 1;
switch (code)
{
case EQ:
case NE:
return code;
case GT:
case LE:
if (i != maxval
&& (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
{
*op1 = GEN_INT (i + 1);
return code == GT ? GE : LT;
}
break;
case GE:
case LT:
if (i != ~maxval
&& (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
{
*op1 = GEN_INT (i - 1);
return code == GE ? GT : LE;
}
break;
case GTU:
case LEU:
if (i != ~((unsigned HOST_WIDE_INT) 0)
&& (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
{
*op1 = GEN_INT (i + 1);
return code == GTU ? GEU : LTU;
}
break;
case GEU:
case LTU:
if (i != 0
&& (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
{
*op1 = GEN_INT (i - 1);
return code == GEU ? GTU : LEU;
}
break;
default:
gcc_unreachable ();
}
return code;
}
rtx
arm_function_value(tree type, tree func ATTRIBUTE_UNUSED)
{
enum machine_mode mode;
int unsignedp ATTRIBUTE_UNUSED;
rtx r ATTRIBUTE_UNUSED;
mode = TYPE_MODE (type);
if (INTEGRAL_TYPE_P (type))
PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
if (arm_return_in_msb (type))
{
HOST_WIDE_INT size = int_size_in_bytes (type);
if (size % UNITS_PER_WORD != 0)
{
size += UNITS_PER_WORD - size % UNITS_PER_WORD;
mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
}
}
return LIBCALL_VALUE(mode);
}
int
arm_apply_result_size (void)
{
int size = 16;
if (TARGET_ARM)
{
if (TARGET_HARD_FLOAT_ABI)
{
if (TARGET_FPA)
size += 12;
if (TARGET_MAVERICK)
size += 8;
}
if (TARGET_IWMMXT_ABI)
size += 8;
}
return size;
}
int
arm_return_in_memory (tree type)
{
HOST_WIDE_INT size;
if (!AGGREGATE_TYPE_P (type) &&
(TREE_CODE (type) != VECTOR_TYPE) &&
!(TARGET_AAPCS_BASED && TREE_CODE (type) == COMPLEX_TYPE))
return 0;
size = int_size_in_bytes (type);
if (arm_abi != ARM_ABI_APCS)
{
return (size < 0 || size > UNITS_PER_WORD);
}
if (TREE_CODE (type) == VECTOR_TYPE)
return (size < 0 || size > (4 * UNITS_PER_WORD));
#ifndef ARM_WINCE
if (size < 0 || size > UNITS_PER_WORD)
return 1;
if (TREE_CODE (type) == RECORD_TYPE)
{
tree field;
for (field = TYPE_FIELDS (type);
field && TREE_CODE (field) != FIELD_DECL;
field = TREE_CHAIN (field))
continue;
if (field == NULL)
return 0;
if (FLOAT_TYPE_P (TREE_TYPE (field)))
return 1;
if (RETURN_IN_MEMORY (TREE_TYPE (field)))
return 1;
for (field = TREE_CHAIN (field);
field;
field = TREE_CHAIN (field))
{
if (TREE_CODE (field) != FIELD_DECL)
continue;
if (!DECL_BIT_FIELD_TYPE (field))
return 1;
}
return 0;
}
if (TREE_CODE (type) == UNION_TYPE)
{
tree field;
for (field = TYPE_FIELDS (type);
field;
field = TREE_CHAIN (field))
{
if (TREE_CODE (field) != FIELD_DECL)
continue;
if (FLOAT_TYPE_P (TREE_TYPE (field)))
return 1;
if (RETURN_IN_MEMORY (TREE_TYPE (field)))
return 1;
}
return 0;
}
#endif
return 1;
}
int
arm_float_words_big_endian (void)
{
if (TARGET_MAVERICK)
return 0;
if (TARGET_FPA)
{
return 1;
}
if (TARGET_VFP)
return (TARGET_BIG_END ? 1 : 0);
return 1;
}
void
arm_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
rtx libname ATTRIBUTE_UNUSED,
tree fndecl)
{
pcum->nregs = 0;
pcum->iwmmxt_nregs = 0;
pcum->can_split = true;
pcum->call_cookie = CALL_NORMAL;
if (TARGET_LONG_CALLS)
pcum->call_cookie = CALL_LONG;
if (fntype)
{
if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
pcum->call_cookie = CALL_SHORT;
else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
pcum->call_cookie = CALL_LONG;
else if (fndecl && ! TREE_PUBLIC (fndecl))
pcum->call_cookie = CALL_SHORT;
}
pcum->named_count = 0;
pcum->nargs = 0;
if (TARGET_REALLY_IWMMXT && fntype)
{
tree fn_arg;
for (fn_arg = TYPE_ARG_TYPES (fntype);
fn_arg;
fn_arg = TREE_CHAIN (fn_arg))
pcum->named_count += 1;
if (! pcum->named_count)
pcum->named_count = INT_MAX;
}
}
bool
arm_needs_doubleword_align (enum machine_mode mode, tree type)
{
return (GET_MODE_ALIGNMENT (mode) > PARM_BOUNDARY
|| (type && TYPE_ALIGN (type) > PARM_BOUNDARY));
}
rtx
arm_function_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
tree type, int named)
{
int nregs;
if (TARGET_IWMMXT_ABI
&& arm_vector_mode_supported_p (mode)
&& pcum->named_count > pcum->nargs + 1)
{
if (pcum->iwmmxt_nregs <= 9)
return gen_rtx_REG (mode, pcum->iwmmxt_nregs + FIRST_IWMMXT_REGNUM);
else
{
pcum->can_split = false;
return NULL_RTX;
}
}
if (pcum->nregs & 1
&& ARM_DOUBLEWORD_ALIGN
&& arm_needs_doubleword_align (mode, type))
pcum->nregs++;
if (mode == VOIDmode)
return GEN_INT (pcum->call_cookie);
if (pcum->can_split)
nregs = 1;
else
nregs = ARM_NUM_REGS2 (mode, type);
if (!named || pcum->nregs + nregs > NUM_ARG_REGS)
return NULL_RTX;
return gen_rtx_REG (mode, pcum->nregs);
}
static int
arm_arg_partial_bytes (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
int nregs = pcum->nregs;
if (arm_vector_mode_supported_p (mode))
return 0;
if (NUM_ARG_REGS > nregs
&& (NUM_ARG_REGS < nregs + ARM_NUM_REGS2 (mode, type))
&& pcum->can_split)
return (NUM_ARG_REGS - nregs) * UNITS_PER_WORD;
return 0;
}
static bool
arm_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type, bool named ATTRIBUTE_UNUSED)
{
return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
}
typedef enum
{
OFF,
LONG,
SHORT
} arm_pragma_enum;
static arm_pragma_enum arm_pragma_long_calls = OFF;
void
arm_pr_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
{
arm_pragma_long_calls = LONG;
}
void
arm_pr_no_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
{
arm_pragma_long_calls = SHORT;
}
void
arm_pr_long_calls_off (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
{
arm_pragma_long_calls = OFF;
}
const struct attribute_spec arm_attribute_table[] =
{
{ "long_call", 0, 0, false, true, true, NULL },
{ "short_call", 0, 0, false, true, true, NULL },
{ "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
{ "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
{ "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
#ifdef ARM_PE
{ "dllimport", 0, 0, true, false, false, NULL },
{ "dllexport", 0, 0, true, false, false, NULL },
{ "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
#elif TARGET_DLLIMPORT_DECL_ATTRIBUTES
{ "dllimport", 0, 0, false, false, false, handle_dll_attribute },
{ "dllexport", 0, 0, false, false, false, handle_dll_attribute },
{ "notshared", 0, 0, false, true, false, arm_handle_notshared_attribute },
#endif
{ "ms_struct", 0, 0, false, false, false, arm_handle_ms_struct_attribute },
{ "gcc_struct", 0, 0, false, false, false, arm_handle_gcc_struct_attribute },
#ifdef SUBTARGET_ATTRIBUTE_TABLE
SUBTARGET_ATTRIBUTE_TABLE,
#endif
{ NULL, 0, 0, false, false, false, NULL }
};
static tree
arm_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
{
if (TREE_CODE (*node) != FUNCTION_DECL)
{
warning (OPT_Wattributes, "%qs attribute only applies to functions",
IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
return NULL_TREE;
}
static tree
arm_handle_isr_attribute (tree *node, tree name, tree args, int flags,
bool *no_add_attrs)
{
if (DECL_P (*node))
{
if (TREE_CODE (*node) != FUNCTION_DECL)
{
warning (OPT_Wattributes, "%qs attribute only applies to functions",
IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
}
else
{
if (TREE_CODE (*node) == FUNCTION_TYPE
|| TREE_CODE (*node) == METHOD_TYPE)
{
if (arm_isr_value (args) == ARM_FT_UNKNOWN)
{
warning (OPT_Wattributes, "%qs attribute ignored",
IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
}
else if (TREE_CODE (*node) == POINTER_TYPE
&& (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
|| TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
&& arm_isr_value (args) != ARM_FT_UNKNOWN)
{
*node = build_variant_type_copy (*node);
TREE_TYPE (*node) = build_type_attribute_variant
(TREE_TYPE (*node),
tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
*no_add_attrs = true;
}
else
{
if (flags & ((int) ATTR_FLAG_DECL_NEXT
| (int) ATTR_FLAG_FUNCTION_NEXT
| (int) ATTR_FLAG_ARRAY_NEXT))
{
*no_add_attrs = true;
return tree_cons (name, args, NULL_TREE);
}
else
{
warning (OPT_Wattributes, "%qs attribute ignored",
IDENTIFIER_POINTER (name));
}
}
}
return NULL_TREE;
}
#if TARGET_DLLIMPORT_DECL_ATTRIBUTES
static tree
arm_handle_notshared_attribute (tree *node,
tree name ATTRIBUTE_UNUSED,
tree args ATTRIBUTE_UNUSED,
int flags ATTRIBUTE_UNUSED,
bool *no_add_attrs)
{
tree decl = TYPE_NAME (*node);
if (decl)
{
DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
DECL_VISIBILITY_SPECIFIED (decl) = 1;
*no_add_attrs = false;
}
return NULL_TREE;
}
#endif
static int
arm_comp_type_attributes (tree type1, tree type2)
{
int l1, l2, s1, s2;
if (TREE_CODE (type1) != FUNCTION_TYPE)
return 1;
l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
if (l1 | l2 | s1 | s2)
{
if ((l1 != l2) || (s1 != s2))
return 0;
if ((l1 & s2) || (l2 & s1))
return 0;
}
l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
if (! l1)
l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
if (! l2)
l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
if (l1 != l2)
return 0;
return 1;
}
void
arm_encode_call_attribute (tree decl, int flag)
{
#if TARGET_MACHO
rtx sym_ref = XEXP (DECL_RTL (decl), 0);
if (DECL_WEAK (decl)
&& DECL_VISIBILITY (decl) == VISIBILITY_DEFAULT
&& flag == SYMBOL_SHORT_CALL)
return;
SYMBOL_REF_FLAGS (sym_ref) |= flag;
#else
const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
int len = strlen (str);
char * newstr;
if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
return;
newstr = alloca (len + 2);
newstr[0] = flag;
strcpy (newstr + 1, str);
newstr = (char *) ggc_alloc_string (newstr, len + 1);
XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
#endif
}
static void
arm_set_default_type_attributes (tree type)
{
if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
{
tree type_attr_list, attr_name;
type_attr_list = TYPE_ATTRIBUTES (type);
if (arm_pragma_long_calls == LONG)
attr_name = get_identifier ("long_call");
else if (arm_pragma_long_calls == SHORT)
attr_name = get_identifier ("short_call");
else
return;
type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
TYPE_ATTRIBUTES (type) = type_attr_list;
}
#if TARGET_MACHO
else if ((TARGET_MS_BITFIELD_LAYOUT || darwin_ms_struct)
&& (TREE_CODE (type) == RECORD_TYPE
|| TREE_CODE (type) == UNION_TYPE))
#else
else if (TARGET_MS_BITFIELD_LAYOUT
&& (TREE_CODE (type) == RECORD_TYPE
|| TREE_CODE (type) == UNION_TYPE))
#endif
{
TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("ms_struct"),
NULL_TREE,
TYPE_ATTRIBUTES (type));
}
}
static int
current_file_function_operand (rtx sym_ref)
{
#if TARGET_MACHO
if (SYMBOL_SHORT_CALL_ATTR_P (sym_ref))
#else
if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
#endif
return 1;
if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
&& !DECL_WEAK (current_function_decl))
return 1;
return 0;
}
int
arm_is_longcall_p (rtx sym_ref, int call_cookie, int call_symbol)
{
if (!call_symbol)
{
if (GET_CODE (sym_ref) != MEM)
return 0;
sym_ref = XEXP (sym_ref, 0);
}
if (GET_CODE (sym_ref) != SYMBOL_REF)
return 0;
if (call_cookie & CALL_SHORT)
return 0;
if (TARGET_LONG_CALLS)
{
if (flag_function_sections
|| DECL_SECTION_NAME (current_function_decl))
return 1;
}
if (current_file_function_operand (sym_ref))
return 0;
#if TARGET_MACHO
return (call_cookie & CALL_LONG)
|| SYMBOL_LONG_CALL_ATTR_P (sym_ref)
|| TARGET_LONG_CALLS;
#else
return (call_cookie & CALL_LONG)
|| ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
|| TARGET_LONG_CALLS;
#endif
}
static bool
arm_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
{
int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
if (cfun->machine->sibcall_blocked)
return false;
if (TARGET_THUMB)
return false;
if (decl == NULL)
return true;
if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
call_type = CALL_SHORT;
else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
call_type = CALL_LONG;
if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
return false;
if (TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl) && TARGET_INTERWORK)
{
if (TARGET_MACHO)
return false;
else if (!arm_arch5)
return false;
}
if (arm_cpp_interwork && TREE_PUBLIC (decl) && DECL_WEAK (decl))
return false;
if (IS_INTERRUPT (arm_current_func_type ()))
return false;
return true;
}
int
legitimate_pic_operand_p (rtx x)
{
if (GET_CODE (x) == SYMBOL_REF
|| (GET_CODE (x) == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF))
return 0;
return 1;
}
rtx
legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
{
if (GET_CODE (orig) == SYMBOL_REF
|| GET_CODE (orig) == LABEL_REF)
{
#ifndef AOF_ASSEMBLER
rtx pic_ref, address;
#endif
rtx norig, l1 = NULL_RTX;
rtx insn;
int subregs = 0;
bool avoid_ind = true;
if (!TARGET_MACHO && !current_function_uses_pic_offset_table)
{
gcc_assert (!no_new_pseudos);
if (arm_pic_register != INVALID_REGNUM)
{
cfun->machine->pic_reg = gen_rtx_REG (Pmode, arm_pic_register);
if (!ir_type())
current_function_uses_pic_offset_table = 1;
}
else
{
rtx seq;
cfun->machine->pic_reg = gen_reg_rtx (Pmode);
if (!ir_type())
{
current_function_uses_pic_offset_table = 1;
start_sequence ();
arm_load_pic_register (0UL);
seq = get_insns ();
end_sequence ();
emit_insn_after (seq, entry_of_function ());
}
}
}
if (reg == 0)
{
gcc_assert (!no_new_pseudos);
reg = gen_reg_rtx (Pmode);
subregs = 1;
}
#ifdef AOF_ASSEMBLER
insn = emit_insn (gen_pic_load_addr_based (reg, orig));
#else
if (subregs)
address = gen_reg_rtx (Pmode);
else
address = reg;
norig = orig;
#if TARGET_MACHO
if (TARGET_MACHO)
{
if (GET_CODE (orig) == SYMBOL_REF
|| GET_CODE (orig) == LABEL_REF)
{
rtx x, ptr_ref = orig;
l1 = gen_label_rtx ();
if (GET_CODE (orig) == SYMBOL_REF)
{
bool defined = machopic_data_defined_p (orig);
if (defined && MACHO_DYNAMIC_NO_PIC_P)
return orig;
if (! defined)
{
avoid_ind = false;
ptr_ref = gen_rtx_SYMBOL_REF (Pmode,
machopic_indirection_name (orig, false));
SET_SYMBOL_REF_DECL (ptr_ref, SYMBOL_REF_DECL (orig));
SYMBOL_REF_FLAGS (ptr_ref) |= MACHO_SYMBOL_FLAG_DEFINED;
}
}
else
{
if (MACHO_DYNAMIC_NO_PIC_P)
return orig;
}
if (! MACHO_DYNAMIC_NO_PIC_P)
{
x = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
ptr_ref = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, ptr_ref, x));
}
norig = ptr_ref;
}
}
#endif
if (TARGET_MACHO && ! MACHO_DYNAMIC_NO_PIC_P)
{
if (GET_CODE (orig) == SYMBOL_REF
|| GET_CODE (orig) == LABEL_REF)
{
if (TARGET_ARM)
{
emit_insn (gen_pic_load_addr_arm (address, norig, l1));
emit_insn (gen_pic_add_dot_plus_eight (address, l1, address));
}
else
{
emit_insn (gen_pic_load_addr_thumb (address, norig, l1));
emit_insn (gen_pic_add_dot_plus_four (address, l1, address));
}
}
else
abort ();
}
else
{
if (TARGET_ARM)
emit_insn (gen_pic_load_addr_arm (address, norig, l1));
else
emit_insn (gen_pic_load_addr_thumb (address, norig, l1));
}
if ((GET_CODE (orig) == LABEL_REF
|| (GET_CODE (orig) == SYMBOL_REF &&
SYMBOL_REF_LOCAL_P (orig)))
&& NEED_GOT_RELOC)
pic_ref = gen_rtx_PLUS (Pmode, cfun->machine->pic_reg, address);
else
{
if (! TARGET_MACHO)
pic_ref = gen_const_mem (Pmode,
gen_rtx_PLUS (Pmode, cfun->machine->pic_reg,
address));
else if (avoid_ind)
pic_ref = address;
else
pic_ref = gen_const_mem (Pmode, address);
}
insn = emit_move_insn (reg, pic_ref);
#endif
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
REG_NOTES (insn));
return reg;
}
else if (GET_CODE (orig) == CONST)
{
rtx base, offset;
if (GET_CODE (XEXP (orig, 0)) == PLUS
&& XEXP (XEXP (orig, 0), 0) == cfun->machine->pic_reg)
return orig;
if (GET_CODE (XEXP (orig, 0)) == UNSPEC
&& XINT (XEXP (orig, 0), 1) == UNSPEC_TLS)
return orig;
if (reg == 0)
{
gcc_assert (!no_new_pseudos);
reg = gen_reg_rtx (Pmode);
}
gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
base == reg ? 0 : reg);
#if 0
if (GET_CODE (offset) == CONST_INT)
{
if (!arm_legitimate_index_p (mode, offset, SET, 0))
{
gcc_assert (!no_new_pseudos);
offset = force_reg (Pmode, offset);
}
if (GET_CODE (offset) == CONST_INT)
return plus_constant (base, INTVAL (offset));
}
#endif
if (GET_MODE_SIZE (mode) > 4
&& (GET_MODE_CLASS (mode) == MODE_INT
|| TARGET_SOFT_FLOAT))
{
emit_insn (gen_addsi3 (reg, base, offset));
return reg;
}
return gen_rtx_PLUS (Pmode, base, offset);
}
return orig;
}
static int
thumb_find_work_register (unsigned long pushed_regs_mask)
{
int reg;
for (reg = LAST_ARG_REGNUM; reg >= 0; reg --)
if (!regs_ever_live[reg])
return reg;
if (cfun->machine->uses_anonymous_args
&& current_function_pretend_args_size > 0)
return LAST_ARG_REGNUM;
if (! cfun->machine->uses_anonymous_args
&& current_function_args_size >= 0
&& current_function_args_size <= (LAST_ARG_REGNUM * UNITS_PER_WORD)
&& cfun->args_info.nregs < 4)
return LAST_ARG_REGNUM;
for (reg = LAST_LO_REGNUM; reg > LAST_ARG_REGNUM; reg --)
if (pushed_regs_mask & (1 << reg))
return reg;
gcc_unreachable ();
}
static GTY(()) int pic_labelno;
void
arm_load_pic_register (unsigned long saved_regs ATTRIBUTE_UNUSED)
{
#ifndef AOF_ASSEMBLER
rtx l1, labelno, pic_tmp, pic_tmp2, pic_rtx;
rtx global_offset_table;
if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
return;
gcc_assert (flag_pic);
labelno = GEN_INT (pic_labelno++);
l1 = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, labelno), UNSPEC_PIC_LABEL);
l1 = gen_rtx_CONST (VOIDmode, l1);
global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
pic_tmp = plus_constant (l1, TARGET_ARM ? 8 : 4);
if (GOT_PCREL)
pic_tmp2 = gen_rtx_CONST (VOIDmode,
gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
else
pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
if (TARGET_ARM)
{
emit_insn (gen_pic_load_addr_arm (cfun->machine->pic_reg, pic_rtx, l1));
emit_insn (gen_pic_add_dot_plus_eight (cfun->machine->pic_reg, l1,
cfun->machine->pic_reg));
}
else
{
if (arm_pic_register != INVALID_REGNUM
&& REGNO (cfun->machine->pic_reg) > LAST_LO_REGNUM)
{
pic_tmp = gen_rtx_REG (SImode,
thumb_find_work_register (saved_regs));
emit_insn (gen_pic_load_addr_thumb (pic_tmp, pic_rtx, l1));
emit_insn (gen_movsi (pic_offset_table_rtx, pic_tmp));
}
else
emit_insn (gen_pic_load_addr_thumb (cfun->machine->pic_reg, pic_rtx, l1));
emit_insn (gen_pic_add_dot_plus_four (cfun->machine->pic_reg, l1,
cfun->machine->pic_reg));
}
emit_insn (gen_rtx_USE (VOIDmode, cfun->machine->pic_reg));
#endif
}
static int
arm_address_register_rtx_p (rtx x, int strict_p)
{
int regno;
if (GET_CODE (x) != REG)
return 0;
regno = REGNO (x);
if (strict_p)
return ARM_REGNO_OK_FOR_BASE_P (regno);
return (regno <= LAST_ARM_REGNUM
|| regno >= FIRST_PSEUDO_REGISTER
|| regno == FRAME_POINTER_REGNUM
|| regno == ARG_POINTER_REGNUM);
}
static int
pcrel_constant_p (rtx x)
{
if (GET_CODE (x) == MINUS)
return symbol_mentioned_p (XEXP (x, 0)) && label_mentioned_p (XEXP (x, 1));
return FALSE;
}
int
arm_legitimate_address_p (enum machine_mode mode, rtx x, RTX_CODE outer,
int strict_p)
{
bool use_ldrd;
enum rtx_code code = GET_CODE (x);
if (arm_address_register_rtx_p (x, strict_p))
return 1;
use_ldrd = (TARGET_LDRD
&& (mode == DImode
|| (mode == DFmode && (TARGET_SOFT_FLOAT || TARGET_VFP))));
if (code == POST_INC || code == PRE_DEC
|| ((code == PRE_INC || code == POST_DEC)
&& (use_ldrd || GET_MODE_SIZE (mode) <= 4)))
return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
else if ((code == POST_MODIFY || code == PRE_MODIFY)
&& arm_address_register_rtx_p (XEXP (x, 0), strict_p)
&& GET_CODE (XEXP (x, 1)) == PLUS
&& rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
{
rtx addend = XEXP (XEXP (x, 1), 1);
if (use_ldrd
&& GET_CODE (x) == POST_MODIFY
&& GET_CODE (addend) == REG)
return 0;
return ((use_ldrd || GET_MODE_SIZE (mode) <= 4)
&& arm_legitimate_index_p (mode, addend, outer, strict_p));
}
else if (reload_completed
&& (code == LABEL_REF
|| (code == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
return 1;
else if (mode == TImode)
return 0;
else if (code == PLUS)
{
rtx xop0 = XEXP (x, 0);
rtx xop1 = XEXP (x, 1);
return ((arm_address_register_rtx_p (xop0, strict_p)
&& arm_legitimate_index_p (mode, xop1, outer, strict_p))
|| (arm_address_register_rtx_p (xop1, strict_p)
&& arm_legitimate_index_p (mode, xop0, outer, strict_p)));
}
#if 0
else if (GET_CODE (x) == MINUS)
{
rtx xop0 = XEXP (x, 0);
rtx xop1 = XEXP (x, 1);
return (arm_address_register_rtx_p (xop0, strict_p)
&& arm_legitimate_index_p (mode, xop1, outer, strict_p));
}
#endif
else if (GET_MODE_CLASS (mode) != MODE_FLOAT
&& code == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x)
&& ! (flag_pic
&& symbol_mentioned_p (get_pool_constant (x))
&& ! pcrel_constant_p (get_pool_constant (x))))
return 1;
return 0;
}
static int
arm_legitimate_index_p (enum machine_mode mode, rtx index, RTX_CODE outer,
int strict_p)
{
HOST_WIDE_INT range;
enum rtx_code code = GET_CODE (index);
if (TARGET_HARD_FLOAT
&& (TARGET_FPA || TARGET_MAVERICK || TARGET_VFP)
&& (GET_MODE_CLASS (mode) == MODE_FLOAT
|| (TARGET_MAVERICK && mode == DImode)))
return (code == CONST_INT && INTVAL (index) < 1024
&& INTVAL (index) > -1024
&& (INTVAL (index) & 3) == 0);
if (TARGET_REALLY_IWMMXT && VALID_IWMMXT_REG_MODE (mode))
{
if (!TARGET_LDRD || mode != DImode)
return (code == CONST_INT
&& INTVAL (index) < 1024
&& INTVAL (index) > -1024
&& (INTVAL (index) & 3) == 0);
}
if (arm_address_register_rtx_p (index, strict_p)
&& (GET_MODE_SIZE (mode) <= 4))
return 1;
if (mode == DImode || mode == DFmode)
{
if (code == CONST_INT)
{
HOST_WIDE_INT val = INTVAL (index);
if (TARGET_LDRD)
return val > -256 && val < 256;
else
return val > -4096 && val < 4092;
}
return TARGET_LDRD && arm_address_register_rtx_p (index, strict_p);
}
if (GET_MODE_SIZE (mode) <= 4
&& ! (arm_arch4
&& (mode == HImode
|| (mode == QImode && outer == SIGN_EXTEND))))
{
if (code == MULT)
{
rtx xiop0 = XEXP (index, 0);
rtx xiop1 = XEXP (index, 1);
return ((arm_address_register_rtx_p (xiop0, strict_p)
&& power_of_two_operand (xiop1, SImode))
|| (arm_address_register_rtx_p (xiop1, strict_p)
&& power_of_two_operand (xiop0, SImode)));
}
else if (code == LSHIFTRT || code == ASHIFTRT
|| code == ASHIFT || code == ROTATERT)
{
rtx op = XEXP (index, 1);
return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
&& GET_CODE (op) == CONST_INT
&& INTVAL (op) > 0
&& INTVAL (op) <= 31);
}
}
if (arm_arch4)
{
if (mode == HImode || (outer == SIGN_EXTEND && mode == QImode))
range = 256;
else
range = 4096;
}
else
range = (mode == HImode) ? 4095 : 4096;
return (code == CONST_INT
&& INTVAL (index) < range
&& INTVAL (index) > -range);
}
static int
thumb_base_register_rtx_p (rtx x, enum machine_mode mode, int strict_p)
{
int regno;
if (GET_CODE (x) != REG)
return 0;
regno = REGNO (x);
if (strict_p)
return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
return (regno <= LAST_LO_REGNUM
|| regno > LAST_VIRTUAL_REGISTER
|| regno == FRAME_POINTER_REGNUM
|| (GET_MODE_SIZE (mode) >= 4
&& (regno == STACK_POINTER_REGNUM
|| regno >= FIRST_PSEUDO_REGISTER
|| x == hard_frame_pointer_rtx
|| x == arg_pointer_rtx)));
}
inline static int
thumb_index_register_rtx_p (rtx x, int strict_p)
{
return thumb_base_register_rtx_p (x, QImode, strict_p);
}
int
thumb_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
{
if (GET_MODE_SIZE (mode) < 4
&& !(reload_in_progress || reload_completed)
&& (reg_mentioned_p (frame_pointer_rtx, x)
|| reg_mentioned_p (arg_pointer_rtx, x)
|| reg_mentioned_p (virtual_incoming_args_rtx, x)
|| reg_mentioned_p (virtual_outgoing_args_rtx, x)
|| reg_mentioned_p (virtual_stack_dynamic_rtx, x)
|| reg_mentioned_p (virtual_stack_vars_rtx, x)))
return 0;
else if (thumb_base_register_rtx_p (x, mode, strict_p))
return 1;
else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
&& GET_CODE (x) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x) && !flag_pic)
return 1;
else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
&& (GET_CODE (x) == LABEL_REF
|| (GET_CODE (x) == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
return 1;
else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
&& thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
return 1;
else if (GET_CODE (x) == PLUS)
{
if (GET_MODE_SIZE (mode) <= 4
&& XEXP (x, 0) != frame_pointer_rtx
&& XEXP (x, 1) != frame_pointer_rtx
&& thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
&& thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
return 1;
else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
|| XEXP (x, 0) == arg_pointer_rtx)
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
return 1;
else if (GET_CODE (XEXP (x, 0)) == REG
&& REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
&& GET_MODE_SIZE (mode) >= 4
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& INTVAL (XEXP (x, 1)) >= 0
&& INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
&& (INTVAL (XEXP (x, 1)) & 3) == 0)
return 1;
else if (GET_CODE (XEXP (x, 0)) == REG
&& REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
&& GET_MODE_SIZE (mode) >= 4
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& (INTVAL (XEXP (x, 1)) & 3) == 0)
return 1;
}
else if (GET_MODE_CLASS (mode) != MODE_FLOAT
&& GET_MODE_SIZE (mode) == 4
&& GET_CODE (x) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x)
&& ! (flag_pic
&& symbol_mentioned_p (get_pool_constant (x))
&& ! pcrel_constant_p (get_pool_constant (x))))
return 1;
return 0;
}
int
thumb_legitimate_offset_p (enum machine_mode mode, HOST_WIDE_INT val)
{
switch (GET_MODE_SIZE (mode))
{
case 1:
return val >= 0 && val < 32;
case 2:
return val >= 0 && val < 64 && (val & 1) == 0;
default:
return (val >= 0
&& (val + GET_MODE_SIZE (mode)) <= 128
&& (val & 3) == 0);
}
}
static GTY(()) rtx tls_get_addr_libfunc;
static rtx
get_tls_get_addr (void)
{
if (!tls_get_addr_libfunc)
tls_get_addr_libfunc = init_one_libfunc ("__tls_get_addr");
return tls_get_addr_libfunc;
}
static rtx
arm_load_tp (rtx target)
{
if (!target)
target = gen_reg_rtx (SImode);
if (TARGET_HARD_TP)
{
emit_insn (gen_load_tp_hard (target));
}
else
{
rtx tmp;
emit_insn (gen_load_tp_soft ());
tmp = gen_rtx_REG (SImode, 0);
emit_move_insn (target, tmp);
}
return target;
}
static rtx
load_tls_operand (rtx x, rtx reg)
{
rtx tmp;
if (reg == NULL_RTX)
reg = gen_reg_rtx (SImode);
tmp = gen_rtx_CONST (SImode, x);
emit_move_insn (reg, tmp);
return reg;
}
static rtx
arm_call_tls_get_addr (rtx x, rtx reg, rtx *valuep, int reloc)
{
rtx insns, label, labelno, sum;
start_sequence ();
labelno = GEN_INT (pic_labelno++);
label = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, labelno), UNSPEC_PIC_LABEL);
label = gen_rtx_CONST (VOIDmode, label);
sum = gen_rtx_UNSPEC (Pmode,
gen_rtvec (4, x, GEN_INT (reloc), label,
GEN_INT (TARGET_ARM ? 8 : 4)),
UNSPEC_TLS);
reg = load_tls_operand (sum, reg);
if (TARGET_ARM)
emit_insn (gen_pic_add_dot_plus_eight (reg, reg, labelno));
else
emit_insn (gen_pic_add_dot_plus_four (reg, reg, labelno));
*valuep = emit_library_call_value (get_tls_get_addr (), NULL_RTX, LCT_PURE,
Pmode, 1, reg, Pmode);
insns = get_insns ();
end_sequence ();
return insns;
}
rtx
legitimize_tls_address (rtx x, rtx reg)
{
rtx dest, tp, label, labelno, sum, insns, ret, eqv, addend;
unsigned int model = SYMBOL_REF_TLS_MODEL (x);
switch (model)
{
case TLS_MODEL_GLOBAL_DYNAMIC:
insns = arm_call_tls_get_addr (x, reg, &ret, TLS_GD32);
dest = gen_reg_rtx (Pmode);
emit_libcall_block (insns, dest, ret, x);
return dest;
case TLS_MODEL_LOCAL_DYNAMIC:
insns = arm_call_tls_get_addr (x, reg, &ret, TLS_LDM32);
eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const1_rtx),
UNSPEC_TLS);
dest = gen_reg_rtx (Pmode);
emit_libcall_block (insns, dest, ret, eqv);
addend = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, x, GEN_INT (TLS_LDO32)),
UNSPEC_TLS);
addend = force_reg (SImode, gen_rtx_CONST (SImode, addend));
return gen_rtx_PLUS (Pmode, dest, addend);
case TLS_MODEL_INITIAL_EXEC:
labelno = GEN_INT (pic_labelno++);
label = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, labelno), UNSPEC_PIC_LABEL);
label = gen_rtx_CONST (VOIDmode, label);
sum = gen_rtx_UNSPEC (Pmode,
gen_rtvec (4, x, GEN_INT (TLS_IE32), label,
GEN_INT (TARGET_ARM ? 8 : 4)),
UNSPEC_TLS);
reg = load_tls_operand (sum, reg);
if (TARGET_ARM)
emit_insn (gen_tls_load_dot_plus_eight (reg, reg, labelno));
else
{
emit_insn (gen_pic_add_dot_plus_four (reg, reg, labelno));
emit_move_insn (reg, gen_const_mem (SImode, reg));
}
tp = arm_load_tp (NULL_RTX);
return gen_rtx_PLUS (Pmode, tp, reg);
case TLS_MODEL_LOCAL_EXEC:
tp = arm_load_tp (NULL_RTX);
reg = gen_rtx_UNSPEC (Pmode,
gen_rtvec (2, x, GEN_INT (TLS_LE32)),
UNSPEC_TLS);
reg = force_reg (SImode, gen_rtx_CONST (SImode, reg));
return gen_rtx_PLUS (Pmode, tp, reg);
default:
abort ();
}
}
rtx
arm_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
{
if (arm_tls_symbol_p (x))
return legitimize_tls_address (x, NULL_RTX);
if (flag_pic)
{
rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
if (new_x != orig_x)
x = new_x;
}
else if (GET_CODE (x) == PLUS)
{
rtx xop0 = XEXP (x, 0);
rtx xop1 = XEXP (x, 1);
if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
xop0 = force_reg (SImode, xop0);
if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1)
&& GET_CODE (xop1) != CONST_INT)
xop1 = force_reg (SImode, xop1);
if (GET_CODE (xop1) == CONST_INT)
{
HOST_WIDE_INT n, low_n;
rtx base_reg, val;
if (GET_CODE (xop0) == PLUS)
{
rtx xop00 = XEXP (xop0, 0);
rtx xop01 = XEXP (xop0, 1);
if (xop01 == virtual_stack_vars_rtx)
{
base_reg = gen_reg_rtx (SImode);
val = force_operand (gen_rtx_PLUS (SImode, xop01, xop1),
NULL_RTX);
emit_move_insn (base_reg, val);
x = gen_rtx_PLUS (SImode, xop00, base_reg);
return x;
}
}
n = INTVAL (xop1);
if (mode == DImode
|| ((TARGET_SOFT_FLOAT || TARGET_VFP) && mode == DFmode))
{
low_n = n & 0x0f;
n &= ~0x0f;
if (low_n > 4)
{
n += 16;
low_n -= 16;
}
}
else if ((mode == HImode || mode == QImode) && arm_arch4)
{
low_n = n >= 0 ? (n & 0xff) : -((-n) & 0xff);
n -= low_n;
}
else
{
low_n = ((mode) == TImode ? 0
: n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
n -= low_n;
}
if (n != 0)
{
base_reg = gen_reg_rtx (SImode);
val = force_operand (plus_constant (xop0, n), NULL_RTX);
emit_move_insn (base_reg, val);
x = plus_constant (base_reg, low_n);
}
else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
x = gen_rtx_PLUS (SImode, xop0, xop1);
}
else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
x = gen_rtx_PLUS (SImode, xop0, xop1);
}
else if (GET_CODE (x) == MINUS)
{
rtx xop0 = XEXP (x, 0);
rtx xop1 = XEXP (x, 1);
if (CONSTANT_P (xop0))
xop0 = force_reg (SImode, xop0);
if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
xop1 = force_reg (SImode, xop1);
if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
x = gen_rtx_MINUS (SImode, xop0, xop1);
}
else if (GET_CODE (x) == CONST_INT && optimize > 0)
{
unsigned int bits;
HOST_WIDE_INT mask, base, index;
rtx base_reg;
bits = (mode == SImode) ? 12 : 8;
mask = (1 << bits) - 1;
base = INTVAL (x) & ~mask;
index = INTVAL (x) & mask;
if (bit_count (base & 0xffffffff) > (32 - bits)/2)
{
base |= mask;
index -= mask;
}
base_reg = force_reg (SImode, GEN_INT (base));
x = plus_constant (base_reg, index);
}
return x;
}
rtx
thumb_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
{
if (arm_tls_symbol_p (x))
return legitimize_tls_address (x, NULL_RTX);
if (GET_CODE (x) == PLUS
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& (INTVAL (XEXP (x, 1)) >= 32 * GET_MODE_SIZE (mode)
|| INTVAL (XEXP (x, 1)) < 0))
{
rtx xop0 = XEXP (x, 0);
rtx xop1 = XEXP (x, 1);
HOST_WIDE_INT offset = INTVAL (xop1);
if (optimize_size && offset >= 0
&& offset < 256 + 31 * GET_MODE_SIZE (mode))
{
HOST_WIDE_INT delta;
if (offset >= 256)
delta = offset - (256 - GET_MODE_SIZE (mode));
else if (offset < 32 * GET_MODE_SIZE (mode) + 8)
delta = 31 * GET_MODE_SIZE (mode);
else
delta = offset & (~31 * GET_MODE_SIZE (mode));
xop0 = force_operand (plus_constant (xop0, offset - delta),
NULL_RTX);
x = plus_constant (xop0, delta);
}
else if (offset < 0 && offset > -256)
x = force_operand (x, NULL_RTX);
else
{
xop1 = force_reg (SImode, xop1);
x = gen_rtx_PLUS (SImode, xop0, xop1);
}
}
else if (GET_CODE (x) == PLUS
&& s_register_operand (XEXP (x, 1), SImode)
&& !s_register_operand (XEXP (x, 0), SImode))
{
rtx xop0 = force_operand (XEXP (x, 0), NULL_RTX);
x = gen_rtx_PLUS (SImode, xop0, XEXP (x, 1));
}
if (flag_pic)
{
rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
if (new_x != orig_x)
x = new_x;
}
return x;
}
rtx
thumb_legitimize_reload_address (rtx *x_p,
enum machine_mode mode,
int opnum, int type,
int ind_levels ATTRIBUTE_UNUSED)
{
rtx x = *x_p;
if (GET_CODE (x) == PLUS
&& GET_MODE_SIZE (mode) < 4
&& REG_P (XEXP (x, 0))
&& XEXP (x, 0) == stack_pointer_rtx
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& !thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
{
rtx orig_x = x;
x = copy_rtx (x);
push_reload (orig_x, NULL_RTX, x_p, NULL, MODE_BASE_REG_CLASS (mode),
Pmode, VOIDmode, 0, 0, opnum, type);
return x;
}
if (GET_CODE (x) == PLUS
&& REG_P (XEXP (x, 0))
&& REG_P (XEXP (x, 1))
&& !REG_MODE_OK_FOR_REG_BASE_P (XEXP (x, 0), mode)
&& !REG_MODE_OK_FOR_REG_BASE_P (XEXP (x, 1), mode))
{
rtx orig_x = x;
x = copy_rtx (x);
push_reload (orig_x, NULL_RTX, x_p, NULL, MODE_BASE_REG_CLASS (mode),
Pmode, VOIDmode, 0, 0, opnum, type);
return x;
}
return NULL;
}
static bool
arm_tls_symbol_p (rtx x)
{
if (! TARGET_HAVE_TLS)
return false;
if (GET_CODE (x) != SYMBOL_REF)
return false;
return SYMBOL_REF_TLS_MODEL (x) != 0;
}
static int
arm_tls_operand_p_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
{
if (GET_CODE (*x) == SYMBOL_REF)
return SYMBOL_REF_TLS_MODEL (*x) != 0;
if (GET_CODE (*x) == UNSPEC
&& XINT (*x, 1) == UNSPEC_TLS)
return -1;
return 0;
}
bool
arm_tls_referenced_p (rtx x)
{
if (! TARGET_HAVE_TLS)
return false;
return for_each_rtx (&x, arm_tls_operand_p_1, NULL);
}
static bool
arm_cannot_force_const_mem (rtx x)
{
return arm_tls_referenced_p (x)
|| ! LEGITIMATE_INDIRECT_OPERAND_P (x);
}
#define REG_OR_SUBREG_REG(X) \
(GET_CODE (X) == REG \
|| (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
#define REG_OR_SUBREG_RTX(X) \
(GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
#ifndef COSTS_N_INSNS
#define COSTS_N_INSNS(N) ((N) * 4 - 2)
#endif
static inline int
thumb_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
{
enum machine_mode mode = GET_MODE (x);
switch (code)
{
case ASHIFT:
case ASHIFTRT:
case LSHIFTRT:
case ROTATERT:
case PLUS:
case MINUS:
case COMPARE:
case NEG:
case NOT:
return COSTS_N_INSNS (1);
case MULT:
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
{
int cycles = 0;
unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
while (i)
{
i >>= 2;
cycles++;
}
return COSTS_N_INSNS (2) + cycles;
}
return COSTS_N_INSNS (1) + 16;
case SET:
return (COSTS_N_INSNS (1)
+ 4 * ((GET_CODE (SET_SRC (x)) == MEM)
+ GET_CODE (SET_DEST (x)) == MEM));
case CONST_INT:
if (outer == SET)
{
if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
return 0;
if (thumb_shiftable_const (INTVAL (x)))
return COSTS_N_INSNS (2);
return COSTS_N_INSNS (3);
}
else if ((outer == PLUS || outer == COMPARE)
&& INTVAL (x) < 256 && INTVAL (x) > -256)
return 0;
else if (outer == AND
&& INTVAL (x) < 256 && INTVAL (x) >= -256)
return COSTS_N_INSNS (1);
else if (outer == ASHIFT || outer == ASHIFTRT
|| outer == LSHIFTRT)
return 0;
return COSTS_N_INSNS (2);
case CONST:
case CONST_DOUBLE:
case LABEL_REF:
case SYMBOL_REF:
return COSTS_N_INSNS (3);
case UDIV:
case UMOD:
case DIV:
case MOD:
return 100;
case TRUNCATE:
return 99;
case AND:
case XOR:
case IOR:
return 8;
case MEM:
return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
+ ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
? 4 : 0));
case IF_THEN_ELSE:
if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
return 14;
return 2;
case ZERO_EXTEND:
switch (GET_MODE (XEXP (x, 0)))
{
case QImode:
return (1 + (mode == DImode ? 4 : 0)
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case HImode:
return (4 + (mode == DImode ? 4 : 0)
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case SImode:
return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
default:
return 99;
}
default:
return 99;
}
}
static inline int
thumb_size_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
{
enum machine_mode mode = GET_MODE (x);
switch (code)
{
case ASHIFT:
case ASHIFTRT:
case LSHIFTRT:
case ROTATERT:
case PLUS:
case MINUS:
case COMPARE:
case NEG:
case NOT:
case AND:
case XOR:
case IOR:
case MULT:
return COSTS_N_INSNS (1);
case SET:
return (COSTS_N_INSNS (1)
+ 4 * ((GET_CODE (SET_SRC (x)) == MEM)
+ GET_CODE (SET_DEST (x)) == MEM));
case CONST_INT:
if (outer == SET)
{
if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
return 0;
if (thumb_shiftable_const (INTVAL (x)))
return COSTS_N_INSNS (2);
return COSTS_N_INSNS (3);
}
else if ((outer == PLUS || outer == COMPARE)
&& INTVAL (x) < 256 && INTVAL (x) > -256)
return 0;
else if (outer == AND
&& INTVAL (x) < 256 && INTVAL (x) >= -256)
return COSTS_N_INSNS (1);
else if (outer == ASHIFT || outer == ASHIFTRT
|| outer == LSHIFTRT)
return 0;
return COSTS_N_INSNS (2);
case CONST:
case CONST_DOUBLE:
case LABEL_REF:
case SYMBOL_REF:
return COSTS_N_INSNS (3);
case UDIV:
case UMOD:
case DIV:
case MOD:
return 100;
case TRUNCATE:
return 99;
case MEM:
return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
+ ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
? 4 : 0));
case IF_THEN_ELSE:
if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
return 14;
return 2;
case ZERO_EXTEND:
switch (GET_MODE (XEXP (x, 0)))
{
case QImode:
return (1 + (mode == DImode ? 4 : 0)
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case HImode:
return (4 + (mode == DImode ? 4 : 0)
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case SImode:
return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
default:
return 99;
}
default:
return 99;
}
}
static inline int
arm_rtx_costs_1 (rtx x, enum rtx_code code, enum rtx_code outer)
{
enum machine_mode mode = GET_MODE (x);
enum rtx_code subcode;
int extra_cost;
switch (code)
{
case MEM:
return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
+ (GET_CODE (x) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
case DIV:
case MOD:
case UDIV:
case UMOD:
return optimize_size ? COSTS_N_INSNS (2) : 100;
case ROTATE:
if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
return 4;
case ROTATERT:
if (mode != SImode)
return 8;
case ASHIFT: case LSHIFTRT: case ASHIFTRT:
if (mode == DImode)
return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
+ ((GET_CODE (XEXP (x, 0)) == REG
|| (GET_CODE (XEXP (x, 0)) == SUBREG
&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
? 0 : 8));
return (1 + ((GET_CODE (XEXP (x, 0)) == REG
|| (GET_CODE (XEXP (x, 0)) == SUBREG
&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
? 0 : 4)
+ ((GET_CODE (XEXP (x, 1)) == REG
|| (GET_CODE (XEXP (x, 1)) == SUBREG
&& GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
|| (GET_CODE (XEXP (x, 1)) == CONST_INT))
? 0 : 4));
case MINUS:
if (mode == DImode)
return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
+ ((REG_OR_SUBREG_REG (XEXP (x, 0))
|| (GET_CODE (XEXP (x, 0)) == CONST_INT
&& const_ok_for_arm (INTVAL (XEXP (x, 0)))))
? 0 : 8));
if (GET_MODE_CLASS (mode) == MODE_FLOAT)
return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
|| (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
&& arm_const_double_rtx (XEXP (x, 1))))
? 0 : 8)
+ ((REG_OR_SUBREG_REG (XEXP (x, 0))
|| (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
&& arm_const_double_rtx (XEXP (x, 0))))
? 0 : 8));
if (((GET_CODE (XEXP (x, 0)) == CONST_INT
&& const_ok_for_arm (INTVAL (XEXP (x, 0)))
&& REG_OR_SUBREG_REG (XEXP (x, 1))))
|| (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
|| subcode == ASHIFTRT || subcode == LSHIFTRT
|| subcode == ROTATE || subcode == ROTATERT
|| (subcode == MULT
&& GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
&& ((INTVAL (XEXP (XEXP (x, 1), 1)) &
(INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
&& REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
&& (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
|| GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
&& REG_OR_SUBREG_REG (XEXP (x, 0))))
return 1;
case PLUS:
if (GET_CODE (XEXP (x, 0)) == MULT)
{
extra_cost = rtx_cost (XEXP (x, 0), code);
if (!REG_OR_SUBREG_REG (XEXP (x, 1)))
extra_cost += 4 * ARM_NUM_REGS (mode);
return extra_cost;
}
if (GET_MODE_CLASS (mode) == MODE_FLOAT)
return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
+ ((REG_OR_SUBREG_REG (XEXP (x, 1))
|| (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
&& arm_const_double_rtx (XEXP (x, 1))))
? 0 : 8));
case AND: case XOR: case IOR:
extra_cost = 0;
if ((REG_OR_SUBREG_REG (XEXP (x, 0))
&& ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
&& GET_CODE (XEXP (x, 1)) != CONST_INT)
|| (REG_OR_SUBREG_REG (XEXP (x, 0))
&& ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
extra_cost = 4;
if (mode == DImode)
return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
+ ((REG_OR_SUBREG_REG (XEXP (x, 1))
|| (GET_CODE (XEXP (x, 1)) == CONST_INT
&& const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
? 0 : 8));
if (REG_OR_SUBREG_REG (XEXP (x, 0)))
return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
+ ((REG_OR_SUBREG_REG (XEXP (x, 1))
|| (GET_CODE (XEXP (x, 1)) == CONST_INT
&& const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
? 0 : 4));
else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
return (1 + extra_cost
+ ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
|| subcode == LSHIFTRT || subcode == ASHIFTRT
|| subcode == ROTATE || subcode == ROTATERT
|| (subcode == MULT
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
&& ((INTVAL (XEXP (XEXP (x, 0), 1)) &
(INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
&& (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
&& ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
|| GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
? 1 : 4));
return 8;
case MULT:
gcc_unreachable ();
case TRUNCATE:
if (arm_arch3m && mode == SImode
&& GET_CODE (XEXP (x, 0)) == LSHIFTRT
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
&& (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
== GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
&& (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
|| GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
return 8;
return 99;
case NEG:
if (GET_MODE_CLASS (mode) == MODE_FLOAT)
return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
case NOT:
if (mode == DImode)
return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
case IF_THEN_ELSE:
if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
return 14;
return 2;
case COMPARE:
return 1;
case ABS:
return 4 + (mode == DImode ? 4 : 0);
case SIGN_EXTEND:
if (GET_MODE (XEXP (x, 0)) == QImode)
return (4 + (mode == DImode ? 4 : 0)
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case ZERO_EXTEND:
switch (GET_MODE (XEXP (x, 0)))
{
case QImode:
return (1 + (mode == DImode ? 4 : 0)
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case HImode:
return (4 + (mode == DImode ? 4 : 0)
+ (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case SImode:
return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
case V8QImode:
case V4HImode:
case V2SImode:
case V4QImode:
case V2HImode:
return 1;
default:
gcc_unreachable ();
}
gcc_unreachable ();
case CONST_INT:
if (const_ok_for_arm (INTVAL (x)))
return outer == SET ? 2 : -1;
else if (outer == AND
&& const_ok_for_arm (~INTVAL (x)))
return -1;
else if ((outer == COMPARE
|| outer == PLUS || outer == MINUS)
&& const_ok_for_arm (-INTVAL (x)))
return -1;
else
return 5;
case CONST:
case LABEL_REF:
case SYMBOL_REF:
return 6;
case CONST_DOUBLE:
if (arm_const_double_rtx (x))
return outer == SET ? 2 : -1;
else if ((outer == COMPARE || outer == PLUS)
&& neg_const_double_rtx_ok_for_fpa (x))
return -1;
return 7;
default:
return 99;
}
}
static bool
arm_size_rtx_costs (rtx x, int code, int outer_code, int *total)
{
enum machine_mode mode = GET_MODE (x);
if (TARGET_THUMB)
{
*total = thumb_size_rtx_costs (x, code, outer_code);
return true;
}
switch (code)
{
case MEM:
if (REG_P (XEXP (x, 0)))
*total = COSTS_N_INSNS (1);
else
*total = COSTS_N_INSNS (ARM_NUM_REGS (mode));
return true;
case DIV:
case MOD:
case UDIV:
case UMOD:
*total = COSTS_N_INSNS (2);
return false;
case ROTATE:
if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
{
*total = COSTS_N_INSNS (2) + rtx_cost (XEXP (x, 0), code);
return true;
}
case ROTATERT:
case ASHIFT:
case LSHIFTRT:
case ASHIFTRT:
if (mode == DImode && GET_CODE (XEXP (x, 1)) == CONST_INT)
{
*total = COSTS_N_INSNS (3) + rtx_cost (XEXP (x, 0), code);
return true;
}
else if (mode == SImode)
{
*total = COSTS_N_INSNS (1) + rtx_cost (XEXP (x, 0), code);
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
*total += 1 + rtx_cost (XEXP (x, 1), code);
return true;
}
*total = COSTS_N_INSNS (2);
return false;
case MINUS:
if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
{
*total = COSTS_N_INSNS (1);
return false;
}
if (mode == SImode)
{
enum rtx_code subcode0 = GET_CODE (XEXP (x, 0));
enum rtx_code subcode1 = GET_CODE (XEXP (x, 1));
if (subcode0 == ROTATE || subcode0 == ROTATERT || subcode0 == ASHIFT
|| subcode0 == LSHIFTRT || subcode0 == ASHIFTRT
|| subcode1 == ROTATE || subcode1 == ROTATERT
|| subcode1 == ASHIFT || subcode1 == LSHIFTRT
|| subcode1 == ASHIFTRT)
{
*total = 0;
return false;
}
*total = COSTS_N_INSNS (1);
return false;
}
*total = COSTS_N_INSNS (ARM_NUM_REGS (mode));
return false;
case PLUS:
if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
{
*total = COSTS_N_INSNS (1);
return false;
}
case AND: case XOR: case IOR:
if (mode == SImode)
{
enum rtx_code subcode = GET_CODE (XEXP (x, 0));
if (subcode == ROTATE || subcode == ROTATERT || subcode == ASHIFT
|| subcode == LSHIFTRT || subcode == ASHIFTRT
|| (code == AND && subcode == NOT))
{
*total = 0;
return false;
}
}
*total = COSTS_N_INSNS (ARM_NUM_REGS (mode));
return false;
case MULT:
if (mode == DImode)
{
if (((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
&& GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
|| (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
&& GET_CODE (XEXP (x, 1)) == ZERO_EXTEND))
&& GET_MODE (XEXP (XEXP (x, 0), 0)) == SImode
&& GET_MODE (XEXP (XEXP (x, 1), 0)) == SImode)
{
*total = COSTS_N_INSNS (1)
+ rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
+ rtx_cost (XEXP (XEXP (x, 1), 0), MULT);
return true;
}
else
{
*total = COSTS_N_INSNS (3);
return false;
}
}
*total = COSTS_N_INSNS (ARM_NUM_REGS (mode));
return false;
case NEG:
if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
*total = COSTS_N_INSNS (1);
case NOT:
*total = COSTS_N_INSNS (ARM_NUM_REGS (mode));
return false;
case IF_THEN_ELSE:
*total = 0;
return false;
case COMPARE:
if (cc_register (XEXP (x, 0), VOIDmode))
* total = 0;
else
*total = COSTS_N_INSNS (1);
return false;
case ABS:
if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
*total = COSTS_N_INSNS (1);
else
*total = COSTS_N_INSNS (1 + ARM_NUM_REGS (mode));
return false;
case SIGN_EXTEND:
*total = 0;
if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) < 4)
{
if (!(arm_arch4 && MEM_P (XEXP (x, 0))))
*total += COSTS_N_INSNS (arm_arch6 ? 1 : 2);
}
if (mode == DImode)
*total += COSTS_N_INSNS (1);
return false;
case ZERO_EXTEND:
*total = 0;
if (!(arm_arch4 && MEM_P (XEXP (x, 0))))
{
switch (GET_MODE (XEXP (x, 0)))
{
case QImode:
*total += COSTS_N_INSNS (1);
break;
case HImode:
*total += COSTS_N_INSNS (arm_arch6 ? 1 : 2);
case SImode:
break;
default:
*total += COSTS_N_INSNS (2);
}
}
if (mode == DImode)
*total += COSTS_N_INSNS (1);
return false;
case CONST_INT:
if (const_ok_for_arm (INTVAL (x)))
*total = COSTS_N_INSNS (outer_code == SET ? 1 : 0);
else if (const_ok_for_arm (~INTVAL (x)))
*total = COSTS_N_INSNS (outer_code == AND ? 0 : 1);
else if (const_ok_for_arm (-INTVAL (x)))
{
if (outer_code == COMPARE || outer_code == PLUS
|| outer_code == MINUS)
*total = 0;
else
*total = COSTS_N_INSNS (1);
}
else
*total = COSTS_N_INSNS (2);
return true;
case CONST:
case LABEL_REF:
case SYMBOL_REF:
*total = COSTS_N_INSNS (2);
return true;
case CONST_DOUBLE:
*total = COSTS_N_INSNS (4);
return true;
default:
if (mode != VOIDmode)
*total = COSTS_N_INSNS (ARM_NUM_REGS (mode));
else
*total = COSTS_N_INSNS (4);
return false;
}
}
static bool
arm_slowmul_rtx_costs (rtx x, int code, int outer_code, int *total)
{
enum machine_mode mode = GET_MODE (x);
if (TARGET_THUMB)
{
*total = thumb_rtx_costs (x, code, outer_code);
return true;
}
switch (code)
{
case MULT:
if (GET_MODE_CLASS (mode) == MODE_FLOAT
|| mode == DImode)
{
*total = 30;
return true;
}
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
{
unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
& (unsigned HOST_WIDE_INT) 0xffffffff);
int cost, const_ok = const_ok_for_arm (i);
int j, booth_unit_size;
cost = const_ok ? 4 : 8;
booth_unit_size = 2;
for (j = 0; i && j < 32; j += booth_unit_size)
{
i >>= booth_unit_size;
cost += 2;
}
*total = cost;
return true;
}
*total = 30 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
+ (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4);
return true;
default:
*total = arm_rtx_costs_1 (x, code, outer_code);
return true;
}
}
static bool
arm_fastmul_rtx_costs (rtx x, int code, int outer_code, int *total)
{
enum machine_mode mode = GET_MODE (x);
if (TARGET_THUMB)
{
*total = thumb_rtx_costs (x, code, outer_code);
return true;
}
switch (code)
{
case MULT:
if (mode == DImode
&& (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
&& (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
|| GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
{
*total = 8;
return true;
}
if (GET_MODE_CLASS (mode) == MODE_FLOAT
|| mode == DImode)
{
*total = 30;
return true;
}
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
{
unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
& (unsigned HOST_WIDE_INT) 0xffffffff);
int cost, const_ok = const_ok_for_arm (i);
int j, booth_unit_size;
cost = const_ok ? 4 : 8;
booth_unit_size = 8;
for (j = 0; i && j < 32; j += booth_unit_size)
{
i >>= booth_unit_size;
cost += 2;
}
*total = cost;
return true;
}
*total = 8 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
+ (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4);
return true;
default:
*total = arm_rtx_costs_1 (x, code, outer_code);
return true;
}
}
static bool
arm_xscale_rtx_costs (rtx x, int code, int outer_code, int *total)
{
enum machine_mode mode = GET_MODE (x);
if (TARGET_THUMB)
{
*total = thumb_rtx_costs (x, code, outer_code);
return true;
}
switch (code)
{
case MULT:
if (mode == DImode
&& (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
&& (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
|| GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
{
*total = 8;
return true;
}
if (GET_MODE_CLASS (mode) == MODE_FLOAT
|| mode == DImode)
{
*total = 30;
return true;
}
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
{
unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
& (unsigned HOST_WIDE_INT) 0xffffffff);
int cost, const_ok = const_ok_for_arm (i);
unsigned HOST_WIDE_INT masked_const;
cost = 2;
if (! const_ok)
cost += 1;
masked_const = i & 0xffff8000;
if (masked_const != 0 && masked_const != 0xffff8000)
{
masked_const = i & 0xf8000000;
if (masked_const == 0 || masked_const == 0xf8000000)
cost += 1;
else
cost += 2;
}
*total = cost;
return true;
}
*total = 8 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
+ (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4);
return true;
case COMPARE:
if (GET_CODE (XEXP (x, 0)) == MULT)
*total = 4 + rtx_cost (XEXP (x, 0), code);
else
*total = arm_rtx_costs_1 (x, code, outer_code);
return true;
default:
*total = arm_rtx_costs_1 (x, code, outer_code);
return true;
}
}
static bool
arm_9e_rtx_costs (rtx x, int code, int outer_code, int *total)
{
enum machine_mode mode = GET_MODE (x);
int nonreg_cost;
int cost;
if (TARGET_THUMB)
{
switch (code)
{
case MULT:
*total = COSTS_N_INSNS (3);
return true;
default:
*total = thumb_rtx_costs (x, code, outer_code);
return true;
}
}
switch (code)
{
case MULT:
if (mode == DImode
&& (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
&& (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
|| GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
{
*total = 3;
return true;
}
if (GET_MODE_CLASS (mode) == MODE_FLOAT)
{
*total = 30;
return true;
}
if (mode == DImode)
{
cost = 7;
nonreg_cost = 8;
}
else
{
cost = 2;
nonreg_cost = 4;
}
*total = cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : nonreg_cost)
+ (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : nonreg_cost);
return true;
default:
*total = arm_rtx_costs_1 (x, code, outer_code);
return true;
}
}
static inline int
arm_arm_address_cost (rtx x)
{
enum rtx_code c = GET_CODE (x);
if (c == PRE_INC || c == PRE_DEC || c == POST_INC || c == POST_DEC)
return 0;
if (c == MEM || c == LABEL_REF || c == SYMBOL_REF)
return 10;
if (c == PLUS || c == MINUS)
{
if (GET_CODE (XEXP (x, 0)) == CONST_INT)
return 2;
if (ARITHMETIC_P (XEXP (x, 0)) || ARITHMETIC_P (XEXP (x, 1)))
return 3;
return 4;
}
return 6;
}
static inline int
arm_thumb_address_cost (rtx x)
{
enum rtx_code c = GET_CODE (x);
if (c == REG)
return 1;
if (c == PLUS
&& GET_CODE (XEXP (x, 0)) == REG
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
return 1;
return 2;
}
static int
arm_address_cost (rtx x)
{
return TARGET_ARM ? arm_arm_address_cost (x) : arm_thumb_address_cost (x);
}
static int
arm_adjust_cost (rtx insn, rtx link, rtx dep, int cost)
{
#ifdef ENABLE_LLVM
insn = insn;
link = link;
dep = dep;
cost = cost;
return 1;
#else
rtx i_pat, d_pat;
if (arm_tune_xscale
&& REG_NOTE_KIND (link) == 0
&& recog_memoized (insn) >= 0
&& recog_memoized (dep) >= 0)
{
int shift_opnum = get_attr_shift (insn);
enum attr_type attr_type = get_attr_type (dep);
if (shift_opnum != 0
&& (attr_type == TYPE_ALU_SHIFT || attr_type == TYPE_ALU_SHIFT_REG))
{
rtx shifted_operand;
int opno;
extract_insn (insn);
shifted_operand = recog_data.operand[shift_opnum];
extract_insn (dep);
preprocess_constraints ();
for (opno = 0; opno < recog_data.n_operands; opno++)
{
if (recog_data.operand_type[opno] == OP_IN)
continue;
if (reg_overlap_mentioned_p (recog_data.operand[opno],
shifted_operand))
return 2;
}
}
}
if (REG_NOTE_KIND (link) == REG_DEP_ANTI
|| REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
return 0;
if (REG_NOTE_KIND (link) == 0
&& GET_CODE (insn) == CALL_INSN)
return 1;
if ((i_pat = single_set (insn)) != NULL
&& GET_CODE (SET_SRC (i_pat)) == MEM
&& (d_pat = single_set (dep)) != NULL
&& GET_CODE (SET_DEST (d_pat)) == MEM)
{
rtx src_mem = XEXP (SET_SRC (i_pat), 0);
if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
|| reg_mentioned_p (stack_pointer_rtx, src_mem)
|| reg_mentioned_p (frame_pointer_rtx, src_mem)
|| reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
return 1;
}
return cost;
#endif
}
static int fp_consts_inited = 0;
static const char * const strings_fp[8] =
{
"0", "1", "2", "3",
"4", "5", "0.5", "10"
};
static REAL_VALUE_TYPE values_fp[8];
static void
init_fp_table (void)
{
int i;
REAL_VALUE_TYPE r;
if (TARGET_VFP)
fp_consts_inited = 1;
else
fp_consts_inited = 8;
for (i = 0; i < fp_consts_inited; i++)
{
r = REAL_VALUE_ATOF (strings_fp[i], DFmode);
values_fp[i] = r;
}
}
int
arm_const_double_rtx (rtx x)
{
REAL_VALUE_TYPE r;
int i;
if (!fp_consts_inited)
init_fp_table ();
REAL_VALUE_FROM_CONST_DOUBLE (r, x);
if (REAL_VALUE_MINUS_ZERO (r))
return 0;
for (i = 0; i < fp_consts_inited; i++)
if (REAL_VALUES_EQUAL (r, values_fp[i]))
return 1;
return 0;
}
int
neg_const_double_rtx_ok_for_fpa (rtx x)
{
REAL_VALUE_TYPE r;
int i;
if (!fp_consts_inited)
init_fp_table ();
REAL_VALUE_FROM_CONST_DOUBLE (r, x);
r = REAL_VALUE_NEGATE (r);
if (REAL_VALUE_MINUS_ZERO (r))
return 0;
for (i = 0; i < 8; i++)
if (REAL_VALUES_EQUAL (r, values_fp[i]))
return 1;
return 0;
}
int
cirrus_memory_offset (rtx op)
{
if (! (reload_in_progress || reload_completed)
&& ( reg_mentioned_p (frame_pointer_rtx, op)
|| reg_mentioned_p (arg_pointer_rtx, op)
|| reg_mentioned_p (virtual_incoming_args_rtx, op)
|| reg_mentioned_p (virtual_outgoing_args_rtx, op)
|| reg_mentioned_p (virtual_stack_dynamic_rtx, op)
|| reg_mentioned_p (virtual_stack_vars_rtx, op)))
return 0;
if (GET_CODE (op) == MEM)
{
rtx ind;
ind = XEXP (op, 0);
if (GET_CODE (ind) == REG)
return 1;
if (GET_CODE (ind) == PLUS
&& GET_CODE (XEXP (ind, 0)) == REG
&& REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
&& GET_CODE (XEXP (ind, 1)) == CONST_INT)
return 1;
}
return 0;
}
int
arm_coproc_mem_operand (rtx op, bool wb)
{
rtx ind;
if (! (reload_in_progress || reload_completed)
&& ( reg_mentioned_p (frame_pointer_rtx, op)
|| reg_mentioned_p (arg_pointer_rtx, op)
|| reg_mentioned_p (virtual_incoming_args_rtx, op)
|| reg_mentioned_p (virtual_outgoing_args_rtx, op)
|| reg_mentioned_p (virtual_stack_dynamic_rtx, op)
|| reg_mentioned_p (virtual_stack_vars_rtx, op)))
return FALSE;
if (GET_CODE (op) != MEM)
return FALSE;
ind = XEXP (op, 0);
if (reload_completed
&& (GET_CODE (ind) == LABEL_REF
|| (GET_CODE (ind) == CONST
&& GET_CODE (XEXP (ind, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
&& GET_CODE (XEXP (XEXP (ind, 0), 1)) == CONST_INT)))
return TRUE;
if (GET_CODE (ind) == REG)
return arm_address_register_rtx_p (ind, 0);
if (wb
&& (GET_CODE (ind) == PRE_INC
|| GET_CODE (ind) == POST_INC
|| GET_CODE (ind) == PRE_DEC
|| GET_CODE (ind) == POST_DEC))
return arm_address_register_rtx_p (XEXP (ind, 0), 0);
if (wb
&& (GET_CODE (ind) == POST_MODIFY || GET_CODE (ind) == PRE_MODIFY)
&& arm_address_register_rtx_p (XEXP (ind, 0), 0)
&& GET_CODE (XEXP (ind, 1)) == PLUS
&& rtx_equal_p (XEXP (XEXP (ind, 1), 0), XEXP (ind, 0)))
ind = XEXP (ind, 1);
if (GET_CODE (ind) == PLUS
&& GET_CODE (XEXP (ind, 0)) == REG
&& REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
&& GET_CODE (XEXP (ind, 1)) == CONST_INT
&& INTVAL (XEXP (ind, 1)) > -1024
&& INTVAL (XEXP (ind, 1)) < 1024
&& (INTVAL (XEXP (ind, 1)) & 3) == 0)
return TRUE;
return FALSE;
}
int
arm_eliminable_register (rtx x)
{
return REG_P (x) && (REGNO (x) == FRAME_POINTER_REGNUM
|| REGNO (x) == ARG_POINTER_REGNUM
|| (REGNO (x) >= FIRST_VIRTUAL_REGISTER
&& REGNO (x) <= LAST_VIRTUAL_REGISTER));
}
enum reg_class
coproc_secondary_reload_class (enum machine_mode mode, rtx x, bool wb)
{
if (arm_coproc_mem_operand (x, wb) || s_register_operand (x, mode))
return NO_REGS;
return GENERAL_REGS;
}
static bool
arm_return_in_msb (tree valtype)
{
return (TARGET_AAPCS_BASED
&& BYTES_BIG_ENDIAN
&& (AGGREGATE_TYPE_P (valtype)
|| TREE_CODE (valtype) == COMPLEX_TYPE));
}
#ifndef ENABLE_LLVM
static bool
arm_memory_load_p (rtx insn)
{
rtx body, lhs, rhs;;
if (insn == NULL_RTX || GET_CODE (insn) != INSN)
return false;
body = PATTERN (insn);
if (GET_CODE (body) != SET)
return false;
lhs = XEXP (body, 0);
rhs = XEXP (body, 1);
lhs = REG_OR_SUBREG_RTX (lhs);
if (GET_CODE (lhs) != REG
|| REGNO_REG_CLASS (REGNO (lhs)) != GENERAL_REGS)
return false;
return (GET_CODE (rhs) == MEM
|| GET_CODE (rhs) == SYMBOL_REF
|| note_invalid_constants (insn, -1, false));
}
static bool
arm_cirrus_insn_p (rtx insn)
{
enum attr_cirrus attr;
if (!insn
|| GET_CODE (insn) != INSN
|| GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER)
return 0;
attr = get_attr_cirrus (insn);
return attr != CIRRUS_NOT;
}
static void
cirrus_reorg (rtx first)
{
enum attr_cirrus attr;
rtx body = PATTERN (first);
rtx t;
int nops;
if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
{
nops = 0;
t = next_nonnote_insn (first);
if (arm_cirrus_insn_p (t))
++ nops;
if (arm_cirrus_insn_p (next_nonnote_insn (t)))
++ nops;
while (nops --)
emit_insn_after (gen_nop (), first);
return;
}
if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
body = XVECEXP (body, 0, 0);
if (GET_CODE (body) == SET)
{
rtx lhs = XEXP (body, 0), rhs = XEXP (body, 1);
if (get_attr_cirrus (first) == CIRRUS_DOUBLE)
{
if (arm_cirrus_insn_p (next_nonnote_insn (first)))
emit_insn_after (gen_nop (), first);
return;
}
else if (arm_memory_load_p (first))
{
unsigned int arm_regno;
if (GET_CODE (lhs) == REG)
arm_regno = REGNO (lhs);
else
{
gcc_assert (GET_CODE (rhs) == REG);
arm_regno = REGNO (rhs);
}
first = next_nonnote_insn (first);
if (! arm_cirrus_insn_p (first))
return;
body = PATTERN (first);
if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0))
body = XVECEXP (body, 0, 0);
if (GET_CODE (body) == FLOAT)
body = XEXP (body, 0);
if (get_attr_cirrus (first) == CIRRUS_MOVE
&& GET_CODE (XEXP (body, 1)) == REG
&& arm_regno == REGNO (XEXP (body, 1)))
emit_insn_after (gen_nop (), first);
return;
}
}
if (!first
|| GET_CODE (first) != INSN
|| GET_CODE (PATTERN (first)) == USE
|| GET_CODE (PATTERN (first)) == CLOBBER)
return;
attr = get_attr_cirrus (first);
if (attr == CIRRUS_COMPARE)
{
nops = 0;
t = next_nonnote_insn (first);
if (arm_cirrus_insn_p (t))
++ nops;
if (arm_cirrus_insn_p (next_nonnote_insn (t)))
++ nops;
while (nops --)
emit_insn_after (gen_nop (), first);
return;
}
}
#endif
int
symbol_mentioned_p (rtx x)
{
return symbol_mentioned_with_filter (x, 0);
}
int
non_local_symbol_mentioned_p (rtx x)
{
return symbol_mentioned_with_filter (x, 1);
}
static int
symbol_mentioned_with_filter (rtx x, int filter_local)
{
const char * fmt;
int i;
if (GET_CODE (x) == SYMBOL_REF)
{
#if TARGET_MACHO
if (filter_local && machopic_data_defined_p (x))
return 0;
else
#endif
return 1;
}
if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLS)
return 0;
fmt = GET_RTX_FORMAT (GET_CODE (x));
for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
{
if (fmt[i] == 'E')
{
int j;
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
if (symbol_mentioned_with_filter (XVECEXP (x, i, j),
filter_local))
return 1;
}
else if (fmt[i] == 'e'
&& symbol_mentioned_with_filter (XEXP (x, i),
filter_local))
return 1;
}
return 0;
}
int
label_mentioned_p (rtx x)
{
const char * fmt;
int i;
if (GET_CODE (x) == LABEL_REF)
return 1;
if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLS)
return 0;
fmt = GET_RTX_FORMAT (GET_CODE (x));
for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
{
if (fmt[i] == 'E')
{
int j;
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
if (label_mentioned_p (XVECEXP (x, i, j)))
return 1;
}
else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
return 1;
}
return 0;
}
int
tls_mentioned_p (rtx x)
{
switch (GET_CODE (x))
{
case CONST:
return tls_mentioned_p (XEXP (x, 0));
case UNSPEC:
if (XINT (x, 1) == UNSPEC_TLS)
return 1;
default:
return 0;
}
}
static bool
arm_cannot_copy_insn_p (rtx insn)
{
rtx pat = PATTERN (insn);
if (GET_CODE (pat) == SET)
{
rtx rhs = SET_SRC (pat);
rtx lhs = SET_DEST (pat);
if (GET_CODE (rhs) == UNSPEC
&& XINT (rhs, 1) == UNSPEC_PIC_BASE)
return TRUE;
if (GET_CODE (rhs) == MEM
&& GET_CODE (XEXP (rhs, 0)) == UNSPEC
&& XINT (XEXP (rhs, 0), 1) == UNSPEC_PIC_BASE)
return TRUE;
if (GET_CODE (lhs) == MEM
&& GET_CODE (XEXP (lhs, 0)) == UNSPEC
&& XINT (XEXP (lhs, 0), 1) == UNSPEC_PIC_BASE)
return TRUE;
}
if (GET_CODE (pat) == PARALLEL
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET)
{
rtx rhs = SET_SRC (XVECEXP (pat, 0, 0));
if (GET_CODE (rhs) == UNSPEC
&& XINT (rhs, 1) == UNSPEC_PIC_BASE)
return TRUE;
if (GET_CODE (rhs) == MEM
&& GET_CODE (XEXP (rhs, 0)) == UNSPEC
&& XINT (XEXP (rhs, 0), 1) == UNSPEC_PIC_BASE)
return TRUE;
}
return FALSE;
}
enum rtx_code
minmax_code (rtx x)
{
enum rtx_code code = GET_CODE (x);
switch (code)
{
case SMAX:
return GE;
case SMIN:
return LE;
case UMIN:
return LEU;
case UMAX:
return GEU;
default:
gcc_unreachable ();
}
}
int
adjacent_mem_locations (rtx a, rtx b)
{
if (volatile_refs_p (a) || volatile_refs_p (b))
return 0;
if ((GET_CODE (XEXP (a, 0)) == REG
|| (GET_CODE (XEXP (a, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
&& (GET_CODE (XEXP (b, 0)) == REG
|| (GET_CODE (XEXP (b, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
{
HOST_WIDE_INT val0 = 0, val1 = 0;
rtx reg0, reg1;
int val_diff;
if (GET_CODE (XEXP (a, 0)) == PLUS)
{
reg0 = XEXP (XEXP (a, 0), 0);
val0 = INTVAL (XEXP (XEXP (a, 0), 1));
}
else
reg0 = XEXP (a, 0);
if (GET_CODE (XEXP (b, 0)) == PLUS)
{
reg1 = XEXP (XEXP (b, 0), 0);
val1 = INTVAL (XEXP (XEXP (b, 0), 1));
}
else
reg1 = XEXP (b, 0);
if (!const_ok_for_op (PLUS, val0) || !const_ok_for_op (PLUS, val1))
return 0;
if (arm_eliminable_register (reg0))
return 0;
val_diff = val1 - val0;
if (arm_ld_sched)
{
return (optimize_size && (REGNO (reg0) == REGNO (reg1))
&& (val0 == 0 || val1 == 0 || val0 == 4 || val1 == 4)
&& (val_diff == 4 || val_diff == -4));
}
return ((REGNO (reg0) == REGNO (reg1))
&& (val_diff == 4 || val_diff == -4));
}
return 0;
}
int
load_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
HOST_WIDE_INT *load_offset)
{
int unsorted_regs[4];
HOST_WIDE_INT unsorted_offsets[4];
int order[4];
int base_reg = -1;
int i;
gcc_assert (nops >= 2 && nops <= 4);
for (i = 0; i < nops; i++)
{
rtx reg;
rtx offset;
if (GET_CODE (operands[nops + i]) == SUBREG)
operands[nops + i] = alter_subreg (operands + (nops + i));
gcc_assert (GET_CODE (operands[nops + i]) == MEM);
if (MEM_VOLATILE_P (operands[nops + i]))
return 0;
offset = const0_rtx;
if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
|| (GET_CODE (reg) == SUBREG
&& GET_CODE (reg = SUBREG_REG (reg)) == REG))
|| (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
&& ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
== REG)
|| (GET_CODE (reg) == SUBREG
&& GET_CODE (reg = SUBREG_REG (reg)) == REG))
&& (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
== CONST_INT)))
{
if (i == 0)
{
base_reg = REGNO (reg);
unsorted_regs[0] = (GET_CODE (operands[i]) == REG
? REGNO (operands[i])
: REGNO (SUBREG_REG (operands[i])));
order[0] = 0;
}
else
{
if (base_reg != (int) REGNO (reg))
return 0;
unsorted_regs[i] = (GET_CODE (operands[i]) == REG
? REGNO (operands[i])
: REGNO (SUBREG_REG (operands[i])));
if (unsorted_regs[i] < unsorted_regs[order[0]])
order[0] = i;
}
if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
|| (i != nops - 1 && unsorted_regs[i] == base_reg))
return 0;
unsorted_offsets[i] = INTVAL (offset);
}
else
return 0;
}
for (i = 1; i < nops; i++)
{
int j;
order[i] = order[i - 1];
for (j = 0; j < nops; j++)
if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
&& (order[i] == order[i - 1]
|| unsorted_regs[j] < unsorted_regs[order[i]]))
order[i] = j;
if (order[i] == order[i - 1])
return 0;
if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
return 0;
}
if (base)
{
*base = base_reg;
for (i = 0; i < nops; i++)
regs[i] = unsorted_regs[order[i]];
*load_offset = unsorted_offsets[order[0]];
}
if (unsorted_offsets[order[0]] == 0)
return 1;
if (unsorted_offsets[order[0]] == 4)
return 2;
if (unsorted_offsets[order[nops - 1]] == 0)
return 3;
if (unsorted_offsets[order[nops - 1]] == -4)
return 4;
if (nops == 2 && arm_ld_sched)
return 0;
return (const_ok_for_arm (unsorted_offsets[order[0]])
|| const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
}
const char *
emit_ldm_seq (rtx *operands, int nops)
{
int regs[4];
int base_reg;
HOST_WIDE_INT offset;
char buf[100];
int i;
switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
{
case 1:
strcpy (buf, "ldm%?ia\t");
break;
case 2:
strcpy (buf, "ldm%?ib\t");
break;
case 3:
strcpy (buf, "ldm%?da\t");
break;
case 4:
strcpy (buf, "ldm%?db\t");
break;
case 5:
if (offset >= 0)
sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
(long) offset);
else
sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
(long) -offset);
output_asm_insn (buf, operands);
base_reg = regs[0];
strcpy (buf, "ldm%?ia\t");
break;
default:
gcc_unreachable ();
}
sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
for (i = 1; i < nops; i++)
sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
reg_names[regs[i]]);
strcat (buf, "}\t%@ phole ldm");
output_asm_insn (buf, operands);
return "";
}
int
store_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
HOST_WIDE_INT * load_offset)
{
int unsorted_regs[4];
HOST_WIDE_INT unsorted_offsets[4];
int order[4];
int base_reg = -1;
int i;
gcc_assert (nops >= 2 && nops <= 4);
for (i = 0; i < nops; i++)
{
rtx reg;
rtx offset;
if (GET_CODE (operands[nops + i]) == SUBREG)
operands[nops + i] = alter_subreg (operands + (nops + i));
gcc_assert (GET_CODE (operands[nops + i]) == MEM);
if (MEM_VOLATILE_P (operands[nops + i]))
return 0;
offset = const0_rtx;
if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
|| (GET_CODE (reg) == SUBREG
&& GET_CODE (reg = SUBREG_REG (reg)) == REG))
|| (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
&& ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
== REG)
|| (GET_CODE (reg) == SUBREG
&& GET_CODE (reg = SUBREG_REG (reg)) == REG))
&& (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
== CONST_INT)))
{
if (i == 0)
{
base_reg = REGNO (reg);
unsorted_regs[0] = (GET_CODE (operands[i]) == REG
? REGNO (operands[i])
: REGNO (SUBREG_REG (operands[i])));
order[0] = 0;
}
else
{
if (base_reg != (int) REGNO (reg))
return 0;
unsorted_regs[i] = (GET_CODE (operands[i]) == REG
? REGNO (operands[i])
: REGNO (SUBREG_REG (operands[i])));
if (unsorted_regs[i] < unsorted_regs[order[0]])
order[0] = i;
}
if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
return 0;
unsorted_offsets[i] = INTVAL (offset);
}
else
return 0;
}
for (i = 1; i < nops; i++)
{
int j;
order[i] = order[i - 1];
for (j = 0; j < nops; j++)
if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
&& (order[i] == order[i - 1]
|| unsorted_regs[j] < unsorted_regs[order[i]]))
order[i] = j;
if (order[i] == order[i - 1])
return 0;
if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
return 0;
}
if (base)
{
*base = base_reg;
for (i = 0; i < nops; i++)
regs[i] = unsorted_regs[order[i]];
*load_offset = unsorted_offsets[order[0]];
}
if (unsorted_offsets[order[0]] == 0)
return 1;
if (unsorted_offsets[order[0]] == 4)
return 2;
if (unsorted_offsets[order[nops - 1]] == 0)
return 3;
if (unsorted_offsets[order[nops - 1]] == -4)
return 4;
return 0;
}
const char *
emit_stm_seq (rtx *operands, int nops)
{
int regs[4];
int base_reg;
HOST_WIDE_INT offset;
char buf[100];
int i;
switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
{
case 1:
strcpy (buf, "stm%?ia\t");
break;
case 2:
strcpy (buf, "stm%?ib\t");
break;
case 3:
strcpy (buf, "stm%?da\t");
break;
case 4:
strcpy (buf, "stm%?db\t");
break;
default:
gcc_unreachable ();
}
sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
for (i = 1; i < nops; i++)
sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
reg_names[regs[i]]);
strcat (buf, "}\t%@ phole stm");
output_asm_insn (buf, operands);
return "";
}
rtx
arm_gen_load_multiple (int base_regno, int count, rtx from, int up,
int write_back, rtx basemem, HOST_WIDE_INT *offsetp)
{
HOST_WIDE_INT offset = *offsetp;
int i = 0, j;
rtx result;
int sign = up ? 1 : -1;
rtx mem, addr;
if (arm_tune_xscale && count <= 2 && ! optimize_size)
{
rtx seq;
start_sequence ();
for (i = 0; i < count; i++)
{
addr = plus_constant (from, i * 4 * sign);
mem = adjust_automodify_address (basemem, SImode, addr, offset);
emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
offset += 4 * sign;
}
if (write_back)
{
emit_move_insn (from, plus_constant (from, count * 4 * sign));
*offsetp = offset;
}
seq = get_insns ();
end_sequence ();
return seq;
}
result = gen_rtx_PARALLEL (VOIDmode,
rtvec_alloc (count + (write_back ? 1 : 0)));
if (write_back)
{
XVECEXP (result, 0, 0)
= gen_rtx_SET (VOIDmode, from, plus_constant (from, count * 4 * sign));
i = 1;
count++;
}
for (j = 0; i < count; i++, j++)
{
addr = plus_constant (from, j * 4 * sign);
mem = adjust_automodify_address_nv (basemem, SImode, addr, offset);
XVECEXP (result, 0, i)
= gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
offset += 4 * sign;
}
if (write_back)
*offsetp = offset;
return result;
}
rtx
arm_gen_store_multiple (int base_regno, int count, rtx to, int up,
int write_back, rtx basemem, HOST_WIDE_INT *offsetp)
{
HOST_WIDE_INT offset = *offsetp;
int i = 0, j;
rtx result;
int sign = up ? 1 : -1;
rtx mem, addr;
if (arm_tune_xscale && count <= 2 && ! optimize_size)
{
rtx seq;
start_sequence ();
for (i = 0; i < count; i++)
{
addr = plus_constant (to, i * 4 * sign);
mem = adjust_automodify_address (basemem, SImode, addr, offset);
emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
offset += 4 * sign;
}
if (write_back)
{
emit_move_insn (to, plus_constant (to, count * 4 * sign));
*offsetp = offset;
}
seq = get_insns ();
end_sequence ();
return seq;
}
result = gen_rtx_PARALLEL (VOIDmode,
rtvec_alloc (count + (write_back ? 1 : 0)));
if (write_back)
{
XVECEXP (result, 0, 0)
= gen_rtx_SET (VOIDmode, to,
plus_constant (to, count * 4 * sign));
i = 1;
count++;
}
for (j = 0; i < count; i++, j++)
{
addr = plus_constant (to, j * 4 * sign);
mem = adjust_automodify_address_nv (basemem, SImode, addr, offset);
XVECEXP (result, 0, i)
= gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
offset += 4 * sign;
}
if (write_back)
*offsetp = offset;
return result;
}
int
arm_gen_movmemqi (rtx *operands)
{
HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
HOST_WIDE_INT srcoffset, dstoffset;
int i;
rtx src, dst, srcbase, dstbase;
rtx part_bytes_reg = NULL;
rtx mem;
if (GET_CODE (operands[2]) != CONST_INT
|| GET_CODE (operands[3]) != CONST_INT
|| INTVAL (operands[2]) > 64
|| INTVAL (operands[3]) & 3)
return 0;
if (optimize_size
&& INTVAL (operands[2]) != 1
&& INTVAL (operands[2]) != 2
&& INTVAL (operands[2]) != 4
&& INTVAL (operands[2]) != 8
&& INTVAL (operands[2]) != 12
&& INTVAL (operands[2]) != 16)
return 0;
dstbase = operands[0];
srcbase = operands[1];
dst = copy_to_mode_reg (SImode, XEXP (dstbase, 0));
src = copy_to_mode_reg (SImode, XEXP (srcbase, 0));
in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
out_words_to_go = INTVAL (operands[2]) / 4;
last_bytes = INTVAL (operands[2]) & 3;
dstoffset = srcoffset = 0;
if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
for (i = 0; in_words_to_go >= 2; i+=4)
{
if (in_words_to_go > 4)
emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
srcbase, &srcoffset));
else
emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
FALSE, srcbase, &srcoffset));
if (out_words_to_go)
{
if (out_words_to_go > 4)
emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
dstbase, &dstoffset));
else if (out_words_to_go != 1)
emit_insn (arm_gen_store_multiple (0, out_words_to_go,
dst, TRUE,
(last_bytes == 0
? FALSE : TRUE),
dstbase, &dstoffset));
else
{
mem = adjust_automodify_address (dstbase, SImode, dst, dstoffset);
emit_move_insn (mem, gen_rtx_REG (SImode, 0));
if (last_bytes != 0)
{
emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
dstoffset += 4;
}
}
}
in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
}
if (out_words_to_go)
{
rtx sreg;
mem = adjust_automodify_address (srcbase, SImode, src, srcoffset);
sreg = copy_to_reg (mem);
mem = adjust_automodify_address (dstbase, SImode, dst, dstoffset);
emit_move_insn (mem, sreg);
in_words_to_go--;
gcc_assert (!in_words_to_go);
}
if (in_words_to_go)
{
gcc_assert (in_words_to_go > 0);
mem = adjust_automodify_address (srcbase, SImode, src, srcoffset);
part_bytes_reg = copy_to_mode_reg (SImode, mem);
}
gcc_assert (!last_bytes || part_bytes_reg);
if (BYTES_BIG_ENDIAN && last_bytes)
{
rtx tmp = gen_reg_rtx (SImode);
emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
GEN_INT (8 * (4 - last_bytes))));
part_bytes_reg = tmp;
while (last_bytes)
{
mem = adjust_automodify_address (dstbase, QImode,
plus_constant (dst, last_bytes - 1),
dstoffset + last_bytes - 1);
emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
if (--last_bytes)
{
tmp = gen_reg_rtx (SImode);
emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
part_bytes_reg = tmp;
}
}
}
else
{
if (last_bytes > 1)
{
mem = adjust_automodify_address (dstbase, HImode, dst, dstoffset);
emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
last_bytes -= 2;
if (last_bytes)
{
rtx tmp = gen_reg_rtx (SImode);
emit_insn (gen_addsi3 (dst, dst, const2_rtx));
emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
part_bytes_reg = tmp;
dstoffset += 2;
}
}
if (last_bytes)
{
mem = adjust_automodify_address (dstbase, QImode, dst, dstoffset);
emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
}
}
return 1;
}
enum machine_mode
arm_select_dominance_cc_mode (rtx x, rtx y, HOST_WIDE_INT cond_or)
{
enum rtx_code cond1, cond2;
int swapped = 0;
if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
!= CCmode)
|| (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
!= CCmode))
return CCmode;
if (cond_or == DOM_CC_NX_OR_Y)
cond1 = reverse_condition (cond1);
if (cond1 != cond2
&& !comparison_dominates_p (cond1, cond2)
&& (swapped = 1, !comparison_dominates_p (cond2, cond1)))
return CCmode;
if (swapped)
{
enum rtx_code temp = cond1;
cond1 = cond2;
cond2 = temp;
}
switch (cond1)
{
case EQ:
if (cond_or == DOM_CC_X_AND_Y)
return CC_DEQmode;
switch (cond2)
{
case EQ: return CC_DEQmode;
case LE: return CC_DLEmode;
case LEU: return CC_DLEUmode;
case GE: return CC_DGEmode;
case GEU: return CC_DGEUmode;
default: gcc_unreachable ();
}
case LT:
if (cond_or == DOM_CC_X_AND_Y)
return CC_DLTmode;
switch (cond2)
{
case LT:
return CC_DLTmode;
case LE:
return CC_DLEmode;
case NE:
return CC_DNEmode;
default:
gcc_unreachable ();
}
case GT:
if (cond_or == DOM_CC_X_AND_Y)
return CC_DGTmode;
switch (cond2)
{
case GT:
return CC_DGTmode;
case GE:
return CC_DGEmode;
case NE:
return CC_DNEmode;
default:
gcc_unreachable ();
}
case LTU:
if (cond_or == DOM_CC_X_AND_Y)
return CC_DLTUmode;
switch (cond2)
{
case LTU:
return CC_DLTUmode;
case LEU:
return CC_DLEUmode;
case NE:
return CC_DNEmode;
default:
gcc_unreachable ();
}
case GTU:
if (cond_or == DOM_CC_X_AND_Y)
return CC_DGTUmode;
switch (cond2)
{
case GTU:
return CC_DGTUmode;
case GEU:
return CC_DGEUmode;
case NE:
return CC_DNEmode;
default:
gcc_unreachable ();
}
case NE:
gcc_assert (cond1 == cond2);
return CC_DNEmode;
case LE:
gcc_assert (cond1 == cond2);
return CC_DLEmode;
case GE:
gcc_assert (cond1 == cond2);
return CC_DGEmode;
case LEU:
gcc_assert (cond1 == cond2);
return CC_DLEUmode;
case GEU:
gcc_assert (cond1 == cond2);
return CC_DGEUmode;
default:
gcc_unreachable ();
}
}
enum machine_mode
arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
{
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
{
switch (op)
{
case EQ:
case NE:
case UNORDERED:
case ORDERED:
case UNLT:
case UNLE:
case UNGT:
case UNGE:
case UNEQ:
case LTGT:
return CCFPmode;
case LT:
case LE:
case GT:
case GE:
if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
return CCFPmode;
return CCFPEmode;
default:
gcc_unreachable ();
}
}
if (GET_MODE (y) == SImode && GET_CODE (y) == REG
&& (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
|| GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
|| GET_CODE (x) == ROTATERT))
return CC_SWPmode;
if (GET_MODE (y) == SImode && REG_P (y)
&& GET_CODE (x) == NEG
&& (op == EQ || op == NE))
return CC_Zmode;
if (GET_MODE (x) == SImode
&& GET_CODE (x) == ASHIFT
&& GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
&& GET_CODE (XEXP (x, 0)) == SUBREG
&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
&& GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
&& (op == EQ || op == NE
|| op == GEU || op == GTU || op == LTU || op == LEU)
&& GET_CODE (y) == CONST_INT)
return CC_Zmode;
if (GET_CODE (x) == IF_THEN_ELSE
&& (XEXP (x, 2) == const0_rtx
|| XEXP (x, 2) == const1_rtx)
&& COMPARISON_P (XEXP (x, 0))
&& COMPARISON_P (XEXP (x, 1)))
return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
INTVAL (XEXP (x, 2)));
if (GET_CODE (x) == AND
&& COMPARISON_P (XEXP (x, 0))
&& COMPARISON_P (XEXP (x, 1)))
return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
DOM_CC_X_AND_Y);
if (GET_CODE (x) == IOR
&& COMPARISON_P (XEXP (x, 0))
&& COMPARISON_P (XEXP (x, 1)))
return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
DOM_CC_X_OR_Y);
if (TARGET_THUMB
&& GET_MODE (x) == SImode
&& (op == EQ || op == NE)
&& GET_CODE (x) == ZERO_EXTRACT
&& XEXP (x, 1) == const1_rtx)
return CC_Nmode;
if (GET_MODE (x) == SImode
&& y == const0_rtx
&& (op == EQ || op == NE || op == LT || op == GE)
&& (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
|| GET_CODE (x) == AND || GET_CODE (x) == IOR
|| GET_CODE (x) == XOR || GET_CODE (x) == MULT
|| GET_CODE (x) == NOT || GET_CODE (x) == NEG
|| GET_CODE (x) == LSHIFTRT
|| GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
|| GET_CODE (x) == ROTATERT
|| (TARGET_ARM && GET_CODE (x) == ZERO_EXTRACT)))
return CC_NOOVmode;
if (GET_MODE (x) == QImode && (op == EQ || op == NE))
return CC_Zmode;
if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
&& GET_CODE (x) == PLUS
&& (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
return CC_Cmode;
return CCmode;
}
rtx
arm_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
{
enum machine_mode mode = SELECT_CC_MODE (code, x, y);
rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
emit_set_insn (cc_reg, gen_rtx_COMPARE (mode, x, y));
return cc_reg;
}
rtx
arm_gen_return_addr_mask (void)
{
rtx reg = gen_reg_rtx (Pmode);
emit_insn (gen_return_addr_mask (reg));
return reg;
}
void
arm_reload_in_hi (rtx *operands)
{
rtx ref = operands[1];
rtx base, scratch;
HOST_WIDE_INT offset = 0;
if (GET_CODE (ref) == SUBREG)
{
offset = SUBREG_BYTE (ref);
ref = SUBREG_REG (ref);
}
if (GET_CODE (ref) == REG)
{
if (reg_equiv_mem[REGNO (ref)])
{
ref = reg_equiv_mem[REGNO (ref)];
base = find_replacement (&XEXP (ref, 0));
}
else
base = reg_equiv_address[REGNO (ref)];
}
else
base = find_replacement (&XEXP (ref, 0));
if (GET_CODE (base) == MINUS
|| (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
{
rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
emit_set_insn (base_plus, base);
base = base_plus;
}
else if (GET_CODE (base) == PLUS)
{
HOST_WIDE_INT hi, lo;
offset += INTVAL (XEXP (base, 1));
base = XEXP (base, 0);
lo = (offset >= 0
? (offset & 0xfff)
: -((-offset) & 0xfff));
if (lo == 4095)
lo &= 0x7ff;
hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
^ (HOST_WIDE_INT) 0x80000000)
- (HOST_WIDE_INT) 0x80000000);
gcc_assert (hi + lo == offset);
if (hi != 0)
{
rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
base = base_plus;
offset = lo;
}
}
if (REGNO (operands[2]) == REGNO (operands[0]))
scratch = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
else
scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
emit_insn (gen_zero_extendqisi2 (scratch,
gen_rtx_MEM (QImode,
plus_constant (base,
offset))));
emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
gen_rtx_MEM (QImode,
plus_constant (base,
offset + 1))));
if (!BYTES_BIG_ENDIAN)
emit_set_insn (gen_rtx_SUBREG (SImode, operands[0], 0),
gen_rtx_IOR (SImode,
gen_rtx_ASHIFT
(SImode,
gen_rtx_SUBREG (SImode, operands[0], 0),
GEN_INT (8)),
scratch));
else
emit_set_insn (gen_rtx_SUBREG (SImode, operands[0], 0),
gen_rtx_IOR (SImode,
gen_rtx_ASHIFT (SImode, scratch,
GEN_INT (8)),
gen_rtx_SUBREG (SImode, operands[0], 0)));
}
void
arm_reload_out_hi (rtx *operands)
{
rtx ref = operands[0];
rtx outval = operands[1];
rtx base, scratch;
HOST_WIDE_INT offset = 0;
if (GET_CODE (ref) == SUBREG)
{
offset = SUBREG_BYTE (ref);
ref = SUBREG_REG (ref);
}
if (GET_CODE (ref) == REG)
{
if (reg_equiv_mem[REGNO (ref)])
{
ref = reg_equiv_mem[REGNO (ref)];
base = find_replacement (&XEXP (ref, 0));
}
else
base = reg_equiv_address[REGNO (ref)];
}
else
base = find_replacement (&XEXP (ref, 0));
scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
if (GET_CODE (base) == MINUS
|| (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
{
rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
if (reg_overlap_mentioned_p (base_plus, outval))
{
if (!reg_overlap_mentioned_p (scratch, outval))
{
rtx tmp = scratch;
scratch = base_plus;
base_plus = tmp;
}
else
{
rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
emit_insn (gen_movhi (scratch_hi, outval));
outval = scratch_hi;
}
}
emit_set_insn (base_plus, base);
base = base_plus;
}
else if (GET_CODE (base) == PLUS)
{
HOST_WIDE_INT hi, lo;
offset += INTVAL (XEXP (base, 1));
base = XEXP (base, 0);
lo = (offset >= 0
? (offset & 0xfff)
: -((-offset) & 0xfff));
if (lo == 4095)
lo &= 0x7ff;
hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
^ (HOST_WIDE_INT) 0x80000000)
- (HOST_WIDE_INT) 0x80000000);
gcc_assert (hi + lo == offset);
if (hi != 0)
{
rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
if (reg_overlap_mentioned_p (base_plus, outval))
{
if (!reg_overlap_mentioned_p (scratch, outval))
{
rtx tmp = scratch;
scratch = base_plus;
base_plus = tmp;
}
else
{
rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
emit_insn (gen_movhi (scratch_hi, outval));
outval = scratch_hi;
}
}
emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
base = base_plus;
offset = lo;
}
}
if (BYTES_BIG_ENDIAN)
{
emit_insn (gen_movqi (gen_rtx_MEM (QImode,
plus_constant (base, offset + 1)),
gen_lowpart (QImode, outval)));
emit_insn (gen_lshrsi3 (scratch,
gen_rtx_SUBREG (SImode, outval, 0),
GEN_INT (8)));
emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
gen_lowpart (QImode, scratch)));
}
else
{
emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
gen_lowpart (QImode, outval)));
emit_insn (gen_lshrsi3 (scratch,
gen_rtx_SUBREG (SImode, outval, 0),
GEN_INT (8)));
emit_insn (gen_movqi (gen_rtx_MEM (QImode,
plus_constant (base, offset + 1)),
gen_lowpart (QImode, scratch)));
}
}
static bool
arm_must_pass_in_stack (enum machine_mode mode, tree type)
{
if (TARGET_AAPCS_BASED)
return must_pass_in_stack_var_size (mode, type);
else
return must_pass_in_stack_var_size_or_pad (mode, type);
}
bool
arm_pad_arg_upward (enum machine_mode mode, tree type)
{
if (!TARGET_AAPCS_BASED)
return DEFAULT_FUNCTION_ARG_PADDING(mode, type) == upward;
if (type && BYTES_BIG_ENDIAN && INTEGRAL_TYPE_P (type))
return false;
return true;
}
bool
arm_pad_reg_upward (enum machine_mode mode ATTRIBUTE_UNUSED,
tree type, int first ATTRIBUTE_UNUSED)
{
if (TARGET_AAPCS_BASED
&& BYTES_BIG_ENDIAN
&& (AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
&& int_size_in_bytes (type) <= 4)
return true;
return !BYTES_BIG_ENDIAN;
}
static void
arm_print_value (FILE *f, rtx x)
{
switch (GET_CODE (x))
{
case CONST_INT:
fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
return;
case CONST_DOUBLE:
fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
return;
case CONST_VECTOR:
{
int i;
fprintf (f, "<");
for (i = 0; i < CONST_VECTOR_NUNITS (x); i++)
{
fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (CONST_VECTOR_ELT (x, i)));
if (i < (CONST_VECTOR_NUNITS (x) - 1))
fputc (',', f);
}
fprintf (f, ">");
}
return;
case CONST_STRING:
fprintf (f, "\"%s\"", XSTR (x, 0));
return;
case SYMBOL_REF:
fprintf (f, "`%s'", XSTR (x, 0));
return;
case LABEL_REF:
fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
return;
case CONST:
arm_print_value (f, XEXP (x, 0));
return;
case PLUS:
arm_print_value (f, XEXP (x, 0));
fprintf (f, "+");
arm_print_value (f, XEXP (x, 1));
return;
case PC:
fprintf (f, "pc");
return;
default:
fprintf (f, "????");
return;
}
}
struct minipool_node
{
Mnode * next;
Mnode * prev;
HOST_WIDE_INT max_address;
HOST_WIDE_INT min_address;
int refcount;
HOST_WIDE_INT offset;
rtx value;
enum machine_mode mode;
int fix_size;
};
struct minipool_fixup
{
Mfix * next;
rtx insn;
HOST_WIDE_INT address;
rtx * loc;
enum machine_mode mode;
int fix_size;
rtx value;
Mnode * minipool;
HOST_WIDE_INT forwards;
HOST_WIDE_INT backwards;
};
#define MINIPOOL_FIX_SIZE(mode) \
(GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
void
arm_adjust_insn_length (rtx insn, int *length)
{
rtx body = PATTERN (insn);
if (GET_CODE (body) == UNSPEC_VOLATILE
&& (int) XEXP (body, 1) == VUNSPEC_POOL_STRING)
{
int len = TREE_STRING_LENGTH (SYMBOL_REF_DECL
(XVECEXP (body, 0, 0)));
len = (len + 3) & ~3;
*length = len;
}
if (GET_CODE (body) == ADDR_DIFF_VEC)
{
int len = (XVECLEN (body, 1) + 1) * GET_MODE_SIZE (GET_MODE (body));
int insn_size = (TARGET_THUMB) ? 2 : 4;
if (TARGET_THUMB
&& GET_MODE_SIZE (GET_MODE (body)) == 4)
len += 2;
len = ((len + insn_size - 1) / insn_size) * insn_size;
*length = len;
}
if (TARGET_THUMB
&& GET_CODE (body) == UNSPEC_VOLATILE
&& (int) XEXP (body, 1) == VUNSPEC_EPILOGUE)
{
*length = handle_thumb_unexpanded_epilogue (false);
}
if (INSN_CODE (insn) == CODE_FOR_adjustable_thumb_zero_extendhisi2
|| INSN_CODE (insn) == CODE_FOR_adjustable_thumb_zero_extendhisi2_v6)
{
rtx mem = XEXP (XEXP (body, 1), 0);
if (GET_CODE (mem) == REG || GET_CODE (mem) == SUBREG)
*length = 2;
else
{
gcc_assert (GET_CODE (mem) == MEM);
mem = XEXP (mem, 0);
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
if (GET_CODE (mem) == PLUS
&& GET_CODE (XEXP (mem, 0)) == REG
&& REGNO (XEXP (mem, 0)) == SP_REGNUM)
*length = 4;
else
*length = 2;
}
}
if (INSN_CODE (insn) == CODE_FOR_thumb_extendhisi2
|| INSN_CODE (insn) == CODE_FOR_adjustable_thumb_extendhisi2_insn_v6)
{
rtx mem = XEXP (XEXP (XVECEXP (body, 0, 0), 1), 0);
if (GET_CODE (mem) == REG || GET_CODE (mem) == SUBREG)
*length = 2;
else
{
gcc_assert (GET_CODE (mem) == MEM);
mem = XEXP (mem, 0);
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
*length = 4;
if (GET_CODE (mem) == LABEL_REF)
*length = 2;
if (GET_CODE (mem) == PLUS)
{
if (GET_CODE (XEXP (mem, 0)) == LABEL_REF
&& GET_CODE (XEXP (mem, 1)) == CONST_INT)
*length = 2;
if (GET_CODE (XEXP (mem, 1)) == REG)
*length = 2;
}
}
}
if (INSN_CODE (insn) == CODE_FOR_adjustable_thumb_extendqisi2)
{
rtx mem = XEXP (XEXP (body, 1), 0);
if (GET_CODE (mem) == REG || GET_CODE (mem) == SUBREG)
*length = 2;
else
{
gcc_assert (GET_CODE (mem) == MEM);
mem = XEXP (mem, 0);
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
if (GET_CODE (mem) == LABEL_REF)
*length = 2;
else if (GET_CODE (mem) == PLUS
&& GET_CODE (XEXP (mem, 0)) == LABEL_REF)
*length = 2;
else if (GET_CODE (mem) == PLUS)
{
if (GET_CODE (XEXP (mem, 0)) == REG)
{
if (GET_CODE (XEXP (mem, 1)) == REG)
*length = 2;
else if (REGNO (XEXP (mem, 0)) == REGNO (XEXP (body, 0)))
*length = 6;
else
*length = 4;
}
else
{
gcc_assert (GET_CODE (XEXP (mem, 1)) == REG);
if (REGNO (XEXP (mem, 1)) == REGNO (XEXP (body, 0)))
*length = 6;
else
*length = 4;
}
}
else if (GET_CODE (mem) == REG && REGNO (XEXP (body, 0)) == REGNO (mem))
*length = 6;
else
*length = 4;
}
}
if (INSN_CODE (insn) == CODE_FOR_adjustable_thumb_extendqisi2_v6)
{
rtx mem = XEXP (XEXP (body, 1), 0);
if (GET_CODE (mem) == REG || GET_CODE (mem) == SUBREG)
*length = 2;
else
{
gcc_assert (GET_CODE (mem) == MEM);
mem = XEXP (mem, 0);
if (GET_CODE (mem) == CONST)
mem = XEXP (mem, 0);
if (GET_CODE (mem) == LABEL_REF)
*length = 2;
else if (GET_CODE (mem) == PLUS
&& GET_CODE (XEXP (mem, 0)) == LABEL_REF)
*length = 2;
else if (GET_CODE (mem) == PLUS)
{
if (GET_CODE (XEXP (mem, 0)) == REG)
{
if (GET_CODE (XEXP (mem, 1)) == REG)
*length = 2;
else if (REGNO (XEXP (mem, 0)) == REGNO (XEXP (body, 0)))
*length = 4;
else
*length = 4;
}
else
{
gcc_assert (GET_CODE (XEXP (mem, 1)) == REG);
if (REGNO (XEXP (mem, 1)) == REGNO (XEXP (body, 0)))
*length = 4;
else
*length = 4;
}
}
else if (GET_CODE (mem) == REG && REGNO (XEXP (body, 0)) == REGNO (mem))
*length = 4;
else
*length = 4;
}
}
if (INSN_CODE (insn) == CODE_FOR_adjustable_thumb_movhi_insn)
{
rtx mem = XEXP (body, 1);
if (GET_CODE (mem) != MEM)
*length = 2;
else if (GET_CODE (XEXP (mem, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
&& REGNO (XEXP (XEXP (mem, 0), 0)) == SP_REGNUM)
*length = 4;
else
*length = 2;
}
if (INSN_CODE (insn) == CODE_FOR_adjustable_thumb_movdi_insn)
{
rtx op0 = XEXP (body, 0);
rtx op1 = XEXP (body, 1);
if (GET_CODE (op0) == MEM &&
(GET_CODE (XEXP (op0, 0)) == PRE_INC
|| GET_CODE (XEXP (op0, 0)) == POST_INC))
*length = 2;
else if (GET_CODE (op1) == MEM &&
(GET_CODE (XEXP (op1, 0)) == PRE_INC
|| GET_CODE (XEXP (op1, 0)) == POST_INC))
*length = 2;
else if (GET_CODE (op1) == CONST_INT
&& !const_ok_for_arm (INTVAL (op1))
&& INTVAL (op1) >= -4095
&& INTVAL (op1) <= 4095
&& thumb_low_register_operand (op0, GET_MODE (op0)))
*length = 6;
else if (GET_CODE (op1) != MEM)
*length = 4;
else
{
rtx addr = XEXP (op1, 0);
if (GET_CODE (addr) == REG)
*length = 4;
else if (GET_CODE (addr) == CONST)
*length = 4;
else if (GET_CODE (addr) == PLUS)
{
rtx base = XEXP (addr, 0);
rtx offset = XEXP (addr, 1);
if (CONSTANT_P (base))
{
rtx temp = base;
base = offset;
offset = temp;
}
if (GET_CODE (offset) == REG)
*length = 6;
else
*length = 4;
}
else if (GET_CODE (addr) == LABEL_REF)
*length = 4;
else
abort ();
}
}
}
#ifndef ENABLE_LLVM
static Mnode * minipool_vector_head;
static Mnode * minipool_vector_tail;
static rtx minipool_vector_label;
static int minipool_pad;
Mfix * minipool_fix_head;
Mfix * minipool_fix_tail;
Mfix * minipool_barrier;
static rtx
is_jump_table (rtx insn)
{
rtx table;
if (GET_CODE (insn) == JUMP_INSN
&& JUMP_LABEL (insn) != NULL
&& ((table = next_real_insn (JUMP_LABEL (insn)))
== next_real_insn (insn))
&& table != NULL
&& GET_CODE (table) == JUMP_INSN
&& (GET_CODE (PATTERN (table)) == ADDR_VEC
|| GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
return table;
return NULL_RTX;
}
#ifndef JUMP_TABLES_IN_TEXT_SECTION
#define JUMP_TABLES_IN_TEXT_SECTION 0
#endif
static HOST_WIDE_INT
get_jump_table_size (rtx insn)
{
if (JUMP_TABLES_IN_TEXT_SECTION || readonly_data_section == text_section)
{
rtx body = PATTERN (insn);
int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
}
return 0;
}
static Mnode *
move_minipool_fix_forward_ref (Mnode *mp, Mnode *max_mp,
HOST_WIDE_INT max_address)
{
gcc_assert (mp != max_mp);
if (max_mp == NULL)
{
if (max_address < mp->max_address)
mp->max_address = max_address;
}
else
{
if (max_address > max_mp->max_address - mp->fix_size)
mp->max_address = max_mp->max_address - mp->fix_size;
else
mp->max_address = max_address;
mp->prev->next = mp->next;
if (mp->next != NULL)
mp->next->prev = mp->prev;
else
minipool_vector_tail = mp->prev;
mp->next = max_mp;
mp->prev = max_mp->prev;
max_mp->prev = mp;
if (mp->prev != NULL)
mp->prev->next = mp;
else
minipool_vector_head = mp;
}
max_mp = mp;
while (mp->prev != NULL
&& mp->prev->max_address > mp->max_address - mp->prev->fix_size)
{
mp->prev->max_address = mp->max_address - mp->prev->fix_size;
mp = mp->prev;
}
return max_mp;
}
static Mnode *
add_minipool_forward_ref (Mfix *fix)
{
Mnode * max_mp = NULL;
HOST_WIDE_INT max_address = fix->address + fix->forwards - minipool_pad;
Mnode * mp;
if (minipool_vector_head &&
(fix->address + get_attr_length (fix->insn)
>= minipool_vector_head->max_address - fix->fix_size))
return NULL;
for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
{
if (GET_CODE (fix->value) == GET_CODE (mp->value)
&& fix->mode == mp->mode
&& (GET_CODE (fix->value) != CODE_LABEL
|| (CODE_LABEL_NUMBER (fix->value)
== CODE_LABEL_NUMBER (mp->value)))
&& rtx_equal_p (fix->value, mp->value))
{
mp->refcount++;
return move_minipool_fix_forward_ref (mp, max_mp, max_address);
}
if (max_mp == NULL
&& mp->max_address > max_address)
max_mp = mp;
if (ARM_DOUBLEWORD_ALIGN
&& max_mp == NULL
&& fix->fix_size == 8
&& mp->fix_size != 8)
{
max_mp = mp;
max_address = mp->max_address;
}
}
mp = XNEW (Mnode);
mp->fix_size = fix->fix_size;
mp->mode = fix->mode;
mp->value = fix->value;
mp->refcount = 1;
mp->min_address = -65536;
if (max_mp == NULL)
{
mp->max_address = max_address;
mp->next = NULL;
mp->prev = minipool_vector_tail;
if (mp->prev == NULL)
{
minipool_vector_head = mp;
minipool_vector_label = gen_label_rtx ();
}
else
mp->prev->next = mp;
minipool_vector_tail = mp;
}
else
{
if (max_address > max_mp->max_address - mp->fix_size)
mp->max_address = max_mp->max_address - mp->fix_size;
else
mp->max_address = max_address;
mp->next = max_mp;
mp->prev = max_mp->prev;
max_mp->prev = mp;
if (mp->prev != NULL)
mp->prev->next = mp;
else
minipool_vector_head = mp;
}
max_mp = mp;
while (mp->prev != NULL
&& mp->prev->max_address > mp->max_address - mp->prev->fix_size)
{
mp->prev->max_address = mp->max_address - mp->prev->fix_size;
mp = mp->prev;
}
return max_mp;
}
static Mnode *
move_minipool_fix_backward_ref (Mnode *mp, Mnode *min_mp,
HOST_WIDE_INT min_address)
{
HOST_WIDE_INT offset;
gcc_assert (mp != min_mp);
if (min_mp == NULL)
{
if (min_address > mp->min_address)
mp->min_address = min_address;
}
else
{
mp->min_address = min_address;
mp->next->prev = mp->prev;
if (mp->prev != NULL)
mp->prev->next = mp->next;
else
minipool_vector_head = mp->next;
mp->prev = min_mp;
mp->next = min_mp->next;
min_mp->next = mp;
if (mp->next != NULL)
mp->next->prev = mp;
else
minipool_vector_tail = mp;
}
min_mp = mp;
offset = 0;
for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
{
mp->offset = offset;
if (mp->refcount > 0)
offset += mp->fix_size;
if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
mp->next->min_address = mp->min_address + mp->fix_size;
}
return min_mp;
}
static Mnode *
add_minipool_backward_ref (Mfix *fix)
{
Mnode *min_mp = NULL;
HOST_WIDE_INT min_address = fix->address - fix->backwards;
Mnode *mp;
if (min_address >= minipool_barrier->address
|| (minipool_vector_tail->min_address + fix->fix_size
>= minipool_barrier->address))
return NULL;
for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
{
if (GET_CODE (fix->value) == GET_CODE (mp->value)
&& fix->mode == mp->mode
&& (GET_CODE (fix->value) != CODE_LABEL
|| (CODE_LABEL_NUMBER (fix->value)
== CODE_LABEL_NUMBER (mp->value)))
&& rtx_equal_p (fix->value, mp->value)
&& (mp->max_address
> (minipool_barrier->address
+ minipool_vector_tail->offset
+ minipool_vector_tail->fix_size)))
{
mp->refcount++;
return move_minipool_fix_backward_ref (mp, min_mp, min_address);
}
if (min_mp != NULL)
mp->min_address += fix->fix_size;
else
{
if (mp->min_address < min_address)
{
if (ARM_DOUBLEWORD_ALIGN
&& fix->fix_size == 8 && mp->fix_size != 8)
return NULL;
else
min_mp = mp;
}
else if (mp->max_address
< minipool_barrier->address + mp->offset + fix->fix_size)
{
min_mp = mp;
min_address = mp->min_address + fix->fix_size;
}
else if (ARM_DOUBLEWORD_ALIGN
&& min_mp == NULL
&& fix->fix_size == 8
&& mp->fix_size < 8)
{
min_mp = mp;
min_address = mp->min_address + fix->fix_size;
}
}
}
mp = XNEW (Mnode);
mp->fix_size = fix->fix_size;
mp->mode = fix->mode;
mp->value = fix->value;
mp->refcount = 1;
mp->max_address = minipool_barrier->address + 65536;
mp->min_address = min_address;
if (min_mp == NULL)
{
mp->prev = NULL;
mp->next = minipool_vector_head;
if (mp->next == NULL)
{
minipool_vector_tail = mp;
minipool_vector_label = gen_label_rtx ();
}
else
mp->next->prev = mp;
minipool_vector_head = mp;
}
else
{
mp->next = min_mp->next;
mp->prev = min_mp;
min_mp->next = mp;
if (mp->next != NULL)
mp->next->prev = mp;
else
minipool_vector_tail = mp;
}
min_mp = mp;
if (mp->prev)
mp = mp->prev;
else
mp->offset = 0;
while (mp->next != NULL)
{
if (mp->next->min_address < mp->min_address + mp->fix_size)
mp->next->min_address = mp->min_address + mp->fix_size;
if (mp->refcount)
mp->next->offset = mp->offset + mp->fix_size;
else
mp->next->offset = mp->offset;
mp = mp->next;
}
return min_mp;
}
static void
assign_minipool_offsets (Mfix *barrier)
{
HOST_WIDE_INT offset = 0;
Mnode *mp;
minipool_barrier = barrier;
for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
{
mp->offset = offset;
if (mp->refcount > 0)
offset += mp->fix_size;
}
}
static void
dump_minipool (rtx scan)
{
Mnode * mp;
Mnode * nmp;
int align64 = 0;
if (ARM_DOUBLEWORD_ALIGN)
for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
if (mp->refcount > 0 && mp->fix_size == 8)
{
align64 = 1;
break;
}
if (dump_file)
fprintf (dump_file,
";; Emitting minipool after insn %u; address %ld; align %d (bytes)\n",
INSN_UID (scan), (unsigned long) minipool_barrier->address, align64 ? 8 : 4);
scan = emit_label_after (gen_label_rtx (), scan);
scan = emit_insn_after (align64 ? gen_align_8 () : gen_align_4 (), scan);
scan = emit_label_after (minipool_vector_label, scan);
for (mp = minipool_vector_head; mp != NULL; mp = nmp)
{
if (mp->refcount > 0)
{
if (dump_file)
{
fprintf (dump_file,
";; Offset %u, min %ld, max %ld ",
(unsigned) mp->offset, (unsigned long) mp->min_address,
(unsigned long) mp->max_address);
arm_print_value (dump_file, mp->value);
fputc ('\n', dump_file);
}
switch (mp->fix_size)
{
#ifdef HAVE_consttable_1
case 1:
scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
break;
#endif
#ifdef HAVE_consttable_2
case 2:
scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
break;
#endif
#ifdef HAVE_consttable_4
case 4:
scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
break;
#endif
#ifdef HAVE_consttable_8
case 8:
scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
break;
#endif
default:
gcc_unreachable ();
}
}
nmp = mp->next;
free (mp);
}
minipool_vector_head = minipool_vector_tail = NULL;
scan = emit_insn_after (gen_consttable_end (), scan);
scan = emit_barrier_after (scan);
}
static int
arm_barrier_cost (rtx insn)
{
int base_cost = 50;
rtx next = next_nonnote_insn (insn);
if (next != NULL && GET_CODE (next) == CODE_LABEL)
base_cost -= 20;
switch (GET_CODE (insn))
{
case CODE_LABEL:
return 50;
case INSN:
case CALL_INSN:
return base_cost;
case JUMP_INSN:
return base_cost - 10;
default:
return base_cost + 10;
}
}
static Mfix *
create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
{
HOST_WIDE_INT count = 0;
rtx barrier;
rtx from = fix->insn;
rtx selected = NULL;
int selected_cost;
HOST_WIDE_INT selected_address;
Mfix * new_fix;
HOST_WIDE_INT max_count = max_address - fix->address;
rtx label = gen_label_rtx ();
selected_cost = arm_barrier_cost (from);
selected_address = fix->address;
while (from && count < max_count)
{
rtx tmp;
int new_cost;
gcc_assert (GET_CODE (from) != BARRIER);
count += get_attr_length (from);
if (LABEL_P (from))
count += get_label_pad (from, fix->address + count);
tmp = is_jump_table (from);
if (tmp != NULL)
{
count += get_jump_table_size (tmp);
new_cost = arm_barrier_cost (from);
if (count < max_count
&& (!selected || new_cost <= selected_cost))
{
selected = tmp;
selected_cost = new_cost;
selected_address = fix->address + count;
}
from = NEXT_INSN (tmp);
continue;
}
new_cost = arm_barrier_cost (from);
if (count < max_count
&& (!selected || new_cost <= selected_cost))
{
selected = from;
selected_cost = new_cost;
selected_address = fix->address + count;
}
from = NEXT_INSN (from);
}
gcc_assert (selected);
from = emit_jump_insn_after (gen_jump (label), selected);
JUMP_LABEL (from) = label;
barrier = emit_barrier_after (from);
emit_label_after (label, barrier);
new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
new_fix->insn = barrier;
new_fix->address = selected_address;
new_fix->next = fix->next;
fix->next = new_fix;
return new_fix;
}
static void
push_minipool_barrier (rtx insn, HOST_WIDE_INT address)
{
Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
fix->insn = insn;
fix->address = address;
fix->next = NULL;
if (minipool_fix_head != NULL)
minipool_fix_tail->next = fix;
else
minipool_fix_head = fix;
minipool_fix_tail = fix;
}
static void
push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx *loc,
enum machine_mode mode, rtx value)
{
Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
#ifdef AOF_ASSEMBLER
if (flag_pic && GET_CODE (value) == SYMBOL_REF)
value = aof_pic_entry (value);
#endif
fix->insn = insn;
fix->address = address;
fix->loc = loc;
fix->mode = mode;
fix->fix_size = MINIPOOL_FIX_SIZE (mode);
fix->value = value;
fix->forwards = get_attr_pool_range (insn);
fix->backwards = get_attr_neg_pool_range (insn);
fix->minipool = NULL;
gcc_assert (fix->forwards || fix->backwards);
if (ARM_DOUBLEWORD_ALIGN && fix->fix_size == 8)
minipool_pad = 4;
if (dump_file)
{
fprintf (dump_file,
";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
GET_MODE_NAME (mode),
INSN_UID (insn), (unsigned long) address,
-1 * (long)fix->backwards, (long)fix->forwards);
arm_print_value (dump_file, fix->value);
fprintf (dump_file, "\n");
}
fix->next = NULL;
if (minipool_fix_head != NULL)
minipool_fix_tail->next = fix;
else
minipool_fix_head = fix;
minipool_fix_tail = fix;
}
#endif
int
arm_const_double_inline_cost (rtx val)
{
rtx lowpart, highpart;
enum machine_mode mode;
mode = GET_MODE (val);
if (mode == VOIDmode)
mode = DImode;
gcc_assert (GET_MODE_SIZE (mode) == 8);
lowpart = gen_lowpart (SImode, val);
highpart = gen_highpart_mode (SImode, mode, val);
gcc_assert (GET_CODE (lowpart) == CONST_INT);
gcc_assert (GET_CODE (highpart) == CONST_INT);
return (arm_gen_constant (SET, SImode, NULL_RTX, INTVAL (lowpart),
NULL_RTX, NULL_RTX, 0, 0)
+ arm_gen_constant (SET, SImode, NULL_RTX, INTVAL (highpart),
NULL_RTX, NULL_RTX, 0, 0));
}
bool
const64_ok_for_arm_immediate (rtx val)
{
rtx lowpart, highpart;
enum machine_mode mode;
if (!TARGET_ARM)
return false;
mode = GET_MODE (val);
if (mode == VOIDmode)
mode = DImode;
gcc_assert (GET_MODE_SIZE (mode) == 8);
lowpart = gen_lowpart (SImode, val);
highpart = gen_highpart_mode (SImode, mode, val);
gcc_assert (GET_CODE (lowpart) == CONST_INT);
gcc_assert (GET_CODE (highpart) == CONST_INT);
return (const_ok_for_arm (INTVAL (lowpart))
&& const_ok_for_arm (INTVAL (highpart)));
}
bool
const64_ok_for_arm_add (rtx val)
{
rtx lowpart, highpart, lowpart_neg, highpart_neg, val_neg;
enum machine_mode mode;
if (!TARGET_ARM)
return false;
mode = GET_MODE (val);
if (mode == VOIDmode)
mode = DImode;
gcc_assert (GET_MODE_SIZE (mode) == 8);
lowpart = gen_lowpart (SImode, val);
highpart = gen_highpart_mode (SImode, mode, val);
val_neg = negate_rtx (mode, val);
lowpart_neg = gen_lowpart (SImode, val_neg);
highpart_neg = gen_highpart_mode (SImode, mode, val_neg);
gcc_assert (GET_CODE (lowpart) == CONST_INT);
gcc_assert (GET_CODE (highpart) == CONST_INT);
return ((const_ok_for_arm (INTVAL (lowpart))
&& const_ok_for_arm (INTVAL (highpart)))
|| (const_ok_for_arm (INTVAL (lowpart_neg))
&& const_ok_for_arm (INTVAL (highpart_neg))));
}
bool
arm_const_double_by_parts (rtx val)
{
enum machine_mode mode = GET_MODE (val);
rtx part;
if (optimize_size || arm_ld_sched)
return true;
if (mode == VOIDmode)
mode = DImode;
part = gen_highpart_mode (SImode, mode, val);
gcc_assert (GET_CODE (part) == CONST_INT);
if (const_ok_for_arm (INTVAL (part))
|| const_ok_for_arm (~INTVAL (part)))
return true;
part = gen_lowpart (SImode, val);
gcc_assert (GET_CODE (part) == CONST_INT);
if (const_ok_for_arm (INTVAL (part))
|| const_ok_for_arm (~INTVAL (part)))
return true;
return false;
}
#ifndef ENABLE_LLVM
static bool
note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
{
bool result = false;
int opno;
extract_insn (insn);
if (!constrain_operands (1))
fatal_insn_not_found (insn);
if (recog_data.n_alternatives == 0)
return false;
preprocess_constraints ();
for (opno = 0; opno < recog_data.n_operands; opno++)
{
if (recog_data.operand_type[opno] != OP_IN)
continue;
if (recog_op_alt[opno][which_alternative].memory_ok)
{
rtx op = recog_data.operand[opno];
if (CONSTANT_P (op))
{
if (do_pushes)
push_minipool_fix (insn, address, recog_data.operand_loc[opno],
recog_data.operand_mode[opno], op);
result = true;
}
else if (GET_CODE (op) == MEM
&& GET_CODE (XEXP (op, 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
{
if (do_pushes)
{
rtx cop = avoid_constant_pool_reference (op);
if (op == cop)
cop = get_pool_constant (XEXP (op, 0));
push_minipool_fix (insn, address,
recog_data.operand_loc[opno],
recog_data.operand_mode[opno], cop);
}
result = true;
}
}
}
return result;
}
static HOST_WIDE_INT get_label_pad (rtx insn, HOST_WIDE_INT address)
{
int label_align, max_skip;
unsigned HOST_WIDE_INT align_mask;
int pad_needed;
gcc_assert (LABEL_P (insn));
label_align = LABEL_ALIGN_LOG (insn);
max_skip = LABEL_MAX_SKIP (insn);
align_mask = ((unsigned int) 1 << label_align) - 1;
if ((address & align_mask) == 0)
return 0;
pad_needed = ((address | align_mask) + 1) - address;
if (max_skip && (pad_needed > max_skip))
return 0;
return pad_needed;
}
#endif
static void
arm_reorg (void)
{
#ifndef ENABLE_LLVM
rtx insn;
HOST_WIDE_INT address = 0;
Mfix * fix;
minipool_fix_head = minipool_fix_tail = NULL;
if (TARGET_THUMB)
address = count_thumb_unexpanded_prologue ();
insn = get_insns ();
gcc_assert (GET_CODE (insn) == NOTE);
minipool_pad = 0;
for (insn = next_nonnote_insn (insn); insn; insn = next_nonnote_insn (insn))
{
if (TARGET_CIRRUS_FIX_INVALID_INSNS
&& (arm_cirrus_insn_p (insn)
|| GET_CODE (insn) == JUMP_INSN
|| arm_memory_load_p (insn)))
cirrus_reorg (insn);
if (GET_CODE (insn) == BARRIER)
push_minipool_barrier (insn, address);
else if (LABEL_P (insn))
address += get_label_pad (insn, address);
else if (INSN_P (insn))
{
rtx table;
note_invalid_constants (insn, address, true);
address += get_attr_length (insn);
if ((table = is_jump_table (insn)) != NULL)
{
address += get_jump_table_size (table);
insn = table;
}
}
}
fix = minipool_fix_head;
while (fix)
{
Mfix * ftmp;
Mfix * fdel;
Mfix * last_added_fix;
Mfix * last_barrier = NULL;
Mfix * this_fix;
while (fix && GET_CODE (fix->insn) == BARRIER)
fix = fix->next;
if (fix == NULL)
break;
last_added_fix = NULL;
for (ftmp = fix; ftmp; ftmp = ftmp->next)
{
if (GET_CODE (ftmp->insn) == BARRIER)
{
if (ftmp->address >= minipool_vector_head->max_address)
break;
last_barrier = ftmp;
}
else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
break;
last_added_fix = ftmp;
}
if (last_barrier != NULL)
{
for (fdel = last_barrier->next;
fdel && fdel != ftmp;
fdel = fdel->next)
{
fdel->minipool->refcount--;
fdel->minipool = NULL;
}
ftmp = last_barrier;
}
else
{
HOST_WIDE_INT max_address;
gcc_assert (ftmp);
max_address = minipool_vector_head->max_address;
if (ftmp->address < max_address)
max_address = ftmp->address + 1;
last_barrier = create_fix_barrier (last_added_fix, max_address);
}
assign_minipool_offsets (last_barrier);
while (ftmp)
{
if (GET_CODE (ftmp->insn) != BARRIER
&& ((ftmp->minipool = add_minipool_backward_ref (ftmp))
== NULL))
break;
ftmp = ftmp->next;
}
for (this_fix = fix; this_fix && ftmp != this_fix;
this_fix = this_fix->next)
if (GET_CODE (this_fix->insn) != BARRIER)
{
rtx addr
= plus_constant (gen_rtx_LABEL_REF (VOIDmode,
minipool_vector_label),
this_fix->minipool->offset);
*this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
}
dump_minipool (last_barrier->insn);
fix = ftmp;
}
after_arm_reorg = 1;
obstack_free (&minipool_obstack, minipool_startobj);
#endif
}
const char *
fp_immediate_constant (rtx x)
{
REAL_VALUE_TYPE r;
int i;
if (!fp_consts_inited)
init_fp_table ();
REAL_VALUE_FROM_CONST_DOUBLE (r, x);
for (i = 0; i < 8; i++)
if (REAL_VALUES_EQUAL (r, values_fp[i]))
return strings_fp[i];
gcc_unreachable ();
}
static const char *
fp_const_from_val (REAL_VALUE_TYPE *r)
{
int i;
if (!fp_consts_inited)
init_fp_table ();
for (i = 0; i < 8; i++)
if (REAL_VALUES_EQUAL (*r, values_fp[i]))
return strings_fp[i];
gcc_unreachable ();
}
static void
print_multi_reg (FILE *stream, const char *instr, unsigned reg,
unsigned long mask)
{
unsigned i;
bool not_first = FALSE;
fputc ('\t', stream);
asm_fprintf (stream, instr, reg);
fputs (", {", stream);
for (i = 0; i <= LAST_ARM_REGNUM; i++)
if (mask & (1 << i))
{
if (not_first)
fprintf (stream, ", ");
asm_fprintf (stream, "%r", i);
not_first = TRUE;
}
fprintf (stream, "}\n");
}
static void
arm_output_fldmx (FILE * stream, unsigned int base, int reg, int count)
{
int i;
if (count == 2 && !arm_arch6)
{
if (reg == 15)
reg--;
count++;
}
fputc ('\t', stream);
asm_fprintf (stream, "fldmfdx\t%r!, {", base);
for (i = reg; i < reg + count; i++)
{
if (i > reg)
fputs (", ", stream);
asm_fprintf (stream, "d%d", i);
}
fputs ("}\n", stream);
}
const char *
vfp_output_fstmx (rtx * operands)
{
char pattern[100];
int p;
int base;
int i;
strcpy (pattern, "fstmfdx\t%m0!, {%P1");
p = strlen (pattern);
gcc_assert (GET_CODE (operands[1]) == REG);
base = (REGNO (operands[1]) - FIRST_VFP_REGNUM) / 2;
for (i = 1; i < XVECLEN (operands[2], 0); i++)
{
p += sprintf (&pattern[p], ", d%d", base + i);
}
strcpy (&pattern[p], "}");
output_asm_insn (pattern, operands);
return "";
}
static int
vfp_emit_fstmx (int base_reg, int count)
{
rtx par;
rtx dwarf;
rtx tmp, reg;
int i;
if (count == 2 && !arm_arch6)
{
if (base_reg == LAST_VFP_REGNUM - 3)
base_reg -= 2;
count++;
}
par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (count + 1));
reg = gen_rtx_REG (DFmode, base_reg);
base_reg += 2;
XVECEXP (par, 0, 0)
= gen_rtx_SET (VOIDmode,
gen_frame_mem (BLKmode,
gen_rtx_PRE_DEC (BLKmode,
stack_pointer_rtx)),
gen_rtx_UNSPEC (BLKmode,
gen_rtvec (1, reg),
UNSPEC_PUSH_MULT));
tmp = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
plus_constant (stack_pointer_rtx, -(count * 8 + 4)));
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, 0) = tmp;
tmp = gen_rtx_SET (VOIDmode,
gen_frame_mem (DFmode, stack_pointer_rtx),
reg);
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, 1) = tmp;
for (i = 1; i < count; i++)
{
reg = gen_rtx_REG (DFmode, base_reg);
base_reg += 2;
XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
tmp = gen_rtx_SET (VOIDmode,
gen_frame_mem (DFmode,
plus_constant (stack_pointer_rtx,
i * 8)),
reg);
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, i + 1) = tmp;
}
par = emit_insn (par);
REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
REG_NOTES (par));
RTX_FRAME_RELATED_P (par) = 1;
return count * 8 + 4;
}
const char *
output_call (rtx *operands)
{
gcc_assert (!arm_arch5);
if (REGNO (operands[0]) == LR_REGNUM)
{
operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
output_asm_insn ("mov%?\t%0, %|lr", operands);
}
output_asm_insn ("mov%?\t%|lr, %|pc", operands);
if (TARGET_INTERWORK || arm_arch4t)
output_asm_insn ("bx%?\t%0", operands);
else
output_asm_insn ("mov%?\t%|pc, %0", operands);
return "";
}
const char *
output_call_mem (rtx *operands)
{
if (TARGET_INTERWORK && !arm_arch5)
{
output_asm_insn ("ldr%?\t%|ip, %0", operands);
output_asm_insn ("mov%?\t%|lr, %|pc", operands);
output_asm_insn ("bx%?\t%|ip", operands);
}
else if (regno_use_in (LR_REGNUM, operands[0]))
{
output_asm_insn ("ldr%?\t%|ip, %0", operands);
if (arm_arch5)
output_asm_insn ("blx%?\t%|ip", operands);
else
{
output_asm_insn ("mov%?\t%|lr, %|pc", operands);
if (arm_arch4t)
output_asm_insn ("bx%?\t%|ip", operands);
else
output_asm_insn ("mov%?\t%|pc, %|ip", operands);
}
}
else
{
output_asm_insn ("mov%?\t%|lr, %|pc", operands);
output_asm_insn ("ldr%?\t%|pc, %0", operands);
}
return "";
}
const char *
output_mov_long_double_fpa_from_arm (rtx *operands)
{
int arm_reg0 = REGNO (operands[1]);
rtx ops[3];
gcc_assert (arm_reg0 != IP_REGNUM);
ops[0] = gen_rtx_REG (SImode, arm_reg0);
ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
return "";
}
const char *
output_mov_long_double_arm_from_fpa (rtx *operands)
{
int arm_reg0 = REGNO (operands[0]);
rtx ops[3];
gcc_assert (arm_reg0 != IP_REGNUM);
ops[0] = gen_rtx_REG (SImode, arm_reg0);
ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
return "";
}
const char *
output_mov_long_double_arm_from_arm (rtx *operands)
{
int dest_start = REGNO (operands[0]);
int src_start = REGNO (operands[1]);
rtx ops[2];
int i;
if (dest_start < src_start)
{
for (i = 0; i < 3; i++)
{
ops[0] = gen_rtx_REG (SImode, dest_start + i);
ops[1] = gen_rtx_REG (SImode, src_start + i);
output_asm_insn ("mov%?\t%0, %1", ops);
}
}
else
{
for (i = 2; i >= 0; i--)
{
ops[0] = gen_rtx_REG (SImode, dest_start + i);
ops[1] = gen_rtx_REG (SImode, src_start + i);
output_asm_insn ("mov%?\t%0, %1", ops);
}
}
return "";
}
const char *
output_mov_double_fpa_from_arm (rtx *operands)
{
int arm_reg0 = REGNO (operands[1]);
rtx ops[2];
gcc_assert (arm_reg0 != IP_REGNUM);
ops[0] = gen_rtx_REG (SImode, arm_reg0);
ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
return "";
}
const char *
output_mov_double_arm_from_fpa (rtx *operands)
{
int arm_reg0 = REGNO (operands[0]);
rtx ops[2];
gcc_assert (arm_reg0 != IP_REGNUM);
ops[0] = gen_rtx_REG (SImode, arm_reg0);
ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
return "";
}
const char *
output_move_double (rtx *operands)
{
enum rtx_code code0 = GET_CODE (operands[0]);
enum rtx_code code1 = GET_CODE (operands[1]);
rtx otherops[3];
if (code0 == REG)
{
int reg0 = REGNO (operands[0]);
otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
gcc_assert (code1 == MEM);
switch (GET_CODE (XEXP (operands[1], 0)))
{
case REG:
output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
break;
case PRE_INC:
gcc_assert (TARGET_LDRD);
output_asm_insn ("ldr%?d\t%0, [%m1, #8]!", operands);
break;
case PRE_DEC:
output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
break;
case POST_INC:
output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
break;
case POST_DEC:
gcc_assert (TARGET_LDRD);
output_asm_insn ("ldr%?d\t%0, [%m1], #-8", operands);
break;
case PRE_MODIFY:
case POST_MODIFY:
otherops[0] = operands[0];
otherops[1] = XEXP (XEXP (XEXP (operands[1], 0), 1), 0);
otherops[2] = XEXP (XEXP (XEXP (operands[1], 0), 1), 1);
if (GET_CODE (XEXP (operands[1], 0)) == PRE_MODIFY)
{
if (reg_overlap_mentioned_p (otherops[0], otherops[2]))
{
output_asm_insn ("add%?\t%1, %1, %2", otherops);
output_asm_insn ("ldr%?d\t%0, [%1] @split", otherops);
}
else
{
if (GET_CODE (otherops[2]) == CONST_INT
&& (INTVAL(otherops[2]) <= -256
|| INTVAL(otherops[2]) >= 256))
{
output_asm_insn ("ldr%?\t%0, [%1, %2]!", otherops);
otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
output_asm_insn ("ldr%?\t%0, [%1, #4]", otherops);
}
else
output_asm_insn ("ldr%?d\t%0, [%1, %2]!", otherops);
}
}
else
{
if (GET_CODE (otherops[2]) == CONST_INT
&& (INTVAL(otherops[2]) <= -256
|| INTVAL(otherops[2]) >= 256))
{
otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
output_asm_insn ("ldr%?\t%0, [%1, #4]", otherops);
otherops[0] = operands[0];
output_asm_insn ("ldr%?\t%0, [%1], %2", otherops);
}
else
output_asm_insn ("ldr%?d\t%0, [%1], %2", otherops);
}
break;
case LABEL_REF:
case CONST:
output_asm_insn ("adr%?\t%0, %1", operands);
output_asm_insn ("ldm%?ia\t%0, %M0", operands);
break;
default:
if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
{
otherops[0] = operands[0];
otherops[1] = XEXP (XEXP (operands[1], 0), 0);
otherops[2] = XEXP (XEXP (operands[1], 0), 1);
if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
{
if (GET_CODE (otherops[2]) == CONST_INT)
{
switch ((int) INTVAL (otherops[2]))
{
case -8:
output_asm_insn ("ldm%?db\t%1, %M0", otherops);
return "";
case -4:
output_asm_insn ("ldm%?da\t%1, %M0", otherops);
return "";
case 4:
output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
return "";
}
}
if (TARGET_LDRD
&& (GET_CODE (otherops[2]) == REG
|| (GET_CODE (otherops[2]) == CONST_INT
&& INTVAL (otherops[2]) > -256
&& INTVAL (otherops[2]) < 256)))
{
if (reg_overlap_mentioned_p (otherops[0],
otherops[2]))
{
otherops[1] = XEXP (XEXP (operands[1], 0), 1);
otherops[2] = XEXP (XEXP (operands[1], 0), 0);
}
if (reg_overlap_mentioned_p (otherops[0], otherops[2]))
{
output_asm_insn ("add%?\t%1, %1, %2", otherops);
output_asm_insn ("ldr%?d\t%0, [%1]",
otherops);
}
else
output_asm_insn ("ldr%?d\t%0, [%1, %2]", otherops);
return "";
}
if (GET_CODE (otherops[2]) == CONST_INT)
{
if (!(const_ok_for_arm (INTVAL (otherops[2]))))
output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
else
output_asm_insn ("add%?\t%0, %1, %2", otherops);
}
else
output_asm_insn ("add%?\t%0, %1, %2", otherops);
}
else
output_asm_insn ("sub%?\t%0, %1, %2", otherops);
return "ldm%?ia\t%0, %M0";
}
else
{
otherops[1] = adjust_address (operands[1], SImode, 4);
if (reg_mentioned_p (operands[0], operands[1]))
{
output_asm_insn ("ldr%?\t%0, %1", otherops);
output_asm_insn ("ldr%?\t%0, %1", operands);
}
else
{
output_asm_insn ("ldr%?\t%0, %1", operands);
output_asm_insn ("ldr%?\t%0, %1", otherops);
}
}
}
}
else
{
gcc_assert (code0 == MEM && code1 == REG);
gcc_assert (REGNO (operands[1]) != IP_REGNUM);
switch (GET_CODE (XEXP (operands[0], 0)))
{
case REG:
output_asm_insn ("stm%?ia\t%m0, %M1", operands);
break;
case PRE_INC:
gcc_assert (TARGET_LDRD);
output_asm_insn ("str%?d\t%1, [%m0, #8]!", operands);
break;
case PRE_DEC:
output_asm_insn ("stm%?db\t%m0!, %M1", operands);
break;
case POST_INC:
output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
break;
case POST_DEC:
gcc_assert (TARGET_LDRD);
output_asm_insn ("str%?d\t%1, [%m0], #-8", operands);
break;
case PRE_MODIFY:
case POST_MODIFY:
otherops[0] = operands[1];
otherops[1] = XEXP (XEXP (XEXP (operands[0], 0), 1), 0);
otherops[2] = XEXP (XEXP (XEXP (operands[0], 0), 1), 1);
if (GET_CODE (otherops[2]) == CONST_INT
&& (INTVAL(otherops[2]) <= -256
|| INTVAL(otherops[2]) >= 256))
{
rtx reg1;
reg1 = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
if (GET_CODE (XEXP (operands[0], 0)) == PRE_MODIFY)
{
output_asm_insn ("ldr%?\t%0, [%1, %2]!", otherops);
otherops[0] = reg1;
output_asm_insn ("ldr%?\t%0, [%1, #4]", otherops);
}
else
{
otherops[0] = reg1;
output_asm_insn ("ldr%?\t%0, [%1, #4]", otherops);
otherops[0] = operands[1];
output_asm_insn ("ldr%?\t%0, [%1], %2", otherops);
}
}
else if (GET_CODE (XEXP (operands[0], 0)) == PRE_MODIFY)
output_asm_insn ("str%?d\t%0, [%1, %2]!", otherops);
else
output_asm_insn ("str%?d\t%0, [%1], %2", otherops);
break;
case PLUS:
otherops[2] = XEXP (XEXP (operands[0], 0), 1);
if (GET_CODE (otherops[2]) == CONST_INT)
{
switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
{
case -8:
output_asm_insn ("stm%?db\t%m0, %M1", operands);
return "";
case -4:
output_asm_insn ("stm%?da\t%m0, %M1", operands);
return "";
case 4:
output_asm_insn ("stm%?ib\t%m0, %M1", operands);
return "";
}
}
if (TARGET_LDRD
&& (GET_CODE (otherops[2]) == REG
|| (GET_CODE (otherops[2]) == CONST_INT
&& INTVAL (otherops[2]) > -256
&& INTVAL (otherops[2]) < 256)))
{
otherops[0] = operands[1];
otherops[1] = XEXP (XEXP (operands[0], 0), 0);
output_asm_insn ("str%?d\t%0, [%1, %2]", otherops);
return "";
}
default:
otherops[0] = adjust_address (operands[0], SImode, 4);
otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
output_asm_insn ("str%?\t%1, %0", operands);
output_asm_insn ("str%?\t%1, %0", otherops);
}
}
return "";
}
const char *
output_add_immediate (rtx *operands)
{
HOST_WIDE_INT n = INTVAL (operands[2]);
if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
{
if (n < 0)
output_multi_immediate (operands,
"sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
-n);
else
output_multi_immediate (operands,
"add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
n);
}
return "";
}
static const char *
output_multi_immediate (rtx *operands, const char *instr1, const char *instr2,
int immed_op, HOST_WIDE_INT n)
{
#if HOST_BITS_PER_WIDE_INT > 32
n &= 0xffffffff;
#endif
if (n == 0)
{
operands[immed_op] = const0_rtx;
output_asm_insn (instr1, operands);
}
else
{
int i;
const char * instr = instr1;
for (i = 0; i < 32; i += 2)
{
if (n & (3 << i))
{
operands[immed_op] = GEN_INT (n & (255 << i));
output_asm_insn (instr, operands);
instr = instr2;
i += 6;
}
}
}
return "";
}
const char *
arithmetic_instr (rtx op, int shift_first_arg)
{
switch (GET_CODE (op))
{
case PLUS:
return "add";
case MINUS:
return shift_first_arg ? "rsb" : "sub";
case IOR:
return "orr";
case XOR:
return "eor";
case AND:
return "and";
default:
gcc_unreachable ();
}
}
static const char *
shift_op (rtx op, HOST_WIDE_INT *amountp)
{
const char * mnem;
enum rtx_code code = GET_CODE (op);
switch (GET_CODE (XEXP (op, 1)))
{
case REG:
case SUBREG:
*amountp = -1;
break;
case CONST_INT:
*amountp = INTVAL (XEXP (op, 1));
break;
default:
gcc_unreachable ();
}
switch (code)
{
case ASHIFT:
mnem = "asl";
break;
case ASHIFTRT:
mnem = "asr";
break;
case LSHIFTRT:
mnem = "lsr";
break;
case ROTATE:
gcc_assert (*amountp != -1);
*amountp = 32 - *amountp;
case ROTATERT:
mnem = "ror";
break;
case MULT:
gcc_assert (*amountp != -1);
*amountp = int_log2 (*amountp);
return "asl";
default:
gcc_unreachable ();
}
if (*amountp != -1)
{
if (code == ROTATERT)
*amountp &= 31;
else if (*amountp != (*amountp & 31))
{
if (code == ASHIFT)
mnem = "lsr";
*amountp = 32;
}
if (*amountp == 0)
return NULL;
}
return mnem;
}
static HOST_WIDE_INT
int_log2 (HOST_WIDE_INT power)
{
HOST_WIDE_INT shift = 0;
while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
{
gcc_assert (shift <= 31);
shift++;
}
return shift;
}
#define MAX_ASCII_LEN 51
void
output_ascii_pseudo_op (FILE *stream, const unsigned char *p, int len)
{
int i;
int len_so_far = 0;
fputs ("\t.ascii\t\"", stream);
for (i = 0; i < len; i++)
{
int c = p[i];
if (len_so_far >= MAX_ASCII_LEN)
{
fputs ("\"\n\t.ascii\t\"", stream);
len_so_far = 0;
}
if (ISPRINT (c))
{
if (c == '\\' || c == '\"')
{
putc ('\\', stream);
len_so_far++;
}
putc (c, stream);
len_so_far++;
}
else
{
fprintf (stream, "\\%03o", c);
len_so_far += 4;
}
}
fputs ("\"\n", stream);
}
static unsigned long
arm_compute_save_reg0_reg12_mask (void)
{
unsigned long func_type = arm_current_func_type ();
unsigned long save_reg_mask = 0;
unsigned int reg;
if (IS_INTERRUPT (func_type))
{
unsigned int max_reg;
if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
max_reg = 7;
else
max_reg = 12;
for (reg = 0; reg <= max_reg; reg++)
if (regs_ever_live[reg]
|| (! current_function_is_leaf && call_used_regs [reg]))
save_reg_mask |= (1 << reg);
if (flag_pic
&& !TARGET_SINGLE_PIC_BASE
&& arm_pic_register != INVALID_REGNUM
&& current_function_uses_pic_offset_table)
save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
}
else
{
for (reg = 0; reg <= 11; reg++)
if (regs_ever_live[reg] && ! call_used_regs [reg])
save_reg_mask |= (1 << reg);
if (frame_pointer_needed)
save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
if (flag_pic
&& !TARGET_SINGLE_PIC_BASE
&& arm_pic_register != INVALID_REGNUM
&& (regs_ever_live[PIC_OFFSET_TABLE_REGNUM]
|| current_function_uses_pic_offset_table))
save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
}
if (current_function_calls_eh_return)
{
unsigned int i;
for (i = 0; ; i++)
{
reg = EH_RETURN_DATA_REGNO (i);
if (reg == INVALID_REGNUM)
break;
save_reg_mask |= 1 << reg;
}
}
return save_reg_mask;
}
static unsigned long
arm_compute_save_reg_mask (void)
{
unsigned int save_reg_mask = 0;
unsigned long func_type = arm_current_func_type ();
if (IS_NAKED (func_type))
return 0;
if (!IS_VOLATILE (func_type))
save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
if (regs_ever_live [LR_REGNUM]
|| (save_reg_mask
&& optimize_size
&& ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
&& !current_function_calls_eh_return))
save_reg_mask |= 1 << LR_REGNUM;
if (cfun->machine->lr_save_eliminated)
save_reg_mask &= ~ (1 << LR_REGNUM);
if (frame_pointer_needed)
save_reg_mask |= (1 << LR_REGNUM | 1 << HARD_FRAME_POINTER_REGNUM);
if (TARGET_REALLY_IWMMXT
&& (!IS_VOLATILE (func_type))
&& ((bit_count (save_reg_mask)
+ ARM_NUM_INTS (current_function_pretend_args_size)) % 2) != 0)
{
unsigned int reg;
for (reg = 4; reg <= 12; reg++)
if ((save_reg_mask & (1 << reg)) == 0)
break;
if (reg <= 12)
save_reg_mask |= (1 << reg);
else
{
cfun->machine->sibcall_blocked = 1;
save_reg_mask |= (1 << 3);
}
}
return save_reg_mask;
}
static unsigned long
thumb_compute_save_reg_mask (void)
{
unsigned long mask;
unsigned reg;
mask = 0;
for (reg = 0; reg < 12; reg ++)
if (regs_ever_live[reg] && !call_used_regs[reg])
mask |= 1 << reg;
if (frame_pointer_needed)
mask |= 1 << THUMB_HARD_FRAME_POINTER_REGNUM;
if (flag_pic
&& !TARGET_SINGLE_PIC_BASE
&& arm_pic_register != INVALID_REGNUM
&& current_function_uses_pic_offset_table)
mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
if (!frame_pointer_needed && CALLER_INTERWORKING_SLOT_SIZE > 0)
mask |= 1 << ARM_HARD_FRAME_POINTER_REGNUM;
if (mask & 0xff || thumb_force_lr_save ())
mask |= (1 << LR_REGNUM);
if ((mask & 0xff) == 0
&& ((mask & 0x0f00) || TARGET_BACKTRACE))
{
reg = thumb_find_work_register (1 << (LAST_LO_REGNUM - 1));
if (! call_used_regs[reg])
mask |= 1 << reg;
}
if (frame_pointer_needed && ((mask & 0x70) == 0)
&& (ROUND_UP_WORD (get_frame_size ())
+ current_function_outgoing_args_size) >= 512)
mask |= 1 << (LAST_LO_REGNUM - 1);
return mask;
}
static int
arm_get_vfp_saved_size (void)
{
unsigned int regno;
int count;
int saved;
saved = 0;
if (TARGET_HARD_FLOAT && TARGET_VFP)
{
count = 0;
for (regno = FIRST_VFP_REGNUM;
regno < LAST_VFP_REGNUM;
regno += 2)
{
if ((!regs_ever_live[regno] || call_used_regs[regno])
&& (!regs_ever_live[regno + 1] || call_used_regs[regno + 1]))
{
if (count > 0)
{
if (count == 2 && !arm_arch6)
count++;
saved += count * 8 + 4;
}
count = 0;
}
else
count++;
}
if (count > 0)
{
if (count == 2 && !arm_arch6)
count++;
saved += count * 8 + 4;
}
}
return saved;
}
const char *
output_return_instruction (rtx operand, int really_return, int reverse)
{
char conditional[10];
char instr[100];
unsigned reg;
unsigned long live_regs_mask;
unsigned long func_type;
arm_stack_offsets *offsets;
func_type = arm_current_func_type ();
if (IS_NAKED (func_type))
return "";
if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
{
if (really_return)
{
rtx ops[2];
ops[0] = operand;
ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
: "abort");
assemble_external_libcall (ops[1]);
output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
}
return "";
}
gcc_assert (!current_function_calls_alloca || really_return);
sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
return_used_this_function = 1;
live_regs_mask = arm_compute_save_reg_mask ();
if (live_regs_mask)
{
const char * return_reg;
if (really_return
&& (! TARGET_INTERWORK || arm_arch5))
return_reg = reg_names[PC_REGNUM];
else
return_reg = reg_names[LR_REGNUM];
if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
{
if (frame_pointer_needed)
{
live_regs_mask &= ~ (1 << IP_REGNUM);
live_regs_mask |= (1 << SP_REGNUM);
}
else
gcc_assert (IS_INTERRUPT (func_type) || TARGET_REALLY_IWMMXT);
}
for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
if (live_regs_mask == (1U << reg))
break;
if (reg <= LAST_ARM_REGNUM
&& (reg != LR_REGNUM
|| ! really_return
|| ! IS_INTERRUPT (func_type)))
{
sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
(reg == LR_REGNUM) ? return_reg : reg_names[reg]);
}
else
{
char *p;
int first = 1;
if (live_regs_mask & (1 << SP_REGNUM))
{
unsigned HOST_WIDE_INT stack_adjust;
offsets = arm_get_frame_offsets ();
stack_adjust = offsets->outgoing_args - offsets->saved_regs;
gcc_assert (stack_adjust == 0 || stack_adjust == 4);
if (stack_adjust && arm_arch5)
sprintf (instr, "ldm%sib\t%%|sp, {", conditional);
else
{
if (stack_adjust)
live_regs_mask |= 1 << 3;
sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
}
}
else
sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
p = instr + strlen (instr);
for (reg = 0; reg <= SP_REGNUM; reg++)
if (live_regs_mask & (1 << reg))
{
int l = strlen (reg_names[reg]);
if (first)
first = 0;
else
{
memcpy (p, ", ", 2);
p += 2;
}
memcpy (p, "%|", 2);
memcpy (p + 2, reg_names[reg], l);
p += l + 2;
}
if (live_regs_mask & (1 << LR_REGNUM))
{
sprintf (p, "%s%%|%s}", first ? "" : ", ", return_reg);
if (IS_INTERRUPT (func_type))
strcat (p, "^");
}
else
strcpy (p, "}");
}
output_asm_insn (instr, & operand);
if (really_return
&& func_type != ARM_FT_INTERWORKED
&& (live_regs_mask & (1 << LR_REGNUM)) != 0)
{
really_return = 0;
}
}
if (really_return)
{
switch ((int) ARM_FUNC_TYPE (func_type))
{
case ARM_FT_ISR:
case ARM_FT_FIQ:
sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
break;
case ARM_FT_INTERWORKED:
sprintf (instr, "bx%s\t%%|lr", conditional);
break;
case ARM_FT_EXCEPTION:
sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
break;
default:
if (arm_arch5 || arm_arch4t)
sprintf (instr, "bx%s\t%%|lr", conditional);
else
sprintf (instr, "mov%s\t%%|pc, %%|lr", conditional);
break;
}
output_asm_insn (instr, & operand);
}
return "";
}
void
arm_poke_function_name (FILE *stream, const char *name)
{
unsigned long alignlength;
unsigned long length;
rtx x;
length = strlen (name) + 1;
alignlength = ROUND_UP_WORD (length);
ASM_OUTPUT_ASCII (stream, name, length);
ASM_OUTPUT_ALIGN (stream, 2);
x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
assemble_aligned_integer (UNITS_PER_WORD, x);
}
static void
arm_output_function_prologue (FILE *f, HOST_WIDE_INT frame_size)
{
unsigned long func_type;
if (!TARGET_ARM)
{
thumb_output_function_prologue (f, frame_size);
return;
}
gcc_assert (!arm_ccfsm_state && !arm_target_insn);
func_type = arm_current_func_type ();
switch ((int) ARM_FUNC_TYPE (func_type))
{
default:
case ARM_FT_NORMAL:
break;
case ARM_FT_INTERWORKED:
asm_fprintf (f, "\t%@ Function supports interworking.\n");
break;
case ARM_FT_ISR:
asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
break;
case ARM_FT_FIQ:
asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
break;
case ARM_FT_EXCEPTION:
asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
break;
}
if (IS_NAKED (func_type))
asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
if (IS_VOLATILE (func_type))
asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
if (IS_NESTED (func_type))
asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %wd\n",
current_function_args_size,
current_function_pretend_args_size, frame_size);
asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
frame_pointer_needed,
cfun->machine->uses_anonymous_args);
if (cfun->machine->lr_save_eliminated)
asm_fprintf (f, "\t%@ link register save eliminated.\n");
if (current_function_calls_eh_return)
asm_fprintf (f, "\t@ Calls __builtin_eh_return.\n");
#ifdef AOF_ASSEMBLER
if (flag_pic)
asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
#endif
return_used_this_function = 0;
}
const char *
arm_output_epilogue (rtx sibling)
{
int reg;
unsigned long saved_regs_mask;
unsigned long func_type;
int floats_offset = 0;
rtx operands[3];
FILE * f = asm_out_file;
unsigned int lrm_count = 0;
int really_return = (sibling == NULL);
int start_reg;
arm_stack_offsets *offsets;
if (use_return_insn (FALSE, sibling) && return_used_this_function)
return "";
func_type = arm_current_func_type ();
if (IS_NAKED (func_type))
return "";
if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
{
rtx op;
op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
assemble_external_libcall (op);
output_asm_insn ("bl\t%a0", &op);
return "";
}
gcc_assert (!current_function_calls_eh_return || really_return);
offsets = arm_get_frame_offsets ();
saved_regs_mask = arm_compute_save_reg_mask ();
if (TARGET_IWMMXT)
lrm_count = bit_count (saved_regs_mask);
floats_offset = offsets->saved_args;
for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
if (saved_regs_mask & (1 << reg))
floats_offset += 4;
if (frame_pointer_needed)
{
int vfp_offset = offsets->frame;
unsigned long regs_above_fp =
inclusive_bitmask (ARM_HARD_FRAME_POINTER_REGNUM + 1, 11);
if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
{
for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
if (regs_ever_live[reg] && !call_used_regs[reg])
{
floats_offset += 12;
asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
reg, FP_REGNUM, floats_offset - vfp_offset);
}
}
else
{
start_reg = LAST_FPA_REGNUM;
for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
{
if (regs_ever_live[reg] && !call_used_regs[reg])
{
floats_offset += 12;
if (start_reg - reg == 3)
{
asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
reg, FP_REGNUM, floats_offset - vfp_offset);
start_reg = reg - 1;
}
}
else
{
if (reg != start_reg)
asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
reg + 1, start_reg - reg,
FP_REGNUM, floats_offset - vfp_offset);
start_reg = reg - 1;
}
}
if (reg != start_reg)
asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
reg + 1, start_reg - reg,
FP_REGNUM, floats_offset - vfp_offset);
}
if (TARGET_HARD_FLOAT && TARGET_VFP)
{
int saved_size;
saved_size = arm_get_vfp_saved_size ();
if (saved_size > 0)
{
floats_offset += saved_size;
asm_fprintf (f, "\tsub\t%r, %r, #%d\n", IP_REGNUM,
FP_REGNUM, floats_offset - vfp_offset);
}
start_reg = FIRST_VFP_REGNUM;
for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
{
if ((!regs_ever_live[reg] || call_used_regs[reg])
&& (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
{
if (start_reg != reg)
arm_output_fldmx (f, IP_REGNUM,
(start_reg - FIRST_VFP_REGNUM) / 2,
(reg - start_reg) / 2);
start_reg = reg + 2;
}
}
if (start_reg != reg)
arm_output_fldmx (f, IP_REGNUM,
(start_reg - FIRST_VFP_REGNUM) / 2,
(reg - start_reg) / 2);
}
if (TARGET_IWMMXT)
{
lrm_count += (lrm_count % 2 ? 2 : 1);
for (reg = LAST_IWMMXT_REGNUM; reg >= FIRST_IWMMXT_REGNUM; reg--)
if (regs_ever_live[reg] && !call_used_regs[reg])
{
asm_fprintf (f, "\twldrd\t%r, [%r, #-%d]\n",
reg, FP_REGNUM, lrm_count * 4);
lrm_count += 2;
}
}
if (sibling)
{
bool is_value;
int regno = indirect_sibreturn_reg (sibling, &is_value);
if (regno > 3 && regno != 12)
{
if (is_value)
XEXP (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0), 0)
= gen_rtx_REG (SImode, IP_REGNUM);
else
XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0), 0)
= gen_rtx_REG (SImode, IP_REGNUM);
asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, regno);
}
if (regno == -1)
{
rtx stack_reg, offset;
offset = indirect_sibreturn_mem (sibling, &stack_reg, &is_value);
if (offset)
{
if (is_value)
XEXP (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0), 0)
= gen_rtx_REG (SImode, IP_REGNUM);
else
XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0), 0)
= gen_rtx_REG (SImode, IP_REGNUM);
asm_fprintf (f, "\tldr\t%r, [%r, #%wd]\n", IP_REGNUM,
REGNO (stack_reg), INTVAL (offset));
}
}
}
if ((offsets->outgoing_args - offsets->saved_args
!= (signed) bit_count (saved_regs_mask) * 4)
|| ! current_function_sp_is_unchanging)
asm_fprintf (f, "\tsub\t%r, %r, #%d\n", SP_REGNUM, FP_REGNUM,
(bit_count (saved_regs_mask) - 2) * 4);
if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
&& really_return
&& current_function_pretend_args_size == 0
&& saved_regs_mask & (1 << LR_REGNUM)
&& !current_function_calls_eh_return)
{
saved_regs_mask &= ~ (1 << LR_REGNUM);
saved_regs_mask |= (1 << PC_REGNUM);
}
gcc_assert (! (saved_regs_mask & (1 << SP_REGNUM)));
if (saved_regs_mask & regs_above_fp)
{
print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
saved_regs_mask & regs_above_fp);
print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
saved_regs_mask & ~regs_above_fp);
}
else
print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
if (current_function_pretend_args_size)
{
operands[0] = operands[1] = stack_pointer_rtx;
operands[2] = GEN_INT (current_function_pretend_args_size);
output_add_immediate (operands);
}
if (IS_INTERRUPT (func_type))
print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
}
else
{
int ip_ok = 1;
if (sibling)
{
bool is_value;
int regno = indirect_sibreturn_reg (sibling, &is_value);
if (regno > 3 && regno != 12)
{
ip_ok = 0;
if (is_value)
XEXP (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0), 0)
= gen_rtx_REG (SImode, IP_REGNUM);
else
XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0), 0)
= gen_rtx_REG (SImode, IP_REGNUM);
asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, regno);
}
if (regno == -1)
{
rtx stack_reg, offset;
offset = indirect_sibreturn_mem (sibling, &stack_reg, &is_value);
if (offset)
{
ip_ok = 0;
if (is_value)
XEXP (XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 1), 0), 0)
= gen_rtx_REG (SImode, IP_REGNUM);
else
XEXP (XEXP (XVECEXP (PATTERN (sibling), 0, 0), 0), 0)
= gen_rtx_REG (SImode, IP_REGNUM);
asm_fprintf (f, "\tldr\t%r, [%r, #%wd]\n", IP_REGNUM,
REGNO (stack_reg), INTVAL (offset));
}
}
}
if (offsets->outgoing_args != offsets->saved_regs)
{
int delta = offsets->outgoing_args - offsets->saved_regs;
int maxpopsize;
tree rettype = TREE_TYPE (TREE_TYPE (current_function_decl));
if (TYPE_MODE (rettype) == VOIDmode)
maxpopsize = 20;
else if (TYPE_MODE (rettype) == DFmode
|| TYPE_MODE (rettype) == DImode)
maxpopsize = 12;
else
maxpopsize = 16;
if (saved_regs_mask
& ((1<<4) | (1<<5) | (1<<6) | (1<<7) | (1<<8) | (1<<9) | (1<<10) | (1<<11)))
ip_ok = 0;
if (!ip_ok)
maxpopsize -= 4;
if (optimize_size
&& delta <= maxpopsize && delta % 4 == 0
&& !TARGET_IWMMXT
&& really_return
&& TARGET_SOFT_FLOAT
&& arm_fpu_arch == FPUTYPE_NONE
&& !flag_pic
&& !frame_pointer_needed)
{
int reg = ip_ok ? 12 : 3;
while (delta)
{
saved_regs_mask |= (1 << reg);
reg = (reg == 12) ? 3 : reg - 1;
delta -= 4;
}
}
else
{
operands[0] = operands[1] = stack_pointer_rtx;
operands[2] = GEN_INT (offsets->outgoing_args - offsets->saved_regs);
output_add_immediate (operands);
}
}
if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
{
for (reg = FIRST_FPA_REGNUM; reg <= LAST_FPA_REGNUM; reg++)
if (regs_ever_live[reg] && !call_used_regs[reg])
asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
reg, SP_REGNUM);
}
else
{
start_reg = FIRST_FPA_REGNUM;
for (reg = FIRST_FPA_REGNUM; reg <= LAST_FPA_REGNUM; reg++)
{
if (regs_ever_live[reg] && !call_used_regs[reg])
{
if (reg - start_reg == 3)
{
asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
start_reg, SP_REGNUM);
start_reg = reg + 1;
}
}
else
{
if (reg != start_reg)
asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
start_reg, reg - start_reg,
SP_REGNUM);
start_reg = reg + 1;
}
}
if (reg != start_reg)
asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
start_reg, reg - start_reg, SP_REGNUM);
}
if (TARGET_HARD_FLOAT && TARGET_VFP)
{
start_reg = FIRST_VFP_REGNUM;
for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
{
if ((!regs_ever_live[reg] || call_used_regs[reg])
&& (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
{
if (start_reg != reg)
arm_output_fldmx (f, SP_REGNUM,
(start_reg - FIRST_VFP_REGNUM) / 2,
(reg - start_reg) / 2);
start_reg = reg + 2;
}
}
if (start_reg != reg)
arm_output_fldmx (f, SP_REGNUM,
(start_reg - FIRST_VFP_REGNUM) / 2,
(reg - start_reg) / 2);
}
if (TARGET_IWMMXT)
for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
if (regs_ever_live[reg] && !call_used_regs[reg])
asm_fprintf (f, "\twldrd\t%r, [%r], #8\n", reg, SP_REGNUM);
if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
&& really_return
&& current_function_pretend_args_size == 0
&& saved_regs_mask & (1 << LR_REGNUM)
&& !current_function_calls_eh_return)
{
saved_regs_mask &= ~ (1 << LR_REGNUM);
saved_regs_mask |= (1 << PC_REGNUM);
}
if (saved_regs_mask == (1 << LR_REGNUM))
{
asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
}
else if (saved_regs_mask)
{
if (saved_regs_mask & (1 << SP_REGNUM))
print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
else
print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
}
if (current_function_pretend_args_size)
{
operands[0] = operands[1] = stack_pointer_rtx;
operands[2] = GEN_INT (current_function_pretend_args_size);
output_add_immediate (operands);
}
}
if (!really_return || saved_regs_mask & (1 << PC_REGNUM))
return "";
if (current_function_calls_eh_return)
asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
ARM_EH_STACKADJ_REGNUM);
switch ((int) ARM_FUNC_TYPE (func_type))
{
case ARM_FT_ISR:
case ARM_FT_FIQ:
asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
break;
case ARM_FT_EXCEPTION:
asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
break;
case ARM_FT_INTERWORKED:
asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
break;
default:
if (arm_arch5 || arm_arch4t)
asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
else
asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
break;
}
return "";
}
static void
arm_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
{
arm_stack_offsets *offsets;
if (TARGET_THUMB)
{
int regno;
for (regno = 0; regno < LR_REGNUM; regno++)
{
rtx label = cfun->machine->call_via[regno];
if (label != NULL)
{
switch_to_section (function_section (current_function_decl));
targetm.asm_out.internal_label (asm_out_file, "L",
CODE_LABEL_NUMBER (label));
asm_fprintf (asm_out_file, "\tbx\t%r\n", regno);
}
}
return_used_this_function = 0;
}
else
{
offsets = arm_get_frame_offsets ();
gcc_assert (!use_return_insn (FALSE, NULL)
|| !return_used_this_function
|| offsets->saved_regs == offsets->outgoing_args
|| frame_pointer_needed);
after_arm_reorg = 0;
}
#if TARGET_MACHO
{
rtx insn = get_last_insn ();
while (insn
&& NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
insn = PREV_INSN (insn);
if (insn
&& (LABEL_P (insn)
|| (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
fputs ("\tnop\n", file);
}
#endif
}
static rtx
emit_multi_reg_push (unsigned long mask)
{
int num_regs = 0;
int num_dwarf_regs;
int i, j;
rtx par;
rtx dwarf;
int dwarf_par_index;
rtx tmp, reg;
for (i = 0; i <= LAST_ARM_REGNUM; i++)
if (mask & (1 << i))
num_regs++;
gcc_assert (num_regs && num_regs <= 16);
num_dwarf_regs = num_regs;
if (mask & (1 << PC_REGNUM))
num_dwarf_regs--;
par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
dwarf_par_index = 1;
for (i = 0; i <= LAST_ARM_REGNUM; i++)
{
if (mask & (1 << i))
{
reg = gen_rtx_REG (SImode, i);
XVECEXP (par, 0, 0)
= gen_rtx_SET (VOIDmode,
gen_frame_mem (BLKmode,
gen_rtx_PRE_DEC (BLKmode,
stack_pointer_rtx)),
gen_rtx_UNSPEC (BLKmode,
gen_rtvec (1, reg),
UNSPEC_PUSH_MULT));
if (i != PC_REGNUM)
{
tmp = gen_rtx_SET (VOIDmode,
gen_frame_mem (SImode, stack_pointer_rtx),
reg);
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
dwarf_par_index++;
}
break;
}
}
for (j = 1, i++; j < num_regs; i++)
{
if (mask & (1 << i))
{
reg = gen_rtx_REG (SImode, i);
XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
if (i != PC_REGNUM)
{
tmp
= gen_rtx_SET (VOIDmode,
gen_frame_mem (SImode,
plus_constant (stack_pointer_rtx,
4 * j)),
reg);
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
}
j++;
}
}
par = emit_insn (par);
tmp = gen_rtx_SET (VOIDmode,
stack_pointer_rtx,
plus_constant (stack_pointer_rtx, -4 * num_regs));
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, 0) = tmp;
REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
REG_NOTES (par));
return par;
}
static int
arm_size_return_regs (void)
{
enum machine_mode mode;
if (current_function_return_rtx != 0)
mode = GET_MODE (current_function_return_rtx);
else
mode = DECL_MODE (DECL_RESULT (current_function_decl));
return GET_MODE_SIZE (mode);
}
static rtx
emit_sfm (int base_reg, int count)
{
rtx par;
rtx dwarf;
rtx tmp, reg;
int i;
par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (count + 1));
reg = gen_rtx_REG (XFmode, base_reg++);
XVECEXP (par, 0, 0)
= gen_rtx_SET (VOIDmode,
gen_frame_mem (BLKmode,
gen_rtx_PRE_DEC (BLKmode,
stack_pointer_rtx)),
gen_rtx_UNSPEC (BLKmode,
gen_rtvec (1, reg),
UNSPEC_PUSH_MULT));
tmp = gen_rtx_SET (VOIDmode,
gen_frame_mem (XFmode, stack_pointer_rtx), reg);
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, 1) = tmp;
for (i = 1; i < count; i++)
{
reg = gen_rtx_REG (XFmode, base_reg++);
XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
tmp = gen_rtx_SET (VOIDmode,
gen_frame_mem (XFmode,
plus_constant (stack_pointer_rtx,
i * 12)),
reg);
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, i + 1) = tmp;
}
tmp = gen_rtx_SET (VOIDmode,
stack_pointer_rtx,
plus_constant (stack_pointer_rtx, -12 * count));
RTX_FRAME_RELATED_P (tmp) = 1;
XVECEXP (dwarf, 0, 0) = tmp;
par = emit_insn (par);
REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
REG_NOTES (par));
return par;
}
static bool
thumb_force_lr_save (void)
{
return !cfun->machine->lr_save_eliminated
&& (!leaf_function_p ()
|| thumb_far_jump_used_p ()
|| regs_ever_live [LR_REGNUM]);
}
static arm_stack_offsets *
arm_get_frame_offsets (void)
{
struct arm_stack_offsets *offsets;
unsigned long func_type;
int leaf;
int saved;
HOST_WIDE_INT frame_size;
offsets = &cfun->machine->stack_offsets;
if (reload_completed)
return offsets;
frame_size = ROUND_UP_WORD (get_frame_size ());
leaf = leaf_function_p ();
offsets->saved_args = current_function_pretend_args_size;
offsets->frame = offsets->saved_args + (frame_pointer_needed ? 8 : 0);
if (TARGET_ARM)
{
unsigned int regno;
saved = bit_count (arm_compute_save_reg_mask ()) * 4;
if (TARGET_REALLY_IWMMXT)
{
for (regno = FIRST_IWMMXT_REGNUM;
regno <= LAST_IWMMXT_REGNUM;
regno++)
if (regs_ever_live [regno] && ! call_used_regs [regno])
saved += 8;
}
func_type = arm_current_func_type ();
if (! IS_VOLATILE (func_type))
{
for (regno = FIRST_FPA_REGNUM; regno <= LAST_FPA_REGNUM; regno++)
if (regs_ever_live[regno] && ! call_used_regs[regno])
saved += 12;
if (TARGET_HARD_FLOAT && TARGET_VFP)
saved += arm_get_vfp_saved_size ();
}
}
else
{
saved = bit_count (thumb_compute_save_reg_mask ()) * 4;
if (TARGET_BACKTRACE)
saved += 16;
if (current_function_has_nonlocal_label && arm_arch6)
saved += 64;
}
offsets->saved_regs = offsets->saved_args + saved;
offsets->soft_frame = offsets->saved_regs + CALLER_INTERWORKING_SLOT_SIZE;
if (leaf && frame_size == 0)
{
offsets->outgoing_args = offsets->soft_frame;
offsets->locals_base = offsets->soft_frame;
return offsets;
}
if (ARM_DOUBLEWORD_ALIGN
&& (offsets->soft_frame & 7))
offsets->soft_frame += 4;
offsets->locals_base = offsets->soft_frame + frame_size;
offsets->outgoing_args = (offsets->locals_base
+ current_function_outgoing_args_size);
if (ARM_DOUBLEWORD_ALIGN)
{
if (offsets->outgoing_args & 7)
offsets->outgoing_args += 4;
gcc_assert (!(offsets->outgoing_args & 7));
}
return offsets;
}
HOST_WIDE_INT
arm_compute_initial_elimination_offset (unsigned int from, unsigned int to)
{
arm_stack_offsets *offsets;
offsets = arm_get_frame_offsets ();
switch (from)
{
case ARG_POINTER_REGNUM:
switch (to)
{
case FRAME_POINTER_REGNUM:
return offsets->soft_frame - offsets->saved_args;
case HARD_FRAME_POINTER_REGNUM:
return offsets->frame - (offsets->saved_args + 4);
case STACK_POINTER_REGNUM:
return offsets->outgoing_args - (offsets->saved_args + 4);
default:
gcc_unreachable ();
}
gcc_unreachable ();
case FRAME_POINTER_REGNUM:
switch (to)
{
case HARD_FRAME_POINTER_REGNUM:
return offsets->frame - offsets->soft_frame;
case STACK_POINTER_REGNUM:
return offsets->outgoing_args - offsets->soft_frame;
default:
gcc_unreachable ();
}
gcc_unreachable ();
default:
gcc_unreachable ();
}
}
void
arm_expand_prologue (void)
{
int reg;
rtx amount;
rtx insn;
rtx ip_rtx;
unsigned long live_regs_mask;
unsigned long func_type;
int saved_regs = 0;
unsigned HOST_WIDE_INT args_to_push;
arm_stack_offsets *offsets;
func_type = arm_current_func_type ();
if (IS_NAKED (func_type))
return;
args_to_push = current_function_pretend_args_size;
live_regs_mask = arm_compute_save_reg_mask ();
ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
if (frame_pointer_needed)
{
if (IS_INTERRUPT (func_type))
{
insn = emit_multi_reg_push (1 << IP_REGNUM);
}
else if (IS_NESTED (func_type))
{
}
}
if (args_to_push)
{
if (cfun->machine->uses_anonymous_args)
insn = emit_multi_reg_push
((0xf0 >> (args_to_push / 4)) & 0xf);
else
insn = emit_insn
(gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
GEN_INT (- args_to_push)));
RTX_FRAME_RELATED_P (insn) = 1;
}
if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
&& (live_regs_mask & (1 << LR_REGNUM)) != 0
&& ! frame_pointer_needed)
{
rtx lr = gen_rtx_REG (SImode, LR_REGNUM);
emit_set_insn (lr, plus_constant (lr, -4));
}
offsets = arm_get_frame_offsets ();
if (live_regs_mask)
{
saved_regs += bit_count (live_regs_mask) * 4;
if (optimize_size
&& !flag_pic
&& !frame_pointer_needed
&& arm_fpu_arch == FPUTYPE_NONE
&& TARGET_SOFT_FLOAT
&& !TARGET_IWMMXT)
{
int ip_ok = 1;
int delta = offsets->outgoing_args - offsets->saved_args - saved_regs;
if (delta < 0)
abort();
if (live_regs_mask
& ((1<<4) | (1<<5) | (1<<6) | (1<<7) | (1<<8) | (1<<9) | (1<<10) | (1<<11)))
ip_ok = 0;
if (delta <= (ip_ok ? 20 : 16) && delta % 4 == 0)
{
int reg = (ip_ok ? 12 : 3);
while (delta)
{
delta -= 4;
live_regs_mask |= (1<<reg);
reg = (reg == 12) ? 3 : reg - 1;
saved_regs += 4;
}
}
}
if (frame_pointer_needed)
{
unsigned long regs_above_fp =
inclusive_bitmask (ARM_HARD_FRAME_POINTER_REGNUM + 1, 11);
unsigned long initial_push_regs = live_regs_mask
& ~regs_above_fp;
unsigned long second_push_regs = live_regs_mask
& regs_above_fp;
insn = emit_multi_reg_push (initial_push_regs);
RTX_FRAME_RELATED_P (insn) = 1;
insn = emit_insn (
gen_addsi3 (hard_frame_pointer_rtx, stack_pointer_rtx,
GEN_INT ((bit_count (initial_push_regs) - 2)
* 4)));
RTX_FRAME_RELATED_P (insn) = 1;
emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
if (second_push_regs)
{
insn = emit_multi_reg_push (second_push_regs);
RTX_FRAME_RELATED_P (insn) = 1;
}
}
else
{
insn = emit_multi_reg_push (live_regs_mask);
RTX_FRAME_RELATED_P (insn) = 1;
}
}
if (TARGET_IWMMXT)
for (reg = LAST_IWMMXT_REGNUM; reg >= FIRST_IWMMXT_REGNUM; reg--)
if (regs_ever_live[reg] && ! call_used_regs [reg])
{
insn = gen_rtx_PRE_DEC (V2SImode, stack_pointer_rtx);
insn = gen_frame_mem (V2SImode, insn);
insn = emit_set_insn (insn, gen_rtx_REG (V2SImode, reg));
RTX_FRAME_RELATED_P (insn) = 1;
saved_regs += 8;
}
if (! IS_VOLATILE (func_type))
{
int start_reg;
if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
{
for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
if (regs_ever_live[reg] && !call_used_regs[reg])
{
insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
insn = gen_frame_mem (XFmode, insn);
insn = emit_set_insn (insn, gen_rtx_REG (XFmode, reg));
RTX_FRAME_RELATED_P (insn) = 1;
saved_regs += 12;
}
}
else
{
start_reg = LAST_FPA_REGNUM;
for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
{
if (regs_ever_live[reg] && !call_used_regs[reg])
{
if (start_reg - reg == 3)
{
insn = emit_sfm (reg, 4);
RTX_FRAME_RELATED_P (insn) = 1;
saved_regs += 48;
start_reg = reg - 1;
}
}
else
{
if (start_reg != reg)
{
insn = emit_sfm (reg + 1, start_reg - reg);
RTX_FRAME_RELATED_P (insn) = 1;
saved_regs += (start_reg - reg) * 12;
}
start_reg = reg - 1;
}
}
if (start_reg != reg)
{
insn = emit_sfm (reg + 1, start_reg - reg);
saved_regs += (start_reg - reg) * 12;
RTX_FRAME_RELATED_P (insn) = 1;
}
}
if (TARGET_HARD_FLOAT && TARGET_VFP)
{
start_reg = FIRST_VFP_REGNUM;
for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
{
if ((!regs_ever_live[reg] || call_used_regs[reg])
&& (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
{
if (start_reg != reg)
saved_regs += vfp_emit_fstmx (start_reg,
(reg - start_reg) / 2);
start_reg = reg + 2;
}
}
if (start_reg != reg)
saved_regs += vfp_emit_fstmx (start_reg,
(reg - start_reg) / 2);
}
}
if (offsets->outgoing_args != offsets->saved_args + saved_regs)
{
rtx last = get_last_insn ();
amount = GEN_INT (offsets->saved_args + saved_regs
- offsets->outgoing_args);
insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
amount));
do
{
last = last ? NEXT_INSN (last) : get_insns ();
RTX_FRAME_RELATED_P (last) = 1;
}
while (last != insn);
if (frame_pointer_needed)
insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
hard_frame_pointer_rtx));
}
if (flag_pic && arm_pic_register != INVALID_REGNUM)
arm_load_pic_register (0UL);
if (current_function_profile || !TARGET_SCHED_PROLOG
|| (ARM_EABI_UNWIND_TABLES && flag_non_call_exceptions))
emit_insn (gen_blockage ());
if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
{
emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
cfun->machine->lr_save_eliminated = 1;
}
}
void
arm_print_operand (FILE *stream, rtx x, int code)
{
switch (code)
{
case '.':
#ifdef LOCAL_LABEL_PREFIX
fputs (LOCAL_LABEL_PREFIX, stream);
#endif
return;
case '@':
fputs (ASM_COMMENT_START, stream);
return;
case '_':
fputs (user_label_prefix, stream);
return;
case '|':
fputs (REGISTER_PREFIX, stream);
return;
case '?':
if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
{
if (TARGET_THUMB)
{
output_operand_lossage ("predicated Thumb instruction");
break;
}
if (current_insn_predicate != NULL)
{
output_operand_lossage
("predicated instruction in conditional sequence");
break;
}
fputs (arm_condition_codes[arm_current_cc], stream);
}
else if (current_insn_predicate)
{
enum arm_cond_code code;
if (TARGET_THUMB)
{
output_operand_lossage ("predicated Thumb instruction");
break;
}
code = get_arm_condition_code (current_insn_predicate);
fputs (arm_condition_codes[code], stream);
}
return;
case 'N':
{
REAL_VALUE_TYPE r;
REAL_VALUE_FROM_CONST_DOUBLE (r, x);
r = REAL_VALUE_NEGATE (r);
fprintf (stream, "%s", fp_const_from_val (&r));
}
return;
case 'B':
if (GET_CODE (x) == CONST_INT)
{
HOST_WIDE_INT val;
val = ARM_SIGN_EXTEND (~INTVAL (x));
fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
}
else
{
putc ('~', stream);
output_addr_const (stream, x);
}
return;
case 'i':
fprintf (stream, "%s", arithmetic_instr (x, 1));
return;
case 's':
if (GET_CODE (x) == CONST_INT)
{
fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
return;
}
arm_print_operand (stream, x, 0);
return;
case 'I':
fprintf (stream, "%s", arithmetic_instr (x, 0));
return;
case 'S':
{
HOST_WIDE_INT val;
const char *shift;
if (!shift_operator (x, SImode))
{
output_operand_lossage ("invalid shift operand");
break;
}
shift = shift_op (x, &val);
if (shift)
{
fprintf (stream, ", %s ", shift);
if (val == -1)
arm_print_operand (stream, XEXP (x, 1), 0);
else
fprintf (stream, "#" HOST_WIDE_INT_PRINT_DEC, val);
}
}
return;
case 'Q':
if (GET_CODE (x) != REG || REGNO (x) > LAST_ARM_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
return;
case 'R':
if (GET_CODE (x) != REG || REGNO (x) > LAST_ARM_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
return;
case 'H':
if (GET_CODE (x) != REG || REGNO (x) > LAST_ARM_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
asm_fprintf (stream, "%r", REGNO (x) + 1);
return;
case 'm':
asm_fprintf (stream, "%r",
GET_CODE (XEXP (x, 0)) == REG
? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
return;
case 'M':
asm_fprintf (stream, "{%r-%r}",
REGNO (x),
REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
return;
case 'd':
if (x == const_true_rtx)
return;
if (!COMPARISON_P (x))
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
fputs (arm_condition_codes[get_arm_condition_code (x)],
stream);
return;
case 'D':
if (x == const_true_rtx)
{
output_operand_lossage ("instruction never exectued");
return;
}
if (!COMPARISON_P (x))
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
(get_arm_condition_code (x))],
stream);
return;
case 'W':
case 'X':
case 'Y':
case 'Z':
gcc_assert (GET_CODE (x) == REG
&& REGNO_REG_CLASS (REGNO (x)) == CIRRUS_REGS);
fprintf (stream, "mv%s%s",
code == 'W' ? "f"
: code == 'X' ? "d"
: code == 'Y' ? "fx" : "dx", reg_names[REGNO (x)] + 2);
return;
case 'V':
{
int mode = GET_MODE (x);
if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
fprintf (stream, "mv%s%s",
mode == DFmode ? "d"
: mode == SImode ? "fx"
: mode == DImode ? "dx"
: "f", reg_names[REGNO (x)] + 2);
return;
}
case 'U':
if (GET_CODE (x) != REG
|| REGNO (x) < FIRST_IWMMXT_GR_REGNUM
|| REGNO (x) > LAST_IWMMXT_GR_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
else
fprintf (stream, "%d", REGNO (x) - FIRST_IWMMXT_GR_REGNUM);
return;
case 'w':
if (GET_CODE (x) != CONST_INT
|| INTVAL (x) < 0
|| INTVAL (x) >= 16)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
else
{
static const char * wc_reg_names [16] =
{
"wCID", "wCon", "wCSSF", "wCASF",
"wC4", "wC5", "wC6", "wC7",
"wCGR0", "wCGR1", "wCGR2", "wCGR3",
"wC12", "wC13", "wC14", "wC15"
};
fprintf (stream, "%s", wc_reg_names [INTVAL (x)]);
}
return;
case 'P':
{
int mode = GET_MODE (x);
int num;
if (mode != DImode && mode != DFmode)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
if (GET_CODE (x) != REG
|| !IS_VFP_REGNUM (REGNO (x)))
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
num = REGNO(x) - FIRST_VFP_REGNUM;
if (num & 1)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
}
fprintf (stream, "d%d", num >> 1);
}
return;
default:
if (x == 0)
{
output_operand_lossage ("missing operand");
return;
}
switch (GET_CODE (x))
{
case REG:
asm_fprintf (stream, "%r", REGNO (x));
break;
case MEM:
output_memory_reference_mode = GET_MODE (x);
output_address (XEXP (x, 0));
break;
case CONST_DOUBLE:
fprintf (stream, "#%s", fp_immediate_constant (x));
break;
default:
gcc_assert (GET_CODE (x) != NEG);
fputc ('#', stream);
output_addr_const (stream, x);
break;
}
}
}
#ifndef AOF_ASSEMBLER
static bool
arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
{
if (TARGET_MACHO)
aligned_p = 1;
if (size == UNITS_PER_WORD && aligned_p)
{
fputs ("\t" DOT_WORD "\t", asm_out_file);
output_addr_const (asm_out_file, x);
if (NEED_GOT_RELOC && flag_pic && making_const_table &&
(GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
{
if (GET_CODE (x) == SYMBOL_REF
&& (CONSTANT_POOL_ADDRESS_P (x)
|| SYMBOL_REF_LOCAL_P (x)))
fputs ("(GOTOFF)", asm_out_file);
else if (GET_CODE (x) == LABEL_REF)
fputs ("(GOTOFF)", asm_out_file);
else
fputs ("(GOT)", asm_out_file);
}
fputc ('\n', asm_out_file);
return true;
}
if (arm_vector_mode_supported_p (GET_MODE (x)))
{
int i, units;
gcc_assert (GET_CODE (x) == CONST_VECTOR);
units = CONST_VECTOR_NUNITS (x);
switch (GET_MODE (x))
{
case V2SImode: size = 4; break;
case V4HImode: size = 2; break;
case V8QImode: size = 1; break;
default:
gcc_unreachable ();
}
for (i = 0; i < units; i++)
{
rtx elt;
elt = CONST_VECTOR_ELT (x, i);
assemble_integer
(elt, size, i == 0 ? BIGGEST_ALIGNMENT : size * BITS_PER_UNIT, 1);
}
return true;
}
return default_assemble_integer (x, size, aligned_p);
}
#ifdef OBJECT_FORMAT_ELF
static void
arm_elf_asm_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
{
if (!TARGET_AAPCS_BASED)
{
default_named_section_asm_out_constructor (symbol, priority);
return;
}
switch_to_section (ctors_section);
assemble_align (POINTER_SIZE);
fputs ("\t.word\t", asm_out_file);
output_addr_const (asm_out_file, symbol);
fputs ("(target1)\n", asm_out_file);
}
#endif
#endif
static enum arm_cond_code
get_arm_condition_code (rtx comparison)
{
enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
int code;
enum rtx_code comp_code = GET_CODE (comparison);
if (GET_MODE_CLASS (mode) != MODE_CC)
mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
XEXP (comparison, 1));
switch (mode)
{
case CC_DNEmode: code = ARM_NE; goto dominance;
case CC_DEQmode: code = ARM_EQ; goto dominance;
case CC_DGEmode: code = ARM_GE; goto dominance;
case CC_DGTmode: code = ARM_GT; goto dominance;
case CC_DLEmode: code = ARM_LE; goto dominance;
case CC_DLTmode: code = ARM_LT; goto dominance;
case CC_DGEUmode: code = ARM_CS; goto dominance;
case CC_DGTUmode: code = ARM_HI; goto dominance;
case CC_DLEUmode: code = ARM_LS; goto dominance;
case CC_DLTUmode: code = ARM_CC;
dominance:
gcc_assert (comp_code == EQ || comp_code == NE);
if (comp_code == EQ)
return ARM_INVERSE_CONDITION_CODE (code);
return code;
case CC_NOOVmode:
switch (comp_code)
{
case NE: return ARM_NE;
case EQ: return ARM_EQ;
case GE: return ARM_PL;
case LT: return ARM_MI;
default: gcc_unreachable ();
}
case CC_Zmode:
switch (comp_code)
{
case NE: return ARM_NE;
case EQ: return ARM_EQ;
default: gcc_unreachable ();
}
case CC_Nmode:
switch (comp_code)
{
case NE: return ARM_MI;
case EQ: return ARM_PL;
default: gcc_unreachable ();
}
case CCFPEmode:
case CCFPmode:
switch (comp_code)
{
case GE: return ARM_GE;
case GT: return ARM_GT;
case LE: return ARM_LS;
case LT: return ARM_MI;
case NE: return ARM_NE;
case EQ: return ARM_EQ;
case ORDERED: return ARM_VC;
case UNORDERED: return ARM_VS;
case UNLT: return ARM_LT;
case UNLE: return ARM_LE;
case UNGT: return ARM_HI;
case UNGE: return ARM_PL;
case UNEQ:
case LTGT:
default: gcc_unreachable ();
}
case CC_SWPmode:
switch (comp_code)
{
case NE: return ARM_NE;
case EQ: return ARM_EQ;
case GE: return ARM_LE;
case GT: return ARM_LT;
case LE: return ARM_GE;
case LT: return ARM_GT;
case GEU: return ARM_LS;
case GTU: return ARM_CC;
case LEU: return ARM_CS;
case LTU: return ARM_HI;
default: gcc_unreachable ();
}
case CC_Cmode:
switch (comp_code)
{
case LTU: return ARM_CS;
case GEU: return ARM_CC;
default: gcc_unreachable ();
}
case CCmode:
switch (comp_code)
{
case NE: return ARM_NE;
case EQ: return ARM_EQ;
case GE: return ARM_GE;
case GT: return ARM_GT;
case LE: return ARM_LE;
case LT: return ARM_LT;
case GEU: return ARM_CS;
case GTU: return ARM_HI;
case LEU: return ARM_LS;
case LTU: return ARM_CC;
default: gcc_unreachable ();
}
default: gcc_unreachable ();
}
}
void
arm_final_prescan_insn (rtx insn)
{
#ifdef ENABLE_LLVM
insn = insn;
#else
rtx body = PATTERN (insn);
int reverse = 0;
int jump_clobbers = 0;
int seeking_return = 0;
rtx start_insn = insn;
if (arm_ccfsm_state == 4)
{
if (insn == arm_target_insn)
{
arm_target_insn = NULL;
arm_ccfsm_state = 0;
}
return;
}
if (arm_ccfsm_state == 3)
{
if (simplejump_p (insn))
{
start_insn = next_nonnote_insn (start_insn);
if (GET_CODE (start_insn) == BARRIER)
{
start_insn = next_nonnote_insn (start_insn);
}
if (GET_CODE (start_insn) == CODE_LABEL
&& CODE_LABEL_NUMBER (start_insn) == arm_target_label
&& LABEL_NUSES (start_insn) == 1)
reverse = TRUE;
else
return;
}
else if (GET_CODE (body) == RETURN)
{
start_insn = next_nonnote_insn (start_insn);
if (GET_CODE (start_insn) == BARRIER)
start_insn = next_nonnote_insn (start_insn);
if (GET_CODE (start_insn) == CODE_LABEL
&& CODE_LABEL_NUMBER (start_insn) == arm_target_label
&& LABEL_NUSES (start_insn) == 1)
{
reverse = TRUE;
seeking_return = 1;
}
else
return;
}
else
return;
}
gcc_assert (!arm_ccfsm_state || reverse);
if (GET_CODE (insn) != JUMP_INSN)
return;
if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
body = XVECEXP (body, 0, 0);
if (reverse
|| (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
&& GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
{
int insns_skipped;
int fail = FALSE, succeed = FALSE;
int then_not_else = TRUE;
rtx this_insn = start_insn, label = 0;
if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
{
jump_clobbers = 1;
return;
}
if (reverse)
{
if (!seeking_return)
label = XEXP (SET_SRC (body), 0);
}
else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
label = XEXP (XEXP (SET_SRC (body), 1), 0);
else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
{
label = XEXP (XEXP (SET_SRC (body), 2), 0);
then_not_else = FALSE;
}
else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
seeking_return = 1;
else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
{
seeking_return = 1;
then_not_else = FALSE;
}
else
gcc_unreachable ();
for (insns_skipped = 0;
!fail && !succeed && insns_skipped++ < max_insns_skipped;)
{
rtx scanbody;
this_insn = next_nonnote_insn (this_insn);
if (!this_insn)
break;
switch (GET_CODE (this_insn))
{
case CODE_LABEL:
if (this_insn == label)
{
if (jump_clobbers)
{
arm_ccfsm_state = 2;
this_insn = next_nonnote_insn (this_insn);
}
else
arm_ccfsm_state = 1;
succeed = TRUE;
}
else
fail = TRUE;
break;
case BARRIER:
this_insn = next_nonnote_insn (this_insn);
if (this_insn && this_insn == label)
{
if (jump_clobbers)
{
arm_ccfsm_state = 2;
this_insn = next_nonnote_insn (this_insn);
}
else
arm_ccfsm_state = 1;
succeed = TRUE;
}
else
fail = TRUE;
break;
case CALL_INSN:
if (arm_arch5)
{
fail = TRUE;
break;
}
this_insn = next_nonnote_insn (this_insn);
if (this_insn && GET_CODE (this_insn) == BARRIER)
this_insn = next_nonnote_insn (this_insn);
if (this_insn && this_insn == label
&& insns_skipped < max_insns_skipped)
{
if (jump_clobbers)
{
arm_ccfsm_state = 2;
this_insn = next_nonnote_insn (this_insn);
}
else
arm_ccfsm_state = 1;
succeed = TRUE;
}
else
fail = TRUE;
break;
case JUMP_INSN:
scanbody = PATTERN (this_insn);
if (GET_CODE (scanbody) == SET
&& GET_CODE (SET_DEST (scanbody)) == PC)
{
if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
&& XEXP (SET_SRC (scanbody), 0) == label && !reverse)
{
arm_ccfsm_state = 2;
succeed = TRUE;
}
else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
fail = TRUE;
}
else if (GET_CODE (scanbody) == RETURN
&& !use_return_insn (TRUE, NULL)
&& !optimize_size)
fail = TRUE;
else if (GET_CODE (scanbody) == RETURN
&& seeking_return)
{
arm_ccfsm_state = 2;
succeed = TRUE;
}
else if (GET_CODE (scanbody) == PARALLEL)
{
switch (get_attr_conds (this_insn))
{
case CONDS_NOCOND:
break;
default:
fail = TRUE;
break;
}
}
else
fail = TRUE;
break;
case INSN:
scanbody = PATTERN (this_insn);
if (!(GET_CODE (scanbody) == SET
|| GET_CODE (scanbody) == PARALLEL)
|| get_attr_conds (this_insn) != CONDS_NOCOND)
fail = TRUE;
if (GET_CODE (scanbody) != USE
&& GET_CODE (scanbody) != CLOBBER
&& get_attr_cirrus (this_insn) != CIRRUS_NOT)
fail = TRUE;
break;
default:
break;
}
}
if (succeed)
{
if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
arm_target_label = CODE_LABEL_NUMBER (label);
else
{
gcc_assert (seeking_return || arm_ccfsm_state == 2);
while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
{
this_insn = next_nonnote_insn (this_insn);
gcc_assert (!this_insn
|| (GET_CODE (this_insn) != BARRIER
&& GET_CODE (this_insn) != CODE_LABEL));
}
if (!this_insn)
{
recog (PATTERN (insn), insn, NULL);
arm_ccfsm_state = 0;
arm_target_insn = NULL;
return;
}
arm_target_insn = this_insn;
}
if (jump_clobbers)
{
gcc_assert (!reverse);
arm_current_cc =
get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
0), 0), 1));
if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
}
else
{
if (!reverse)
arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
0));
}
if (reverse || then_not_else)
arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
}
recog (PATTERN (insn), insn, NULL);
}
#endif
}
int
arm_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
{
if (GET_MODE_CLASS (mode) == MODE_CC)
return (regno == CC_REGNUM
|| (TARGET_HARD_FLOAT && TARGET_VFP
&& regno == VFPCC_REGNUM));
if (TARGET_THUMB)
return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
if (TARGET_HARD_FLOAT && TARGET_MAVERICK
&& IS_CIRRUS_REGNUM (regno))
return (GET_MODE_CLASS (mode) == MODE_FLOAT) || (mode == DImode);
if (TARGET_HARD_FLOAT && TARGET_VFP
&& IS_VFP_REGNUM (regno))
{
if (mode == SFmode || mode == SImode)
return TRUE;
if (mode == DFmode)
return ((regno - FIRST_VFP_REGNUM) & 1) == 0;
return FALSE;
}
if (TARGET_REALLY_IWMMXT)
{
if (IS_IWMMXT_GR_REGNUM (regno))
return mode == SImode;
if (IS_IWMMXT_REGNUM (regno))
return VALID_IWMMXT_REG_MODE (mode);
}
if (regno <= LAST_ARM_REGNUM)
return !(TARGET_LDRD && GET_MODE_SIZE (mode) > 4 && (regno & 1) != 0);
if (regno == FRAME_POINTER_REGNUM
|| regno == ARG_POINTER_REGNUM)
return GET_MODE_CLASS (mode) == MODE_INT;
return (TARGET_HARD_FLOAT && TARGET_FPA
&& GET_MODE_CLASS (mode) == MODE_FLOAT
&& regno >= FIRST_FPA_REGNUM
&& regno <= LAST_FPA_REGNUM);
}
int
arm_regno_class (int regno)
{
if (TARGET_THUMB)
{
if (regno == STACK_POINTER_REGNUM)
return STACK_REG;
if (regno == CC_REGNUM)
return CC_REG;
if (regno < 8)
return LO_REGS;
return HI_REGS;
}
if ( regno <= LAST_ARM_REGNUM
|| regno == FRAME_POINTER_REGNUM
|| regno == ARG_POINTER_REGNUM)
return GENERAL_REGS;
if (regno == CC_REGNUM || regno == VFPCC_REGNUM)
return NO_REGS;
if (IS_CIRRUS_REGNUM (regno))
return CIRRUS_REGS;
if (IS_VFP_REGNUM (regno))
return VFP_REGS;
if (IS_IWMMXT_REGNUM (regno))
return IWMMXT_REGS;
if (IS_IWMMXT_GR_REGNUM (regno))
return IWMMXT_GR_REGS;
return FPA_REGS;
}
int
arm_debugger_arg_offset (int value, rtx addr)
{
rtx insn;
if (frame_pointer_needed)
{
if ((GET_CODE (addr) == REG) && (REGNO (addr) == SP_REGNUM))
return arm_local_debug_offset (addr);
if (GET_CODE (addr) == PLUS
&& GET_CODE (XEXP (addr, 0)) == REG
&& REGNO (XEXP (addr, 0)) == SP_REGNUM)
return arm_local_debug_offset (addr);
}
if (value != 0)
return value;
if (GET_CODE (addr) != REG)
return 0;
if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
return 0;
if ((TARGET_THUMB || !frame_pointer_needed)
&& REGNO (addr) == SP_REGNUM)
return 0;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
if ( GET_CODE (insn) == INSN
&& GET_CODE (PATTERN (insn)) == SET
&& REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
&& GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
&& GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
&& REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
&& GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
)
{
value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
break;
}
}
if (value == 0)
{
debug_rtx (addr);
warning (0, "unable to compute real location of stacked parameter");
value = 8;
}
return value;
}
#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
do \
{ \
if ((MASK) & insn_flags) \
lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
BUILT_IN_MD, NULL, NULL_TREE); \
} \
while (0)
struct builtin_description
{
const unsigned int mask;
const enum insn_code icode;
const char * const name;
const enum arm_builtins code;
const enum rtx_code comparison;
const unsigned int flag;
};
static const struct builtin_description bdesc_2arg[] =
{
#define IWMMXT_BUILTIN(code, string, builtin) \
{ FL_IWMMXT, CODE_FOR_##code, "__builtin_arm_" string, \
ARM_BUILTIN_##builtin, 0, 0 },
IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsm", WMULSM)
IWMMXT_BUILTIN (umulv4hi3_highpart, "wmulum", WMULUM)
IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
IWMMXT_BUILTIN (iwmmxt_wmadds, "wmadds", WMADDS)
IWMMXT_BUILTIN (iwmmxt_wmaddu, "wmaddu", WMADDU)
#define IWMMXT_BUILTIN2(code, builtin) \
{ FL_IWMMXT, CODE_FOR_##code, NULL, ARM_BUILTIN_##builtin, 0, 0 },
IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
IWMMXT_BUILTIN2 (ashlv4hi3_di, WSLLH)
IWMMXT_BUILTIN2 (ashlv4hi3, WSLLHI)
IWMMXT_BUILTIN2 (ashlv2si3_di, WSLLW)
IWMMXT_BUILTIN2 (ashlv2si3, WSLLWI)
IWMMXT_BUILTIN2 (ashldi3_di, WSLLD)
IWMMXT_BUILTIN2 (ashldi3_iwmmxt, WSLLDI)
IWMMXT_BUILTIN2 (lshrv4hi3_di, WSRLH)
IWMMXT_BUILTIN2 (lshrv4hi3, WSRLHI)
IWMMXT_BUILTIN2 (lshrv2si3_di, WSRLW)
IWMMXT_BUILTIN2 (lshrv2si3, WSRLWI)
IWMMXT_BUILTIN2 (lshrdi3_di, WSRLD)
IWMMXT_BUILTIN2 (lshrdi3_iwmmxt, WSRLDI)
IWMMXT_BUILTIN2 (ashrv4hi3_di, WSRAH)
IWMMXT_BUILTIN2 (ashrv4hi3, WSRAHI)
IWMMXT_BUILTIN2 (ashrv2si3_di, WSRAW)
IWMMXT_BUILTIN2 (ashrv2si3, WSRAWI)
IWMMXT_BUILTIN2 (ashrdi3_di, WSRAD)
IWMMXT_BUILTIN2 (ashrdi3_iwmmxt, WSRADI)
IWMMXT_BUILTIN2 (rorv4hi3_di, WRORH)
IWMMXT_BUILTIN2 (rorv4hi3, WRORHI)
IWMMXT_BUILTIN2 (rorv2si3_di, WRORW)
IWMMXT_BUILTIN2 (rorv2si3, WRORWI)
IWMMXT_BUILTIN2 (rordi3_di, WRORD)
IWMMXT_BUILTIN2 (rordi3, WRORDI)
IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
};
static const struct builtin_description bdesc_1arg[] =
{
IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
};
static void
arm_init_iwmmxt_builtins (void)
{
const struct builtin_description * d;
size_t i;
tree endlink = void_list_node;
tree V2SI_type_node = build_vector_type_for_mode (intSI_type_node, V2SImode);
tree V4HI_type_node = build_vector_type_for_mode (intHI_type_node, V4HImode);
tree V8QI_type_node = build_vector_type_for_mode (intQI_type_node, V8QImode);
tree int_ftype_int
= build_function_type (integer_type_node,
tree_cons (NULL_TREE, integer_type_node, endlink));
tree v8qi_ftype_v8qi_v8qi_int
= build_function_type (V8QI_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
tree_cons (NULL_TREE,
integer_type_node,
endlink))));
tree v4hi_ftype_v4hi_int
= build_function_type (V4HI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE, integer_type_node,
endlink)));
tree v2si_ftype_v2si_int
= build_function_type (V2SI_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
tree_cons (NULL_TREE, integer_type_node,
endlink)));
tree v2si_ftype_di_di
= build_function_type (V2SI_type_node,
tree_cons (NULL_TREE, long_long_integer_type_node,
tree_cons (NULL_TREE, long_long_integer_type_node,
endlink)));
tree di_ftype_di_int
= build_function_type (long_long_integer_type_node,
tree_cons (NULL_TREE, long_long_integer_type_node,
tree_cons (NULL_TREE, integer_type_node,
endlink)));
tree di_ftype_di_int_int
= build_function_type (long_long_integer_type_node,
tree_cons (NULL_TREE, long_long_integer_type_node,
tree_cons (NULL_TREE, integer_type_node,
tree_cons (NULL_TREE,
integer_type_node,
endlink))));
tree int_ftype_v8qi
= build_function_type (integer_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
endlink));
tree int_ftype_v4hi
= build_function_type (integer_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
endlink));
tree int_ftype_v2si
= build_function_type (integer_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
endlink));
tree int_ftype_v8qi_int
= build_function_type (integer_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
tree_cons (NULL_TREE, integer_type_node,
endlink)));
tree int_ftype_v4hi_int
= build_function_type (integer_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE, integer_type_node,
endlink)));
tree int_ftype_v2si_int
= build_function_type (integer_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
tree_cons (NULL_TREE, integer_type_node,
endlink)));
tree v8qi_ftype_v8qi_int_int
= build_function_type (V8QI_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
tree_cons (NULL_TREE, integer_type_node,
tree_cons (NULL_TREE,
integer_type_node,
endlink))));
tree v4hi_ftype_v4hi_int_int
= build_function_type (V4HI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE, integer_type_node,
tree_cons (NULL_TREE,
integer_type_node,
endlink))));
tree v2si_ftype_v2si_int_int
= build_function_type (V2SI_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
tree_cons (NULL_TREE, integer_type_node,
tree_cons (NULL_TREE,
integer_type_node,
endlink))));
tree v8qi_ftype_v4hi_v4hi
= build_function_type (V8QI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
endlink)));
tree v4hi_ftype_v2si_v2si
= build_function_type (V4HI_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
endlink)));
tree v2si_ftype_v4hi_v4hi
= build_function_type (V2SI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
endlink)));
tree v2si_ftype_v8qi_v8qi
= build_function_type (V2SI_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
endlink)));
tree v4hi_ftype_v4hi_di
= build_function_type (V4HI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE,
long_long_integer_type_node,
endlink)));
tree v2si_ftype_v2si_di
= build_function_type (V2SI_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
tree_cons (NULL_TREE,
long_long_integer_type_node,
endlink)));
tree void_ftype_int_int
= build_function_type (void_type_node,
tree_cons (NULL_TREE, integer_type_node,
tree_cons (NULL_TREE, integer_type_node,
endlink)));
tree di_ftype_void
= build_function_type (long_long_unsigned_type_node, endlink);
tree di_ftype_v8qi
= build_function_type (long_long_integer_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
endlink));
tree di_ftype_v4hi
= build_function_type (long_long_integer_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
endlink));
tree di_ftype_v2si
= build_function_type (long_long_integer_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
endlink));
tree v2si_ftype_v4hi
= build_function_type (V2SI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
endlink));
tree v4hi_ftype_v8qi
= build_function_type (V4HI_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
endlink));
tree di_ftype_di_v4hi_v4hi
= build_function_type (long_long_unsigned_type_node,
tree_cons (NULL_TREE,
long_long_unsigned_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE,
V4HI_type_node,
endlink))));
tree di_ftype_v4hi_v4hi
= build_function_type (long_long_unsigned_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
endlink)));
tree v8qi_ftype_v8qi_v8qi
= build_function_type (V8QI_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
tree_cons (NULL_TREE, V8QI_type_node,
endlink)));
tree v4hi_ftype_v4hi_v4hi
= build_function_type (V4HI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
tree_cons (NULL_TREE, V4HI_type_node,
endlink)));
tree v2si_ftype_v2si_v2si
= build_function_type (V2SI_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
tree_cons (NULL_TREE, V2SI_type_node,
endlink)));
tree di_ftype_di_di
= build_function_type (long_long_unsigned_type_node,
tree_cons (NULL_TREE, long_long_unsigned_type_node,
tree_cons (NULL_TREE,
long_long_unsigned_type_node,
endlink)));
for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
{
enum machine_mode mode;
tree type;
if (d->name == 0)
continue;
mode = insn_data[d->icode].operand[1].mode;
switch (mode)
{
case V8QImode:
type = v8qi_ftype_v8qi_v8qi;
break;
case V4HImode:
type = v4hi_ftype_v4hi_v4hi;
break;
case V2SImode:
type = v2si_ftype_v2si_v2si;
break;
case DImode:
type = di_ftype_di_di;
break;
default:
gcc_unreachable ();
}
def_mbuiltin (d->mask, d->name, type, d->code);
}
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wzero", di_ftype_void, ARM_BUILTIN_WZERO);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_setwcx", void_ftype_int_int, ARM_BUILTIN_SETWCX);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_getwcx", int_ftype_int, ARM_BUILTIN_GETWCX);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSLLH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllw", v2si_ftype_v2si_di, ARM_BUILTIN_WSLLW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslld", di_ftype_di_di, ARM_BUILTIN_WSLLD);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSLLHI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSLLWI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslldi", di_ftype_di_int, ARM_BUILTIN_WSLLDI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRLH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRLW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrld", di_ftype_di_di, ARM_BUILTIN_WSRLD);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRLHI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRLWI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrldi", di_ftype_di_int, ARM_BUILTIN_WSRLDI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrah", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRAH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsraw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRAW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrad", di_ftype_di_di, ARM_BUILTIN_WSRAD);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrahi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRAHI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrawi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRAWI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsradi", di_ftype_di_int, ARM_BUILTIN_WSRADI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WRORH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorw", v2si_ftype_v2si_di, ARM_BUILTIN_WRORW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrord", di_ftype_di_di, ARM_BUILTIN_WRORD);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WRORHI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorwi", v2si_ftype_v2si_int, ARM_BUILTIN_WRORWI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrordi", di_ftype_di_int, ARM_BUILTIN_WRORDI);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wshufh", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSHUFH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadb", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadh", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadbz", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADBZ);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadhz", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADHZ);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsb", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMSB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMSH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMSW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmub", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMUB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMUH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMUW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrb", v8qi_ftype_v8qi_int_int, ARM_BUILTIN_TINSRB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrh", v4hi_ftype_v4hi_int_int, ARM_BUILTIN_TINSRH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrw", v2si_ftype_v2si_int_int, ARM_BUILTIN_TINSRW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccb", di_ftype_v8qi, ARM_BUILTIN_WACCB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wacch", di_ftype_v4hi, ARM_BUILTIN_WACCH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccw", di_ftype_v2si, ARM_BUILTIN_WACCW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskb", int_ftype_v8qi, ARM_BUILTIN_TMOVMSKB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskh", int_ftype_v4hi, ARM_BUILTIN_TMOVMSKH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskw", int_ftype_v2si, ARM_BUILTIN_TMOVMSKW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhss", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHSS);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhus", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHUS);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwus", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWUS);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwss", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWSS);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdus", v2si_ftype_di_di, ARM_BUILTIN_WPACKDUS);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdss", v2si_ftype_di_di, ARM_BUILTIN_WPACKDSS);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHUB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHUH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHUW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHSB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHSH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHSW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELUB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELUH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELUW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELSB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELSH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELSW);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacs", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACS);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacsz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACSZ);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacu", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACU);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacuz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACUZ);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_walign", v8qi_ftype_v8qi_v8qi_int, ARM_BUILTIN_WALIGN);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmia", di_ftype_di_int_int, ARM_BUILTIN_TMIA);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiaph", di_ftype_di_int_int, ARM_BUILTIN_TMIAPH);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabb", di_ftype_di_int_int, ARM_BUILTIN_TMIABB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabt", di_ftype_di_int_int, ARM_BUILTIN_TMIABT);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatb", di_ftype_di_int_int, ARM_BUILTIN_TMIATB);
def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatt", di_ftype_di_int_int, ARM_BUILTIN_TMIATT);
}
static void
arm_init_tls_builtins (void)
{
tree ftype;
tree nothrow = tree_cons (get_identifier ("nothrow"), NULL, NULL);
tree const_nothrow = tree_cons (get_identifier ("const"), NULL, nothrow);
ftype = build_function_type (ptr_type_node, void_list_node);
lang_hooks.builtin_function ("__builtin_thread_pointer", ftype,
ARM_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
NULL, const_nothrow);
}
static void
arm_init_builtins (void)
{
arm_init_tls_builtins ();
if (TARGET_REALLY_IWMMXT)
arm_init_iwmmxt_builtins ();
#ifdef SUBTARGET_INIT_BUILTINS
SUBTARGET_INIT_BUILTINS;
#endif
}
static rtx
safe_vector_operand (rtx x, enum machine_mode mode)
{
if (x != const0_rtx)
return x;
x = gen_reg_rtx (mode);
emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
: gen_rtx_SUBREG (DImode, x, 0)));
return x;
}
static rtx
arm_expand_binop_builtin (enum insn_code icode,
tree arglist, rtx target)
{
rtx pat;
tree arg0 = TREE_VALUE (arglist);
tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
enum machine_mode tmode = insn_data[icode].operand[0].mode;
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
enum machine_mode mode1 = insn_data[icode].operand[2].mode;
if (VECTOR_MODE_P (mode0))
op0 = safe_vector_operand (op0, mode0);
if (VECTOR_MODE_P (mode1))
op1 = safe_vector_operand (op1, mode1);
if (! target
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
gcc_assert (GET_MODE (op0) == mode0 && GET_MODE (op1) == mode1);
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
pat = GEN_FCN (icode) (target, op0, op1);
if (! pat)
return 0;
emit_insn (pat);
return target;
}
static rtx
arm_expand_unop_builtin (enum insn_code icode,
tree arglist, rtx target, int do_load)
{
rtx pat;
tree arg0 = TREE_VALUE (arglist);
rtx op0 = expand_normal (arg0);
enum machine_mode tmode = insn_data[icode].operand[0].mode;
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
if (! target
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
if (do_load)
op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
else
{
if (VECTOR_MODE_P (mode0))
op0 = safe_vector_operand (op0, mode0);
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
}
pat = GEN_FCN (icode) (target, op0);
if (! pat)
return 0;
emit_insn (pat);
return target;
}
static rtx
arm_expand_builtin (tree exp,
rtx target,
rtx subtarget ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
int ignore ATTRIBUTE_UNUSED)
{
const struct builtin_description * d;
enum insn_code icode;
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
tree arglist = TREE_OPERAND (exp, 1);
tree arg0;
tree arg1;
tree arg2;
rtx op0;
rtx op1;
rtx op2;
rtx pat;
int fcode = DECL_FUNCTION_CODE (fndecl);
size_t i;
enum machine_mode tmode;
enum machine_mode mode0;
enum machine_mode mode1;
enum machine_mode mode2;
switch (fcode)
{
case ARM_BUILTIN_TEXTRMSB:
case ARM_BUILTIN_TEXTRMUB:
case ARM_BUILTIN_TEXTRMSH:
case ARM_BUILTIN_TEXTRMUH:
case ARM_BUILTIN_TEXTRMSW:
case ARM_BUILTIN_TEXTRMUW:
icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
: fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
: fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
: fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
: CODE_FOR_iwmmxt_textrmw);
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
op0 = expand_normal (arg0);
op1 = expand_normal (arg1);
tmode = insn_data[icode].operand[0].mode;
mode0 = insn_data[icode].operand[1].mode;
mode1 = insn_data[icode].operand[2].mode;
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
{
error ("selector must be an immediate");
return gen_reg_rtx (tmode);
}
if (target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
pat = GEN_FCN (icode) (target, op0, op1);
if (! pat)
return 0;
emit_insn (pat);
return target;
case ARM_BUILTIN_TINSRB:
case ARM_BUILTIN_TINSRH:
case ARM_BUILTIN_TINSRW:
icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
: fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
: CODE_FOR_iwmmxt_tinsrw);
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
op0 = expand_normal (arg0);
op1 = expand_normal (arg1);
op2 = expand_normal (arg2);
tmode = insn_data[icode].operand[0].mode;
mode0 = insn_data[icode].operand[1].mode;
mode1 = insn_data[icode].operand[2].mode;
mode2 = insn_data[icode].operand[3].mode;
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
{
error ("selector must be an immediate");
return const0_rtx;
}
if (target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
pat = GEN_FCN (icode) (target, op0, op1, op2);
if (! pat)
return 0;
emit_insn (pat);
return target;
case ARM_BUILTIN_SETWCX:
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
op0 = force_reg (SImode, expand_normal (arg0));
op1 = expand_normal (arg1);
emit_insn (gen_iwmmxt_tmcr (op1, op0));
return 0;
case ARM_BUILTIN_GETWCX:
arg0 = TREE_VALUE (arglist);
op0 = expand_normal (arg0);
target = gen_reg_rtx (SImode);
emit_insn (gen_iwmmxt_tmrc (target, op0));
return target;
case ARM_BUILTIN_WSHUFH:
icode = CODE_FOR_iwmmxt_wshufh;
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
op0 = expand_normal (arg0);
op1 = expand_normal (arg1);
tmode = insn_data[icode].operand[0].mode;
mode1 = insn_data[icode].operand[1].mode;
mode2 = insn_data[icode].operand[2].mode;
if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
op0 = copy_to_mode_reg (mode1, op0);
if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
{
error ("mask must be an immediate");
return const0_rtx;
}
if (target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
pat = GEN_FCN (icode) (target, op0, op1);
if (! pat)
return 0;
emit_insn (pat);
return target;
case ARM_BUILTIN_WSADB:
return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadb, arglist, target);
case ARM_BUILTIN_WSADH:
return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadh, arglist, target);
case ARM_BUILTIN_WSADBZ:
return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, arglist, target);
case ARM_BUILTIN_WSADHZ:
return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, arglist, target);
case ARM_BUILTIN_WMACS:
case ARM_BUILTIN_WMACU:
case ARM_BUILTIN_WALIGN:
case ARM_BUILTIN_TMIA:
case ARM_BUILTIN_TMIAPH:
case ARM_BUILTIN_TMIATT:
case ARM_BUILTIN_TMIATB:
case ARM_BUILTIN_TMIABT:
case ARM_BUILTIN_TMIABB:
icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
: fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
: fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
: fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
: fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
: fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
: fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
: fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
: CODE_FOR_iwmmxt_walign);
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
op0 = expand_normal (arg0);
op1 = expand_normal (arg1);
op2 = expand_normal (arg2);
tmode = insn_data[icode].operand[0].mode;
mode0 = insn_data[icode].operand[1].mode;
mode1 = insn_data[icode].operand[2].mode;
mode2 = insn_data[icode].operand[3].mode;
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
op2 = copy_to_mode_reg (mode2, op2);
if (target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
pat = GEN_FCN (icode) (target, op0, op1, op2);
if (! pat)
return 0;
emit_insn (pat);
return target;
case ARM_BUILTIN_WZERO:
target = gen_reg_rtx (DImode);
emit_insn (gen_iwmmxt_clrdi (target));
return target;
case ARM_BUILTIN_THREAD_POINTER:
return arm_load_tp (target);
default:
break;
}
for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
if (d->code == (const enum arm_builtins) fcode)
return arm_expand_binop_builtin (d->icode, arglist, target);
for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
if (d->code == (const enum arm_builtins) fcode)
return arm_expand_unop_builtin (d->icode, arglist, target, 0);
return NULL_RTX;
}
inline static int
number_of_first_bit_set (unsigned mask)
{
int bit;
for (bit = 0;
(mask & (1 << bit)) == 0;
++bit)
continue;
return bit;
}
static int
handle_thumb_pushpop (FILE *f, unsigned long mask, int push, int *cfa_offset,
unsigned long real_regs, bool emit)
{
int regno;
int lo_mask = mask & 0xFF;
int pushed_words = 0;
int bytes = 0;
gcc_assert (mask);
if (lo_mask == 0 && !push && (mask & (1 << PC_REGNUM)))
{
return handle_thumb_exit (f, -1, emit);
}
if (ARM_EABI_UNWIND_TABLES && push && emit)
{
fprintf (f, "\t.save\t{");
for (regno = 0; regno < 15; regno++)
{
if (real_regs & (1 << regno))
{
if (real_regs & ((1 << regno) -1))
fprintf (f, ", ");
asm_fprintf (f, "%r", regno);
}
}
fprintf (f, "}\n");
}
bytes += 2;
if (emit)
fprintf (f, "\t%s\t{", push ? "push" : "pop");
for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
{
if (lo_mask & 1)
{
if (emit)
{
asm_fprintf (f, "%r", regno);
if ((lo_mask & ~1) != 0)
fprintf (f, ", ");
}
pushed_words++;
}
}
if (push && (mask & (1 << LR_REGNUM)))
{
if (emit)
{
if (mask & 0xFF)
fprintf (f, ", ");
asm_fprintf (f, "%r", LR_REGNUM);
}
pushed_words++;
}
else if (!push && (mask & (1 << PC_REGNUM)))
{
if ((TARGET_INTERWORK && !arm_arch5)
|| TARGET_BACKTRACE
|| current_function_calls_eh_return)
{
if (emit)
fprintf (f, "}\n");
bytes += handle_thumb_exit (f, -1, emit);
return bytes;
}
else if (emit)
{
if (mask & 0xFF)
fprintf (f, ", ");
asm_fprintf (f, "%r", PC_REGNUM);
}
}
if (emit)
fprintf (f, "}\n");
if (emit && push && pushed_words && dwarf2out_do_frame ())
{
char *l = dwarf2out_cfi_label ();
int pushed_mask = real_regs;
*cfa_offset += pushed_words * 4;
dwarf2out_def_cfa (l, SP_REGNUM, *cfa_offset);
pushed_words = 0;
pushed_mask = real_regs;
for (regno = 0; regno <= 14; regno++, pushed_mask >>= 1)
{
if (pushed_mask & 1)
dwarf2out_reg_save (l, regno, 4 * pushed_words++ - *cfa_offset);
}
}
return bytes;
}
static int
handle_thumb_exit (FILE *f, int reg_containing_return_addr, bool emit)
{
unsigned regs_available_for_popping;
unsigned regs_to_pop;
int pops_needed;
unsigned available;
unsigned required;
int mode;
int size;
int restore_a4 = FALSE;
int bytes = 0;
regs_to_pop = 0;
pops_needed = 0;
if (reg_containing_return_addr == -1)
{
regs_to_pop |= 1 << LR_REGNUM;
++pops_needed;
}
if (TARGET_BACKTRACE)
{
regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
pops_needed += 2;
}
if (pops_needed == 0)
{
if (current_function_calls_eh_return)
{
bytes += 2;
if (emit)
asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, ARM_EH_STACKADJ_REGNUM);
}
bytes += 2;
if (emit)
asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
return bytes;
}
else if ((!TARGET_INTERWORK || arm_arch5)
&& !TARGET_BACKTRACE
&& !is_called_in_ARM_mode (current_function_decl)
&& !current_function_calls_eh_return)
{
bytes += 2;
if (emit)
asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
return bytes;
}
regs_available_for_popping = 0;
if (current_function_calls_eh_return)
size = 12;
else
{
if (current_function_return_rtx != 0)
mode = GET_MODE (current_function_return_rtx);
else
mode = DECL_MODE (DECL_RESULT (current_function_decl));
size = GET_MODE_SIZE (mode);
if (size == 0)
{
if (mode == VOIDmode)
regs_available_for_popping =
(1 << ARG_REGISTER (1))
| (1 << ARG_REGISTER (2))
| (1 << ARG_REGISTER (3));
else
regs_available_for_popping =
(1 << ARG_REGISTER (2))
| (1 << ARG_REGISTER (3));
}
else if (size <= 4)
regs_available_for_popping =
(1 << ARG_REGISTER (2))
| (1 << ARG_REGISTER (3));
else if (size <= 8)
regs_available_for_popping =
(1 << ARG_REGISTER (3));
}
for (available = regs_available_for_popping,
required = regs_to_pop;
required != 0 && available != 0;
available &= ~(available & - available),
required &= ~(required & - required))
-- pops_needed;
if (available > 0)
regs_available_for_popping &= ~available;
else if (pops_needed)
{
if (regs_available_for_popping == 0
&& reg_containing_return_addr == LAST_ARG_REGNUM)
{
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
reg_containing_return_addr = LR_REGNUM;
}
else if (size > 12)
{
restore_a4 = TRUE;
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
}
if (reg_containing_return_addr != LAST_ARG_REGNUM)
{
regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
--pops_needed;
}
}
bytes += handle_thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
regs_available_for_popping, emit);
if (reg_containing_return_addr == -1)
{
regs_to_pop &= ~(1 << LR_REGNUM);
reg_containing_return_addr =
number_of_first_bit_set (regs_available_for_popping);
regs_available_for_popping &= ~(1 << reg_containing_return_addr);
}
if (regs_available_for_popping)
{
int frame_pointer;
frame_pointer = number_of_first_bit_set (regs_available_for_popping);
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n",
ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
regs_available_for_popping &= ~(1 << frame_pointer);
regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
if (regs_available_for_popping)
{
int stack_pointer;
stack_pointer = number_of_first_bit_set (regs_available_for_popping);
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
}
else
{
regs_available_for_popping |= (1 << frame_pointer);
}
}
if (regs_available_for_popping == 0 && pops_needed > 0)
{
regs_available_for_popping |= 1 << reg_containing_return_addr;
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
reg_containing_return_addr);
reg_containing_return_addr = LR_REGNUM;
}
if (pops_needed > 0)
{
int popped_into;
int move_to;
bytes += handle_thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
regs_available_for_popping, emit);
popped_into = number_of_first_bit_set (regs_available_for_popping);
move_to = number_of_first_bit_set (regs_to_pop);
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
regs_to_pop &= ~(1 << move_to);
--pops_needed;
}
if (pops_needed > 0)
{
int popped_into;
bytes += handle_thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
regs_available_for_popping, emit);
popped_into = number_of_first_bit_set (regs_available_for_popping);
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
}
if (restore_a4)
{
if (reg_containing_return_addr != LR_REGNUM)
{
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
reg_containing_return_addr = LR_REGNUM;
}
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
}
if (current_function_calls_eh_return)
{
bytes += 2;
if (emit)
asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, ARM_EH_STACKADJ_REGNUM);
}
bytes += 2;
if (emit)
asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
return bytes;
}
void
thumb_final_prescan_insn (rtx insn)
{
if (flag_print_asm_name)
asm_fprintf (asm_out_file, "%@ 0x%04x\n",
INSN_ADDRESSES (INSN_UID (insn)));
}
int
thumb_shiftable_const (unsigned HOST_WIDE_INT val)
{
unsigned HOST_WIDE_INT mask = 0xff;
int i;
if (val == 0)
return 0;
for (i = 0; i < 25; i++)
if ((val & (mask << i)) == val)
return 1;
return 0;
}
static int
thumb_far_jump_used_p (void)
{
#ifdef ENABLE_LLVM
return 0;
#else
rtx insn;
if (cfun->machine->far_jump_used)
return 1;
if (!(ARM_DOUBLEWORD_ALIGN || reload_completed))
{
if (regs_ever_live [ARG_POINTER_REGNUM])
cfun->machine->arg_pointer_live = 1;
else if (!cfun->machine->arg_pointer_live)
return 0;
}
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
&& get_attr_far_jump (insn) == FAR_JUMP_YES
)
{
cfun->machine->far_jump_used = 1;
return 1;
}
}
return 0;
#endif
}
int
is_called_in_ARM_mode (tree func)
{
gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
return TRUE;
#ifdef ARM_PE
return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
#else
return FALSE;
#endif
}
static int
handle_thumb_unexpanded_epilogue (bool emit)
{
int regno;
unsigned long live_regs_mask = 0;
int high_regs_pushed = 0;
int had_to_push_lr;
int size;
int bytes = 0;
if (return_used_this_function)
return bytes;
if (IS_NAKED (arm_current_func_type ()))
return bytes;
if (current_function_has_nonlocal_label && arm_arch6)
{
bytes += 4;
if (emit)
asm_fprintf (asm_out_file, "\tblx ___restore_vfp_d8_d15_regs\n");
}
live_regs_mask = thumb_compute_save_reg_mask ();
high_regs_pushed = bit_count (live_regs_mask & 0x0f00);
size = arm_size_return_regs ();
if (high_regs_pushed)
{
unsigned long mask = live_regs_mask & 0xff;
int next_hi_reg;
if (size <= 12)
mask |= 1 << 3;
if (size <= 8)
mask |= 1 << 2;
if (mask == 0)
internal_error
("no low registers available for popping high registers");
for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
if (live_regs_mask & (1 << next_hi_reg))
break;
while (high_regs_pushed)
{
for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
{
if (mask & (1 << regno))
high_regs_pushed--;
if (high_regs_pushed == 0)
break;
}
mask &= (2 << regno) - 1;
bytes += handle_thumb_pushpop (asm_out_file, mask, 0, NULL, mask, emit);
for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
{
if (mask & (1 << regno))
{
bytes += 2;
if (emit)
asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
regno);
for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
if (live_regs_mask & (1 << next_hi_reg))
break;
}
}
}
live_regs_mask &= ~0x0f00;
}
had_to_push_lr = (live_regs_mask & (1 << LR_REGNUM)) != 0;
live_regs_mask &= 0xff;
if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
{
if (had_to_push_lr)
live_regs_mask |= 1 << PC_REGNUM;
if (live_regs_mask)
bytes += handle_thumb_pushpop (asm_out_file, live_regs_mask, FALSE, NULL,
live_regs_mask, emit);
if (!had_to_push_lr)
bytes += handle_thumb_exit (asm_out_file, LR_REGNUM, emit);
}
else
{
if (live_regs_mask)
bytes += handle_thumb_pushpop (asm_out_file, live_regs_mask, FALSE, NULL,
live_regs_mask, emit);
if (had_to_push_lr)
{
if (size > 12)
{
bytes += 2;
if (emit)
asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", IP_REGNUM,
LAST_ARG_REGNUM);
}
bytes += handle_thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0, NULL,
1 << LAST_ARG_REGNUM, emit);
if (size > 12)
{
bytes += 4;
if (emit)
{
asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", LR_REGNUM,
LAST_ARG_REGNUM);
asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", LAST_ARG_REGNUM,
IP_REGNUM);
}
regno = LR_REGNUM;
}
else
regno = LAST_ARG_REGNUM;
}
else
regno = LR_REGNUM;
bytes += 2;
if (emit)
asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
SP_REGNUM, SP_REGNUM,
current_function_pretend_args_size);
bytes += handle_thumb_exit (asm_out_file, regno, emit);
}
return bytes;
}
const char * thumb_unexpanded_epilogue (void)
{
(void) handle_thumb_unexpanded_epilogue (true);
return "";
}
static struct machine_function *
arm_init_machine_status (void)
{
struct machine_function *machine;
machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
#if ARM_FT_UNKNOWN != 0
machine->func_type = ARM_FT_UNKNOWN;
#endif
return machine;
}
rtx
arm_return_addr (int count, rtx frame)
{
if (count != 0)
return gen_rtx_MEM (Pmode, plus_constant (frame, 4));
return get_hard_reg_initial_val (Pmode, LR_REGNUM);
}
void
arm_init_expanders (void)
{
init_machine_status = arm_init_machine_status;
if (cfun)
mark_reg_pointer (arg_pointer_rtx, PARM_BOUNDARY);
}
HOST_WIDE_INT
thumb_compute_initial_elimination_offset (unsigned int from, unsigned int to)
{
arm_stack_offsets *offsets;
offsets = arm_get_frame_offsets ();
switch (from)
{
case ARG_POINTER_REGNUM:
switch (to)
{
case STACK_POINTER_REGNUM:
return offsets->outgoing_args - offsets->saved_args;
case FRAME_POINTER_REGNUM:
return offsets->soft_frame - offsets->saved_args;
case HARD_FRAME_POINTER_REGNUM:
return offsets->frame - offsets->saved_args;
default:
gcc_unreachable ();
}
break;
case FRAME_POINTER_REGNUM:
switch (to)
{
case STACK_POINTER_REGNUM:
return offsets->outgoing_args - offsets->soft_frame;
case HARD_FRAME_POINTER_REGNUM:
return offsets->frame - offsets->soft_frame;
default:
gcc_unreachable ();
}
break;
default:
gcc_unreachable ();
}
}
void
thumb_expand_prologue (void)
{
rtx insn, dwarf;
HOST_WIDE_INT amount;
arm_stack_offsets *offsets;
unsigned long func_type;
int regno;
unsigned long live_regs_mask;
func_type = arm_current_func_type ();
if (IS_NAKED (func_type))
return;
if (IS_INTERRUPT (func_type))
{
error ("interrupt Service Routines cannot be coded in Thumb mode");
return;
}
live_regs_mask = thumb_compute_save_reg_mask ();
if (flag_pic && arm_pic_register != INVALID_REGNUM)
arm_load_pic_register (live_regs_mask);
offsets = arm_get_frame_offsets ();
if (frame_pointer_needed)
{
insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx,
stack_pointer_rtx,
GEN_INT (offsets->saved_regs
- offsets->frame)));
RTX_FRAME_RELATED_P (insn) = 1;
}
else if (CALLER_INTERWORKING_SLOT_SIZE > 0)
{
emit_move_insn (gen_rtx_REG (Pmode, ARM_HARD_FRAME_POINTER_REGNUM),
stack_pointer_rtx);
}
amount = offsets->outgoing_args - offsets->saved_regs;
if (amount)
{
if (amount < 512)
{
insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
GEN_INT (- amount)));
RTX_FRAME_RELATED_P (insn) = 1;
}
else
{
rtx reg;
for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
if (live_regs_mask & (1 << regno)
&& !(frame_pointer_needed
&& (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
break;
if (regno > LAST_LO_REGNUM)
{
rtx spare = gen_rtx_REG (SImode, IP_REGNUM);
reg = gen_rtx_REG (SImode, LAST_LO_REGNUM - 1);
emit_insn (gen_movsi (spare, reg));
emit_insn (gen_prologue_use (spare));
emit_insn (gen_movsi (reg, GEN_INT (- amount)));
insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
stack_pointer_rtx, reg));
RTX_FRAME_RELATED_P (insn) = 1;
dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
plus_constant (stack_pointer_rtx,
-amount));
RTX_FRAME_RELATED_P (dwarf) = 1;
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
REG_NOTES (insn));
emit_insn (gen_movsi (reg, spare));
emit_insn (gen_prologue_use (reg));
}
else
{
reg = gen_rtx_REG (SImode, regno);
emit_insn (gen_movsi (reg, GEN_INT (- amount)));
insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
stack_pointer_rtx, reg));
RTX_FRAME_RELATED_P (insn) = 1;
dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
plus_constant (stack_pointer_rtx,
-amount));
RTX_FRAME_RELATED_P (dwarf) = 1;
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
REG_NOTES (insn));
}
}
}
if (current_function_profile || !TARGET_SCHED_PROLOG
|| (ARM_EABI_UNWIND_TABLES && flag_non_call_exceptions))
emit_insn (gen_blockage ());
cfun->machine->lr_save_eliminated = !thumb_force_lr_save ();
if (live_regs_mask & 0xff)
cfun->machine->lr_save_eliminated = 0;
if (cfun->machine->lr_save_eliminated)
emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
}
void
thumb_expand_epilogue (void)
{
HOST_WIDE_INT amount;
arm_stack_offsets *offsets;
int regno;
if (IS_NAKED (arm_current_func_type ()))
return;
offsets = arm_get_frame_offsets ();
amount = offsets->outgoing_args - offsets->saved_regs;
if (! current_function_sp_is_unchanging)
{
int fp_offset = offsets->frame - offsets->saved_regs;
if (fp_offset)
{
rtx reg = gen_rtx_REG (SImode, LAST_ARG_REGNUM);
emit_insn (gen_movsi (reg, hard_frame_pointer_rtx));
emit_insn (gen_addsi3 (reg, reg, GEN_INT (fp_offset)));
emit_insn (gen_movsi (stack_pointer_rtx, reg));
}
else
{
emit_insn (gen_movsi (stack_pointer_rtx,
hard_frame_pointer_rtx));
}
}
else if (amount)
{
if (amount < 512)
emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
GEN_INT (amount)));
else
{
rtx reg = gen_rtx_REG (SImode, LAST_ARG_REGNUM);
emit_insn (gen_movsi (reg, GEN_INT (amount)));
emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
}
}
emit_insn (gen_prologue_use (stack_pointer_rtx));
if (current_function_profile || !TARGET_SCHED_PROLOG)
emit_insn (gen_blockage ());
for (regno = 0; regno < 13; regno++)
if (regs_ever_live[regno] && !call_used_regs[regno])
emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, regno)));
if (! regs_ever_live[LR_REGNUM])
emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM)));
}
static int
handle_thumb_unexpanded_prologue (FILE *f, bool emit)
{
unsigned long live_regs_mask = 0;
unsigned long l_mask;
unsigned high_regs_pushed = 0;
int cfa_offset = 0;
int regno;
int bytes = 0;
if (IS_NAKED (arm_current_func_type ()))
return bytes;
if (is_called_in_ARM_mode (current_function_decl))
{
const char * name;
gcc_assert (GET_CODE (DECL_RTL (current_function_decl)) == MEM);
gcc_assert (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0))
== SYMBOL_REF);
bytes += 8;
if (emit)
{
name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
#define STUB_NAME ".real_start_of"
fprintf (f, "\t.code\t16\n");
#ifdef ARM_PE
if (arm_dllexport_name_p (name))
name = arm_strip_name_encoding (name);
#endif
asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
if (TARGET_MACHO)
asm_fprintf (f, "\t.thumb_func %s%U%s\n", STUB_NAME, name);
else
fprintf (f, "\t.thumb_func\n");
asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
}
}
if (current_function_pretend_args_size)
{
if (emit && ARM_EABI_UNWIND_TABLES)
fprintf (f, "\t.pad #%d\n",
current_function_pretend_args_size);
if (emit)
{
if (cfun->machine->uses_anonymous_args)
{
int num_pushes;
fprintf (f, "\tpush\t{");
num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
regno <= LAST_ARG_REGNUM;
regno++)
asm_fprintf (f, "%r%s", regno,
regno == LAST_ARG_REGNUM ? "" : ", ");
fprintf (f, "}\n");
}
else
asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
SP_REGNUM, SP_REGNUM,
current_function_pretend_args_size);
}
if (emit && dwarf2out_do_frame ())
{
char *l = dwarf2out_cfi_label ();
cfa_offset = cfa_offset + current_function_pretend_args_size;
dwarf2out_def_cfa (l, SP_REGNUM, cfa_offset);
}
}
live_regs_mask = thumb_compute_save_reg_mask ();
l_mask = live_regs_mask & 0x40ff;
high_regs_pushed = bit_count (live_regs_mask & 0x0f00);
if (TARGET_BACKTRACE)
{
unsigned offset;
unsigned work_register;
work_register = thumb_find_work_register (live_regs_mask);
if (emit && ARM_EABI_UNWIND_TABLES)
asm_fprintf (f, "\t.pad #16\n");
bytes += 2;
if (emit)
asm_fprintf
(f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
SP_REGNUM, SP_REGNUM);
if (emit && dwarf2out_do_frame ())
{
char *l = dwarf2out_cfi_label ();
cfa_offset = cfa_offset + 16;
dwarf2out_def_cfa (l, SP_REGNUM, cfa_offset);
}
if (l_mask)
{
bytes += handle_thumb_pushpop (f, l_mask, 1, &cfa_offset, l_mask, emit);
offset = bit_count (l_mask) * UNITS_PER_WORD;
}
else
offset = 0;
bytes += 4;
if (emit)
{
asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
offset + 16 + current_function_pretend_args_size);
asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
offset + 4);
}
bytes += 8;
if (emit)
{
if (l_mask)
{
asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
offset + 12);
asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
ARM_HARD_FRAME_POINTER_REGNUM);
asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
offset);
}
else
{
asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
ARM_HARD_FRAME_POINTER_REGNUM);
asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
offset);
asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
offset + 12);
}
}
bytes += 8;
if (emit)
{
asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
offset + 8);
asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
offset + 12);
asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
ARM_HARD_FRAME_POINTER_REGNUM, work_register);
}
}
else if ((l_mask & 0xff) != 0
|| (high_regs_pushed == 0 && l_mask))
bytes += handle_thumb_pushpop (f, l_mask, 1, &cfa_offset, l_mask, emit);
if (high_regs_pushed)
{
unsigned pushable_regs;
unsigned next_hi_reg;
for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
if (live_regs_mask & (1 << next_hi_reg))
break;
pushable_regs = l_mask & 0x7f;
if (pushable_regs == 0)
pushable_regs = 1 << thumb_find_work_register (live_regs_mask);
while (high_regs_pushed > 0)
{
unsigned long real_regs_mask = 0;
for (regno = LAST_LO_REGNUM; regno >= 0; regno --)
{
if (pushable_regs & (1 << regno))
{
bytes += 2;
if (emit)
asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
high_regs_pushed --;
real_regs_mask |= (1 << next_hi_reg);
if (high_regs_pushed)
{
for (next_hi_reg --; next_hi_reg > LAST_LO_REGNUM;
next_hi_reg --)
if (live_regs_mask & (1 << next_hi_reg))
break;
}
else
{
pushable_regs &= ~((1 << regno) - 1);
break;
}
}
}
if (l_mask == (1 << LR_REGNUM))
{
bytes += handle_thumb_pushpop
(f, pushable_regs | (1 << LR_REGNUM),
1, &cfa_offset,
real_regs_mask | (1 << LR_REGNUM), emit);
l_mask = 0;
}
else
bytes += handle_thumb_pushpop (f, pushable_regs, 1, &cfa_offset, real_regs_mask, emit);
}
}
if (current_function_has_nonlocal_label && arm_arch6)
{
bytes += 4;
if (emit)
asm_fprintf (f, "\tblx ___save_vfp_d8_d15_regs\n");
}
return bytes;
}
static void
thumb_output_function_prologue (FILE *f, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
{
(void) handle_thumb_unexpanded_prologue (f, true);
}
int count_thumb_unexpanded_prologue (void)
{
return handle_thumb_unexpanded_prologue (NULL, false);
}
const char *
thumb_load_double_from_address (rtx *operands)
{
rtx addr;
rtx base;
rtx offset;
rtx arg1;
rtx arg2;
gcc_assert (GET_CODE (operands[0]) == REG);
gcc_assert (GET_CODE (operands[1]) == MEM);
addr = XEXP (operands[1], 0);
switch (GET_CODE (addr))
{
case REG:
operands[2] = adjust_address (operands[1], SImode, 4);
if (REGNO (operands[0]) == REGNO (addr))
{
output_asm_insn ("ldr\t%H0, %2", operands);
output_asm_insn ("ldr\t%0, %1", operands);
}
else
{
output_asm_insn ("ldr\t%0, %1", operands);
output_asm_insn ("ldr\t%H0, %2", operands);
}
break;
case CONST:
operands[2] = adjust_address (operands[1], SImode, 4);
output_asm_insn ("ldr\t%0, %1", operands);
output_asm_insn ("ldr\t%H0, %2", operands);
break;
case PLUS:
arg1 = XEXP (addr, 0);
arg2 = XEXP (addr, 1);
if (CONSTANT_P (arg1))
base = arg2, offset = arg1;
else
base = arg1, offset = arg2;
gcc_assert (GET_CODE (base) == REG);
if (GET_CODE (offset) == REG)
{
gcc_unreachable();
}
else
{
operands[2] = adjust_address (operands[1], SImode, 4);
if (REGNO (operands[0]) == REGNO (base))
{
output_asm_insn ("ldr\t%H0, %2", operands);
output_asm_insn ("ldr\t%0, %1", operands);
}
else
{
output_asm_insn ("ldr\t%0, %1", operands);
output_asm_insn ("ldr\t%H0, %2", operands);
}
}
break;
case LABEL_REF:
operands[2] = adjust_address (operands[1], SImode, 4);
output_asm_insn ("ldr\t%H0, %2", operands);
output_asm_insn ("ldr\t%0, %1", operands);
break;
default:
gcc_unreachable ();
}
return "";
}
const char *
thumb_output_move_mem_multiple (int n, rtx *operands)
{
rtx tmp;
switch (n)
{
case 2:
if (REGNO (operands[4]) > REGNO (operands[5]))
{
tmp = operands[4];
operands[4] = operands[5];
operands[5] = tmp;
}
output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
break;
case 3:
if (REGNO (operands[4]) > REGNO (operands[5]))
{
tmp = operands[4];
operands[4] = operands[5];
operands[5] = tmp;
}
if (REGNO (operands[5]) > REGNO (operands[6]))
{
tmp = operands[5];
operands[5] = operands[6];
operands[6] = tmp;
}
if (REGNO (operands[4]) > REGNO (operands[5]))
{
tmp = operands[4];
operands[4] = operands[5];
operands[5] = tmp;
}
output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
break;
default:
gcc_unreachable ();
}
return "";
}
const char *
thumb_call_via_reg (rtx reg)
{
int regno = REGNO (reg);
rtx *labelp;
gcc_assert (regno < LR_REGNUM);
if (in_section == text_section)
{
thumb_call_reg_needed = 1;
if (thumb_call_via_label[regno] == NULL)
thumb_call_via_label[regno] = gen_label_rtx ();
labelp = thumb_call_via_label + regno;
}
else
{
if (cfun->machine->call_via[regno] == NULL)
cfun->machine->call_via[regno] = gen_label_rtx ();
labelp = cfun->machine->call_via + regno;
}
output_asm_insn ("bl\t%a0", labelp);
return "";
}
void
thumb_expand_movmemqi (rtx *operands)
{
rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
HOST_WIDE_INT len = INTVAL (operands[2]);
HOST_WIDE_INT offset = 0;
while (len >= 12)
{
emit_insn (gen_movmem12b (out, in, out, in));
len -= 12;
}
if (len >= 8)
{
emit_insn (gen_movmem8b (out, in, out, in));
len -= 8;
}
if (len >= 4)
{
rtx reg = gen_reg_rtx (SImode);
emit_insn (gen_movsi (reg, gen_rtx_MEM (SImode, in)));
emit_insn (gen_movsi (gen_rtx_MEM (SImode, out), reg));
len -= 4;
offset += 4;
}
if (len >= 2)
{
rtx reg = gen_reg_rtx (HImode);
emit_insn (gen_movhi (reg, gen_rtx_MEM (HImode,
plus_constant (in, offset))));
emit_insn (gen_movhi (gen_rtx_MEM (HImode, plus_constant (out, offset)),
reg));
len -= 2;
offset += 2;
}
if (len)
{
rtx reg = gen_reg_rtx (QImode);
emit_insn (gen_movqi (reg, gen_rtx_MEM (QImode,
plus_constant (in, offset))));
emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (out, offset)),
reg));
}
}
void
thumb_reload_out_hi (rtx *operands)
{
emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
}
void
thumb_reload_in_hi (rtx *operands ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
static int
arm_get_strip_length (int c)
{
switch (c)
{
ARM_NAME_ENCODING_LENGTHS
default: return 0;
}
}
const char *
arm_strip_name_encoding (const char *name)
{
int skip;
while ((skip = arm_get_strip_length (* name)))
name += skip;
return name;
}
void
arm_asm_output_labelref (FILE *stream, const char *name)
{
int skip;
int verbatim = 0;
while ((skip = arm_get_strip_length (* name)))
{
verbatim |= (*name == '*');
name += skip;
}
if (verbatim)
fputs (name, stream);
else
asm_fprintf (stream, "%U%s", name);
}
static void
arm_file_end (void)
{
int regno;
if (! thumb_call_reg_needed)
return;
switch_to_section (text_section);
asm_fprintf (asm_out_file, "\t.code 16\n");
ASM_OUTPUT_ALIGN (asm_out_file, 1);
for (regno = 0; regno < LR_REGNUM; regno++)
{
rtx label = thumb_call_via_label[regno];
if (label != 0)
{
targetm.asm_out.internal_label (asm_out_file, "L",
CODE_LABEL_NUMBER (label));
asm_fprintf (asm_out_file, "\tbx\t%r\n", regno);
}
}
}
#if TARGET_MACHO
static void
arm_darwin_file_start (void)
{
default_file_start();
darwin_file_start();
}
static void
arm_darwin_file_end (void)
{
darwin_file_end ();
arm_file_end ();
}
#endif
rtx aof_pic_label;
#ifdef AOF_ASSEMBLER
struct pic_chain
{
struct pic_chain * next;
const char * symname;
};
static struct pic_chain * aof_pic_chain = NULL;
rtx
aof_pic_entry (rtx x)
{
struct pic_chain ** chainp;
int offset;
if (aof_pic_label == NULL_RTX)
{
aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
}
for (offset = 0, chainp = &aof_pic_chain; *chainp;
offset += 4, chainp = &(*chainp)->next)
if ((*chainp)->symname == XSTR (x, 0))
return plus_constant (aof_pic_label, offset);
*chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
(*chainp)->next = NULL;
(*chainp)->symname = XSTR (x, 0);
return plus_constant (aof_pic_label, offset);
}
void
aof_dump_pic_table (FILE *f)
{
struct pic_chain * chain;
if (aof_pic_chain == NULL)
return;
asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
PIC_OFFSET_TABLE_REGNUM,
PIC_OFFSET_TABLE_REGNUM);
fputs ("|x$adcons|\n", f);
for (chain = aof_pic_chain; chain; chain = chain->next)
{
fputs ("\tDCD\t", f);
assemble_name (f, chain->symname);
fputs ("\n", f);
}
}
int arm_text_section_count = 1;
static void
aof_output_text_section_asm_op (const void *data ATTRIBUTE_UNUSED)
{
fprintf (asm_out_file, "\tAREA |C$$code%d|, CODE, READONLY",
arm_text_section_count++);
if (flag_pic)
fprintf (asm_out_file, ", PIC, REENTRANT");
fprintf (asm_out_file, "\n");
}
static int arm_data_section_count = 1;
static void
aof_output_data_section_asm_op (const void *data ATTRIBUTE_UNUSED)
{
fprintf (asm_out_file, "\tAREA |C$$data%d|, DATA\n",
arm_data_section_count++);
}
static void
aof_asm_init_sections (void)
{
text_section = get_unnamed_section (SECTION_CODE,
aof_output_text_section_asm_op, NULL);
data_section = get_unnamed_section (SECTION_WRITE,
aof_output_data_section_asm_op, NULL);
readonly_data_section = text_section;
}
void
zero_init_section (void)
{
static int zero_init_count = 1;
fprintf (asm_out_file, "\tAREA |C$$zidata%d|,NOINIT\n", zero_init_count++);
in_section = NULL;
}
struct import
{
struct import * next;
const char * name;
};
static struct import * imports_list = NULL;
void
aof_add_import (const char *name)
{
struct import * new;
for (new = imports_list; new; new = new->next)
if (new->name == name)
return;
new = (struct import *) xmalloc (sizeof (struct import));
new->next = imports_list;
imports_list = new;
new->name = name;
}
void
aof_delete_import (const char *name)
{
struct import ** old;
for (old = &imports_list; *old; old = & (*old)->next)
{
if ((*old)->name == name)
{
*old = (*old)->next;
return;
}
}
}
int arm_main_function = 0;
static void
aof_dump_imports (FILE *f)
{
if (arm_main_function)
{
switch_to_section (text_section);
fputs ("\tIMPORT __main\n", f);
fputs ("\tDCD __main\n", f);
}
while (imports_list)
{
fprintf (f, "\tIMPORT\t");
assemble_name (f, imports_list->name);
fputc ('\n', f);
imports_list = imports_list->next;
}
}
static void
aof_globalize_label (FILE *stream, const char *name)
{
default_globalize_label (stream, name);
if (! strcmp (name, "main"))
arm_main_function = 1;
}
static void
aof_file_start (void)
{
fputs ("__r0\tRN\t0\n", asm_out_file);
fputs ("__a1\tRN\t0\n", asm_out_file);
fputs ("__a2\tRN\t1\n", asm_out_file);
fputs ("__a3\tRN\t2\n", asm_out_file);
fputs ("__a4\tRN\t3\n", asm_out_file);
fputs ("__v1\tRN\t4\n", asm_out_file);
fputs ("__v2\tRN\t5\n", asm_out_file);
fputs ("__v3\tRN\t6\n", asm_out_file);
fputs ("__v4\tRN\t7\n", asm_out_file);
fputs ("__v5\tRN\t8\n", asm_out_file);
fputs ("__v6\tRN\t9\n", asm_out_file);
fputs ("__sl\tRN\t10\n", asm_out_file);
fputs ("__fp\tRN\t11\n", asm_out_file);
fputs ("__ip\tRN\t12\n", asm_out_file);
fputs ("__sp\tRN\t13\n", asm_out_file);
fputs ("__lr\tRN\t14\n", asm_out_file);
fputs ("__pc\tRN\t15\n", asm_out_file);
fputs ("__f0\tFN\t0\n", asm_out_file);
fputs ("__f1\tFN\t1\n", asm_out_file);
fputs ("__f2\tFN\t2\n", asm_out_file);
fputs ("__f3\tFN\t3\n", asm_out_file);
fputs ("__f4\tFN\t4\n", asm_out_file);
fputs ("__f5\tFN\t5\n", asm_out_file);
fputs ("__f6\tFN\t6\n", asm_out_file);
fputs ("__f7\tFN\t7\n", asm_out_file);
switch_to_section (text_section);
}
static void
aof_file_end (void)
{
if (flag_pic)
aof_dump_pic_table (asm_out_file);
arm_file_end ();
aof_dump_imports (asm_out_file);
fputs ("\tEND\n", asm_out_file);
}
#endif
#if !defined(ARM_PE) && !TARGET_MACHO
static void
arm_encode_section_info (tree decl, rtx rtl, int first)
{
#ifndef AOF_ASSEMBLER
if (optimize > 0 && TREE_CONSTANT (decl))
SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
#endif
if (first && DECL_P (decl))
{
if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
else if (! TREE_PUBLIC (decl))
arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
}
default_encode_section_info (decl, rtl, first);
}
#endif
#if TARGET_MACHO
static void
arm_darwin_encode_section_info (tree decl, rtx rtl, int first)
{
darwin_encode_section_info (decl, rtl, first);
if (optimize > 0 && TREE_CONSTANT (decl))
SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
if (DECL_P (decl))
{
if (TREE_CODE (decl) == FUNCTION_DECL
&& DECL_WEAK (decl)
&& DECL_VISIBILITY (decl) == VISIBILITY_DEFAULT)
arm_encode_call_attribute (decl, SYMBOL_LONG_CALL);
else if (! TREE_PUBLIC (decl))
arm_encode_call_attribute (decl, SYMBOL_SHORT_CALL);
}
}
#endif
static void
arm_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
{
if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
&& !strcmp (prefix, "L"))
{
arm_ccfsm_state = 0;
arm_target_insn = NULL;
}
default_internal_label (stream, prefix, labelno);
}
static void
arm_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
HOST_WIDE_INT delta,
HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
tree function)
{
static int thunk_label = 0;
char label[256];
char labelpc[256];
int mi_delta = delta;
const char *const mi_op = mi_delta < 0 ? "sub" : "add";
int shift = 0;
int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function)
? 1 : 0);
rtx function_rtx = XEXP (DECL_RTL (function), 0);
const char *function_name;
bool is_longcall = arm_is_longcall_p (function_rtx,
SYMBOL_REF_FLAGS (function_rtx),
1);
bool is_indirected = false;
#if TARGET_MACHO
if (TARGET_MACHO
&& MACHOPIC_INDIRECT
&& (! machopic_data_defined_p (function_rtx)))
{
function_name = machopic_indirection_name (function_rtx, !is_longcall);
is_indirected = true;
}
else
#endif
function_name = XSTR (function_rtx, 0);
if (mi_delta < 0)
mi_delta = - mi_delta;
if (TARGET_THUMB || is_longcall)
{
int labelno = thunk_label++;
ASM_GENERATE_INTERNAL_LABEL (label, "LTHUMBFUNC", labelno);
fputs ("\tldr\tr12, ", file);
assemble_name (file, label);
fputc ('\n', file);
if (flag_pic)
{
ASM_GENERATE_INTERNAL_LABEL (labelpc, "LTHUNKPC", labelno);
assemble_name (file, labelpc);
fputs (":\n", file);
fputs ("\tadd\tr12, pc, r12\n", file);
}
if (is_indirected)
fputs ("\tldr\tr12, [r12]\n", file);
}
while (mi_delta != 0)
{
if ((mi_delta & (3 << shift)) == 0)
shift += 2;
else
{
asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
mi_op, this_regno, this_regno,
mi_delta & (0xff << shift));
mi_delta &= ~(0xff << shift);
shift += 8;
}
}
if (TARGET_THUMB || is_longcall)
{
fprintf (file, "\tbx\tr12\n");
ASM_OUTPUT_ALIGN (file, 2);
assemble_name (file, label);
fputs (":\n", file);
if (flag_pic)
{
rtx tem = gen_rtx_SYMBOL_REF (Pmode, function_name);
if (TARGET_MACHO && (TARGET_ARM || is_indirected))
tem = gen_rtx_PLUS (GET_MODE (tem), tem, GEN_INT (-8));
else
tem = gen_rtx_PLUS (GET_MODE (tem), tem, GEN_INT (-7));
tem = gen_rtx_MINUS (GET_MODE (tem),
tem,
gen_rtx_SYMBOL_REF (Pmode,
ggc_strdup (labelpc)));
assemble_integer (tem, 4, BITS_PER_WORD, 1);
}
else
assemble_integer (gen_rtx_SYMBOL_REF (Pmode, function_name),
4, BITS_PER_WORD, 1);
}
else
{
fputs ("\tb\t", file);
assemble_name (file, function_name);
if (NEED_PLT_RELOC)
fputs ("(PLT)", file);
fputc ('\n', file);
}
}
int
arm_emit_vector_const (FILE *file, rtx x)
{
int i;
const char * pattern;
gcc_assert (GET_CODE (x) == CONST_VECTOR);
switch (GET_MODE (x))
{
case V2SImode: pattern = "%08x"; break;
case V4HImode: pattern = "%04x"; break;
case V8QImode: pattern = "%02x"; break;
default: gcc_unreachable ();
}
fprintf (file, "0x");
for (i = CONST_VECTOR_NUNITS (x); i--;)
{
rtx element;
element = CONST_VECTOR_ELT (x, i);
fprintf (file, pattern, INTVAL (element));
}
return 1;
}
const char *
arm_output_load_gr (rtx *operands)
{
rtx reg;
rtx offset;
rtx wcgr;
rtx sum;
if (GET_CODE (operands [1]) != MEM
|| GET_CODE (sum = XEXP (operands [1], 0)) != PLUS
|| GET_CODE (reg = XEXP (sum, 0)) != REG
|| GET_CODE (offset = XEXP (sum, 1)) != CONST_INT
|| ((INTVAL (offset) < 1024) && (INTVAL (offset) > -1024)))
return "wldrw%?\t%0, %1";
output_asm_insn ("str%?\t%0, [sp, #-4]!\t@ Start of GR load expansion", & reg);
wcgr = operands[0];
operands[0] = reg;
output_asm_insn ("ldr%?\t%0, %1", operands);
operands[0] = wcgr;
operands[1] = reg;
output_asm_insn ("tmcr%?\t%0, %1", operands);
output_asm_insn ("ldr%?\t%0, [sp], #4\t@ End of GR load expansion", & reg);
return "";
}
static void
arm_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_size,
int second_time ATTRIBUTE_UNUSED)
{
cfun->machine->uses_anonymous_args = 1;
if (cum->nregs < NUM_ARG_REGS)
*pretend_size = (NUM_ARG_REGS - cum->nregs) * UNITS_PER_WORD;
}
int
arm_no_early_store_addr_dep (rtx producer, rtx consumer)
{
rtx value = PATTERN (producer);
rtx addr = PATTERN (consumer);
if (GET_CODE (value) == COND_EXEC)
value = COND_EXEC_CODE (value);
if (GET_CODE (value) == PARALLEL)
value = XVECEXP (value, 0, 0);
value = XEXP (value, 0);
if (GET_CODE (addr) == COND_EXEC)
addr = COND_EXEC_CODE (addr);
if (GET_CODE (addr) == PARALLEL)
addr = XVECEXP (addr, 0, 0);
addr = XEXP (addr, 0);
return !reg_overlap_mentioned_p (value, addr);
}
int
arm_no_early_alu_shift_dep (rtx producer, rtx consumer)
{
rtx value = PATTERN (producer);
rtx op = PATTERN (consumer);
rtx early_op;
if (GET_CODE (value) == COND_EXEC)
value = COND_EXEC_CODE (value);
if (GET_CODE (value) == PARALLEL)
value = XVECEXP (value, 0, 0);
value = XEXP (value, 0);
if (GET_CODE (op) == COND_EXEC)
op = COND_EXEC_CODE (op);
if (GET_CODE (op) == PARALLEL)
op = XVECEXP (op, 0, 0);
op = XEXP (op, 1);
early_op = XEXP (op, 0);
if (GET_CODE (early_op) == REG)
early_op = op;
return !reg_overlap_mentioned_p (value, early_op);
}
int
arm_no_early_alu_shift_value_dep (rtx producer, rtx consumer)
{
rtx value = PATTERN (producer);
rtx op = PATTERN (consumer);
rtx early_op;
if (GET_CODE (value) == COND_EXEC)
value = COND_EXEC_CODE (value);
if (GET_CODE (value) == PARALLEL)
value = XVECEXP (value, 0, 0);
value = XEXP (value, 0);
if (GET_CODE (op) == COND_EXEC)
op = COND_EXEC_CODE (op);
if (GET_CODE (op) == PARALLEL)
op = XVECEXP (op, 0, 0);
op = XEXP (op, 1);
early_op = XEXP (op, 0);
if (GET_CODE (early_op) != REG)
early_op = XEXP (early_op, 0);
return !reg_overlap_mentioned_p (value, early_op);
}
int
arm_no_early_mul_dep (rtx producer, rtx consumer)
{
rtx value = PATTERN (producer);
rtx op = PATTERN (consumer);
if (GET_CODE (value) == COND_EXEC)
value = COND_EXEC_CODE (value);
if (GET_CODE (value) == PARALLEL)
value = XVECEXP (value, 0, 0);
value = XEXP (value, 0);
if (GET_CODE (op) == COND_EXEC)
op = COND_EXEC_CODE (op);
if (GET_CODE (op) == PARALLEL)
op = XVECEXP (op, 0, 0);
op = XEXP (op, 1);
return (GET_CODE (op) == PLUS
&& !reg_overlap_mentioned_p (value, XEXP (op, 0)));
}
static bool
arm_promote_prototypes (tree t ATTRIBUTE_UNUSED)
{
return !TARGET_AAPCS_BASED;
}
static bool
arm_default_short_enums (void)
{
return TARGET_AAPCS_BASED && arm_abi != ARM_ABI_AAPCS_LINUX;
}
static bool
arm_align_anon_bitfield (void)
{
return TARGET_AAPCS_BASED;
}
static tree
arm_cxx_guard_type (void)
{
return TARGET_AAPCS_BASED ? integer_type_node : long_long_integer_type_node;
}
static bool
arm_cxx_guard_mask_bit (void)
{
return TARGET_AAPCS_BASED;
}
static tree
arm_get_cookie_size (tree type)
{
tree size;
if (!TARGET_AAPCS_BASED)
return default_cxx_get_cookie_size (type);
size = build_int_cst (sizetype, 8);
return size;
}
static bool
arm_cookie_has_size (void)
{
return TARGET_AAPCS_BASED;
}
static bool
arm_cxx_cdtor_returns_this (void)
{
return TARGET_AAPCS_BASED;
}
static bool
arm_cxx_key_method_may_be_inline (void)
{
return !TARGET_AAPCS_BASED;
}
static void
arm_cxx_determine_class_data_visibility (tree decl)
{
if (!TARGET_AAPCS_BASED)
return;
if (!TARGET_ARM_DYNAMIC_VAGUE_LINKAGE_P && DECL_COMDAT (decl))
DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
else
DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
DECL_VISIBILITY_SPECIFIED (decl) = 1;
}
static bool
arm_cxx_class_data_always_comdat (void)
{
#if TARGET_MACHO
return false;
#endif
return !TARGET_AAPCS_BASED;
}
static bool
arm_cxx_use_aeabi_atexit (void)
{
return TARGET_AAPCS_BASED;
}
void
arm_set_return_address (rtx source, rtx scratch)
{
arm_stack_offsets *offsets;
HOST_WIDE_INT delta;
rtx addr;
unsigned long saved_regs;
saved_regs = arm_compute_save_reg_mask ();
if ((saved_regs & (1 << LR_REGNUM)) == 0)
emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
else
{
if (frame_pointer_needed)
addr = plus_constant(hard_frame_pointer_rtx, 4);
else
{
offsets = arm_get_frame_offsets ();
delta = offsets->outgoing_args - (offsets->frame - 4);
if (delta >= 4096)
{
emit_insn (gen_addsi3 (scratch, stack_pointer_rtx,
GEN_INT (delta & ~4095)));
addr = scratch;
delta &= 4095;
}
else
addr = stack_pointer_rtx;
addr = plus_constant (addr, delta);
}
emit_move_insn (gen_frame_mem (Pmode, addr), source);
}
}
void
thumb_set_return_address (rtx source, rtx scratch)
{
arm_stack_offsets *offsets;
HOST_WIDE_INT delta;
int reg;
rtx addr;
unsigned long mask;
emit_insn (gen_rtx_USE (VOIDmode, source));
mask = thumb_compute_save_reg_mask ();
if (mask & (1 << LR_REGNUM))
{
offsets = arm_get_frame_offsets ();
if (frame_pointer_needed)
{
delta = 4;
reg = THUMB_HARD_FRAME_POINTER_REGNUM;
}
else
{
delta = offsets->outgoing_args - (offsets->saved_args + 4);
reg = SP_REGNUM;
}
if (TARGET_BACKTRACE)
delta -= 16;
addr = gen_rtx_REG (SImode, reg);
if ((reg != SP_REGNUM && delta >= 128)
|| delta >= 1024)
{
emit_insn (gen_movsi (scratch, GEN_INT (delta)));
emit_insn (gen_addsi3 (scratch, scratch, stack_pointer_rtx));
addr = scratch;
}
else
addr = plus_constant (addr, delta);
emit_move_insn (gen_frame_mem (Pmode, addr), source);
}
else
emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
}
bool
arm_vector_mode_supported_p (enum machine_mode mode)
{
if ((mode == V2SImode)
|| (mode == V4HImode)
|| (mode == V8QImode))
return true;
return false;
}
static unsigned HOST_WIDE_INT
arm_shift_truncation_mask (enum machine_mode mode)
{
return mode == SImode ? 255 : 0;
}
unsigned int
arm_dbx_register_number (unsigned int regno)
{
if (regno < 16)
return regno;
if (IS_FPA_REGNUM (regno))
return (TARGET_AAPCS_BASED ? 96 : 16) + regno - FIRST_FPA_REGNUM;
if (IS_VFP_REGNUM (regno))
return 256 + regno - FIRST_VFP_REGNUM;
if (IS_IWMMXT_GR_REGNUM (regno))
return 104 + regno - FIRST_IWMMXT_GR_REGNUM;
if (IS_IWMMXT_REGNUM (regno))
return 112 + regno - FIRST_IWMMXT_REGNUM;
gcc_unreachable ();
}
#ifdef TARGET_UNWIND_INFO
static void
arm_unwind_emit_stm (FILE * asm_out_file, rtx p)
{
int i;
HOST_WIDE_INT offset;
HOST_WIDE_INT nregs;
int reg_size;
unsigned reg;
unsigned lastreg;
rtx e;
e = XVECEXP (p, 0, 0);
if (GET_CODE (e) != SET
|| GET_CODE (XEXP (e, 0)) != REG
|| REGNO (XEXP (e, 0)) != SP_REGNUM
|| GET_CODE (XEXP (e, 1)) != PLUS)
abort ();
offset = -INTVAL (XEXP (XEXP (e, 1), 1));
nregs = XVECLEN (p, 0) - 1;
reg = REGNO (XEXP (XVECEXP (p, 0, 1), 1));
if (reg < 16)
{
if (nregs * 4 == offset - 4)
{
fprintf (asm_out_file, "\t.pad #4\n");
offset -= 4;
}
reg_size = 4;
}
else if (IS_VFP_REGNUM (reg))
{
offset -= 4;
reg_size = 8;
}
else if (reg >= FIRST_FPA_REGNUM && reg <= LAST_FPA_REGNUM)
{
asm_fprintf (asm_out_file, "\t.save %r, %wd\n", reg, nregs);
return;
}
else
abort ();
if (offset != nregs * reg_size)
abort ();
fprintf (asm_out_file, "\t.save {");
offset = 0;
lastreg = 0;
for (i = 1; i <= nregs; i++)
{
e = XVECEXP (p, 0, i);
if (GET_CODE (e) != SET
|| GET_CODE (XEXP (e, 0)) != MEM
|| GET_CODE (XEXP (e, 1)) != REG)
abort ();
reg = REGNO (XEXP (e, 1));
if (reg < lastreg)
abort ();
if (i != 1)
fprintf (asm_out_file, ", ");
if (IS_VFP_REGNUM (reg))
asm_fprintf (asm_out_file, "d%d", (reg - FIRST_VFP_REGNUM) / 2);
else
asm_fprintf (asm_out_file, "%r", reg);
#ifdef ENABLE_CHECKING
e = XEXP (XEXP (e, 0), 0);
if (GET_CODE (e) == PLUS)
{
offset += reg_size;
if (GET_CODE (XEXP (e, 0)) != REG
|| REGNO (XEXP (e, 0)) != SP_REGNUM
|| GET_CODE (XEXP (e, 1)) != CONST_INT
|| offset != INTVAL (XEXP (e, 1)))
abort ();
}
else if (i != 1
|| GET_CODE (e) != REG
|| REGNO (e) != SP_REGNUM)
abort ();
#endif
}
fprintf (asm_out_file, "}\n");
}
static void
arm_unwind_emit_set (FILE * asm_out_file, rtx p)
{
rtx e0;
rtx e1;
e0 = XEXP (p, 0);
e1 = XEXP (p, 1);
switch (GET_CODE (e0))
{
case MEM:
if (GET_CODE (XEXP (e0, 0)) != PRE_DEC
|| GET_CODE (XEXP (XEXP (e0, 0), 0)) != REG
|| REGNO (XEXP (XEXP (e0, 0), 0)) != SP_REGNUM)
abort ();
asm_fprintf (asm_out_file, "\t.save ");
if (IS_VFP_REGNUM (REGNO (e1)))
asm_fprintf(asm_out_file, "{d%d}\n",
(REGNO (e1) - FIRST_VFP_REGNUM) / 2);
else
asm_fprintf(asm_out_file, "{%r}\n", REGNO (e1));
break;
case REG:
if (REGNO (e0) == SP_REGNUM)
{
if (GET_CODE (e1) != PLUS
|| GET_CODE (XEXP (e1, 0)) != REG
|| REGNO (XEXP (e1, 0)) != SP_REGNUM
|| GET_CODE (XEXP (e1, 1)) != CONST_INT)
abort ();
asm_fprintf (asm_out_file, "\t.pad #%wd\n",
-INTVAL (XEXP (e1, 1)));
}
else if (REGNO (e0) == HARD_FRAME_POINTER_REGNUM)
{
HOST_WIDE_INT offset;
unsigned reg;
if (GET_CODE (e1) == PLUS)
{
if (GET_CODE (XEXP (e1, 0)) != REG
|| GET_CODE (XEXP (e1, 1)) != CONST_INT)
abort ();
reg = REGNO (XEXP (e1, 0));
offset = INTVAL (XEXP (e1, 1));
asm_fprintf (asm_out_file, "\t.setfp %r, %r, #%wd\n",
HARD_FRAME_POINTER_REGNUM, reg,
INTVAL (XEXP (e1, 1)));
}
else if (GET_CODE (e1) == REG)
{
reg = REGNO (e1);
asm_fprintf (asm_out_file, "\t.setfp %r, %r\n",
HARD_FRAME_POINTER_REGNUM, reg);
}
else
abort ();
}
else if (GET_CODE (e1) == REG && REGNO (e1) == SP_REGNUM)
{
asm_fprintf (asm_out_file, "\t.movsp %r\n", REGNO (e0));
}
else if (GET_CODE (e1) == PLUS
&& GET_CODE (XEXP (e1, 0)) == REG
&& REGNO (XEXP (e1, 0)) == SP_REGNUM
&& GET_CODE (XEXP (e1, 1)) == CONST_INT)
{
asm_fprintf (asm_out_file, "\t.movsp %r, #%d\n",
REGNO (e0), (int)INTVAL(XEXP (e1, 1)));
}
else
abort ();
break;
default:
abort ();
}
}
static void
arm_unwind_emit (FILE * asm_out_file, rtx insn)
{
rtx pat;
if (!ARM_EABI_UNWIND_TABLES)
return;
if (GET_CODE (insn) == NOTE || !RTX_FRAME_RELATED_P (insn))
return;
pat = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
if (pat)
pat = XEXP (pat, 0);
else
pat = PATTERN (insn);
switch (GET_CODE (pat))
{
case SET:
arm_unwind_emit_set (asm_out_file, pat);
break;
case SEQUENCE:
arm_unwind_emit_stm (asm_out_file, pat);
break;
default:
abort();
}
}
static bool
arm_output_ttype (rtx x)
{
fputs ("\t.word\t", asm_out_file);
output_addr_const (asm_out_file, x);
if (GET_CODE (x) != CONST_INT)
fputs ("(TARGET2)", asm_out_file);
fputc ('\n', asm_out_file);
return TRUE;
}
#endif
void
arm_output_fn_unwind (FILE * f, bool prologue)
{
if (!ARM_EABI_UNWIND_TABLES)
return;
if (prologue)
fputs ("\t.fnstart\n", f);
else
fputs ("\t.fnend\n", f);
}
static bool
arm_emit_tls_decoration (FILE *fp, rtx x)
{
enum tls_reloc reloc;
rtx val;
val = XVECEXP (x, 0, 0);
reloc = INTVAL (XVECEXP (x, 0, 1));
output_addr_const (fp, val);
switch (reloc)
{
case TLS_GD32:
fputs ("(tlsgd)", fp);
break;
case TLS_LDM32:
fputs ("(tlsldm)", fp);
break;
case TLS_LDO32:
fputs ("(tlsldo)", fp);
break;
case TLS_IE32:
fputs ("(gottpoff)", fp);
break;
case TLS_LE32:
fputs ("(tpoff)", fp);
break;
default:
gcc_unreachable ();
}
switch (reloc)
{
case TLS_GD32:
case TLS_LDM32:
case TLS_IE32:
fputs (" + (. - ", fp);
output_addr_const (fp, XVECEXP (x, 0, 2));
fputs (" - ", fp);
output_addr_const (fp, XVECEXP (x, 0, 3));
fputc (')', fp);
break;
default:
break;
}
return TRUE;
}
bool
arm_output_addr_const_extra (FILE *fp, rtx x)
{
if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLS)
return arm_emit_tls_decoration (fp, x);
else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_PIC_LABEL)
{
char label[256];
int labelno = INTVAL (XVECEXP (x, 0, 0));
ASM_GENERATE_INTERNAL_LABEL (label, "LPIC", labelno);
assemble_name_raw (fp, label);
return TRUE;
}
else if (GET_CODE (x) == CONST_VECTOR)
return arm_emit_vector_const (fp, x);
return FALSE;
}
#if TARGET_MACHO
static bool
arm_binds_local_p (tree decl)
{
return default_binds_local_p_1 (decl,
flag_apple_kext && lang_hooks.vtable_p (decl));
}
#endif
static rtx
arm_builtin_setjmp_frame_value (void)
{
static rtx arm_hard_frame_pointer_rtx;
if (! arm_hard_frame_pointer_rtx)
arm_hard_frame_pointer_rtx =
gen_rtx_REG (Pmode, ARM_HARD_FRAME_POINTER_REGNUM);
return arm_hard_frame_pointer_rtx;
}
#if TARGET_MACHO
void
reset_optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
{
}
#endif
#ifdef OBJECT_FORMAT_MACHO
void
machopic_output_stub (FILE *file, const char *symb, const char *stub)
{
unsigned int length;
char *symbol_name, *lazy_ptr_name, *slp_label_name;
static int label = 0;
symb = (*targetm.strip_name_encoding) (symb);
length = strlen (symb);
symbol_name = alloca (length + 32);
GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
lazy_ptr_name = alloca (length + 32);
GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
slp_label_name = alloca (length + 32);
GEN_SUFFIXED_NAME_FOR_SYMBOL (slp_label_name, symb, length, "$slp");
if (flag_pic == 2)
switch_to_section (darwin_sections[machopic_picsymbol_stub4_section]);
else
switch_to_section (darwin_sections[machopic_symbol_stub4_section]);
fprintf (file, "\t.align 2\n");
if (TARGET_THUMB)
fprintf (file, "\t.code 32\n");
fprintf (file, "%s:\n", stub);
fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
fprintf (file, "\tldr\tip, %s\n", slp_label_name);
label++;
if (flag_pic == 2)
fprintf (file, "L%d$scv:\tadd\tip, pc, ip\n", label);
fprintf (file, "\tldr\tpc, [ip, #0]\n");
if (flag_pic == 2)
fprintf (file, "%s:\n\t.long\t%s - (L%d$scv + 8)\n",
slp_label_name, lazy_ptr_name, label);
else
fprintf (file, "%s:\n\t.long\t%s\n",
slp_label_name, lazy_ptr_name);
switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
fprintf (file, "%s:\n", lazy_ptr_name);
fprintf (file, "\t.indirect_symbol\t%s\n", symbol_name);
fprintf (file, "\t.long\tdyld_stub_binding_helper\n");
}
#endif
extern bool iasm_memory_clobber (const char *);
bool iasm_memory_clobber (const char *ARG_UNUSED (opcode))
{
return true;
}
void
optimization_options (int level, int size ATTRIBUTE_UNUSED)
{
#if TARGET_MACHO
flag_strict_aliasing = 0;
flag_trapping_math = 0;
flag_local_alloc = 0;
#ifdef INSN_SCHEDULING
if (level > 1)
flag_schedule_insns = 0;
#endif
flag_errno_math = 0;
#endif
#ifdef SUBTARGET_OPTIMIZATION_OPTIONS
SUBTARGET_OPTIMIZATION_OPTIONS;
#endif
}
HOST_WIDE_INT
arm_local_debug_offset (rtx var)
{
int offset;
int reg;
if (GET_CODE (var) == PLUS)
{
reg = REGNO (XEXP (var, 0));
offset = INTVAL (XEXP (var, 1));
}
else if (GET_CODE (var) == REG)
{
reg = REGNO (var);
offset = 0;
}
else
{
return 0;
}
if (frame_pointer_needed && reg == SP_REGNUM)
{
arm_stack_offsets *offsets = arm_get_frame_offsets();
return offset + (offsets->frame - offsets->outgoing_args);
}
else
return offset;
}
int arm_label_align (rtx label)
{
rtx insn = NEXT_INSN (label);
if (insn
&& GET_CODE (insn) == INSN
&& GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE)
{
if ((int) XEXP (PATTERN (insn), 1) == VUNSPEC_ALIGN)
return 2;
if ((int) XEXP (PATTERN (insn), 1) == VUNSPEC_ALIGN8)
return 3;
}
return align_labels_log;
}
void
arm_ifcvt_modify_multiple_tests (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
basic_block bb ATTRIBUTE_UNUSED,
rtx *p_true,
rtx *p_false)
{
static RTX_CODE and_codes[10][10] =
{ { NE, 0, GT, GT, LT, LT, GTU, GTU, LTU, LTU },
{ 0, EQ, EQ, 0, EQ, 0, EQ, 0, EQ, 0 },
{ GT, EQ, GE, GT, EQ, 0, 0, 0, 0, 0 },
{ GT, 0, GT, GT, 0, 0, 0, 0, 0, 0 },
{ LT, EQ, EQ, 0, LE, LT, 0, 0, 0, 0 },
{ LT, 0, 0, 0, LT, LT, 0, 0, 0, 0 },
{ GTU, EQ, 0, 0, 0, 0, GEU, GTU, EQ, 0 },
{ GTU, 0, 0, 0, 0, 0, GTU, GTU, 0, 0 },
{ LTU, EQ, 0, 0, 0, 0, EQ, 0, LEU, LTU },
{ LTU, 0, 0, 0, 0, 0, 0, 0, LTU, LTU } };
static RTX_CODE or_codes[10][10] =
{ { NE, 1, 1, NE, 1, NE, 1, NE, 1, NE },
{ 1, EQ, GE, GE, LE, LE, GEU, GEU, LEU, LEU },
{ 1, GE, GE, GE, 1, 1, 0, 0, 0, 0 },
{ NE, GE, GE, GT, 1, NE, 0, 0, 0, 0 },
{ 1, LE, 1, 1, LE, LE, 0, 0, 0, 0 },
{ NE, LE, 1, NE, LE, LT, 0, 0, 0, 0 },
{ 1, GEU, 0, 0, 0, 0, GEU, GEU, 1, 1 },
{ NE, GEU, 0, 0, 0, 0, GEU, GTU, 1, NE },
{ 1, LEU, 0, 0, 0, 0, 1, 1, LEU, LEU },
{ NE, LEU, 0, 0, 0, 0, 1, NE, LEU, LTU } };
rtx true_lhs = XEXP (*p_true, 0);
rtx false_lhs = XEXP (*p_false, 0);
rtx true_rhs = XEXP (*p_true, 1);
rtx false_rhs = XEXP (*p_false, 1);
int true_and_p, false_and_p;
RTX_CODE merged_code;
if (!TARGET_ARM)
return;
if (GET_CODE (*p_true) == AND)
true_and_p = true;
else if (GET_CODE (*p_true) == IOR)
true_and_p = false;
else
return;
if (GET_CODE (*p_false) == AND)
false_and_p = true;
else if (GET_CODE (*p_false) == IOR)
false_and_p = false;
else
return;
if (!cc_register (XEXP (true_lhs, 0), CCmode)
|| !cc_register (XEXP (true_lhs, 0), CCmode)
|| !cc_register (XEXP (true_lhs, 0), CCmode)
|| !cc_register (XEXP (true_lhs, 0), CCmode))
return;
if (XEXP (true_lhs, 1) != const0_rtx
|| XEXP (true_rhs, 1) != const0_rtx
|| XEXP (false_lhs, 1) != const0_rtx
|| XEXP (false_rhs, 1) != const0_rtx)
return;
if (GET_CODE (true_lhs) < NE || GET_CODE (true_lhs) > LTU
|| GET_CODE (true_rhs) < NE || GET_CODE (true_rhs) > LTU)
*p_true = 0;
else
{
if (true_and_p)
merged_code = and_codes [GET_CODE (true_lhs) - NE][GET_CODE (true_rhs) - NE];
else
merged_code = or_codes [GET_CODE (true_lhs) - NE][GET_CODE (true_rhs) - NE];
if (merged_code == 0 || merged_code == 1)
*p_true = 0;
else
*p_true = gen_rtx_fmt_ee (merged_code, VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM), const0_rtx);
}
if (GET_CODE (false_lhs) < NE || GET_CODE (false_lhs) > LTU
|| GET_CODE (false_rhs) < NE || GET_CODE (false_rhs) > LTU)
*p_false = 0;
else
{
if (false_and_p)
merged_code = and_codes [GET_CODE (false_lhs) - NE][GET_CODE (false_rhs) - NE];
else
merged_code = or_codes [GET_CODE (false_lhs) - NE][GET_CODE (false_rhs) - NE];
if (merged_code == 0 || merged_code == 1)
*p_false = 0;
else
*p_false = gen_rtx_fmt_ee (merged_code, VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM), const0_rtx);
}
}
static tree
arm_handle_ms_struct_attribute (tree *node, tree name,
tree args ATTRIBUTE_UNUSED,
int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
{
tree *type = NULL;
if (DECL_P (*node))
{
if (TREE_CODE (*node) == TYPE_DECL)
type = &TREE_TYPE (*node);
}
else
type = node;
if (!(type && (TREE_CODE (*type) == RECORD_TYPE
|| TREE_CODE (*type) == UNION_TYPE)))
{
warning (OPT_Wattributes, "%qs attribute ignored",
IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
else if (lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
{
warning (OPT_Wattributes, "%qs incompatible attribute ignored",
IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
return NULL_TREE;
}
static tree
arm_handle_gcc_struct_attribute (tree *node, tree name,
tree args ATTRIBUTE_UNUSED,
int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
{
tree *type = NULL;
if (DECL_P (*node))
{
if (TREE_CODE (*node) == TYPE_DECL)
type = &TREE_TYPE (*node);
}
else
type = node;
if (!(type && (TREE_CODE (*type) == RECORD_TYPE
|| TREE_CODE (*type) == UNION_TYPE)))
{
warning (OPT_Wattributes, "%qs attribute ignored",
IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
else if (lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))
{
remove_attribute ("ms_struct", TYPE_ATTRIBUTES (*type));
*no_add_attrs = true;
}
return NULL_TREE;
}
static bool
arm_ms_bitfield_layout_p (tree record_type)
{
return (lookup_attribute ("ms_struct",
TYPE_ATTRIBUTES (record_type)) != NULL);
}
int
arm_field_ms_struct_align (tree field)
{
tree type = TREE_TYPE (field);
int desired_align;
if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
desired_align = TYPE_ALIGN (type);
else
{
enum machine_mode mode;
mode = TYPE_MODE (TREE_CODE (type) == ARRAY_TYPE
? get_inner_array_type (type) : type);
desired_align = GET_MODE_BITSIZE (mode) > BIGGEST_ALIGNMENT ?
GET_MODE_BITSIZE (mode) : TYPE_ALIGN (type);
gcc_assert (desired_align <= BIGGEST_MS_STRUCT_ALIGNMENT);
}
return desired_align;
}
int
arm_function_boundary (void)
{
int min_align = TARGET_ARM ? 32 : 16;
if (cfun && current_function_is_thunk)
min_align = MAX (min_align, 32);
if (cfun && cfun->needs_4byte_alignment)
min_align = MAX (min_align, 32);
min_align = MAX (min_align, align_loops * BITS_PER_UNIT);
return min_align;
}
#include "gt-arm.h"