aboutsummaryrefslogtreecommitdiff
path: root/gcc/config/arm/arm.md
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/config/arm/arm.md')
-rw-r--r--gcc/config/arm/arm.md125
1 files changed, 59 insertions, 66 deletions
diff --git a/gcc/config/arm/arm.md b/gcc/config/arm/arm.md
index 2d5359e532f..f78e1477eab 100644
--- a/gcc/config/arm/arm.md
+++ b/gcc/config/arm/arm.md
@@ -4448,62 +4448,78 @@
; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
(define_insn "unaligned_loadsi"
- [(set (match_operand:SI 0 "s_register_operand" "=l,r")
- (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
+ [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
+ (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
UNSPEC_UNALIGNED_LOAD))]
"unaligned_access"
- "ldr%?\t%0, %1\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
+ "@
+ ldr\t%0, %1\t@ unaligned
+ ldr%?\t%0, %1\t@ unaligned
+ ldr%?\t%0, %1\t@ unaligned"
+ [(set_attr "arch" "t1,t2,32")
+ (set_attr "length" "2,2,4")
+ (set_attr "predicable" "no,yes,yes")
+ (set_attr "predicable_short_it" "no,yes,no")
(set_attr "type" "load_4")])
+;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
+;; address (there's no immediate format). That's tricky to support
+;; here and we don't really need this pattern for that case, so only
+;; enable for 32-bit ISAs.
(define_insn "unaligned_loadhis"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(sign_extend:SI
(unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
UNSPEC_UNALIGNED_LOAD)))]
- "unaligned_access"
+ "unaligned_access && TARGET_32BIT"
"ldrsh%?\t%0, %1\t@ unaligned"
[(set_attr "predicable" "yes")
(set_attr "type" "load_byte")])
(define_insn "unaligned_loadhiu"
- [(set (match_operand:SI 0 "s_register_operand" "=l,r")
+ [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
(zero_extend:SI
- (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
+ (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
UNSPEC_UNALIGNED_LOAD)))]
"unaligned_access"
- "ldrh%?\t%0, %1\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
+ "@
+ ldrh\t%0, %1\t@ unaligned
+ ldrh%?\t%0, %1\t@ unaligned
+ ldrh%?\t%0, %1\t@ unaligned"
+ [(set_attr "arch" "t1,t2,32")
+ (set_attr "length" "2,2,4")
+ (set_attr "predicable" "no,yes,yes")
+ (set_attr "predicable_short_it" "no,yes,no")
(set_attr "type" "load_byte")])
(define_insn "unaligned_storesi"
- [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
- (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
+ [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
+ (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
UNSPEC_UNALIGNED_STORE))]
"unaligned_access"
- "str%?\t%1, %0\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
+ "@
+ str\t%1, %0\t@ unaligned
+ str%?\t%1, %0\t@ unaligned
+ str%?\t%1, %0\t@ unaligned"
+ [(set_attr "arch" "t1,t2,32")
+ (set_attr "length" "2,2,4")
+ (set_attr "predicable" "no,yes,yes")
+ (set_attr "predicable_short_it" "no,yes,no")
(set_attr "type" "store_4")])
(define_insn "unaligned_storehi"
- [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
- (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
+ [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
+ (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
UNSPEC_UNALIGNED_STORE))]
"unaligned_access"
- "strh%?\t%1, %0\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
+ "@
+ strh\t%1, %0\t@ unaligned
+ strh%?\t%1, %0\t@ unaligned
+ strh%?\t%1, %0\t@ unaligned"
+ [(set_attr "arch" "t1,t2,32")
+ (set_attr "length" "2,2,4")
+ (set_attr "predicable" "no,yes,yes")
+ (set_attr "predicable_short_it" "no,yes,no")
(set_attr "type" "store_4")])
@@ -5976,52 +5992,29 @@
}
}
- if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
+ split_const (operands[1], &base, &offset);
+ if (INTVAL (offset) != 0
+ && targetm.cannot_force_const_mem (SImode, operands[1]))
{
- split_const (operands[1], &base, &offset);
- if (GET_CODE (base) == SYMBOL_REF
- && !offset_within_block_p (base, INTVAL (offset)))
- {
- tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
- emit_move_insn (tmp, base);
- emit_insn (gen_addsi3 (operands[0], tmp, offset));
- DONE;
- }
+ tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
+ emit_move_insn (tmp, base);
+ emit_insn (gen_addsi3 (operands[0], tmp, offset));
+ DONE;
}
+ tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
+
/* Recognize the case where operand[1] is a reference to thread-local
- data and load its address to a register. */
+ data and load its address to a register. Offsets have been split off
+ already. */
if (arm_tls_referenced_p (operands[1]))
- {
- rtx tmp = operands[1];
- rtx addend = NULL;
-
- if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
- {
- addend = XEXP (XEXP (tmp, 0), 1);
- tmp = XEXP (XEXP (tmp, 0), 0);
- }
-
- gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
- gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
-
- tmp = legitimize_tls_address (tmp,
- !can_create_pseudo_p () ? operands[0] : 0);
- if (addend)
- {
- tmp = gen_rtx_PLUS (SImode, tmp, addend);
- tmp = force_operand (tmp, operands[0]);
- }
- operands[1] = tmp;
- }
+ operands[1] = legitimize_tls_address (operands[1], tmp);
else if (flag_pic
&& (CONSTANT_P (operands[1])
|| symbol_mentioned_p (operands[1])
|| label_mentioned_p (operands[1])))
- operands[1] = legitimize_pic_address (operands[1], SImode,
- (!can_create_pseudo_p ()
- ? operands[0]
- : 0));
+ operands[1] =
+ legitimize_pic_address (operands[1], SImode, tmp);
}
"
)