diff options
author | Thomas Preud'homme <thomas.preudhomme@arm.com> | 2016-09-22 15:19:25 +0000 |
---|---|---|
committer | Thomas Preud'homme <thomas.preudhomme@arm.com> | 2016-09-22 15:19:25 +0000 |
commit | f0e46394ccca89845e710e96d7f3529a840f6f94 (patch) | |
tree | a06af0a328d6a5ab1ee5839bd9c148a570ddf1de | |
parent | aa049f252573e958ff010970bc573b419a91ed1a (diff) |
2016-09-22 Thomas Preud'homme <thomas.preudhomme@arm.com>
gcc/
* config/arm/arm.c (arm_expand_compare_and_swap): Add new bdst local
variable. Add the new parameter to the insn generator. Set that
parameter to be CC flag for 32-bit targets, bval otherwise. Set the
return value from the negation of that parameter for Thumb-1, keeping
the logic unchanged otherwise except for using bdst as the destination
register of the compare_and_swap insn.
(arm_split_compare_and_swap): Add explanation about how is the value
returned to the function comment. Rename scratch variable to
neg_bval. Adapt initialization of variables holding operands to the
new operand numbers. Use return register to hold result of store
exclusive for Thumb-1, scratch register otherwise. Construct the
appropriate cbranch for Thumb-1 targets, keeping the logic unchanged
for 32-bit targets. Guard Z flag setting to restrict to 32bit targets.
Use gen_cbranchsi4 rather than hand-written conditional branch to loop
for strongly ordered compare_and_swap.
* config/arm/predicates.md (cc_register_operand): New predicate.
* config/arm/sync.md (atomic_compare_and_swap<mode>_1): Use a
match_operand with the new predicate to accept either the CC flag or a
destination register for the boolean return value, restricting it to
CC flag only via constraint. Adapt operand numbers accordingly.
git-svn-id: https://gcc.gnu.org/svn/gcc/branches/ARM/embedded-5-branch@240368 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r-- | gcc/ChangeLog.arm | 23 | ||||
-rw-r--r-- | gcc/config/arm/arm.c | 89 | ||||
-rw-r--r-- | gcc/config/arm/predicates.md | 6 | ||||
-rw-r--r-- | gcc/config/arm/sync.md | 40 |
4 files changed, 108 insertions, 50 deletions
diff --git a/gcc/ChangeLog.arm b/gcc/ChangeLog.arm index baf5d077200..0b28b500d1d 100644 --- a/gcc/ChangeLog.arm +++ b/gcc/ChangeLog.arm @@ -1,5 +1,28 @@ 2016-09-22 Thomas Preud'homme <thomas.preudhomme@arm.com> + * config/arm/arm.c (arm_expand_compare_and_swap): Add new bdst local + variable. Add the new parameter to the insn generator. Set that + parameter to be CC flag for 32-bit targets, bval otherwise. Set the + return value from the negation of that parameter for Thumb-1, keeping + the logic unchanged otherwise except for using bdst as the destination + register of the compare_and_swap insn. + (arm_split_compare_and_swap): Add explanation about how is the value + returned to the function comment. Rename scratch variable to + neg_bval. Adapt initialization of variables holding operands to the + new operand numbers. Use return register to hold result of store + exclusive for Thumb-1, scratch register otherwise. Construct the + appropriate cbranch for Thumb-1 targets, keeping the logic unchanged + for 32-bit targets. Guard Z flag setting to restrict to 32bit targets. + Use gen_cbranchsi4 rather than hand-written conditional branch to loop + for strongly ordered compare_and_swap. + * config/arm/predicates.md (cc_register_operand): New predicate. + * config/arm/sync.md (atomic_compare_and_swap<mode>_1): Use a + match_operand with the new predicate to accept either the CC flag or a + destination register for the boolean return value, restricting it to + CC flag only via constraint. Adapt operand numbers accordingly. + +2016-09-22 Thomas Preud'homme <thomas.preudhomme@arm.com> + * config/arm/constraints.md (Q constraint): Document its use for Thumb-1. (Pf constraint): New constraint for relaxed, consume or relaxed memory diff --git a/gcc/config/arm/arm.c b/gcc/config/arm/arm.c index 0457946b059..18d057a34a0 100644 --- a/gcc/config/arm/arm.c +++ b/gcc/config/arm/arm.c @@ -28392,9 +28392,9 @@ emit_unlikely_jump (rtx insn) void arm_expand_compare_and_swap (rtx operands[]) { - rtx bval, rval, mem, oldval, newval, is_weak, mod_s, mod_f, x; + rtx bval, bdst, rval, mem, oldval, newval, is_weak, mod_s, mod_f, x; machine_mode mode; - rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx); + rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx); bval = operands[0]; rval = operands[1]; @@ -28451,43 +28451,54 @@ arm_expand_compare_and_swap (rtx operands[]) gcc_unreachable (); } - emit_insn (gen (rval, mem, oldval, newval, is_weak, mod_s, mod_f)); + bdst = TARGET_THUMB1 ? bval : gen_rtx_REG (CCmode, CC_REGNUM); + emit_insn (gen (bdst, rval, mem, oldval, newval, is_weak, mod_s, mod_f)); if (mode == QImode || mode == HImode) emit_move_insn (operands[1], gen_lowpart (mode, rval)); /* In all cases, we arrange for success to be signaled by Z set. This arrangement allows for the boolean result to be used directly - in a subsequent branch, post optimization. */ - x = gen_rtx_REG (CCmode, CC_REGNUM); - x = gen_rtx_EQ (SImode, x, const0_rtx); - emit_insn (gen_rtx_SET (VOIDmode, bval, x)); + in a subsequent branch, post optimization. For Thumb-1 targets, the + boolean negation of the result is also stored in bval because Thumb-1 + backend lacks dependency tracking for CC flag due to flag-setting not + being represented at RTL level. */ + if (TARGET_THUMB1) + gen_cstoresi_eq0_thumb1 (bval, bdst); + else + { + x = gen_rtx_EQ (SImode, bdst, const0_rtx); + emit_insn (gen_rtx_SET (VOIDmode, bval, x)); + } } /* Split a compare and swap pattern. It is IMPLEMENTATION DEFINED whether another memory store between the load-exclusive and store-exclusive can reset the monitor from Exclusive to Open state. This means we must wait until after reload to split the pattern, lest we get a register spill in - the middle of the atomic sequence. */ + the middle of the atomic sequence. Success of the compare and swap is + indicated by the Z flag set for 32bit targets and by neg_bval being zero + for Thumb-1 targets (ie. negation of the boolean value returned by + atomic_compare_and_swapmode standard pattern in operand 0). */ void arm_split_compare_and_swap (rtx operands[]) { - rtx rval, mem, oldval, newval, scratch; + rtx rval, mem, oldval, newval, neg_bval; machine_mode mode; enum memmodel mod_s, mod_f; bool is_weak; rtx_code_label *label1, *label2; rtx x, cond; - rval = operands[0]; - mem = operands[1]; - oldval = operands[2]; - newval = operands[3]; - is_weak = (operands[4] != const0_rtx); - mod_s = memmodel_from_int (INTVAL (operands[5])); - mod_f = memmodel_from_int (INTVAL (operands[6])); - scratch = operands[7]; + rval = operands[1]; + mem = operands[2]; + oldval = operands[3]; + newval = operands[4]; + is_weak = (operands[5] != const0_rtx); + mod_s = memmodel_from_int (INTVAL (operands[6])); + mod_f = memmodel_from_int (INTVAL (operands[7])); + neg_bval = TARGET_THUMB1 ? operands[0] : operands[8]; mode = GET_MODE (mem); bool is_armv8_sync = arm_arch8 && is_mm_sync (mod_s); @@ -28519,26 +28530,44 @@ arm_split_compare_and_swap (rtx operands[]) arm_emit_load_exclusive (mode, rval, mem, use_acquire); - cond = arm_gen_compare_reg (NE, rval, oldval, scratch); - x = gen_rtx_NE (VOIDmode, cond, const0_rtx); - x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, - gen_rtx_LABEL_REF (Pmode, label2), pc_rtx); - emit_unlikely_jump (gen_rtx_SET (VOIDmode, pc_rtx, x)); + /* Z is set to 0 for 32bit targets (resp. rval set to 1) if oldval != rval, + as required to communicate with arm_expand_compare_and_swap. */ + if (TARGET_32BIT) + { + cond = arm_gen_compare_reg (NE, rval, oldval, neg_bval); + x = gen_rtx_NE (VOIDmode, cond, const0_rtx); + x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, + gen_rtx_LABEL_REF (Pmode, label2), pc_rtx); + emit_unlikely_jump (gen_rtx_SET (VOIDmode, pc_rtx, x)); + } + else + { + emit_move_insn (neg_bval, const1_rtx); + cond = gen_rtx_NE (VOIDmode, rval, oldval); + if (thumb1_cmpneg_operand (oldval, SImode)) + emit_unlikely_jump (gen_cbranchsi4_scratch (neg_bval, rval, oldval, + label2, cond)); + else + emit_unlikely_jump (gen_cbranchsi4_insn (cond, rval, oldval, label2)); + } - arm_emit_store_exclusive (mode, scratch, mem, newval, use_release); + arm_emit_store_exclusive (mode, neg_bval, mem, newval, use_release); /* Weak or strong, we want EQ to be true for success, so that we match the flags that we got from the compare above. */ - cond = gen_rtx_REG (CCmode, CC_REGNUM); - x = gen_rtx_COMPARE (CCmode, scratch, const0_rtx); - emit_insn (gen_rtx_SET (VOIDmode, cond, x)); + if (TARGET_32BIT) + { + cond = gen_rtx_REG (CCmode, CC_REGNUM); + x = gen_rtx_COMPARE (CCmode, neg_bval, const0_rtx); + emit_insn (gen_rtx_SET (VOIDmode, cond, x)); + } if (!is_weak) { - x = gen_rtx_NE (VOIDmode, cond, const0_rtx); - x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, - gen_rtx_LABEL_REF (Pmode, label1), pc_rtx); - emit_unlikely_jump (gen_rtx_SET (VOIDmode, pc_rtx, x)); + /* Z is set to boolean value of !neg_bval, as required to communicate + with arm_expand_compare_and_swap. */ + x = gen_rtx_NE (VOIDmode, neg_bval, const0_rtx); + emit_unlikely_jump (gen_cbranchsi4 (x, neg_bval, const0_rtx, label1)); } if (!is_mm_relaxed (mod_f)) diff --git a/gcc/config/arm/predicates.md b/gcc/config/arm/predicates.md index 48e4ba86e7b..806d14b517c 100644 --- a/gcc/config/arm/predicates.md +++ b/gcc/config/arm/predicates.md @@ -391,6 +391,12 @@ || mode == CC_DGTUmode)); }) +;; Any register, including CC +(define_predicate "cc_register_operand" + (and (match_code "reg") + (ior (match_operand 0 "s_register_operand") + (match_operand 0 "cc_register")))) + (define_special_predicate "arm_extendqisi_mem_op" (and (match_operand 0 "memory_operand") (match_test "TARGET_ARM ? arm_legitimate_address_outer_p (mode, diff --git a/gcc/config/arm/sync.md b/gcc/config/arm/sync.md index e2ab50441f5..019b8425dc6 100644 --- a/gcc/config/arm/sync.md +++ b/gcc/config/arm/sync.md @@ -183,20 +183,20 @@ }) (define_insn_and_split "atomic_compare_and_swap<mode>_1" - [(set (reg:CC_Z CC_REGNUM) ;; bool out + [(set (match_operand 0 "cc_register_operand" "=&c") ;; bool out (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS)) - (set (match_operand:SI 0 "s_register_operand" "=&r") ;; val out + (set (match_operand:SI 1 "s_register_operand" "=&r") ;; val out (zero_extend:SI - (match_operand:NARROW 1 "mem_noofs_operand" "+Ua"))) ;; memory - (set (match_dup 1) + (match_operand:NARROW 2 "mem_noofs_operand" "+Ua"))) ;; memory + (set (match_dup 2) (unspec_volatile:NARROW - [(match_operand:SI 2 "arm_add_operand" "rIL") ;; expected - (match_operand:NARROW 3 "s_register_operand" "r") ;; desired - (match_operand:SI 4 "const_int_operand") ;; is_weak - (match_operand:SI 5 "const_int_operand") ;; mod_s - (match_operand:SI 6 "const_int_operand")] ;; mod_f + [(match_operand:SI 3 "arm_add_operand" "rIL") ;; expected + (match_operand:NARROW 4 "s_register_operand" "r") ;; desired + (match_operand:SI 5 "const_int_operand") ;; is_weak + (match_operand:SI 6 "const_int_operand") ;; mod_s + (match_operand:SI 7 "const_int_operand")] ;; mod_f VUNSPEC_ATOMIC_CAS)) - (clobber (match_scratch:SI 7 "=&r"))] + (clobber (match_scratch:SI 8 "=&r"))] "<sync_predtab>" "#" "&& reload_completed" @@ -212,19 +212,19 @@ [(SI "rIL") (DI "rDi")]) (define_insn_and_split "atomic_compare_and_swap<mode>_1" - [(set (reg:CC_Z CC_REGNUM) ;; bool out + [(set (match_operand 0 "cc_register_operand" "=&c") ;; bool out (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS)) - (set (match_operand:SIDI 0 "s_register_operand" "=&r") ;; val out - (match_operand:SIDI 1 "mem_noofs_operand" "+Ua")) ;; memory - (set (match_dup 1) + (set (match_operand:SIDI 1 "s_register_operand" "=&r") ;; val out + (match_operand:SIDI 2 "mem_noofs_operand" "+Ua")) ;; memory + (set (match_dup 2) (unspec_volatile:SIDI - [(match_operand:SIDI 2 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect - (match_operand:SIDI 3 "s_register_operand" "r") ;; desired - (match_operand:SI 4 "const_int_operand") ;; is_weak - (match_operand:SI 5 "const_int_operand") ;; mod_s - (match_operand:SI 6 "const_int_operand")] ;; mod_f + [(match_operand:SIDI 3 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect + (match_operand:SIDI 4 "s_register_operand" "r") ;; desired + (match_operand:SI 5 "const_int_operand") ;; is_weak + (match_operand:SI 6 "const_int_operand") ;; mod_s + (match_operand:SI 7 "const_int_operand")] ;; mod_f VUNSPEC_ATOMIC_CAS)) - (clobber (match_scratch:SI 7 "=&r"))] + (clobber (match_scratch:SI 8 "=&r"))] "<sync_predtab>" "#" "&& reload_completed" |