aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Henderson <rth@redhat.com>2011-11-03 05:01:43 +0000
committerRichard Henderson <rth@redhat.com>2011-11-03 05:01:43 +0000
commit9946677784de2dcd3783732b98fbbb75c3ddb931 (patch)
tree4fe97462f5beebdb6ecd3d5fd800e9a246d6ccd3
parent5a63bec8ec0d1ee3ddb00781a2adf596871ddeae (diff)
Fix mem_signal_fence and its fallbacks.
* builtins.c (HAVE_mem_thread_fence, gen_mem_thread_fence, HAVE_mem_signal_fence, gen_mem_signal_fence): Default. (expand_builtin_mem_thread_fence): Tidy. (expand_builtin_mem_signal_fence): Fallback to asm memory barrier. git-svn-id: https://gcc.gnu.org/svn/gcc/branches/cxx-mem-model@180817 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog.mm7
-rw-r--r--gcc/builtins.c50
2 files changed, 44 insertions, 13 deletions
diff --git a/gcc/ChangeLog.mm b/gcc/ChangeLog.mm
index 24137ab66ea..351b91fe468 100644
--- a/gcc/ChangeLog.mm
+++ b/gcc/ChangeLog.mm
@@ -1,5 +1,12 @@
2011-11-02 Richard Henderson <rth@redhat.com>
+ * builtins.c (HAVE_mem_thread_fence, gen_mem_thread_fence,
+ HAVE_mem_signal_fence, gen_mem_signal_fence): Default.
+ (expand_builtin_mem_thread_fence): Tidy.
+ (expand_builtin_mem_signal_fence): Fallback to asm memory barrier.
+
+2011-11-02 Richard Henderson <rth@redhat.com>
+
* config/i386/i386.md (UNSPEC_MOVA): New.
* config/i386/sync.md (ATOMIC): New mode iterator.
(atomic_load<ATOMIC>, atomic_store<ATOMIC>): New.
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 7d23469e053..39c0afbf5b1 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -5593,16 +5593,18 @@ expand_builtin_atomic_is_lock_free (tree exp)
/* This routine will either emit the mem_thread_fence pattern or issue a
sync_synchronize to generate a fence for memory model MEMMODEL. */
+#ifndef HAVE_mem_thread_fence
+# define HAVE_mem_thread_fence 0
+# define gen_mem_thread_fence(x) (gcc_unreachable (), NULL_RTX)
+#endif
+
void
expand_builtin_mem_thread_fence (enum memmodel model)
{
- if (model == MEMMODEL_RELAXED)
- return;
-#ifdef HAVE_mem_thread_fence
- emit_insn (gen_mem_thread_fence (GEN_INT (model)));
-#else
- expand_builtin_sync_synchronize ();
-#endif
+ if (HAVE_mem_thread_fence)
+ emit_insn (gen_mem_thread_fence (GEN_INT (model)));
+ else if (model != MEMMODEL_RELAXED)
+ expand_builtin_sync_synchronize ();
}
/* Expand the __atomic_thread_fence intrinsic:
@@ -5621,15 +5623,37 @@ expand_builtin_atomic_thread_fence (tree exp)
/* This routine will either emit the mem_signal_fence pattern or issue a
sync_synchronize to generate a fence for memory model MEMMODEL. */
+#ifndef HAVE_mem_signal_fence
+# define HAVE_mem_signal_fence 0
+# define gen_mem_signal_fence(x) (gcc_unreachable (), NULL_RTX)
+#endif
+
static void
expand_builtin_mem_signal_fence (enum memmodel model)
{
-#ifdef HAVE_mem_signal_fence
- emit_insn (gen_mem_signal_fence (memmodel));
-#else
- if (model != MEMMODEL_RELAXED)
- expand_builtin_sync_synchronize ();
-#endif
+ if (HAVE_mem_signal_fence)
+ emit_insn (gen_mem_signal_fence (GEN_INT (model)));
+ else if (model != MEMMODEL_RELAXED)
+ {
+ rtx asm_op, clob;
+
+ /* By default targets are coherent between a thread and the signal
+ handler running on the same thread. Thus this really becomes a
+ compiler barrier, in that stores must not be sunk past
+ (or raised above) a given point. */
+
+ /* Generate asm volatile("" : : : "memory") as the memory barrier. */
+ asm_op = gen_rtx_ASM_OPERANDS (VOIDmode, empty_string, empty_string, 0,
+ rtvec_alloc (0), rtvec_alloc (0),
+ rtvec_alloc (0), UNKNOWN_LOCATION);
+ MEM_VOLATILE_P (asm_op) = 1;
+
+ clob = gen_rtx_SCRATCH (VOIDmode);
+ clob = gen_rtx_MEM (BLKmode, clob);
+ clob = gen_rtx_CLOBBER (VOIDmode, clob);
+
+ emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, asm_op, clob)));
+ }
}
/* Expand the __atomic_signal_fence intrinsic: