Update barriers for x86/x86-64
Signed-off-by: Jens Axboe <axboe@kernel.dk>
diff --git a/src/barrier.h b/src/barrier.h
index 2076c1c..ef00f67 100644
--- a/src/barrier.h
+++ b/src/barrier.h
@@ -1,12 +1,9 @@
#ifndef LIBURING_BARRIER_H
#define LIBURING_BARRIER_H
-#if defined(__x86_64)
-#define read_barrier() __asm__ __volatile__("lfence":::"memory")
-#define write_barrier() __asm__ __volatile__("sfence":::"memory")
-#elif defined(__i386__)
-#define read_barrier() __asm__ __volatile__("": : :"memory")
-#define write_barrier() __asm__ __volatile__("": : :"memory")
+#if defined(__x86_64) || defined(__i386__)
+#define read_barrier() __asm__ __volatile__("":::"memory")
+#define write_barrier() __asm__ __volatile__("":::"memory")
#else
/*
* Add arch appropriate definitions. Be safe and use full barriers for