Skip site navigation (1)Skip section navigation (2)
Date:      Wed, 29 Jun 2005 10:01:40 GMT
From:      John Baldwin <jhb@FreeBSD.org>
To:        Perforce Change Reviews <perforce@freebsd.org>
Subject:   PERFORCE change 79134 for review
Message-ID:  <200506291001.j5TA1eZC074883@repoman.freebsd.org>

next in thread | raw e-mail | index | archive | help
http://perforce.freebsd.org/chv.cgi?CH=79134

Change 79134 by jhb@jhb_zion on 2005/06/29 10:01:22

	- Consolidate the KLD_MODULE and cc that doesn't do inline asm
	  cases as they produce the same function prototypes.
	- Fix comment atomic_readandclear_long() on amd64.
	- Remove externs from prototypes.
	- Define atomic_cmpset_{acq,rel}_long on amd64.
	- Change the i386 code to use separate functions for long operations
	  instead of macros so that we can have type safety checking on the
	  arguments.  This includes making atomic_cmpset_long() an inline
	  function rather than a macro that would not have compiled without
	  warnings if it were actually used.

Affected files ...

.. //depot/projects/smpng/sys/amd64/include/atomic.h#18 edit
.. //depot/projects/smpng/sys/i386/include/atomic.h#36 edit

Differences ...

==== //depot/projects/smpng/sys/amd64/include/atomic.h#18 (text+ko) ====

@@ -67,7 +67,7 @@
  * Kernel modules call real functions which are built into the kernel.
  * This allows kernel modules to be portable between UP and SMP systems.
  */
-#if defined(KLD_MODULE)
+#if defined(KLD_MODULE) || !(defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE))
 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
 void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
 
@@ -78,10 +78,8 @@
 u_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p);	\
 void		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
 
-#else /* !KLD_MODULE */
+#else /* !KLD_MODULE && __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
 
-#if defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE)
-
 /*
  * For userland, assume the SMP case and use lock prefixes so that
  * the binaries will run on both types of systems.
@@ -106,13 +104,6 @@
 }							\
 struct __hack
 
-#else /* !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
-
-#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)				\
-extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
-
-#endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
-
 /*
  * Atomic compare and set, used by the mutex functions
  *
@@ -121,8 +112,6 @@
  * Returns 0 on failure, non-zero on success
  */
 
-#if defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE)
-
 static __inline int
 atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
 {
@@ -190,18 +179,7 @@
 }							\
 struct __hack
 
-#else /* !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
-
-extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
-extern int atomic_cmpset_long(volatile u_long *, u_long, u_long);
-
-#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)				\
-extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p);		\
-extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
-
-#endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
-
-#endif /* KLD_MODULE */
+#endif /* KLD_MODULE || !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
 
 ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
 ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
@@ -259,7 +237,7 @@
 	__asm __volatile (
 	"	xorq	%0,%0 ;		"
 	"	xchgq	%1,%0 ;		"
-	"# atomic_readandclear_int"
+	"# atomic_readandclear_long"
 	: "=&r" (result)		/* 0 (result) */
 	: "m" (*addr));			/* 1 (addr) */
 
@@ -268,8 +246,8 @@
 
 #else /* !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
 
-extern u_long	atomic_readandclear_long(volatile u_long *);
-extern u_int	atomic_readandclear_int(volatile u_int *);
+u_int	atomic_readandclear_int(volatile u_int *);
+u_long	atomic_readandclear_long(volatile u_long *);
 
 #endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
 
@@ -311,6 +289,8 @@
 #define	atomic_add_rel_long		atomic_add_long
 #define	atomic_subtract_acq_long	atomic_subtract_long
 #define	atomic_subtract_rel_long	atomic_subtract_long
+#define	atomic_cmpset_acq_long		atomic_cmpset_long
+#define	atomic_cmpset_rel_long		atomic_cmpset_long
 
 #define atomic_cmpset_acq_ptr		atomic_cmpset_ptr
 #define atomic_cmpset_rel_ptr		atomic_cmpset_ptr

==== //depot/projects/smpng/sys/i386/include/atomic.h#36 (text+ko) ====

@@ -67,7 +67,7 @@
  * Kernel modules call real functions which are built into the kernel.
  * This allows kernel modules to be portable between UP and SMP systems.
  */
-#if defined(KLD_MODULE)
+#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
 void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
 
@@ -79,8 +79,6 @@
 
 #else /* !KLD_MODULE */
 
-#ifdef __GNUCLIKE_ASM
-
 /*
  * For userland, assume the SMP case and use lock prefixes so that
  * the binaries will run on both types of systems.
@@ -105,13 +103,6 @@
 }							\
 struct __hack
 
-#else /* !__GNUCLIKE_ASM */
-
-#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)				\
-extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
-
-#endif /* __GNUCLIKE_ASM */
-
 /*
  * Atomic compare and set, used by the mutex functions
  *
@@ -120,8 +111,6 @@
  * Returns 0 on failure, non-zero on success
  */
 
-#ifdef __GNUCLIKE_ASM
-
 #if defined(CPU_DISABLE_CMPXCHG)
 
 static __inline int
@@ -226,18 +215,8 @@
 
 #endif	/* !defined(SMP) */
 
-#else /* !__GNUCLIKE_ASM */
-
-extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
-
-#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)				\
-extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p);		\
-extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
-
-#endif /* __GNUCLIKE_ASM */
+#endif /* KLD_MODULE || !__GNUCLIKE_ASM */
 
-#endif /* KLD_MODULE */
-
 ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
 ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
 ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
@@ -253,15 +232,29 @@
 ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
 ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
 
+ATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
+ATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
+ATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
+ATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
+
 ATOMIC_STORE_LOAD(char,	"cmpxchgb %b0,%1", "xchgb %b1,%0");
 ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
 ATOMIC_STORE_LOAD(int,	"cmpxchgl %0,%1",  "xchgl %1,%0");
+ATOMIC_STORE_LOAD(long,	"cmpxchgl %0,%1",  "xchgl %1,%0");
 
 #undef ATOMIC_ASM
 #undef ATOMIC_STORE_LOAD
 
 #if !defined(WANT_FUNCTIONS)
 
+static __inline int
+atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
+{
+
+	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp,
+	    (u_int)src));
+}
+
 /* Read the current value and store a zero in the destination. */
 #ifdef __GNUCLIKE_ASM
 
@@ -280,9 +273,25 @@
 	return (result);
 }
 
+static __inline u_long
+atomic_readandclear_long(volatile u_long *addr)
+{
+	u_long result;
+
+	__asm __volatile (
+	"	xorl	%0,%0 ;		"
+	"	xchgl	%1,%0 ;		"
+	"# atomic_readandclear_long"
+	: "=&r" (result)		/* 0 (result) */
+	: "m" (*addr));			/* 1 (addr) */
+
+	return (result);
+}
+
 #else /* !__GNUCLIKE_ASM */
 
-extern u_int	atomic_readandclear_int(volatile u_int *);
+u_int	atomic_readandclear_int(volatile u_int *);
+u_long	atomic_readandclear_long(volatile u_long *);
 
 #endif /* __GNUCLIKE_ASM */
 
@@ -324,8 +333,8 @@
 #define	atomic_add_rel_long		atomic_add_long
 #define	atomic_subtract_acq_long	atomic_subtract_long
 #define	atomic_subtract_rel_long	atomic_subtract_long
-#define	atomic_cmpset_acq_long		atomic_cmpset_acq_int
-#define	atomic_cmpset_rel_long		atomic_cmpset_rel_int
+#define	atomic_cmpset_acq_long		atomic_cmpset_long
+#define	atomic_cmpset_rel_long		atomic_cmpset_long
 
 #define atomic_cmpset_acq_ptr		atomic_cmpset_ptr
 #define atomic_cmpset_rel_ptr		atomic_cmpset_ptr
@@ -382,16 +391,6 @@
 #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
 #define	atomic_readandclear_32	atomic_readandclear_int
 
-/* Operations on longs. */
-#define	atomic_set_long(p, v)		atomic_set_int((volatile u_int *)(p), (v))
-#define	atomic_clear_long(p, v)		atomic_clear_int((volatile u_int *)(p), (v))
-#define	atomic_add_long(p, v)		atomic_add_int((volatile u_int *)(p), (v))
-#define	atomic_subtract_long(p, v)	atomic_subtract_int((volatile u_int *)(p), (v))
-#define	atomic_cmpset_long(d, e, s)	atomic_cmpset_int((volatile u_int *)(d), (e), (s))
-#define	atomic_load_acq_long(p)		atomic_load_acq_int((volatile u_int *)(p))
-#define	atomic_store_rel_long(p, v)	atomic_store_rel_int((volatile u_int *)(p), (v))
-#define	atomic_readandclear_long(p)	atomic_readandclear_int((volatile u_int *)(p))
-
 /* Operations on pointers. */
 static __inline int
 atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)



Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?200506291001.j5TA1eZC074883>