atomic_arch.h raw

   1  #define a_ll a_ll
   2  static inline int a_ll(volatile int *p)
   3  {
   4  	int v;
   5  	__asm__ __volatile__ ("lwarx %0, 0, %2" : "=r"(v) : "m"(*p), "r"(p));
   6  	return v;
   7  }
   8  
   9  #define a_sc a_sc
  10  static inline int a_sc(volatile int *p, int v)
  11  {
  12  	int r;
  13  	__asm__ __volatile__ (
  14  		"stwcx. %2, 0, %3 ; mfcr %0"
  15  		: "=r"(r), "=m"(*p) : "r"(v), "r"(p) : "memory", "cc");
  16  	return r & 0x20000000; /* "bit 2" of "cr0" (backwards bit order) */
  17  }
  18  
  19  #define a_ll_p a_ll_p
  20  static inline void *a_ll_p(volatile void *p)
  21  {
  22  	void *v;
  23  	__asm__ __volatile__ ("ldarx %0, 0, %2" : "=r"(v) : "m"(*(void *volatile *)p), "r"(p));
  24  	return v;
  25  }
  26  
  27  #define a_sc_p a_sc_p
  28  static inline int a_sc_p(volatile void *p, void *v)
  29  {
  30  	int r;
  31  	__asm__ __volatile__ (
  32  		"stdcx. %2, 0, %3 ; mfcr %0"
  33  		: "=r"(r), "=m"(*(void *volatile *)p) : "r"(v), "r"(p) : "memory", "cc");
  34  	return r & 0x20000000; /* "bit 2" of "cr0" (backwards bit order) */
  35  }
  36  
  37  #define a_barrier a_barrier
  38  static inline void a_barrier()
  39  {
  40  	__asm__ __volatile__ ("sync" : : : "memory");
  41  }
  42  
  43  #define a_pre_llsc a_barrier
  44  
  45  #define a_post_llsc a_post_llsc
  46  static inline void a_post_llsc()
  47  {
  48  	__asm__ __volatile__ ("isync" : : : "memory");
  49  }
  50  
  51  #define a_crash a_crash
  52  static inline void a_crash()
  53  {
  54  	__asm__ __volatile__ (".long 0");
  55  }
  56  
  57  #define a_clz_64 a_clz_64
  58  static inline int a_clz_64(uint64_t x)
  59  {
  60  	__asm__ ("cntlzd %0, %1" : "=r"(x) : "r"(x));
  61  	return x;
  62  }
  63