24 __attribute__( ( always_inline ) ) static inline
void __set_BASEPRI_nb(uint32_t basePri)
26 __ASM
volatile (
"\tMSR basepri, %0\n" : :
"r" (basePri) );
29 __attribute__( ( always_inline ) ) static inline
void __set_BASEPRI_MAX_nb(uint32_t basePri)
31 __ASM
volatile (
"\tMSR basepri_max, %0\n" : :
"r" (basePri) );
34 __attribute__( ( always_inline ) ) static inline
void __set_BASEPRI_MAX(uint32_t basePri)
36 __ASM
volatile (
"\tMSR basepri_max, %0\n" : :
"r" (basePri) :
"memory" );
40 static inline void __basepriRestoreMem(uint8_t *val)
46 static inline uint8_t __basepriSetMemRetVal(uint8_t prio)
48 __set_BASEPRI_MAX(prio);
53 static inline void __basepriRestore(uint8_t *val)
55 __set_BASEPRI_nb(*val);
59 static inline uint8_t __basepriSetRetVal(uint8_t prio)
61 __set_BASEPRI_MAX_nb(prio);
67 #define ATOMIC_BLOCK(prio) for ( uint8_t __basepri_save __attribute__((__cleanup__(__basepriRestoreMem))) = __get_BASEPRI(), \
68 __ToDo = __basepriSetMemRetVal(prio); __ToDo ; __ToDo = 0 )
76 #define ATOMIC_BLOCK_NB(prio) for ( uint8_t __basepri_save __attribute__((__cleanup__(__basepriRestore))) = __get_BASEPRI(), \
77 __ToDo = __basepriSetRetVal(prio); __ToDo ; __ToDo = 0 ) \
87 #warning "Please verify that ATOMIC_BARRIER works as intended"
94 # define __UNIQL_CONCAT2(x,y) x ## y
95 # define __UNIQL_CONCAT(x,y) __UNIQL_CONCAT2(x,y)
96 # define __UNIQL(x) __UNIQL_CONCAT(x,__LINE__)
100 #define ATOMIC_BARRIER(data) \
101 __extension__ void __UNIQL(__barrierEnd)(typeof(data) **__d) { \
102 __asm__ volatile ("\t# barier(" #data ") end\n" : : "m" (**__d)); \
104 typeof(data) __attribute__((__cleanup__(__UNIQL(__barrierEnd)))) *__UNIQL(__barrier) = &data; \
105 __asm__ volatile ("\t# barier (" #data ") start\n" : "=m" (*__UNIQL(__barrier)))
109 #define ATOMIC_OR(ptr, val) __sync_fetch_and_or(ptr, val)
110 #define ATOMIC_AND(ptr, val) __sync_fetch_and_and(ptr, val)
__attribute__((always_inline)) static inline void __set_BASEPRI_nb(uint32_t basePri)
Definition: atomic.h:24