24 #ifndef __CORE_CMINSTR_H
25 #define __CORE_CMINSTR_H
34 #if defined ( __CC_ARM )
37 #if (__ARMCC_VERSION < 400677)
38 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
78 #define __ISB() __isb(0xF)
86 #define __DSB() __dsb(0xF)
94 #define __DMB() __dmb(0xF)
114 #ifndef __NO_EMBEDDED_ASM
129 #ifndef __NO_EMBEDDED_ASM
130 __attribute__((section(
".revsh_text"))) __STATIC_INLINE __ASM
int32_t __REVSH(
int32_t value)
149 #if (__CORTEX_M >= 0x03)
158 #define __RBIT __rbit
168 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
178 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
188 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
200 #define __STREXB(value, ptr) __strex(value, ptr)
212 #define __STREXH(value, ptr) __strex(value, ptr)
224 #define __STREXW(value, ptr) __strex(value, ptr)
232 #define __CLREX __clrex
243 #define __SSAT __ssat
254 #define __USAT __usat
270 #elif defined ( __ICCARM__ )
273 #include <cmsis_iar.h>
276 #elif defined ( __TMS470__ )
279 #include <cmsis_ccs.h>
282 #elif defined ( __GNUC__ )
289 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __NOP(
void)
291 __ASM
volatile (
"nop");
300 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __WFI(
void)
302 __ASM
volatile (
"wfi");
311 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __WFE(
void)
313 __ASM
volatile (
"wfe");
321 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __SEV(
void)
323 __ASM
volatile (
"sev");
333 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __ISB(
void)
335 __ASM
volatile (
"isb");
344 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __DSB(
void)
346 __ASM
volatile (
"dsb");
355 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __DMB(
void)
357 __ASM
volatile (
"dmb");
372 __ASM
volatile (
"rev %0, %1" :
"=r" (result) :
"r" (value) );
384 __attribute__( ( always_inline ) ) __STATIC_INLINE
uint32_t __REV16(
uint32_t value)
388 __ASM
volatile (
"rev16 %0, %1" :
"=r" (result) :
"r" (value) );
400 __attribute__( ( always_inline ) ) __STATIC_INLINE
int32_t __REVSH(
int32_t value)
404 __ASM
volatile (
"revsh %0, %1" :
"=r" (result) :
"r" (value) );
420 __ASM
volatile (
"ror %0, %0, %1" :
"+r" (op1) :
"r" (op2) );
425 #if (__CORTEX_M >= 0x03)
434 __attribute__( ( always_inline ) ) __STATIC_INLINE
uint32_t __RBIT(
uint32_t value)
438 __ASM
volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
450 __attribute__( ( always_inline ) ) __STATIC_INLINE
uint8_t __LDREXB(volatile
uint8_t *addr)
454 __ASM
volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) );
466 __attribute__( ( always_inline ) ) __STATIC_INLINE
uint16_t __LDREXH(volatile
uint16_t *addr)
470 __ASM
volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) );
482 __attribute__( ( always_inline ) ) __STATIC_INLINE
uint32_t __LDREXW(volatile
uint32_t *addr)
486 __ASM
volatile (
"ldrex %0, [%1]" :
"=r" (result) :
"r" (addr) );
500 __attribute__( ( always_inline ) ) __STATIC_INLINE
uint32_t __STREXB(
uint8_t value, volatile
uint8_t *addr)
504 __ASM
volatile (
"strexb %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (
value) );
522 __ASM
volatile (
"strexh %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (
value) );
540 __ASM
volatile (
"strex %0, %2, [%1]" :
"=&r" (result) :
"r" (addr),
"r" (
value) );
550 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __CLREX(
void)
552 __ASM
volatile (
"clrex");
564 #define __SSAT(ARG1,ARG2) \
566 uint32_t __RES, __ARG1 = (ARG1); \
567 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
580 #define __USAT(ARG1,ARG2) \
582 uint32_t __RES, __ARG1 = (ARG1); \
583 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
595 __attribute__( ( always_inline ) ) __STATIC_INLINE
uint8_t __CLZ(
uint32_t value)
599 __ASM
volatile (
"clz %0, %1" :
"=r" (result) :
"r" (value) );
608 #elif defined ( __TASKING__ )