39 #if defined ( __GNUC__ ) 40 #pragma GCC diagnostic push 41 #pragma GCC diagnostic ignored "-Wsign-conversion" 42 #pragma GCC diagnostic ignored "-Wconversion" 43 #pragma GCC diagnostic ignored "-Wunused-parameter" 58 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __enable_irq(
void)
60 __ASM
volatile (
"cpsie i" : : :
"memory");
69 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __disable_irq(
void)
71 __ASM
volatile (
"cpsid i" : : :
"memory");
80 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t
__get_CONTROL(
void)
84 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
94 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_CONTROL(uint32_t control)
96 __ASM
volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
105 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t
__get_IPSR(
void)
109 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
119 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t
__get_APSR(
void)
123 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
134 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t
__get_xPSR(
void)
138 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
148 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t
__get_PSP(
void)
150 register uint32_t result;
152 __ASM
volatile (
"MRS %0, psp\n" :
"=r" (result) );
162 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_PSP(uint32_t topOfProcStack)
164 __ASM
volatile (
"MSR psp, %0\n" : :
"r" (topOfProcStack) :
"sp");
173 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t
__get_MSP(
void)
175 register uint32_t result;
177 __ASM
volatile (
"MRS %0, msp\n" :
"=r" (result) );
188 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_MSP(uint32_t topOfMainStack)
190 __ASM
volatile (
"MSR msp, %0\n" : :
"r" (topOfMainStack) :
"sp");
199 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t
__get_PRIMASK(
void)
203 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) );
213 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_PRIMASK(uint32_t priMask)
215 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
219 #if (__CORTEX_M >= 0x03U) 226 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __enable_fault_irq(
void)
228 __ASM
volatile (
"cpsie f" : : :
"memory");
237 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __disable_fault_irq(
void)
239 __ASM
volatile (
"cpsid f" : : :
"memory");
248 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_BASEPRI(
void)
252 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
262 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_BASEPRI(uint32_t value)
264 __ASM
volatile (
"MSR basepri, %0" : :
"r" (value) :
"memory");
274 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_BASEPRI_MAX(uint32_t value)
276 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (value) :
"memory");
285 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FAULTMASK(
void)
289 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
299 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_FAULTMASK(uint32_t faultMask)
301 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
307 #if (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) 314 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(
void)
316 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) 321 __ASM
volatile (
"VMRS %0, fpscr" :
"=r" (result) );
335 __attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_FPSCR(uint32_t fpscr)
337 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) 340 __ASM
volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc");
361 #if defined (__thumb__) && !defined (__thumb2__) 362 #define __CMSIS_GCC_OUT_REG(r) "=l" (r) 363 #define __CMSIS_GCC_USE_REG(r) "l" (r) 365 #define __CMSIS_GCC_OUT_REG(r) "=r" (r) 366 #define __CMSIS_GCC_USE_REG(r) "r" (r) 373 __attribute__((always_inline)) __STATIC_INLINE
void __NOP(
void)
375 __ASM
volatile (
"nop");
383 __attribute__((always_inline)) __STATIC_INLINE
void __WFI(
void)
385 __ASM
volatile (
"wfi");
394 __attribute__((always_inline)) __STATIC_INLINE
void __WFE(
void)
396 __ASM
volatile (
"wfe");
404 __attribute__((always_inline)) __STATIC_INLINE
void __SEV(
void)
406 __ASM
volatile (
"sev");
416 __attribute__((always_inline)) __STATIC_INLINE
void __ISB(
void)
418 __ASM
volatile (
"isb 0xF":::
"memory");
427 __attribute__((always_inline)) __STATIC_INLINE
void __DSB(
void)
429 __ASM
volatile (
"dsb 0xF":::
"memory");
438 __attribute__((always_inline)) __STATIC_INLINE
void __DMB(
void)
440 __ASM
volatile (
"dmb 0xF":::
"memory");
450 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__REV(uint32_t value)
452 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) 453 return __builtin_bswap32(value);
457 __ASM
volatile (
"rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
469 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__REV16(uint32_t value)
473 __ASM
volatile (
"rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
484 __attribute__((always_inline)) __STATIC_INLINE int32_t
__REVSH(int32_t value)
486 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 487 return (
short)__builtin_bswap16(value);
491 __ASM
volatile (
"revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
504 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
506 return (op1 >> op2) | (op1 << (32U - op2));
517 #define __BKPT(value) __ASM volatile ("bkpt "#value) 526 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__RBIT(uint32_t value)
530 #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) 531 __ASM
volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
533 int32_t s = 4 * 8 - 1;
536 for (value >>= 1U; value; value >>= 1U)
539 result |= value & 1U;
554 #define __CLZ __builtin_clz 557 #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) 565 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDREXB(
volatile uint8_t *
addr)
569 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 570 __ASM
volatile (
"ldrexb %0, %1" :
"=r" (result) :
"Q" (*addr) );
575 __ASM
volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
577 return ((uint8_t) result);
587 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDREXH(
volatile uint16_t *
addr)
591 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 592 __ASM
volatile (
"ldrexh %0, %1" :
"=r" (result) :
"Q" (*addr) );
597 __ASM
volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
599 return ((uint16_t) result);
609 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDREXW(
volatile uint32_t *addr)
613 __ASM
volatile (
"ldrex %0, %1" :
"=r" (result) :
"Q" (*addr) );
626 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXB(uint8_t value,
volatile uint8_t *addr)
630 __ASM
volatile (
"strexb %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
643 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXH(uint16_t value,
volatile uint16_t *addr)
647 __ASM
volatile (
"strexh %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
660 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXW(uint32_t value,
volatile uint32_t *addr)
664 __ASM
volatile (
"strex %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" (value) );
673 __attribute__((always_inline)) __STATIC_INLINE
void __CLREX(
void)
675 __ASM
volatile (
"clrex" :::
"memory");
686 #define __SSAT(ARG1,ARG2) \ 688 uint32_t __RES, __ARG1 = (ARG1); \ 689 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 701 #define __USAT(ARG1,ARG2) \ 703 uint32_t __RES, __ARG1 = (ARG1); \ 704 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 716 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)
720 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
731 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(
volatile uint8_t *addr)
735 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 736 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*addr) );
741 __ASM
volatile (
"ldrbt %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
743 return ((uint8_t) result);
753 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(
volatile uint16_t *addr)
757 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 758 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*addr) );
763 __ASM
volatile (
"ldrht %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
765 return ((uint16_t) result);
775 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(
volatile uint32_t *addr)
779 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*addr) );
790 __attribute__((always_inline)) __STATIC_INLINE
void __STRBT(uint8_t value,
volatile uint8_t *addr)
792 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*addr) :
"r" ((uint32_t)value) );
802 __attribute__((always_inline)) __STATIC_INLINE
void __STRHT(uint16_t value,
volatile uint16_t *addr)
804 __ASM
volatile (
"strht %1, %0" :
"=Q" (*addr) :
"r" ((uint32_t)value) );
814 __attribute__((always_inline)) __STATIC_INLINE
void __STRT(uint32_t value,
volatile uint32_t *addr)
816 __ASM
volatile (
"strt %1, %0" :
"=Q" (*addr) :
"r" (value) );
830 #if (__CORTEX_M >= 0x04U) 832 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
836 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
840 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
844 __ASM
volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
848 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
852 __ASM
volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
856 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
860 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
864 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
868 __ASM
volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
872 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
876 __ASM
volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
881 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
885 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
889 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
893 __ASM
volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
897 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
901 __ASM
volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
905 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
909 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
913 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
917 __ASM
volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
921 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
925 __ASM
volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
930 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
934 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
938 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
942 __ASM
volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
946 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
950 __ASM
volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
954 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
958 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
962 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
966 __ASM
volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
970 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
974 __ASM
volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
978 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
982 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
986 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
990 __ASM
volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
994 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
998 __ASM
volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1002 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1006 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1010 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1014 __ASM
volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1018 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1022 __ASM
volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1026 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1030 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1034 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1038 __ASM
volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1042 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1046 __ASM
volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1050 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1054 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1058 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1062 __ASM
volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1066 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1070 __ASM
volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1074 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1078 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1082 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1086 __ASM
volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1090 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1094 __ASM
volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1098 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1102 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1106 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1110 __ASM
volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1114 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1118 __ASM
volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1122 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1126 __ASM
volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1130 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1134 __ASM
volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1138 #define __SSAT16(ARG1,ARG2) \ 1140 int32_t __RES, __ARG1 = (ARG1); \ 1141 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1145 #define __USAT16(ARG1,ARG2) \ 1147 uint32_t __RES, __ARG1 = (ARG1); \ 1148 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1152 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
1156 __ASM
volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1160 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1164 __ASM
volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1168 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
1172 __ASM
volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1176 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1180 __ASM
volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1184 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1188 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1192 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1196 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1200 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1204 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1208 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1212 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1216 __attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1225 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1227 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1233 __attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1242 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1244 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1250 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1254 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1258 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1262 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1266 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1270 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1274 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1278 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1282 __attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1291 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1293 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1299 __attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1308 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1310 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1316 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1320 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1324 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __QADD( int32_t op1, int32_t op2)
1328 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1332 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __QSUB( int32_t op1, int32_t op2)
1336 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1340 #define __PKHBT(ARG1,ARG2,ARG3) \ 1342 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 1343 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 1347 #define __PKHTB(ARG1,ARG2,ARG3) \ 1349 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 1351 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ 1353 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 1357 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1361 __ASM
volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
1369 #if defined ( __GNUC__ ) 1370 #pragma GCC diagnostic pop __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
__STATIC_INLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
__STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
Reverse byte order in signed short value.
__STATIC_INLINE void __DMB(void)
Data Memory Barrier.
static uip_ds6_addr_t * addr
Pointer to a nbr cache entry.
__STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
__STATIC_INLINE void __SEV(void)
Send Event.
__STATIC_INLINE void __WFE(void)
Wait For Event.
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
__STATIC_INLINE void __WFI(void)
Wait For Interrupt.
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
__STATIC_INLINE void __enable_irq(void)
Enable IRQ Interrupts.
__STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
__STATIC_INLINE void __NOP(void)
No Operation.
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
__STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
__STATIC_INLINE void __DSB(void)
Data Synchronization Barrier.
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
__STATIC_INLINE uint32_t __get_CONTROL(void)
Get Control Register.
__STATIC_INLINE void __ISB(void)
Instruction Synchronization Barrier.
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
__STATIC_INLINE void __disable_irq(void)
Disable IRQ Interrupts.
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
__STATIC_INLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.