35 #ifndef __CMSIS_ARMCC_V6_H 36 #define __CMSIS_ARMCC_V6_H 50 __attribute__((always_inline)) __STATIC_INLINE
void __enable_irq(
void)
52 __ASM
volatile (
"cpsie i" : : :
"memory");
63 __ASM
volatile (
"cpsid i" : : :
"memory");
72 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__get_CONTROL(
void)
76 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
81 #if (__ARM_FEATURE_CMSE == 3U) 87 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_CONTROL_NS(
void)
91 __ASM
volatile (
"MRS %0, control_ns" :
"=r" (result) );
102 __attribute__((always_inline)) __STATIC_INLINE
void __set_CONTROL(uint32_t control)
104 __ASM
volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
108 #if (__ARM_FEATURE_CMSE == 3U) 114 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_CONTROL_NS(uint32_t control)
116 __ASM
volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
126 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__get_IPSR(
void)
130 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
135 #if (__ARM_FEATURE_CMSE == 3U) 141 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_IPSR_NS(
void)
145 __ASM
volatile (
"MRS %0, ipsr_ns" :
"=r" (result) );
156 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__get_APSR(
void)
160 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
165 #if (__ARM_FEATURE_CMSE == 3U) 171 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_APSR_NS(
void)
175 __ASM
volatile (
"MRS %0, apsr_ns" :
"=r" (result) );
186 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__get_xPSR(
void)
190 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
195 #if (__ARM_FEATURE_CMSE == 3U) 201 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_xPSR_NS(
void)
205 __ASM
volatile (
"MRS %0, xpsr_ns" :
"=r" (result) );
216 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__get_PSP(
void)
218 register uint32_t result;
220 __ASM
volatile (
"MRS %0, psp" :
"=r" (result) );
225 #if (__ARM_FEATURE_CMSE == 3U) 231 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PSP_NS(
void)
233 register uint32_t result;
235 __ASM
volatile (
"MRS %0, psp_ns" :
"=r" (result) );
246 __attribute__((always_inline)) __STATIC_INLINE
void __set_PSP(uint32_t topOfProcStack)
248 __ASM
volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) :
"sp");
252 #if (__ARM_FEATURE_CMSE == 3U) 258 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_PSP_NS(uint32_t topOfProcStack)
260 __ASM
volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) :
"sp");
270 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__get_MSP(
void)
272 register uint32_t result;
274 __ASM
volatile (
"MRS %0, msp" :
"=r" (result) );
279 #if (__ARM_FEATURE_CMSE == 3U) 285 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_MSP_NS(
void)
287 register uint32_t result;
289 __ASM
volatile (
"MRS %0, msp_ns" :
"=r" (result) );
300 __attribute__((always_inline)) __STATIC_INLINE
void __set_MSP(uint32_t topOfMainStack)
302 __ASM
volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) :
"sp");
306 #if (__ARM_FEATURE_CMSE == 3U) 312 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_MSP_NS(uint32_t topOfMainStack)
314 __ASM
volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) :
"sp");
324 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__get_PRIMASK(
void)
328 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) );
333 #if (__ARM_FEATURE_CMSE == 3U) 339 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PRIMASK_NS(
void)
343 __ASM
volatile (
"MRS %0, primask_ns" :
"=r" (result) );
354 __attribute__((always_inline)) __STATIC_INLINE
void __set_PRIMASK(uint32_t priMask)
356 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
360 #if (__ARM_FEATURE_CMSE == 3U) 366 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_PRIMASK_NS(uint32_t priMask)
368 __ASM
volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
373 #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) 380 __attribute__((always_inline)) __STATIC_INLINE
void __enable_fault_irq(
void)
382 __ASM
volatile (
"cpsie f" : : :
"memory");
391 __attribute__((always_inline)) __STATIC_INLINE
void __disable_fault_irq(
void)
393 __ASM
volatile (
"cpsid f" : : :
"memory");
402 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_BASEPRI(
void)
406 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
411 #if (__ARM_FEATURE_CMSE == 3U) 417 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_BASEPRI_NS(
void)
421 __ASM
volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
432 __attribute__((always_inline)) __STATIC_INLINE
void __set_BASEPRI(uint32_t value)
434 __ASM
volatile (
"MSR basepri, %0" : :
"r" (value) :
"memory");
438 #if (__ARM_FEATURE_CMSE == 3U) 444 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_BASEPRI_NS(uint32_t value)
446 __ASM
volatile (
"MSR basepri_ns, %0" : :
"r" (value) :
"memory");
457 __attribute__((always_inline)) __STATIC_INLINE
void __set_BASEPRI_MAX(uint32_t value)
459 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (value) :
"memory");
463 #if (__ARM_FEATURE_CMSE == 3U) 470 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_BASEPRI_MAX_NS(uint32_t value)
472 __ASM
volatile (
"MSR basepri_max_ns, %0" : :
"r" (value) :
"memory");
482 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FAULTMASK(
void)
486 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
491 #if (__ARM_FEATURE_CMSE == 3U) 497 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FAULTMASK_NS(
void)
501 __ASM
volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
512 __attribute__((always_inline)) __STATIC_INLINE
void __set_FAULTMASK(uint32_t faultMask)
514 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
518 #if (__ARM_FEATURE_CMSE == 3U) 524 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
526 __ASM
volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
534 #if (__ARM_ARCH_8M__ == 1U) 541 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PSPLIM(
void)
543 register uint32_t result;
545 __ASM
volatile (
"MRS %0, psplim" :
"=r" (result) );
550 #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') 556 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PSPLIM_NS(
void)
558 register uint32_t result;
560 __ASM
volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
571 __attribute__((always_inline)) __STATIC_INLINE
void __set_PSPLIM(uint32_t ProcStackPtrLimit)
573 __ASM
volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
577 #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') 583 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
585 __ASM
volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
595 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_MSPLIM(
void)
597 register uint32_t result;
599 __ASM
volatile (
"MRS %0, msplim" :
"=r" (result) );
605 #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') 611 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_MSPLIM_NS(
void)
613 register uint32_t result;
615 __ASM
volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
626 __attribute__((always_inline)) __STATIC_INLINE
void __set_MSPLIM(uint32_t MainStackPtrLimit)
628 __ASM
volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
632 #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') 638 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
640 __ASM
volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
647 #if ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) 654 #define __get_FPSCR __builtin_arm_get_fpscr 656 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(
void)
658 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) 662 __ASM
volatile (
"VMRS %0, fpscr" :
"=r" (result) );
671 #if (__ARM_FEATURE_CMSE == 3U) 677 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FPSCR_NS(
void)
679 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) 683 __ASM
volatile (
"VMRS %0, fpscr_ns" :
"=r" (result) );
698 #define __set_FPSCR __builtin_arm_set_fpscr 700 __attribute__((always_inline)) __STATIC_INLINE
void __set_FPSCR(uint32_t fpscr)
702 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) 704 __ASM
volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc");
710 #if (__ARM_FEATURE_CMSE == 3U) 716 __attribute__((always_inline)) __STATIC_INLINE
void __TZ_set_FPSCR_NS(uint32_t fpscr)
718 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U) 720 __ASM
volatile (
"VMSR fpscr_ns, %0" : :
"r" (fpscr) :
"vfpcc");
742 #if defined (__thumb__) && !defined (__thumb2__) 743 #define __CMSIS_GCC_OUT_REG(r) "=l" (r) 744 #define __CMSIS_GCC_USE_REG(r) "l" (r) 746 #define __CMSIS_GCC_OUT_REG(r) "=r" (r) 747 #define __CMSIS_GCC_USE_REG(r) "r" (r) 754 #define __NOP __builtin_arm_nop 760 #define __WFI __builtin_arm_wfi 768 #define __WFE __builtin_arm_wfe 775 #define __SEV __builtin_arm_sev 784 #define __ISB() __builtin_arm_isb(0xF); 791 #define __DSB() __builtin_arm_dsb(0xF); 799 #define __DMB() __builtin_arm_dmb(0xF); 808 #define __REV __builtin_bswap32 817 #define __REV16 __builtin_bswap16 819 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__REV16(uint32_t value)
823 __ASM
volatile (
"rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
836 __attribute__((always_inline)) __STATIC_INLINE int32_t
__REVSH(int32_t value)
840 __ASM
volatile (
"revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
852 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
854 return (op1 >> op2) | (op1 << (32U - op2));
865 #define __BKPT(value) __ASM volatile ("bkpt "#value) 875 __attribute__((always_inline)) __STATIC_INLINE uint32_t
__RBIT(uint32_t value)
879 #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) 880 __ASM
volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
882 int32_t s = 4 * 8 - 1;
885 for (value >>= 1U; value; value >>= 1U)
888 result |= value & 1U;
903 #define __CLZ __builtin_clz 906 #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) 914 #define __LDREXB (uint8_t)__builtin_arm_ldrex 923 #define __LDREXH (uint16_t)__builtin_arm_ldrex 932 #define __LDREXW (uint32_t)__builtin_arm_ldrex 943 #define __STREXB (uint32_t)__builtin_arm_strex 954 #define __STREXH (uint32_t)__builtin_arm_strex 965 #define __STREXW (uint32_t)__builtin_arm_strex 972 #define __CLREX __builtin_arm_clrex 983 #define __SSAT(ARG1,ARG2) \ 985 int32_t __RES, __ARG1 = (ARG1); \ 986 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 998 #define __USAT __builtin_arm_usat 1000 #define __USAT(ARG1,ARG2) \ 1002 uint32_t __RES, __ARG1 = (ARG1); \ 1003 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1016 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)
1020 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1031 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(
volatile uint8_t *ptr)
1035 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1036 return ((uint8_t) result);
1046 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(
volatile uint16_t *ptr)
1050 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1051 return ((uint16_t) result);
1061 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(
volatile uint32_t *ptr)
1065 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1076 __attribute__((always_inline)) __STATIC_INLINE
void __STRBT(uint8_t value,
volatile uint8_t *ptr)
1078 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1088 __attribute__((always_inline)) __STATIC_INLINE
void __STRHT(uint16_t value,
volatile uint16_t *ptr)
1090 __ASM
volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1100 __attribute__((always_inline)) __STATIC_INLINE
void __STRT(uint32_t value,
volatile uint32_t *ptr)
1102 __ASM
volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
1108 #if (__ARM_ARCH_8M__ == 1U) 1116 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDAB(
volatile uint8_t *ptr)
1120 __ASM
volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1121 return ((uint8_t) result);
1131 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDAH(
volatile uint16_t *ptr)
1135 __ASM
volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1136 return ((uint16_t) result);
1146 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDA(
volatile uint32_t *ptr)
1150 __ASM
volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1161 __attribute__((always_inline)) __STATIC_INLINE
void __STLB(uint8_t value,
volatile uint8_t *ptr)
1163 __ASM
volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1173 __attribute__((always_inline)) __STATIC_INLINE
void __STLH(uint16_t value,
volatile uint16_t *ptr)
1175 __ASM
volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1185 __attribute__((always_inline)) __STATIC_INLINE
void __STL(uint32_t value,
volatile uint32_t *ptr)
1187 __ASM
volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1197 #define __LDAEXB (uint8_t)__builtin_arm_ldaex 1206 #define __LDAEXH (uint16_t)__builtin_arm_ldaex 1215 #define __LDAEX (uint32_t)__builtin_arm_ldaex 1226 #define __STLEXB (uint32_t)__builtin_arm_stlex 1237 #define __STLEXH (uint32_t)__builtin_arm_stlex 1248 #define __STLEX (uint32_t)__builtin_arm_stlex 1261 #if (__ARM_FEATURE_DSP == 1U) 1263 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1267 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1271 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1275 __ASM
volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1279 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1283 __ASM
volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1287 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1291 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1295 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1299 __ASM
volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1303 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1307 __ASM
volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1312 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1316 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1320 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1324 __ASM
volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1328 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1332 __ASM
volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1336 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1340 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1344 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1348 __ASM
volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1352 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1356 __ASM
volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1361 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1365 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1369 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1373 __ASM
volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1377 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1381 __ASM
volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1385 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1389 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1393 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1397 __ASM
volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1401 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1405 __ASM
volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1409 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1413 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1417 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1421 __ASM
volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1425 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1429 __ASM
volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1433 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1437 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1441 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1445 __ASM
volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1449 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1453 __ASM
volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1457 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1461 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1465 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1469 __ASM
volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1473 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1477 __ASM
volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1481 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1485 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1489 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1493 __ASM
volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1497 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1501 __ASM
volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1505 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1509 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1513 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1517 __ASM
volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1521 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1525 __ASM
volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1529 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1533 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1537 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1541 __ASM
volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1545 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1549 __ASM
volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1553 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1557 __ASM
volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1561 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1565 __ASM
volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1569 #define __SSAT16(ARG1,ARG2) \ 1571 uint32_t __RES, __ARG1 = (ARG1); \ 1572 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1576 #define __USAT16(ARG1,ARG2) \ 1578 uint32_t __RES, __ARG1 = (ARG1); \ 1579 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1583 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
1587 __ASM
volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1591 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1595 __ASM
volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1599 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
1603 __ASM
volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1607 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1611 __ASM
volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1615 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1619 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1623 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1627 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1631 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1635 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1639 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1643 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1647 __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1656 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1658 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1664 __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1673 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1675 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1681 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1685 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1689 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1693 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1697 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1701 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1705 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1709 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1713 __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1722 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1724 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1730 __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1739 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1741 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1747 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1751 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1755 __attribute__((always_inline)) __STATIC_INLINE int32_t __QADD( int32_t op1, int32_t op2)
1759 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1763 __attribute__((always_inline)) __STATIC_INLINE int32_t __QSUB( int32_t op1, int32_t op2)
1767 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1771 #define __PKHBT(ARG1,ARG2,ARG3) \ 1773 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 1774 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 1778 #define __PKHTB(ARG1,ARG2,ARG3) \ 1780 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 1782 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ 1784 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 1788 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1792 __ASM
volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
__STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
Reverse byte order in signed short value.
__STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
#define __REV16
Reverse byte order (16 bit)
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
__STATIC_INLINE void __enable_irq(void)
Enable IRQ Interrupts.
__STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
__STATIC_INLINE uint32_t __get_CONTROL(void)
Get Control Register.
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
__STATIC_INLINE void __disable_irq(void)
Disable IRQ Interrupts.
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
__STATIC_INLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.