27 #ifndef __CMSIS_ARMCLANG_H 28 #define __CMSIS_ARMCLANG_H 30 #pragma clang system_header 32 #ifndef __ARM_COMPAT_H 33 #include <arm_compat.h> 41 #define __INLINE __inline 43 #ifndef __STATIC_INLINE 44 #define __STATIC_INLINE static __inline 46 #ifndef __STATIC_FORCEINLINE 47 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline 50 #define __NO_RETURN __attribute__((__noreturn__)) 53 #define __USED __attribute__((used)) 56 #define __WEAK __attribute__((weak)) 59 #define __PACKED __attribute__((packed, aligned(1))) 61 #ifndef __PACKED_STRUCT 62 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) 64 #ifndef __PACKED_UNION 65 #define __PACKED_UNION union __attribute__((packed, aligned(1))) 67 #ifndef __UNALIGNED_UINT32 68 #pragma clang diagnostic push 69 #pragma clang diagnostic ignored "-Wpacked" 71 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72 #pragma clang diagnostic pop 73 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v) 75 #ifndef __UNALIGNED_UINT16_WRITE 76 #pragma clang diagnostic push 77 #pragma clang diagnostic ignored "-Wpacked" 79 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
80 #pragma clang diagnostic pop 81 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val)) 83 #ifndef __UNALIGNED_UINT16_READ 84 #pragma clang diagnostic push 85 #pragma clang diagnostic ignored "-Wpacked" 87 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
88 #pragma clang diagnostic pop 89 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v) 91 #ifndef __UNALIGNED_UINT32_WRITE 92 #pragma clang diagnostic push 93 #pragma clang diagnostic ignored "-Wpacked" 95 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
96 #pragma clang diagnostic pop 97 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val)) 99 #ifndef __UNALIGNED_UINT32_READ 100 #pragma clang diagnostic push 101 #pragma clang diagnostic ignored "-Wpacked" 103 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
104 #pragma clang diagnostic pop 105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v) 108 #define __ALIGNED(x) __attribute__((aligned(x))) 111 #define __RESTRICT __restrict 146 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
151 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 157 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(
void)
161 __ASM
volatile (
"MRS %0, control_ns" :
"=r" (result) );
174 __ASM
volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
178 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 184 __STATIC_FORCEINLINE
void __TZ_set_CONTROL_NS(uint32_t control)
186 __ASM
volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
196 __STATIC_FORCEINLINE uint32_t
__get_IPSR(
void)
200 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
210 __STATIC_FORCEINLINE uint32_t
__get_APSR(
void)
214 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
224 __STATIC_FORCEINLINE uint32_t
__get_xPSR(
void)
228 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
238 __STATIC_FORCEINLINE uint32_t
__get_PSP(
void)
242 __ASM
volatile (
"MRS %0, psp" :
"=r" (result) );
247 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 253 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(
void)
257 __ASM
volatile (
"MRS %0, psp_ns" :
"=r" (result) );
268 __STATIC_FORCEINLINE
void __set_PSP(uint32_t topOfProcStack)
270 __ASM
volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
274 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 280 __STATIC_FORCEINLINE
void __TZ_set_PSP_NS(uint32_t topOfProcStack)
282 __ASM
volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
292 __STATIC_FORCEINLINE uint32_t
__get_MSP(
void)
296 __ASM
volatile (
"MRS %0, msp" :
"=r" (result) );
301 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 307 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(
void)
311 __ASM
volatile (
"MRS %0, msp_ns" :
"=r" (result) );
322 __STATIC_FORCEINLINE
void __set_MSP(uint32_t topOfMainStack)
324 __ASM
volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
328 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 334 __STATIC_FORCEINLINE
void __TZ_set_MSP_NS(uint32_t topOfMainStack)
336 __ASM
volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
341 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 347 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(
void)
351 __ASM
volatile (
"MRS %0, sp_ns" :
"=r" (result) );
361 __STATIC_FORCEINLINE
void __TZ_set_SP_NS(uint32_t topOfStack)
363 __ASM
volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
377 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) );
382 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 388 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(
void)
392 __ASM
volatile (
"MRS %0, primask_ns" :
"=r" (result) );
405 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
409 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 415 __STATIC_FORCEINLINE
void __TZ_set_PRIMASK_NS(uint32_t priMask)
417 __ASM
volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
422 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 423 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 424 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 430 #define __enable_fault_irq __enable_fiq 438 #define __disable_fault_irq __disable_fiq 446 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(
void)
450 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
455 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 461 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(
void)
465 __ASM
volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
476 __STATIC_FORCEINLINE
void __set_BASEPRI(uint32_t basePri)
478 __ASM
volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
482 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 488 __STATIC_FORCEINLINE
void __TZ_set_BASEPRI_NS(uint32_t basePri)
490 __ASM
volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
501 __STATIC_FORCEINLINE
void __set_BASEPRI_MAX(uint32_t basePri)
503 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
512 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(
void)
516 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
521 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 527 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(
void)
531 __ASM
volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
542 __STATIC_FORCEINLINE
void __set_FAULTMASK(uint32_t faultMask)
544 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
548 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 554 __STATIC_FORCEINLINE
void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
556 __ASM
volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
565 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 566 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 577 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(
void)
579 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 580 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 585 __ASM
volatile (
"MRS %0, psplim" :
"=r" (result) );
590 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)) 600 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(
void)
602 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 607 __ASM
volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
623 __STATIC_FORCEINLINE
void __set_PSPLIM(uint32_t ProcStackPtrLimit)
625 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 626 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 628 (void)ProcStackPtrLimit;
630 __ASM
volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
635 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 645 __STATIC_FORCEINLINE
void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
647 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 649 (void)ProcStackPtrLimit;
651 __ASM
volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
665 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(
void)
667 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 668 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 673 __ASM
volatile (
"MRS %0, msplim" :
"=r" (result) );
679 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 688 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(
void)
690 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 695 __ASM
volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
710 __STATIC_FORCEINLINE
void __set_MSPLIM(uint32_t MainStackPtrLimit)
712 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 713 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 715 (void)MainStackPtrLimit;
717 __ASM
volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
722 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 731 __STATIC_FORCEINLINE
void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
733 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 735 (void)MainStackPtrLimit;
737 __ASM
volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
750 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 751 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 752 #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr 754 #define __get_FPSCR() ((uint32_t)0U) 762 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 763 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 764 #define __set_FPSCR __builtin_arm_set_fpscr 766 #define __set_FPSCR(x) ((void)(x)) 782 #if defined (__thumb__) && !defined (__thumb2__) 783 #define __CMSIS_GCC_OUT_REG(r) "=l" (r) 784 #define __CMSIS_GCC_USE_REG(r) "l" (r) 786 #define __CMSIS_GCC_OUT_REG(r) "=r" (r) 787 #define __CMSIS_GCC_USE_REG(r) "r" (r) 794 #define __NOP __builtin_arm_nop 800 #define __WFI __builtin_arm_wfi 808 #define __WFE __builtin_arm_wfe 815 #define __SEV __builtin_arm_sev 824 #define __ISB() __builtin_arm_isb(0xF); 831 #define __DSB() __builtin_arm_dsb(0xF); 839 #define __DMB() __builtin_arm_dmb(0xF); 848 #define __REV(value) __builtin_bswap32(value) 857 #define __REV16(value) __ROR(__REV(value), 16) 866 #define __REVSH(value) (int16_t)__builtin_bswap16(value) 876 __STATIC_FORCEINLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
883 return (op1 >> op2) | (op1 << (32U - op2));
894 #define __BKPT(value) __ASM volatile ("bkpt "#value) 903 #define __RBIT __builtin_arm_rbit 911 #define __CLZ (uint8_t)__builtin_clz 914 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 915 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 916 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 917 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 924 #define __LDREXB (uint8_t)__builtin_arm_ldrex 933 #define __LDREXH (uint16_t)__builtin_arm_ldrex 942 #define __LDREXW (uint32_t)__builtin_arm_ldrex 953 #define __STREXB (uint32_t)__builtin_arm_strex 964 #define __STREXH (uint32_t)__builtin_arm_strex 975 #define __STREXW (uint32_t)__builtin_arm_strex 982 #define __CLREX __builtin_arm_clrex 990 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 991 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 992 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 1001 #define __SSAT __builtin_arm_ssat 1011 #define __USAT __builtin_arm_usat 1021 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1025 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1036 __STATIC_FORCEINLINE uint8_t __LDRBT(
volatile uint8_t *ptr)
1040 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1041 return ((uint8_t) result);
1051 __STATIC_FORCEINLINE uint16_t __LDRHT(
volatile uint16_t *ptr)
1055 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1056 return ((uint16_t) result);
1066 __STATIC_FORCEINLINE uint32_t __LDRT(
volatile uint32_t *ptr)
1070 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1081 __STATIC_FORCEINLINE
void __STRBT(uint8_t value,
volatile uint8_t *ptr)
1083 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1093 __STATIC_FORCEINLINE
void __STRHT(uint16_t value,
volatile uint16_t *ptr)
1095 __ASM
volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1105 __STATIC_FORCEINLINE
void __STRT(uint32_t value,
volatile uint32_t *ptr)
1107 __ASM
volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
1121 __STATIC_FORCEINLINE int32_t
__SSAT(int32_t val, uint32_t sat)
1123 if ((sat >= 1U) && (sat <= 32U))
1125 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1126 const int32_t min = -1 - max ;
1146 __STATIC_FORCEINLINE uint32_t
__USAT(int32_t val, uint32_t sat)
1150 const uint32_t max = ((1U << sat) - 1U);
1151 if (val > (int32_t)max)
1160 return (uint32_t)val;
1168 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 1169 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 1176 __STATIC_FORCEINLINE uint8_t __LDAB(
volatile uint8_t *ptr)
1180 __ASM
volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1181 return ((uint8_t) result);
1191 __STATIC_FORCEINLINE uint16_t __LDAH(
volatile uint16_t *ptr)
1195 __ASM
volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1196 return ((uint16_t) result);
1206 __STATIC_FORCEINLINE uint32_t __LDA(
volatile uint32_t *ptr)
1210 __ASM
volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1221 __STATIC_FORCEINLINE
void __STLB(uint8_t value,
volatile uint8_t *ptr)
1223 __ASM
volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1233 __STATIC_FORCEINLINE
void __STLH(uint16_t value,
volatile uint16_t *ptr)
1235 __ASM
volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1245 __STATIC_FORCEINLINE
void __STL(uint32_t value,
volatile uint32_t *ptr)
1247 __ASM
volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1257 #define __LDAEXB (uint8_t)__builtin_arm_ldaex 1266 #define __LDAEXH (uint16_t)__builtin_arm_ldaex 1275 #define __LDAEX (uint32_t)__builtin_arm_ldaex 1286 #define __STLEXB (uint32_t)__builtin_arm_stlex 1297 #define __STLEXH (uint32_t)__builtin_arm_stlex 1308 #define __STLEX (uint32_t)__builtin_arm_stlex 1322 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) 1324 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1328 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1332 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1336 __ASM
volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1340 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1344 __ASM
volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1348 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1352 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1356 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1360 __ASM
volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1364 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1368 __ASM
volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1373 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1377 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1381 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1385 __ASM
volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1389 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1393 __ASM
volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1397 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1401 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1405 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1409 __ASM
volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1413 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1417 __ASM
volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1422 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1426 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1430 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1434 __ASM
volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1438 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1442 __ASM
volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1446 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1450 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1454 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1458 __ASM
volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1462 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1466 __ASM
volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1470 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1474 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1478 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1482 __ASM
volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1486 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1490 __ASM
volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1494 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1498 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1502 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1506 __ASM
volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1510 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1514 __ASM
volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1518 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1522 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1526 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1530 __ASM
volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1534 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1538 __ASM
volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1542 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1546 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1550 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1554 __ASM
volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1558 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1562 __ASM
volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1566 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1570 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1574 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1578 __ASM
volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1582 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1586 __ASM
volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1590 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1594 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1598 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1602 __ASM
volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1606 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1610 __ASM
volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1614 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1618 __ASM
volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1622 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1626 __ASM
volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1630 #define __SSAT16(ARG1,ARG2) \ 1632 int32_t __RES, __ARG1 = (ARG1); \ 1633 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1637 #define __USAT16(ARG1,ARG2) \ 1639 uint32_t __RES, __ARG1 = (ARG1); \ 1640 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1644 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1648 __ASM
volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1652 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1656 __ASM
volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1660 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1664 __ASM
volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1668 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1672 __ASM
volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1676 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1680 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1684 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1688 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1692 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1696 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1700 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1704 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1708 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1717 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1719 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1725 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1734 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1736 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1742 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1746 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1750 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1754 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1758 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1762 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1766 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1770 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1774 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1783 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1785 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1791 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1800 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1802 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1808 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1812 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1816 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
1820 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1824 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
1828 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1833 #define __PKHBT(ARG1,ARG2,ARG3) \ 1835 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 1836 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 1840 #define __PKHTB(ARG1,ARG2,ARG3) \ 1842 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 1844 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ 1846 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 1851 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \ 1852 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) ) 1854 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \ 1855 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) ) 1857 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1861 __ASM
volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
__STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_armcc.h:178
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_armclang.h:1121
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_armcc.h:190
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition: cmsis_armcc.h:130
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_armcc.h:214
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_armcc.h:154
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_armcc.h:166
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_armclang.h:1146
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_armcc.h:238
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_armcc.h:250
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_armcc.h:226
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_armclang.h:876
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_armcc.h:202
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_armcc.h:142