29 #pragma GCC diagnostic push 30 #pragma GCC diagnostic ignored "-Wsign-conversion" 31 #pragma GCC diagnostic ignored "-Wconversion" 32 #pragma GCC diagnostic ignored "-Wunused-parameter" 36 #define __has_builtin(x) (0) 44 #define __INLINE inline 46 #ifndef __STATIC_INLINE 47 #define __STATIC_INLINE static inline 49 #ifndef __STATIC_FORCEINLINE 50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline 53 #define __NO_RETURN __attribute__((__noreturn__)) 56 #define __USED __attribute__((used)) 59 #define __WEAK __attribute__((weak)) 62 #define __PACKED __attribute__((packed, aligned(1))) 64 #ifndef __PACKED_STRUCT 65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) 67 #ifndef __PACKED_UNION 68 #define __PACKED_UNION union __attribute__((packed, aligned(1))) 70 #ifndef __UNALIGNED_UINT32 71 #pragma GCC diagnostic push 72 #pragma GCC diagnostic ignored "-Wpacked" 73 #pragma GCC diagnostic ignored "-Wattributes" 74 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75 #pragma GCC diagnostic pop 76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v) 78 #ifndef __UNALIGNED_UINT16_WRITE 79 #pragma GCC diagnostic push 80 #pragma GCC diagnostic ignored "-Wpacked" 81 #pragma GCC diagnostic ignored "-Wattributes" 82 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
83 #pragma GCC diagnostic pop 84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val)) 86 #ifndef __UNALIGNED_UINT16_READ 87 #pragma GCC diagnostic push 88 #pragma GCC diagnostic ignored "-Wpacked" 89 #pragma GCC diagnostic ignored "-Wattributes" 90 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91 #pragma GCC diagnostic pop 92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v) 94 #ifndef __UNALIGNED_UINT32_WRITE 95 #pragma GCC diagnostic push 96 #pragma GCC diagnostic ignored "-Wpacked" 97 #pragma GCC diagnostic ignored "-Wattributes" 98 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
99 #pragma GCC diagnostic pop 100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val)) 102 #ifndef __UNALIGNED_UINT32_READ 103 #pragma GCC diagnostic push 104 #pragma GCC diagnostic ignored "-Wpacked" 105 #pragma GCC diagnostic ignored "-Wattributes" 106 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
107 #pragma GCC diagnostic pop 108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v) 111 #define __ALIGNED(x) __attribute__((aligned(x))) 114 #define __RESTRICT __restrict 131 __ASM
volatile (
"cpsie i" : : :
"memory");
142 __ASM
volatile (
"cpsid i" : : :
"memory");
155 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
160 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 166 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(
void)
170 __ASM
volatile (
"MRS %0, control_ns" :
"=r" (result) );
183 __ASM
volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
187 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 193 __STATIC_FORCEINLINE
void __TZ_set_CONTROL_NS(uint32_t control)
195 __ASM
volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
205 __STATIC_FORCEINLINE uint32_t
__get_IPSR(
void)
209 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
219 __STATIC_FORCEINLINE uint32_t
__get_APSR(
void)
223 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
233 __STATIC_FORCEINLINE uint32_t
__get_xPSR(
void)
237 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
247 __STATIC_FORCEINLINE uint32_t
__get_PSP(
void)
251 __ASM
volatile (
"MRS %0, psp" :
"=r" (result) );
256 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 262 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(
void)
266 __ASM
volatile (
"MRS %0, psp_ns" :
"=r" (result) );
277 __STATIC_FORCEINLINE
void __set_PSP(uint32_t topOfProcStack)
279 __ASM
volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
283 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 289 __STATIC_FORCEINLINE
void __TZ_set_PSP_NS(uint32_t topOfProcStack)
291 __ASM
volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
301 __STATIC_FORCEINLINE uint32_t
__get_MSP(
void)
305 __ASM
volatile (
"MRS %0, msp" :
"=r" (result) );
310 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 316 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(
void)
320 __ASM
volatile (
"MRS %0, msp_ns" :
"=r" (result) );
331 __STATIC_FORCEINLINE
void __set_MSP(uint32_t topOfMainStack)
333 __ASM
volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
337 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 343 __STATIC_FORCEINLINE
void __TZ_set_MSP_NS(uint32_t topOfMainStack)
345 __ASM
volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 356 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(
void)
360 __ASM
volatile (
"MRS %0, sp_ns" :
"=r" (result) );
370 __STATIC_FORCEINLINE
void __TZ_set_SP_NS(uint32_t topOfStack)
372 __ASM
volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
386 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) ::
"memory");
391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 397 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(
void)
401 __ASM
volatile (
"MRS %0, primask_ns" :
"=r" (result) ::
"memory");
414 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
418 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 424 __STATIC_FORCEINLINE
void __TZ_set_PRIMASK_NS(uint32_t priMask)
426 __ASM
volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
431 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 432 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 433 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 439 __STATIC_FORCEINLINE
void __enable_fault_irq(
void)
441 __ASM
volatile (
"cpsie f" : : :
"memory");
450 __STATIC_FORCEINLINE
void __disable_fault_irq(
void)
452 __ASM
volatile (
"cpsid f" : : :
"memory");
461 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(
void)
465 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
470 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 476 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(
void)
480 __ASM
volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
491 __STATIC_FORCEINLINE
void __set_BASEPRI(uint32_t basePri)
493 __ASM
volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
497 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 503 __STATIC_FORCEINLINE
void __TZ_set_BASEPRI_NS(uint32_t basePri)
505 __ASM
volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
516 __STATIC_FORCEINLINE
void __set_BASEPRI_MAX(uint32_t basePri)
518 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
527 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(
void)
531 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
536 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 542 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(
void)
546 __ASM
volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
557 __STATIC_FORCEINLINE
void __set_FAULTMASK(uint32_t faultMask)
559 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
563 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 569 __STATIC_FORCEINLINE
void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
571 __ASM
volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
580 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 581 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 592 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(
void)
594 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 595 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 600 __ASM
volatile (
"MRS %0, psplim" :
"=r" (result) );
605 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)) 614 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(
void)
616 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 621 __ASM
volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
637 __STATIC_FORCEINLINE
void __set_PSPLIM(uint32_t ProcStackPtrLimit)
639 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 640 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 642 (void)ProcStackPtrLimit;
644 __ASM
volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
649 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 658 __STATIC_FORCEINLINE
void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
660 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 662 (void)ProcStackPtrLimit;
664 __ASM
volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
679 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(
void)
681 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 682 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 687 __ASM
volatile (
"MRS %0, msplim" :
"=r" (result) );
693 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 702 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(
void)
704 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 709 __ASM
volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
725 __STATIC_FORCEINLINE
void __set_MSPLIM(uint32_t MainStackPtrLimit)
727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 728 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 730 (void)MainStackPtrLimit;
732 __ASM
volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
737 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 746 __STATIC_FORCEINLINE
void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 750 (void)MainStackPtrLimit;
752 __ASM
volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
768 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 769 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 770 #if __has_builtin(__builtin_arm_get_fpscr) 774 return __builtin_arm_get_fpscr();
778 __ASM
volatile (
"VMRS %0, fpscr" :
"=r" (result) );
792 __STATIC_FORCEINLINE
void __set_FPSCR(uint32_t fpscr)
794 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 795 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 796 #if __has_builtin(__builtin_arm_set_fpscr) 800 __builtin_arm_set_fpscr(fpscr);
802 __ASM
volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc",
"memory");
822 #if defined (__thumb__) && !defined (__thumb2__) 823 #define __CMSIS_GCC_OUT_REG(r) "=l" (r) 824 #define __CMSIS_GCC_RW_REG(r) "+l" (r) 825 #define __CMSIS_GCC_USE_REG(r) "l" (r) 827 #define __CMSIS_GCC_OUT_REG(r) "=r" (r) 828 #define __CMSIS_GCC_RW_REG(r) "+r" (r) 829 #define __CMSIS_GCC_USE_REG(r) "r" (r) 836 #define __NOP() __ASM volatile ("nop") 842 #define __WFI() __ASM volatile ("wfi") 850 #define __WFE() __ASM volatile ("wfe") 857 #define __SEV() __ASM volatile ("sev") 866 __STATIC_FORCEINLINE
void __ISB(
void)
868 __ASM
volatile (
"isb 0xF":::
"memory");
877 __STATIC_FORCEINLINE
void __DSB(
void)
879 __ASM
volatile (
"dsb 0xF":::
"memory");
888 __STATIC_FORCEINLINE
void __DMB(
void)
890 __ASM
volatile (
"dmb 0xF":::
"memory");
900 __STATIC_FORCEINLINE uint32_t
__REV(uint32_t value)
902 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) 903 return __builtin_bswap32(value);
907 __ASM
volatile (
"rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
919 __STATIC_FORCEINLINE uint32_t
__REV16(uint32_t value)
923 __ASM
volatile (
"rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
934 __STATIC_FORCEINLINE int16_t
__REVSH(int16_t value)
936 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 937 return (int16_t)__builtin_bswap16(value);
941 __ASM
volatile (
"revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
954 __STATIC_FORCEINLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
961 return (op1 >> op2) | (op1 << (32U - op2));
972 #define __BKPT(value) __ASM volatile ("bkpt "#value) 981 __STATIC_FORCEINLINE uint32_t
__RBIT(uint32_t value)
985 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 986 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 987 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 988 __ASM
volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
990 uint32_t s = (4U * 8U) - 1U;
993 for (value >>= 1U; value != 0U; value >>= 1U)
996 result |= value & 1U;
1011 #define __CLZ (uint8_t)__builtin_clz 1014 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 1015 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 1016 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 1017 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 1024 __STATIC_FORCEINLINE uint8_t __LDREXB(
volatile uint8_t *addr)
1028 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 1029 __ASM
volatile (
"ldrexb %0, %1" :
"=r" (result) :
"Q" (*addr) );
1034 __ASM
volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
1036 return ((uint8_t) result);
1046 __STATIC_FORCEINLINE uint16_t __LDREXH(
volatile uint16_t *addr)
1050 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 1051 __ASM
volatile (
"ldrexh %0, %1" :
"=r" (result) :
"Q" (*addr) );
1056 __ASM
volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
1058 return ((uint16_t) result);
1068 __STATIC_FORCEINLINE uint32_t __LDREXW(
volatile uint32_t *addr)
1072 __ASM
volatile (
"ldrex %0, %1" :
"=r" (result) :
"Q" (*addr) );
1085 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value,
volatile uint8_t *addr)
1089 __ASM
volatile (
"strexb %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
1102 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value,
volatile uint16_t *addr)
1106 __ASM
volatile (
"strexh %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
1119 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value,
volatile uint32_t *addr)
1123 __ASM
volatile (
"strex %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" (value) );
1132 __STATIC_FORCEINLINE
void __CLREX(
void)
1134 __ASM
volatile (
"clrex" :::
"memory");
1143 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 1144 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 1145 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 1153 #define __SSAT(ARG1,ARG2) \ 1156 int32_t __RES, __ARG1 = (ARG1); \ 1157 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1169 #define __USAT(ARG1,ARG2) \ 1172 uint32_t __RES, __ARG1 = (ARG1); \ 1173 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1185 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1189 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1200 __STATIC_FORCEINLINE uint8_t __LDRBT(
volatile uint8_t *ptr)
1204 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 1205 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1210 __ASM
volatile (
"ldrbt %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
1212 return ((uint8_t) result);
1222 __STATIC_FORCEINLINE uint16_t __LDRHT(
volatile uint16_t *ptr)
1226 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 1227 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1232 __ASM
volatile (
"ldrht %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
1234 return ((uint16_t) result);
1244 __STATIC_FORCEINLINE uint32_t __LDRT(
volatile uint32_t *ptr)
1248 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1259 __STATIC_FORCEINLINE
void __STRBT(uint8_t value,
volatile uint8_t *ptr)
1261 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1271 __STATIC_FORCEINLINE
void __STRHT(uint16_t value,
volatile uint16_t *ptr)
1273 __ASM
volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1283 __STATIC_FORCEINLINE
void __STRT(uint32_t value,
volatile uint32_t *ptr)
1285 __ASM
volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
1299 __STATIC_FORCEINLINE int32_t
__SSAT(int32_t val, uint32_t sat)
1301 if ((sat >= 1U) && (sat <= 32U))
1303 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1304 const int32_t min = -1 - max ;
1324 __STATIC_FORCEINLINE uint32_t
__USAT(int32_t val, uint32_t sat)
1328 const uint32_t max = ((1U << sat) - 1U);
1329 if (val > (int32_t)max)
1338 return (uint32_t)val;
1346 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 1347 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 1354 __STATIC_FORCEINLINE uint8_t __LDAB(
volatile uint8_t *ptr)
1358 __ASM
volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1359 return ((uint8_t) result);
1369 __STATIC_FORCEINLINE uint16_t __LDAH(
volatile uint16_t *ptr)
1373 __ASM
volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1374 return ((uint16_t) result);
1384 __STATIC_FORCEINLINE uint32_t __LDA(
volatile uint32_t *ptr)
1388 __ASM
volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1399 __STATIC_FORCEINLINE
void __STLB(uint8_t value,
volatile uint8_t *ptr)
1401 __ASM
volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1411 __STATIC_FORCEINLINE
void __STLH(uint16_t value,
volatile uint16_t *ptr)
1413 __ASM
volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1423 __STATIC_FORCEINLINE
void __STL(uint32_t value,
volatile uint32_t *ptr)
1425 __ASM
volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1435 __STATIC_FORCEINLINE uint8_t __LDAEXB(
volatile uint8_t *ptr)
1439 __ASM
volatile (
"ldaexb %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1440 return ((uint8_t) result);
1450 __STATIC_FORCEINLINE uint16_t __LDAEXH(
volatile uint16_t *ptr)
1454 __ASM
volatile (
"ldaexh %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1455 return ((uint16_t) result);
1465 __STATIC_FORCEINLINE uint32_t __LDAEX(
volatile uint32_t *ptr)
1469 __ASM
volatile (
"ldaex %0, %1" :
"=r" (result) :
"Q" (*ptr) );
1482 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value,
volatile uint8_t *ptr)
1486 __ASM
volatile (
"stlexb %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1499 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value,
volatile uint16_t *ptr)
1503 __ASM
volatile (
"stlexh %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1516 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value,
volatile uint32_t *ptr)
1520 __ASM
volatile (
"stlex %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) );
1536 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) 1538 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1542 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1546 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1550 __ASM
volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1554 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1558 __ASM
volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1562 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1566 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1570 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1574 __ASM
volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1578 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1582 __ASM
volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1587 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1591 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1595 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1599 __ASM
volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1603 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1607 __ASM
volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1611 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1615 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1619 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1623 __ASM
volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1627 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1631 __ASM
volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1636 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1640 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1644 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1648 __ASM
volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1652 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1656 __ASM
volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1660 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1664 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1668 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1672 __ASM
volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1676 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1680 __ASM
volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1684 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1688 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1692 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1696 __ASM
volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1700 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1704 __ASM
volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1708 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1712 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1716 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1720 __ASM
volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1724 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1728 __ASM
volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1732 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1736 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1740 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1744 __ASM
volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1748 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1752 __ASM
volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1756 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1760 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1764 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1768 __ASM
volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1772 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1776 __ASM
volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1780 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1784 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1788 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1792 __ASM
volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1796 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1800 __ASM
volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1804 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1808 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1812 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1816 __ASM
volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1820 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1824 __ASM
volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1828 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1832 __ASM
volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1836 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1840 __ASM
volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1844 #define __SSAT16(ARG1,ARG2) \ 1846 int32_t __RES, __ARG1 = (ARG1); \ 1847 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1851 #define __USAT16(ARG1,ARG2) \ 1853 uint32_t __RES, __ARG1 = (ARG1); \ 1854 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 1858 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1862 __ASM
volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1866 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1870 __ASM
volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1874 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1878 __ASM
volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1882 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1886 __ASM
volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1890 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1894 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1898 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1902 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1906 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1910 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1914 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1918 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1922 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1931 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1933 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1939 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1948 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1950 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1956 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1960 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1964 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1968 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1972 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1976 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1980 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1984 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1988 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1997 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1999 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2005 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2014 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2016 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2022 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2026 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2030 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2034 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2038 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2042 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2047 #define __PKHBT(ARG1,ARG2,ARG3) \ 2049 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 2050 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 2054 #define __PKHTB(ARG1,ARG2,ARG3) \ 2056 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 2058 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ 2060 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 2065 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \ 2066 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) ) 2068 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \ 2069 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) ) 2071 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2075 __ASM
volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
2083 #pragma GCC diagnostic pop __STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_armcc.h:178
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_armclang.h:1121
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_armcc.h:190
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: cmsis_gcc.h:981
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
Definition: cmsis_armcc.h:130
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:919
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_armcc.h:214
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition: cmsis_gcc.h:877
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_armcc.h:154
__STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
Set FPSCR.
Definition: cmsis_armcc.h:362
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition: cmsis_gcc.h:888
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_armcc.h:166
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_armclang.h:1146
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_armcc.h:238
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_armcc.h:250
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_armcc.h:226
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_armclang.h:876
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: cmsis_gcc.h:140
__STATIC_INLINE uint32_t __get_FPSCR(void)
Get FPSCR.
Definition: cmsis_armcc.h:345
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: cmsis_gcc.h:866
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:934
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: cmsis_gcc.h:129
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_armcc.h:202
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_armcc.h:142
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: cmsis_gcc.h:900