26 #ifndef __CMSIS_ICCARM_H__ 27 #define __CMSIS_ICCARM_H__ 30 #error This file should only be compiled by ICCARM 33 #pragma system_include 35 #define __IAR_FT _Pragma("inline=forced") __intrinsic 37 #if (__VER__ >= 8000000) 45 #define __ALIGNED(x) __attribute__((aligned(x))) 46 #elif (__VER__ >= 7080000) 48 #define __ALIGNED(x) __attribute__((aligned(x))) 50 #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored. 58 #if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__ 61 #if defined(__ARM8M_MAINLINE__) || defined(__ARM8EM_MAINLINE__) 62 #define __ARM_ARCH_8M_MAIN__ 1 63 #elif defined(__ARM8M_BASELINE__) 64 #define __ARM_ARCH_8M_BASE__ 1 65 #elif defined(__ARM_ARCH_PROFILE) && __ARM_ARCH_PROFILE == 'M' 67 #define __ARM_ARCH_6M__ 1 70 #define __ARM_ARCH_7EM__ 1 72 #define __ARM_ARCH_7M__ 1 79 #if !defined(__ARM_ARCH_6M__) && !defined(__ARM_ARCH_7M__) && !defined(__ARM_ARCH_7EM__) && \ 80 !defined(__ARM_ARCH_8M_BASE__) && !defined(__ARM_ARCH_8M_MAIN__) 81 #if defined(__ARM6M__) && (__CORE__ == __ARM6M__) 82 #define __ARM_ARCH_6M__ 1 83 #elif defined(__ARM7M__) && (__CORE__ == __ARM7M__) 84 #define __ARM_ARCH_7M__ 1 85 #elif defined(__ARM7EM__) && (__CORE__ == __ARM7EM__) 86 #define __ARM_ARCH_7EM__ 1 87 #elif defined(__ARM8M_BASELINE__) && (__CORE == __ARM8M_BASELINE__) 88 #define __ARM_ARCH_8M_BASE__ 1 89 #elif defined(__ARM8M_MAINLINE__) && (__CORE == __ARM8M_MAINLINE__) 90 #define __ARM_ARCH_8M_MAIN__ 1 91 #elif defined(__ARM8EM_MAINLINE__) && (__CORE == __ARM8EM_MAINLINE__) 92 #define __ARM_ARCH_8M_MAIN__ 1 94 #error "Unknown target." 100 #if defined(__ARM_ARCH_6M__) && __ARM_ARCH_6M__==1 101 #define __IAR_M0_FAMILY 1 102 #elif defined(__ARM_ARCH_8M_BASE__) && __ARM_ARCH_8M_BASE__==1 103 #define __IAR_M0_FAMILY 1 105 #define __IAR_M0_FAMILY 0 114 #define __INLINE inline 119 #define __NO_RETURN __attribute__((__noreturn__)) 121 #define __NO_RETURN _Pragma("object_attribute=__noreturn") 127 #define __PACKED __attribute__((packed, aligned(1))) 130 #define __PACKED __packed 134 #ifndef __PACKED_STRUCT 136 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) 139 #define __PACKED_STRUCT __packed struct 143 #ifndef __PACKED_UNION 145 #define __PACKED_UNION union __attribute__((packed, aligned(1))) 148 #define __PACKED_UNION __packed union 153 #define __RESTRICT __restrict 156 #ifndef __STATIC_INLINE 157 #define __STATIC_INLINE static inline 160 #ifndef __FORCEINLINE 161 #define __FORCEINLINE _Pragma("inline=forced") 164 #ifndef __STATIC_FORCEINLINE 165 #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE 168 #ifndef __UNALIGNED_UINT16_READ 169 #pragma language=save 170 #pragma language=extended 171 __IAR_FT uint16_t __iar_uint16_read(
void const *ptr)
173 return *(__packed uint16_t*)(ptr);
175 #pragma language=restore 176 #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR) 180 #ifndef __UNALIGNED_UINT16_WRITE 181 #pragma language=save 182 #pragma language=extended 183 __IAR_FT
void __iar_uint16_write(
void const *ptr, uint16_t val)
185 *(__packed uint16_t*)(ptr) = val;;
187 #pragma language=restore 188 #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL) 191 #ifndef __UNALIGNED_UINT32_READ 192 #pragma language=save 193 #pragma language=extended 194 __IAR_FT uint32_t __iar_uint32_read(
void const *ptr)
196 return *(__packed uint32_t*)(ptr);
198 #pragma language=restore 199 #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR) 202 #ifndef __UNALIGNED_UINT32_WRITE 203 #pragma language=save 204 #pragma language=extended 205 __IAR_FT
void __iar_uint32_write(
void const *ptr, uint32_t val)
207 *(__packed uint32_t*)(ptr) = val;;
209 #pragma language=restore 210 #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL) 213 #ifndef __UNALIGNED_UINT32 214 #pragma language=save 215 #pragma language=extended 217 #pragma language=restore 218 #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v) 223 #define __USED __attribute__((used)) 225 #define __USED _Pragma("__root") 231 #define __WEAK __attribute__((weak)) 233 #define __WEAK _Pragma("__weak") 238 #ifndef __ICCARM_INTRINSICS_VERSION__ 239 #define __ICCARM_INTRINSICS_VERSION__ 0 242 #if __ICCARM_INTRINSICS_VERSION__ == 2 260 #include "iccarm_builtin.h" 262 #define __disable_fault_irq __iar_builtin_disable_fiq 263 #define __disable_irq __iar_builtin_disable_interrupt 264 #define __enable_fault_irq __iar_builtin_enable_fiq 265 #define __enable_irq __iar_builtin_enable_interrupt 266 #define __arm_rsr __iar_builtin_rsr 267 #define __arm_wsr __iar_builtin_wsr 270 #define __get_APSR() (__arm_rsr("APSR")) 271 #define __get_BASEPRI() (__arm_rsr("BASEPRI")) 272 #define __get_CONTROL() (__arm_rsr("CONTROL")) 273 #define __get_FAULTMASK() (__arm_rsr("FAULTMASK")) 275 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 276 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 277 #define __get_FPSCR() (__arm_rsr("FPSCR")) 278 #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE))) 280 #define __get_FPSCR() ( 0 ) 281 #define __set_FPSCR(VALUE) ((void)VALUE) 284 #define __get_IPSR() (__arm_rsr("IPSR")) 285 #define __get_MSP() (__arm_rsr("MSP")) 286 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 287 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 289 #define __get_MSPLIM() (0U) 291 #define __get_MSPLIM() (__arm_rsr("MSPLIM")) 293 #define __get_PRIMASK() (__arm_rsr("PRIMASK")) 294 #define __get_PSP() (__arm_rsr("PSP")) 296 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 297 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 299 #define __get_PSPLIM() (0U) 301 #define __get_PSPLIM() (__arm_rsr("PSPLIM")) 304 #define __get_xPSR() (__arm_rsr("xPSR")) 306 #define __set_BASEPRI(VALUE) (__arm_wsr("BASEPRI", (VALUE))) 307 #define __set_BASEPRI_MAX(VALUE) (__arm_wsr("BASEPRI_MAX", (VALUE))) 308 #define __set_CONTROL(VALUE) (__arm_wsr("CONTROL", (VALUE))) 309 #define __set_FAULTMASK(VALUE) (__arm_wsr("FAULTMASK", (VALUE))) 310 #define __set_MSP(VALUE) (__arm_wsr("MSP", (VALUE))) 312 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 313 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 315 #define __set_MSPLIM(VALUE) ((void)(VALUE)) 317 #define __set_MSPLIM(VALUE) (__arm_wsr("MSPLIM", (VALUE))) 319 #define __set_PRIMASK(VALUE) (__arm_wsr("PRIMASK", (VALUE))) 320 #define __set_PSP(VALUE) (__arm_wsr("PSP", (VALUE))) 321 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 322 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 324 #define __set_PSPLIM(VALUE) ((void)(VALUE)) 326 #define __set_PSPLIM(VALUE) (__arm_wsr("PSPLIM", (VALUE))) 329 #define __TZ_get_CONTROL_NS() (__arm_rsr("CONTROL_NS")) 330 #define __TZ_set_CONTROL_NS(VALUE) (__arm_wsr("CONTROL_NS", (VALUE))) 331 #define __TZ_get_PSP_NS() (__arm_rsr("PSP_NS")) 332 #define __TZ_set_PSP_NS(VALUE) (__arm_wsr("PSP_NS", (VALUE))) 333 #define __TZ_get_MSP_NS() (__arm_rsr("MSP_NS")) 334 #define __TZ_set_MSP_NS(VALUE) (__arm_wsr("MSP_NS", (VALUE))) 335 #define __TZ_get_SP_NS() (__arm_rsr("SP_NS")) 336 #define __TZ_set_SP_NS(VALUE) (__arm_wsr("SP_NS", (VALUE))) 337 #define __TZ_get_PRIMASK_NS() (__arm_rsr("PRIMASK_NS")) 338 #define __TZ_set_PRIMASK_NS(VALUE) (__arm_wsr("PRIMASK_NS", (VALUE))) 339 #define __TZ_get_BASEPRI_NS() (__arm_rsr("BASEPRI_NS")) 340 #define __TZ_set_BASEPRI_NS(VALUE) (__arm_wsr("BASEPRI_NS", (VALUE))) 341 #define __TZ_get_FAULTMASK_NS() (__arm_rsr("FAULTMASK_NS")) 342 #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE))) 344 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 345 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 347 #define __TZ_get_PSPLIM_NS() (0U) 348 #define __TZ_set_PSPLIM_NS(VALUE) ((void)(VALUE)) 350 #define __TZ_get_PSPLIM_NS() (__arm_rsr("PSPLIM_NS")) 351 #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE))) 354 #define __TZ_get_MSPLIM_NS() (__arm_rsr("MSPLIM_NS")) 355 #define __TZ_set_MSPLIM_NS(VALUE) (__arm_wsr("MSPLIM_NS", (VALUE))) 357 #define __NOP __iar_builtin_no_operation 359 #define __CLZ __iar_builtin_CLZ 360 #define __CLREX __iar_builtin_CLREX 362 #define __DMB __iar_builtin_DMB 363 #define __DSB __iar_builtin_DSB 364 #define __ISB __iar_builtin_ISB 366 #define __LDREXB __iar_builtin_LDREXB 367 #define __LDREXH __iar_builtin_LDREXH 368 #define __LDREXW __iar_builtin_LDREX 370 #define __RBIT __iar_builtin_RBIT 371 #define __REV __iar_builtin_REV 372 #define __REV16 __iar_builtin_REV16 374 __IAR_FT int16_t
__REVSH(int16_t val)
376 return (int16_t) __iar_builtin_REVSH(val);
379 #define __ROR __iar_builtin_ROR 380 #define __RRX __iar_builtin_RRX 382 #define __SEV __iar_builtin_SEV 385 #define __SSAT __iar_builtin_SSAT 388 #define __STREXB __iar_builtin_STREXB 389 #define __STREXH __iar_builtin_STREXH 390 #define __STREXW __iar_builtin_STREX 393 #define __USAT __iar_builtin_USAT 396 #define __WFE __iar_builtin_WFE 397 #define __WFI __iar_builtin_WFI 400 #define __SADD8 __iar_builtin_SADD8 401 #define __QADD8 __iar_builtin_QADD8 402 #define __SHADD8 __iar_builtin_SHADD8 403 #define __UADD8 __iar_builtin_UADD8 404 #define __UQADD8 __iar_builtin_UQADD8 405 #define __UHADD8 __iar_builtin_UHADD8 406 #define __SSUB8 __iar_builtin_SSUB8 407 #define __QSUB8 __iar_builtin_QSUB8 408 #define __SHSUB8 __iar_builtin_SHSUB8 409 #define __USUB8 __iar_builtin_USUB8 410 #define __UQSUB8 __iar_builtin_UQSUB8 411 #define __UHSUB8 __iar_builtin_UHSUB8 412 #define __SADD16 __iar_builtin_SADD16 413 #define __QADD16 __iar_builtin_QADD16 414 #define __SHADD16 __iar_builtin_SHADD16 415 #define __UADD16 __iar_builtin_UADD16 416 #define __UQADD16 __iar_builtin_UQADD16 417 #define __UHADD16 __iar_builtin_UHADD16 418 #define __SSUB16 __iar_builtin_SSUB16 419 #define __QSUB16 __iar_builtin_QSUB16 420 #define __SHSUB16 __iar_builtin_SHSUB16 421 #define __USUB16 __iar_builtin_USUB16 422 #define __UQSUB16 __iar_builtin_UQSUB16 423 #define __UHSUB16 __iar_builtin_UHSUB16 424 #define __SASX __iar_builtin_SASX 425 #define __QASX __iar_builtin_QASX 426 #define __SHASX __iar_builtin_SHASX 427 #define __UASX __iar_builtin_UASX 428 #define __UQASX __iar_builtin_UQASX 429 #define __UHASX __iar_builtin_UHASX 430 #define __SSAX __iar_builtin_SSAX 431 #define __QSAX __iar_builtin_QSAX 432 #define __SHSAX __iar_builtin_SHSAX 433 #define __USAX __iar_builtin_USAX 434 #define __UQSAX __iar_builtin_UQSAX 435 #define __UHSAX __iar_builtin_UHSAX 436 #define __USAD8 __iar_builtin_USAD8 437 #define __USADA8 __iar_builtin_USADA8 438 #define __SSAT16 __iar_builtin_SSAT16 439 #define __USAT16 __iar_builtin_USAT16 440 #define __UXTB16 __iar_builtin_UXTB16 441 #define __UXTAB16 __iar_builtin_UXTAB16 442 #define __SXTB16 __iar_builtin_SXTB16 443 #define __SXTAB16 __iar_builtin_SXTAB16 444 #define __SMUAD __iar_builtin_SMUAD 445 #define __SMUADX __iar_builtin_SMUADX 446 #define __SMMLA __iar_builtin_SMMLA 447 #define __SMLAD __iar_builtin_SMLAD 448 #define __SMLADX __iar_builtin_SMLADX 449 #define __SMLALD __iar_builtin_SMLALD 450 #define __SMLALDX __iar_builtin_SMLALDX 451 #define __SMUSD __iar_builtin_SMUSD 452 #define __SMUSDX __iar_builtin_SMUSDX 453 #define __SMLSD __iar_builtin_SMLSD 454 #define __SMLSDX __iar_builtin_SMLSDX 455 #define __SMLSLD __iar_builtin_SMLSLD 456 #define __SMLSLDX __iar_builtin_SMLSLDX 457 #define __SEL __iar_builtin_SEL 458 #define __QADD __iar_builtin_QADD 459 #define __QSUB __iar_builtin_QSUB 460 #define __PKHBT __iar_builtin_PKHBT 461 #define __PKHTB __iar_builtin_PKHTB 468 #define __CLZ __cmsis_iar_clz_not_active 469 #define __SSAT __cmsis_iar_ssat_not_active 470 #define __USAT __cmsis_iar_usat_not_active 471 #define __RBIT __cmsis_iar_rbit_not_active 472 #define __get_APSR __cmsis_iar_get_APSR_not_active 476 #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 477 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) 478 #define __get_FPSCR __cmsis_iar_get_FPSR_not_active 479 #define __set_FPSCR __cmsis_iar_set_FPSR_not_active 482 #ifdef __INTRINSICS_INCLUDED 483 #error intrinsics.h is already included previously! 486 #include <intrinsics.h> 496 __STATIC_INLINE uint8_t
__CLZ(uint32_t data)
498 if (data == 0U) {
return 32U; }
501 uint32_t mask = 0x80000000U;
503 while ((data & mask) == 0U)
511 __STATIC_INLINE uint32_t
__RBIT(uint32_t v)
515 for (v >>= 1U; v; v >>= 1U)
527 __asm(
"MRS %0,APSR" :
"=r" (res));
533 #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 534 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) 537 #define __get_FPSCR() (0) 538 #define __set_FPSCR(VALUE) ((void)VALUE) 541 #pragma diag_suppress=Pe940 542 #pragma diag_suppress=Pe177 544 #define __enable_irq __enable_interrupt 545 #define __disable_irq __disable_interrupt 546 #define __NOP __no_operation 548 #define __get_xPSR __get_PSR 550 #if (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0) 552 __IAR_FT uint32_t __LDREXW(uint32_t
volatile *ptr)
554 return __LDREX((
unsigned long *)ptr);
557 __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t
volatile *ptr)
559 return __STREX(value, (
unsigned long *)ptr);
565 #if (__CORTEX_M >= 0x03) 567 __IAR_FT uint32_t __RRX(uint32_t value)
570 __ASM(
"RRX %0, %1" :
"=r"(result) :
"r" (value) :
"cc");
574 __IAR_FT
void __set_BASEPRI_MAX(uint32_t value)
576 __asm
volatile(
"MSR BASEPRI_MAX,%0"::
"r" (value));
580 #define __enable_fault_irq __enable_fiq 581 #define __disable_fault_irq __disable_fiq 586 __IAR_FT uint32_t
__ROR(uint32_t op1, uint32_t op2)
588 return (op1 >> op2) | (op1 << ((
sizeof(op1)*8)-op2));
591 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 592 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 594 __IAR_FT uint32_t __get_MSPLIM(
void)
597 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 598 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) 602 __asm
volatile(
"MRS %0,MSPLIM" :
"=r" (res));
607 __IAR_FT
void __set_MSPLIM(uint32_t value)
609 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 610 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) 614 __asm
volatile(
"MSR MSPLIM,%0" ::
"r" (value));
618 __IAR_FT uint32_t __get_PSPLIM(
void)
621 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 622 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) 626 __asm
volatile(
"MRS %0,PSPLIM" :
"=r" (res));
631 __IAR_FT
void __set_PSPLIM(uint32_t value)
633 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 634 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) 638 __asm
volatile(
"MSR PSPLIM,%0" ::
"r" (value));
642 __IAR_FT uint32_t __TZ_get_CONTROL_NS(
void)
645 __asm
volatile(
"MRS %0,CONTROL_NS" :
"=r" (res));
649 __IAR_FT
void __TZ_set_CONTROL_NS(uint32_t value)
651 __asm
volatile(
"MSR CONTROL_NS,%0" ::
"r" (value));
654 __IAR_FT uint32_t __TZ_get_PSP_NS(
void)
657 __asm
volatile(
"MRS %0,PSP_NS" :
"=r" (res));
661 __IAR_FT
void __TZ_set_PSP_NS(uint32_t value)
663 __asm
volatile(
"MSR PSP_NS,%0" ::
"r" (value));
666 __IAR_FT uint32_t __TZ_get_MSP_NS(
void)
669 __asm
volatile(
"MRS %0,MSP_NS" :
"=r" (res));
673 __IAR_FT
void __TZ_set_MSP_NS(uint32_t value)
675 __asm
volatile(
"MSR MSP_NS,%0" ::
"r" (value));
678 __IAR_FT uint32_t __TZ_get_SP_NS(
void)
681 __asm
volatile(
"MRS %0,SP_NS" :
"=r" (res));
684 __IAR_FT
void __TZ_set_SP_NS(uint32_t value)
686 __asm
volatile(
"MSR SP_NS,%0" ::
"r" (value));
689 __IAR_FT uint32_t __TZ_get_PRIMASK_NS(
void)
692 __asm
volatile(
"MRS %0,PRIMASK_NS" :
"=r" (res));
696 __IAR_FT
void __TZ_set_PRIMASK_NS(uint32_t value)
698 __asm
volatile(
"MSR PRIMASK_NS,%0" ::
"r" (value));
701 __IAR_FT uint32_t __TZ_get_BASEPRI_NS(
void)
704 __asm
volatile(
"MRS %0,BASEPRI_NS" :
"=r" (res));
708 __IAR_FT
void __TZ_set_BASEPRI_NS(uint32_t value)
710 __asm
volatile(
"MSR BASEPRI_NS,%0" ::
"r" (value));
713 __IAR_FT uint32_t __TZ_get_FAULTMASK_NS(
void)
716 __asm
volatile(
"MRS %0,FAULTMASK_NS" :
"=r" (res));
720 __IAR_FT
void __TZ_set_FAULTMASK_NS(uint32_t value)
722 __asm
volatile(
"MSR FAULTMASK_NS,%0" ::
"r" (value));
725 __IAR_FT uint32_t __TZ_get_PSPLIM_NS(
void)
728 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 729 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) 733 __asm
volatile(
"MRS %0,PSPLIM_NS" :
"=r" (res));
738 __IAR_FT
void __TZ_set_PSPLIM_NS(uint32_t value)
740 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 741 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) 745 __asm
volatile(
"MSR PSPLIM_NS,%0" ::
"r" (value));
749 __IAR_FT uint32_t __TZ_get_MSPLIM_NS(
void)
752 __asm
volatile(
"MRS %0,MSPLIM_NS" :
"=r" (res));
756 __IAR_FT
void __TZ_set_MSPLIM_NS(uint32_t value)
758 __asm
volatile(
"MSR MSPLIM_NS,%0" ::
"r" (value));
765 #define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value)) 768 __STATIC_INLINE int32_t
__SSAT(int32_t val, uint32_t sat)
770 if ((sat >= 1U) && (sat <= 32U))
772 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
773 const int32_t min = -1 - max ;
786 __STATIC_INLINE uint32_t
__USAT(int32_t val, uint32_t sat)
790 const uint32_t max = ((1U << sat) - 1U);
791 if (val > (int32_t)max)
800 return (uint32_t)val;
804 #if (__CORTEX_M >= 0x03) 806 __IAR_FT uint8_t __LDRBT(
volatile uint8_t *addr)
809 __ASM(
"LDRBT %0, [%1]" :
"=r" (res) :
"r" (addr) :
"memory");
810 return ((uint8_t)res);
813 __IAR_FT uint16_t __LDRHT(
volatile uint16_t *addr)
816 __ASM(
"LDRHT %0, [%1]" :
"=r" (res) :
"r" (addr) :
"memory");
817 return ((uint16_t)res);
820 __IAR_FT uint32_t __LDRT(
volatile uint32_t *addr)
823 __ASM(
"LDRT %0, [%1]" :
"=r" (res) :
"r" (addr) :
"memory");
827 __IAR_FT
void __STRBT(uint8_t value,
volatile uint8_t *addr)
829 __ASM(
"STRBT %1, [%0]" : :
"r" (addr),
"r" ((uint32_t)value) :
"memory");
832 __IAR_FT
void __STRHT(uint16_t value,
volatile uint16_t *addr)
834 __ASM(
"STRHT %1, [%0]" : :
"r" (addr),
"r" ((uint32_t)value) :
"memory");
837 __IAR_FT
void __STRT(uint32_t value,
volatile uint32_t *addr)
839 __ASM(
"STRT %1, [%0]" : :
"r" (addr),
"r" (value) :
"memory");
844 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 845 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 848 __IAR_FT uint8_t __LDAB(
volatile uint8_t *ptr)
851 __ASM
volatile (
"LDAB %0, [%1]" :
"=r" (res) :
"r" (ptr) :
"memory");
852 return ((uint8_t)res);
855 __IAR_FT uint16_t __LDAH(
volatile uint16_t *ptr)
858 __ASM
volatile (
"LDAH %0, [%1]" :
"=r" (res) :
"r" (ptr) :
"memory");
859 return ((uint16_t)res);
862 __IAR_FT uint32_t __LDA(
volatile uint32_t *ptr)
865 __ASM
volatile (
"LDA %0, [%1]" :
"=r" (res) :
"r" (ptr) :
"memory");
869 __IAR_FT
void __STLB(uint8_t value,
volatile uint8_t *ptr)
871 __ASM
volatile (
"STLB %1, [%0]" ::
"r" (ptr),
"r" (value) :
"memory");
874 __IAR_FT
void __STLH(uint16_t value,
volatile uint16_t *ptr)
876 __ASM
volatile (
"STLH %1, [%0]" ::
"r" (ptr),
"r" (value) :
"memory");
879 __IAR_FT
void __STL(uint32_t value,
volatile uint32_t *ptr)
881 __ASM
volatile (
"STL %1, [%0]" ::
"r" (ptr),
"r" (value) :
"memory");
884 __IAR_FT uint8_t __LDAEXB(
volatile uint8_t *ptr)
887 __ASM
volatile (
"LDAEXB %0, [%1]" :
"=r" (res) :
"r" (ptr) :
"memory");
888 return ((uint8_t)res);
891 __IAR_FT uint16_t __LDAEXH(
volatile uint16_t *ptr)
894 __ASM
volatile (
"LDAEXH %0, [%1]" :
"=r" (res) :
"r" (ptr) :
"memory");
895 return ((uint16_t)res);
898 __IAR_FT uint32_t __LDAEX(
volatile uint32_t *ptr)
901 __ASM
volatile (
"LDAEX %0, [%1]" :
"=r" (res) :
"r" (ptr) :
"memory");
905 __IAR_FT uint32_t __STLEXB(uint8_t value,
volatile uint8_t *ptr)
908 __ASM
volatile (
"STLEXB %0, %2, [%1]" :
"=r" (res) :
"r" (ptr),
"r" (value) :
"memory");
912 __IAR_FT uint32_t __STLEXH(uint16_t value,
volatile uint16_t *ptr)
915 __ASM
volatile (
"STLEXH %0, %2, [%1]" :
"=r" (res) :
"r" (ptr),
"r" (value) :
"memory");
919 __IAR_FT uint32_t __STLEX(uint32_t value,
volatile uint32_t *ptr)
922 __ASM
volatile (
"STLEX %0, %2, [%1]" :
"=r" (res) :
"r" (ptr),
"r" (value) :
"memory");
929 #undef __IAR_M0_FAMILY 932 #pragma diag_default=Pe940 933 #pragma diag_default=Pe177 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_armclang.h:1121
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: cmsis_gcc.h:981
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_armcc.h:166
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_armclang.h:1146
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_armclang.h:876
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:934
#define __CLZ
Count leading zeros.
Definition: cmsis_armcc.h:540
Definition: cmsis_iccarm.h:216