27#ifndef __CMSIS_ARMCLANG_H
28#define __CMSIS_ARMCLANG_H
30#pragma clang system_header
37 #define __INLINE __inline
39#ifndef __STATIC_INLINE
40 #define __STATIC_INLINE static __inline
42#ifndef __STATIC_FORCEINLINE
43 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
46 #define __NO_RETURN __attribute__((__noreturn__))
49 #define __USED __attribute__((used))
52 #define __WEAK __attribute__((weak))
55 #define __PACKED __attribute__((packed, aligned(1)))
57#ifndef __PACKED_STRUCT
58 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
61 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
63#ifndef __UNALIGNED_UINT32
64 #pragma clang diagnostic push
65 #pragma clang diagnostic ignored "-Wpacked"
68 #pragma clang diagnostic pop
69 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
71#ifndef __UNALIGNED_UINT16_WRITE
72 #pragma clang diagnostic push
73 #pragma clang diagnostic ignored "-Wpacked"
75 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
76 #pragma clang diagnostic pop
77 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
79#ifndef __UNALIGNED_UINT16_READ
80 #pragma clang diagnostic push
81 #pragma clang diagnostic ignored "-Wpacked"
83 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
84 #pragma clang diagnostic pop
85 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
87#ifndef __UNALIGNED_UINT32_WRITE
88 #pragma clang diagnostic push
89 #pragma clang diagnostic ignored "-Wpacked"
91 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
92 #pragma clang diagnostic pop
93 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
95#ifndef __UNALIGNED_UINT32_READ
96 #pragma clang diagnostic push
97 #pragma clang diagnostic ignored "-Wpacked"
99 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
100 #pragma clang diagnostic pop
101 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
104 #define __ALIGNED(x) __attribute__((aligned(x)))
107 #define __RESTRICT __restrict
109#ifndef __COMPILER_BARRIER
110 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
113 #define __NO_INIT __attribute__ ((section (".bss.noinit")))
116 #define __ALIAS(x) __attribute__ ((alias(x)))
121#ifndef __PROGRAM_START
122#define __PROGRAM_START __main
126#define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
130#define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
133#ifndef __VECTOR_TABLE
134#define __VECTOR_TABLE __Vectors
137#ifndef __VECTOR_TABLE_ATTRIBUTE
138#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET")))
141#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
143#define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base
146#ifndef __TZ_STACK_SEAL_SIZE
147#define __TZ_STACK_SEAL_SIZE 8U
150#ifndef __TZ_STACK_SEAL_VALUE
151#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
155__STATIC_FORCEINLINE
void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
156 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
170#if defined (__thumb__) && !defined (__thumb2__)
171#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
172#define __CMSIS_GCC_USE_REG(r) "l" (r)
174#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
175#define __CMSIS_GCC_USE_REG(r) "r" (r)
182#define __NOP __builtin_arm_nop
188#define __WFI __builtin_arm_wfi
196#define __WFE __builtin_arm_wfe
203#define __SEV __builtin_arm_sev
212#define __ISB() __builtin_arm_isb(0xF)
219#define __DSB() __builtin_arm_dsb(0xF)
227#define __DMB() __builtin_arm_dmb(0xF)
236#define __REV(value) __builtin_bswap32(value)
245#define __REV16(value) __ROR(__REV(value), 16)
254#define __REVSH(value) (int16_t)__builtin_bswap16(value)
264__STATIC_FORCEINLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
271 return (op1 >> op2) | (op1 << (32U - op2));
282#define __BKPT(value) __ASM volatile ("bkpt "#value)
291#define __RBIT __builtin_arm_rbit
299__STATIC_FORCEINLINE uint8_t
__CLZ(uint32_t value)
314 return __builtin_clz(value);
318#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
319 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
320 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
321 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
328#define __LDREXB (uint8_t)__builtin_arm_ldrex
337#define __LDREXH (uint16_t)__builtin_arm_ldrex
346#define __LDREXW (uint32_t)__builtin_arm_ldrex
357#define __STREXB (uint32_t)__builtin_arm_strex
368#define __STREXH (uint32_t)__builtin_arm_strex
379#define __STREXW (uint32_t)__builtin_arm_strex
386#define __CLREX __builtin_arm_clrex
394#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
395 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
396 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
405#define __SSAT __builtin_arm_ssat
415#define __USAT __builtin_arm_usat
425__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
429 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
440__STATIC_FORCEINLINE uint8_t __LDRBT(
volatile uint8_t *ptr)
444 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
445 return ((uint8_t) result);
455__STATIC_FORCEINLINE uint16_t __LDRHT(
volatile uint16_t *ptr)
459 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
460 return ((uint16_t) result);
470__STATIC_FORCEINLINE uint32_t __LDRT(
volatile uint32_t *ptr)
474 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
485__STATIC_FORCEINLINE
void __STRBT(uint8_t value,
volatile uint8_t *ptr)
487 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
497__STATIC_FORCEINLINE
void __STRHT(uint16_t value,
volatile uint16_t *ptr)
499 __ASM
volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
509__STATIC_FORCEINLINE
void __STRT(uint32_t value,
volatile uint32_t *ptr)
511 __ASM
volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
525__STATIC_FORCEINLINE int32_t
__SSAT(int32_t val, uint32_t sat)
527 if ((sat >= 1U) && (sat <= 32U))
529 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
530 const int32_t min = -1 - max ;
550__STATIC_FORCEINLINE uint32_t
__USAT(int32_t val, uint32_t sat)
554 const uint32_t max = ((1U << sat) - 1U);
555 if (val > (int32_t)max)
564 return (uint32_t)val;
572#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
573 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
580__STATIC_FORCEINLINE uint8_t __LDAB(
volatile uint8_t *ptr)
584 __ASM
volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
585 return ((uint8_t) result);
595__STATIC_FORCEINLINE uint16_t __LDAH(
volatile uint16_t *ptr)
599 __ASM
volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
600 return ((uint16_t) result);
610__STATIC_FORCEINLINE uint32_t __LDA(
volatile uint32_t *ptr)
614 __ASM
volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
625__STATIC_FORCEINLINE
void __STLB(uint8_t value,
volatile uint8_t *ptr)
627 __ASM
volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
637__STATIC_FORCEINLINE
void __STLH(uint16_t value,
volatile uint16_t *ptr)
639 __ASM
volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
649__STATIC_FORCEINLINE
void __STL(uint32_t value,
volatile uint32_t *ptr)
651 __ASM
volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
661#define __LDAEXB (uint8_t)__builtin_arm_ldaex
670#define __LDAEXH (uint16_t)__builtin_arm_ldaex
679#define __LDAEX (uint32_t)__builtin_arm_ldaex
690#define __STLEXB (uint32_t)__builtin_arm_stlex
701#define __STLEXH (uint32_t)__builtin_arm_stlex
712#define __STLEX (uint32_t)__builtin_arm_stlex
731#ifndef __ARM_COMPAT_H
734 __ASM
volatile (
"cpsie i" : : :
"memory");
744#ifndef __ARM_COMPAT_H
747 __ASM
volatile (
"cpsid i" : : :
"memory");
761 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
766#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
772__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(
void)
776 __ASM
volatile (
"MRS %0, control_ns" :
"=r" (result) );
789 __ASM
volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
794#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
800__STATIC_FORCEINLINE
void __TZ_set_CONTROL_NS(uint32_t control)
802 __ASM
volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
817 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
831 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
845 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
855__STATIC_FORCEINLINE uint32_t
__get_PSP(
void)
859 __ASM
volatile (
"MRS %0, psp" :
"=r" (result) );
864#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
870__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(
void)
874 __ASM
volatile (
"MRS %0, psp_ns" :
"=r" (result) );
885__STATIC_FORCEINLINE
void __set_PSP(uint32_t topOfProcStack)
887 __ASM
volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
891#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
897__STATIC_FORCEINLINE
void __TZ_set_PSP_NS(uint32_t topOfProcStack)
899 __ASM
volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
909__STATIC_FORCEINLINE uint32_t
__get_MSP(
void)
913 __ASM
volatile (
"MRS %0, msp" :
"=r" (result) );
918#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
924__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(
void)
928 __ASM
volatile (
"MRS %0, msp_ns" :
"=r" (result) );
939__STATIC_FORCEINLINE
void __set_MSP(uint32_t topOfMainStack)
941 __ASM
volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
945#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
951__STATIC_FORCEINLINE
void __TZ_set_MSP_NS(uint32_t topOfMainStack)
953 __ASM
volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
958#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
964__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(
void)
968 __ASM
volatile (
"MRS %0, sp_ns" :
"=r" (result) );
978__STATIC_FORCEINLINE
void __TZ_set_SP_NS(uint32_t topOfStack)
980 __ASM
volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
994 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) );
999#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1005__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(
void)
1009 __ASM
volatile (
"MRS %0, primask_ns" :
"=r" (result) );
1022 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
1026#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1032__STATIC_FORCEINLINE
void __TZ_set_PRIMASK_NS(uint32_t priMask)
1034 __ASM
volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
1039#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1040 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1041 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1047__STATIC_FORCEINLINE
void __enable_fault_irq(
void)
1049 __ASM
volatile (
"cpsie f" : : :
"memory");
1058__STATIC_FORCEINLINE
void __disable_fault_irq(
void)
1060 __ASM
volatile (
"cpsid f" : : :
"memory");
1069__STATIC_FORCEINLINE uint32_t __get_BASEPRI(
void)
1073 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
1078#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1084__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(
void)
1088 __ASM
volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
1099__STATIC_FORCEINLINE
void __set_BASEPRI(uint32_t basePri)
1101 __ASM
volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
1105#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1111__STATIC_FORCEINLINE
void __TZ_set_BASEPRI_NS(uint32_t basePri)
1113 __ASM
volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
1124__STATIC_FORCEINLINE
void __set_BASEPRI_MAX(uint32_t basePri)
1126 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
1135__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(
void)
1139 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
1144#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1150__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(
void)
1154 __ASM
volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
1165__STATIC_FORCEINLINE
void __set_FAULTMASK(uint32_t faultMask)
1167 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
1171#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1177__STATIC_FORCEINLINE
void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1179 __ASM
volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
1188#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1189 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1200__STATIC_FORCEINLINE uint32_t __get_PSPLIM(
void)
1202#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1203 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1208 __ASM
volatile (
"MRS %0, psplim" :
"=r" (result) );
1213#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1223__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(
void)
1225#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1230 __ASM
volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
1246__STATIC_FORCEINLINE
void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1248#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1249 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1251 (void)ProcStackPtrLimit;
1253 __ASM
volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
1258#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1268__STATIC_FORCEINLINE
void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1270#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1272 (void)ProcStackPtrLimit;
1274 __ASM
volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
1288__STATIC_FORCEINLINE uint32_t __get_MSPLIM(
void)
1290#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1291 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1296 __ASM
volatile (
"MRS %0, msplim" :
"=r" (result) );
1302#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1311__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(
void)
1313#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1318 __ASM
volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
1333__STATIC_FORCEINLINE
void __set_MSPLIM(uint32_t MainStackPtrLimit)
1335#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1336 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1338 (void)MainStackPtrLimit;
1340 __ASM
volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
1345#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1354__STATIC_FORCEINLINE
void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1356#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1358 (void)MainStackPtrLimit;
1360 __ASM
volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
1373#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1374 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1375#define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
1377#define __get_FPSCR() ((uint32_t)0U)
1385#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1386 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1387#define __set_FPSCR __builtin_arm_set_fpscr
1389#define __set_FPSCR(x) ((void)(x))
1402#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1404__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1408 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1412__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1416 __ASM
volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1420__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1424 __ASM
volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1428__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1432 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1436__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1440 __ASM
volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1444__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1448 __ASM
volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1453__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1457 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1461__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1465 __ASM
volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1469__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1473 __ASM
volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1477__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1481 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1485__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1489 __ASM
volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1493__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1497 __ASM
volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1502__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1506 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1510__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1514 __ASM
volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1518__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1522 __ASM
volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1526__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1530 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1534__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1538 __ASM
volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1542__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1546 __ASM
volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1550__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1554 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1558__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1562 __ASM
volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1566__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1570 __ASM
volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1574__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1578 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1582__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1586 __ASM
volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1590__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1594 __ASM
volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1598__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1602 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1606__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1610 __ASM
volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1614__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1618 __ASM
volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1622__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1626 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1630__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1634 __ASM
volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1638__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1642 __ASM
volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1646__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1650 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1654__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1658 __ASM
volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1662__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1666 __ASM
volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1670__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1674 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1678__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1682 __ASM
volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1686__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1690 __ASM
volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1694__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1698 __ASM
volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1702__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1706 __ASM
volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1710#define __SSAT16(ARG1,ARG2) \
1712 int32_t __RES, __ARG1 = (ARG1); \
1713 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1717#define __USAT16(ARG1,ARG2) \
1719 uint32_t __RES, __ARG1 = (ARG1); \
1720 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1724__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1728 __ASM
volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1732__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1736 __ASM
volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1740__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1744 __ASM
volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1748__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1752 __ASM
volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1756__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1760 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1764__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1768 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1772__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1776 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1780__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1784 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1788__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1797 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1799 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1805__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1814 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1816 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1822__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1826 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1830__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1834 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1838__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1842 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1846__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1850 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1854__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1863 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1865 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1871__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1880 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1882 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1888__STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1892 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1896__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
1900 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1904__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
1908 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1912#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1913 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1915#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1916 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1918#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1920#define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
1922__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1926 __ASM
volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
#define __CLZ
Count leading zeros.
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
#define __ROR
Rotate Right in unsigned value (32 bit)
#define __ISB()
Instruction Synchronization Barrier.
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
#define __enable_irq
Enable IRQ Interrupts.
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
#define __disable_irq
Disable IRQ Interrupts.
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
#define __get_xPSR
Get xPSR Register.
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
void __attribute__((noreturn))(*rom_reset_usb_boot_fn)(uint32_t
Reboot the device into BOOTSEL mode.