29#pragma GCC diagnostic push
30#pragma GCC diagnostic ignored "-Wsign-conversion"
31#pragma GCC diagnostic ignored "-Wconversion"
32#pragma GCC diagnostic ignored "-Wunused-parameter"
36 #define __has_builtin(x) (0)
44 #define __INLINE inline
46#ifndef __STATIC_INLINE
47 #define __STATIC_INLINE static inline
49#ifndef __STATIC_FORCEINLINE
50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
53 #define __NO_RETURN __attribute__((__noreturn__))
56 #define __USED __attribute__((used))
59 #define __WEAK __attribute__((weak))
62 #define __PACKED __attribute__((packed, aligned(1)))
64#ifndef __PACKED_STRUCT
65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
68 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
70#ifndef __UNALIGNED_UINT32
71 #pragma GCC diagnostic push
72 #pragma GCC diagnostic ignored "-Wpacked"
73 #pragma GCC diagnostic ignored "-Wattributes"
75 #pragma GCC diagnostic pop
76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
78#ifndef __UNALIGNED_UINT16_WRITE
79 #pragma GCC diagnostic push
80 #pragma GCC diagnostic ignored "-Wpacked"
81 #pragma GCC diagnostic ignored "-Wattributes"
82 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
83 #pragma GCC diagnostic pop
84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
86#ifndef __UNALIGNED_UINT16_READ
87 #pragma GCC diagnostic push
88 #pragma GCC diagnostic ignored "-Wpacked"
89 #pragma GCC diagnostic ignored "-Wattributes"
90 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91 #pragma GCC diagnostic pop
92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
94#ifndef __UNALIGNED_UINT32_WRITE
95 #pragma GCC diagnostic push
96 #pragma GCC diagnostic ignored "-Wpacked"
97 #pragma GCC diagnostic ignored "-Wattributes"
98 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
99 #pragma GCC diagnostic pop
100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
102#ifndef __UNALIGNED_UINT32_READ
103 #pragma GCC diagnostic push
104 #pragma GCC diagnostic ignored "-Wpacked"
105 #pragma GCC diagnostic ignored "-Wattributes"
106 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
107 #pragma GCC diagnostic pop
108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
111 #define __ALIGNED(x) __attribute__((aligned(x)))
114 #define __RESTRICT __restrict
116#ifndef __COMPILER_BARRIER
117 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
120 #define __NO_INIT __attribute__ ((section (".bss.noinit")))
123 #define __ALIAS(x) __attribute__ ((alias(x)))
128#ifndef __PROGRAM_START
139 extern void _start(
void) __NO_RETURN;
141 typedef struct __copy_table {
147 typedef struct __zero_table {
152 extern const __copy_table_t __copy_table_start__;
153 extern const __copy_table_t __copy_table_end__;
154 extern const __zero_table_t __zero_table_start__;
155 extern const __zero_table_t __zero_table_end__;
157 for (__copy_table_t
const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
158 for(uint32_t i=0u; i<pTable->wlen; ++i) {
159 pTable->dest[i] = pTable->src[i];
163 for (__zero_table_t
const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
164 for(uint32_t i=0u; i<pTable->wlen; ++i) {
165 pTable->dest[i] = 0u;
172#define __PROGRAM_START __cmsis_start
176#define __INITIAL_SP __StackTop
180#define __STACK_LIMIT __StackLimit
183#ifndef __VECTOR_TABLE
184#define __VECTOR_TABLE __Vectors
187#ifndef __VECTOR_TABLE_ATTRIBUTE
188#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors")))
191#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
193#define __STACK_SEAL __StackSeal
196#ifndef __TZ_STACK_SEAL_SIZE
197#define __TZ_STACK_SEAL_SIZE 8U
200#ifndef __TZ_STACK_SEAL_VALUE
201#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
205__STATIC_FORCEINLINE
void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
206 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
220#if defined (__thumb__) && !defined (__thumb2__)
221#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
222#define __CMSIS_GCC_RW_REG(r) "+l" (r)
223#define __CMSIS_GCC_USE_REG(r) "l" (r)
225#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
226#define __CMSIS_GCC_RW_REG(r) "+r" (r)
227#define __CMSIS_GCC_USE_REG(r) "r" (r)
234#define __NOP() __ASM volatile ("nop")
240#define __WFI() __ASM volatile ("wfi":::"memory")
248#define __WFE() __ASM volatile ("wfe":::"memory")
255#define __SEV() __ASM volatile ("sev")
264__STATIC_FORCEINLINE
void __ISB(
void)
266 __ASM
volatile (
"isb 0xF":::
"memory");
275__STATIC_FORCEINLINE
void __DSB(
void)
277 __ASM
volatile (
"dsb 0xF":::
"memory");
286__STATIC_FORCEINLINE
void __DMB(
void)
288 __ASM
volatile (
"dmb 0xF":::
"memory");
298__STATIC_FORCEINLINE uint32_t
__REV(uint32_t value)
300#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
301 return __builtin_bswap32(value);
305 __ASM (
"rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
317__STATIC_FORCEINLINE uint32_t
__REV16(uint32_t value)
321 __ASM (
"rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
332__STATIC_FORCEINLINE int16_t
__REVSH(int16_t value)
334#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
335 return (int16_t)__builtin_bswap16(value);
339 __ASM (
"revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
352__STATIC_FORCEINLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
359 return (op1 >> op2) | (op1 << (32U - op2));
370#define __BKPT(value) __ASM volatile ("bkpt "#value)
379__STATIC_FORCEINLINE uint32_t
__RBIT(uint32_t value)
383#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
384 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
385 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
386 __ASM (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
388 uint32_t s = (4U * 8U) - 1U;
391 for (value >>= 1U; value != 0U; value >>= 1U)
394 result |= value & 1U;
409__STATIC_FORCEINLINE uint8_t
__CLZ(uint32_t value)
424 return __builtin_clz(value);
428#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
429 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
430 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
431 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
438__STATIC_FORCEINLINE uint8_t __LDREXB(
volatile uint8_t *addr)
442#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
443 __ASM
volatile (
"ldrexb %0, %1" :
"=r" (result) :
"Q" (*addr) );
448 __ASM
volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
450 return ((uint8_t) result);
460__STATIC_FORCEINLINE uint16_t __LDREXH(
volatile uint16_t *addr)
464#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
465 __ASM
volatile (
"ldrexh %0, %1" :
"=r" (result) :
"Q" (*addr) );
470 __ASM
volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
472 return ((uint16_t) result);
482__STATIC_FORCEINLINE uint32_t __LDREXW(
volatile uint32_t *addr)
486 __ASM
volatile (
"ldrex %0, %1" :
"=r" (result) :
"Q" (*addr) );
499__STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value,
volatile uint8_t *addr)
503 __ASM
volatile (
"strexb %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
516__STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value,
volatile uint16_t *addr)
520 __ASM
volatile (
"strexh %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
533__STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value,
volatile uint32_t *addr)
537 __ASM
volatile (
"strex %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" (value) );
546__STATIC_FORCEINLINE
void __CLREX(
void)
548 __ASM
volatile (
"clrex" :::
"memory");
557#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
558 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
559 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
567#define __SSAT(ARG1, ARG2) \
570 int32_t __RES, __ARG1 = (ARG1); \
571 __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
583#define __USAT(ARG1, ARG2) \
586 uint32_t __RES, __ARG1 = (ARG1); \
587 __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
599__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
603 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
614__STATIC_FORCEINLINE uint8_t __LDRBT(
volatile uint8_t *ptr)
618#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
619 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
624 __ASM
volatile (
"ldrbt %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
626 return ((uint8_t) result);
636__STATIC_FORCEINLINE uint16_t __LDRHT(
volatile uint16_t *ptr)
640#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
641 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
646 __ASM
volatile (
"ldrht %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
648 return ((uint16_t) result);
658__STATIC_FORCEINLINE uint32_t __LDRT(
volatile uint32_t *ptr)
662 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
673__STATIC_FORCEINLINE
void __STRBT(uint8_t value,
volatile uint8_t *ptr)
675 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
685__STATIC_FORCEINLINE
void __STRHT(uint16_t value,
volatile uint16_t *ptr)
687 __ASM
volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
697__STATIC_FORCEINLINE
void __STRT(uint32_t value,
volatile uint32_t *ptr)
699 __ASM
volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
713__STATIC_FORCEINLINE int32_t
__SSAT(int32_t val, uint32_t sat)
715 if ((sat >= 1U) && (sat <= 32U))
717 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
718 const int32_t min = -1 - max ;
738__STATIC_FORCEINLINE uint32_t
__USAT(int32_t val, uint32_t sat)
742 const uint32_t max = ((1U << sat) - 1U);
743 if (val > (int32_t)max)
752 return (uint32_t)val;
760#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
761 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
768__STATIC_FORCEINLINE uint8_t __LDAB(
volatile uint8_t *ptr)
772 __ASM
volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
773 return ((uint8_t) result);
783__STATIC_FORCEINLINE uint16_t __LDAH(
volatile uint16_t *ptr)
787 __ASM
volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
788 return ((uint16_t) result);
798__STATIC_FORCEINLINE uint32_t __LDA(
volatile uint32_t *ptr)
802 __ASM
volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
813__STATIC_FORCEINLINE
void __STLB(uint8_t value,
volatile uint8_t *ptr)
815 __ASM
volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
825__STATIC_FORCEINLINE
void __STLH(uint16_t value,
volatile uint16_t *ptr)
827 __ASM
volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
837__STATIC_FORCEINLINE
void __STL(uint32_t value,
volatile uint32_t *ptr)
839 __ASM
volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
849__STATIC_FORCEINLINE uint8_t __LDAEXB(
volatile uint8_t *ptr)
853 __ASM
volatile (
"ldaexb %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
854 return ((uint8_t) result);
864__STATIC_FORCEINLINE uint16_t __LDAEXH(
volatile uint16_t *ptr)
868 __ASM
volatile (
"ldaexh %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
869 return ((uint16_t) result);
879__STATIC_FORCEINLINE uint32_t __LDAEX(
volatile uint32_t *ptr)
883 __ASM
volatile (
"ldaex %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
896__STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value,
volatile uint8_t *ptr)
900 __ASM
volatile (
"stlexb %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
913__STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value,
volatile uint16_t *ptr)
917 __ASM
volatile (
"stlexh %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
930__STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value,
volatile uint32_t *ptr)
934 __ASM
volatile (
"stlex %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
957 __ASM
volatile (
"cpsie i" : : :
"memory");
968 __ASM
volatile (
"cpsid i" : : :
"memory");
981 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
986#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
992__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(
void)
996 __ASM
volatile (
"MRS %0, control_ns" :
"=r" (result) );
1009 __ASM
volatile (
"MSR control, %0" : :
"r" (control) :
"memory");
1014#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1020__STATIC_FORCEINLINE
void __TZ_set_CONTROL_NS(uint32_t control)
1022 __ASM
volatile (
"MSR control_ns, %0" : :
"r" (control) :
"memory");
1033__STATIC_FORCEINLINE uint32_t
__get_IPSR(
void)
1037 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
1047__STATIC_FORCEINLINE uint32_t
__get_APSR(
void)
1051 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
1061__STATIC_FORCEINLINE uint32_t
__get_xPSR(
void)
1065 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
1075__STATIC_FORCEINLINE uint32_t
__get_PSP(
void)
1079 __ASM
volatile (
"MRS %0, psp" :
"=r" (result) );
1084#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1090__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(
void)
1094 __ASM
volatile (
"MRS %0, psp_ns" :
"=r" (result) );
1105__STATIC_FORCEINLINE
void __set_PSP(uint32_t topOfProcStack)
1107 __ASM
volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
1111#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1117__STATIC_FORCEINLINE
void __TZ_set_PSP_NS(uint32_t topOfProcStack)
1119 __ASM
volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
1129__STATIC_FORCEINLINE uint32_t
__get_MSP(
void)
1133 __ASM
volatile (
"MRS %0, msp" :
"=r" (result) );
1138#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1144__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(
void)
1148 __ASM
volatile (
"MRS %0, msp_ns" :
"=r" (result) );
1159__STATIC_FORCEINLINE
void __set_MSP(uint32_t topOfMainStack)
1161 __ASM
volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
1165#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1171__STATIC_FORCEINLINE
void __TZ_set_MSP_NS(uint32_t topOfMainStack)
1173 __ASM
volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
1178#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1184__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(
void)
1188 __ASM
volatile (
"MRS %0, sp_ns" :
"=r" (result) );
1198__STATIC_FORCEINLINE
void __TZ_set_SP_NS(uint32_t topOfStack)
1200 __ASM
volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
1214 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) );
1219#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1225__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(
void)
1229 __ASM
volatile (
"MRS %0, primask_ns" :
"=r" (result) );
1242 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
1246#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1252__STATIC_FORCEINLINE
void __TZ_set_PRIMASK_NS(uint32_t priMask)
1254 __ASM
volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
1259#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1260 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1261 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1267__STATIC_FORCEINLINE
void __enable_fault_irq(
void)
1269 __ASM
volatile (
"cpsie f" : : :
"memory");
1278__STATIC_FORCEINLINE
void __disable_fault_irq(
void)
1280 __ASM
volatile (
"cpsid f" : : :
"memory");
1289__STATIC_FORCEINLINE uint32_t __get_BASEPRI(
void)
1293 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
1298#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1304__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(
void)
1308 __ASM
volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
1319__STATIC_FORCEINLINE
void __set_BASEPRI(uint32_t basePri)
1321 __ASM
volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
1325#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1331__STATIC_FORCEINLINE
void __TZ_set_BASEPRI_NS(uint32_t basePri)
1333 __ASM
volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
1344__STATIC_FORCEINLINE
void __set_BASEPRI_MAX(uint32_t basePri)
1346 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
1355__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(
void)
1359 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
1364#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1370__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(
void)
1374 __ASM
volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
1385__STATIC_FORCEINLINE
void __set_FAULTMASK(uint32_t faultMask)
1387 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
1391#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1397__STATIC_FORCEINLINE
void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1399 __ASM
volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
1408#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1409 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1420__STATIC_FORCEINLINE uint32_t __get_PSPLIM(
void)
1422#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1423 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1428 __ASM
volatile (
"MRS %0, psplim" :
"=r" (result) );
1433#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1442__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(
void)
1444#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1449 __ASM
volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
1465__STATIC_FORCEINLINE
void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1467#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1468 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1470 (void)ProcStackPtrLimit;
1472 __ASM
volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
1477#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1486__STATIC_FORCEINLINE
void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1488#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1490 (void)ProcStackPtrLimit;
1492 __ASM
volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
1507__STATIC_FORCEINLINE uint32_t __get_MSPLIM(
void)
1509#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1510 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1515 __ASM
volatile (
"MRS %0, msplim" :
"=r" (result) );
1521#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1530__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(
void)
1532#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1537 __ASM
volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
1553__STATIC_FORCEINLINE
void __set_MSPLIM(uint32_t MainStackPtrLimit)
1555#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1556 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1558 (void)MainStackPtrLimit;
1560 __ASM
volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
1565#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1574__STATIC_FORCEINLINE
void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1576#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1578 (void)MainStackPtrLimit;
1580 __ASM
volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
1596#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1597 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1598#if __has_builtin(__builtin_arm_get_fpscr)
1602 return __builtin_arm_get_fpscr();
1606 __ASM
volatile (
"VMRS %0, fpscr" :
"=r" (result) );
1620__STATIC_FORCEINLINE
void __set_FPSCR(uint32_t fpscr)
1622#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1623 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1624#if __has_builtin(__builtin_arm_set_fpscr)
1628 __builtin_arm_set_fpscr(fpscr);
1630 __ASM
volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc",
"memory");
1647#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1649__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1653 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1657__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1661 __ASM (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1665__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1669 __ASM (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1673__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1677 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1681__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1685 __ASM (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1689__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1693 __ASM (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1698__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1702 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1706__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1710 __ASM (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1714__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1718 __ASM (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1722__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1726 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1730__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1734 __ASM (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1738__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1742 __ASM (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1747__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1751 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1755__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1759 __ASM (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1763__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1767 __ASM (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1771__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1775 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1779__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1783 __ASM (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1787__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1791 __ASM (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1795__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1799 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1803__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1807 __ASM (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1811__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1815 __ASM (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1819__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1823 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1827__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1831 __ASM (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1835__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1839 __ASM (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1843__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1847 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1851__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1855 __ASM (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1859__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1863 __ASM (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1867__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1871 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1875__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1879 __ASM (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1883__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1887 __ASM (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1891__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1895 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1899__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1903 __ASM (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1907__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1911 __ASM (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1915__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1919 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1923__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1927 __ASM (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1931__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1935 __ASM (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1939__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1943 __ASM (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1947__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1951 __ASM (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1955#define __SSAT16(ARG1, ARG2) \
1958 int32_t __RES, __ARG1 = (ARG1); \
1959 __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1963#define __USAT16(ARG1, ARG2) \
1966 uint32_t __RES, __ARG1 = (ARG1); \
1967 __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1971__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1975 __ASM (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1979__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1983 __ASM (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1987__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1991 __ASM (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1995__STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
1998 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
1999 __ASM
volatile (
"sxtb16 %0, %1, ROR %2" :
"=r" (result) :
"r" (op1),
"i" (rotate) );
2001 result = __SXTB16(
__ROR(op1, rotate)) ;
2006__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
2010 __ASM (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2014__STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
2017 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2018 __ASM
volatile (
"sxtab16 %0, %1, %2, ROR %3" :
"=r" (result) :
"r" (op1) ,
"r" (op2) ,
"i" (rotate));
2020 result = __SXTAB16(op1,
__ROR(op2, rotate));
2026__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
2030 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2034__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
2038 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2042__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
2046 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2050__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
2054 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2058__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
2067 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2069 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2075__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
2084 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2086 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2092__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
2096 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2100__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
2104 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2108__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
2112 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2116__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
2120 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2124__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
2133 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2135 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2141__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2150 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2152 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2158__STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2162 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2166__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2170 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2174__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2178 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2183#define __PKHBT(ARG1,ARG2,ARG3) \
2186 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2187 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2191#define __PKHTB(ARG1,ARG2,ARG3) \
2194 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2196 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2198 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2203__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2207 __ASM (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
2215#pragma GCC diagnostic pop
__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
Initializes data and bss sections.
#define __REV
Reverse byte order (32 bit)
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
#define __CLZ
Count leading zeros.
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
#define __ROR
Rotate Right in unsigned value (32 bit)
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
#define __RBIT
Reverse bit order of value.
#define __REV16(value)
Reverse byte order (16 bit)
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
#define __REVSH(value)
Reverse byte order (16 bit)
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
#define __get_FPSCR()
Get FPSCR.
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
#define __enable_irq
Enable IRQ Interrupts.
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
#define __disable_irq
Disable IRQ Interrupts.
#define __set_FPSCR(fpscr)
Set FPSCR.
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
#define __get_xPSR
Get xPSR Register.
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
void __attribute__((noreturn))(*rom_reset_usb_boot_fn)(uint32_t
Reboot the device into BOOTSEL mode.