YAHAL
Yet Another Hardware Abstraction Library
Loading...
Searching...
No Matches
cmsis_armclang_ltm.h
Go to the documentation of this file.
1/**************************************************************************/
7/*
8 * Copyright (c) 2018-2023 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25/*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26
27#ifndef __CMSIS_ARMCLANG_H
28#define __CMSIS_ARMCLANG_H
29
30#pragma clang system_header /* treat file as system include file */
31
32/* CMSIS compiler specific defines */
33#ifndef __ASM
34 #define __ASM __asm
35#endif
36#ifndef __INLINE
37 #define __INLINE __inline
38#endif
39#ifndef __STATIC_INLINE
40 #define __STATIC_INLINE static __inline
41#endif
42#ifndef __STATIC_FORCEINLINE
43 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
44#endif
45#ifndef __NO_RETURN
46 #define __NO_RETURN __attribute__((__noreturn__))
47#endif
48#ifndef __USED
49 #define __USED __attribute__((used))
50#endif
51#ifndef __WEAK
52 #define __WEAK __attribute__((weak))
53#endif
54#ifndef __PACKED
55 #define __PACKED __attribute__((packed, aligned(1)))
56#endif
57#ifndef __PACKED_STRUCT
58 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
59#endif
60#ifndef __PACKED_UNION
61 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
62#endif
63#ifndef __UNALIGNED_UINT32 /* deprecated */
64 #pragma clang diagnostic push
65 #pragma clang diagnostic ignored "-Wpacked"
66/*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
67 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
68 #pragma clang diagnostic pop
69 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
70#endif
71#ifndef __UNALIGNED_UINT16_WRITE
72 #pragma clang diagnostic push
73 #pragma clang diagnostic ignored "-Wpacked"
74/*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
75 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
76 #pragma clang diagnostic pop
77 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
78#endif
79#ifndef __UNALIGNED_UINT16_READ
80 #pragma clang diagnostic push
81 #pragma clang diagnostic ignored "-Wpacked"
82/*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
83 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
84 #pragma clang diagnostic pop
85 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
86#endif
87#ifndef __UNALIGNED_UINT32_WRITE
88 #pragma clang diagnostic push
89 #pragma clang diagnostic ignored "-Wpacked"
90/*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
91 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
92 #pragma clang diagnostic pop
93 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
94#endif
95#ifndef __UNALIGNED_UINT32_READ
96 #pragma clang diagnostic push
97 #pragma clang diagnostic ignored "-Wpacked"
98/*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
99 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
100 #pragma clang diagnostic pop
101 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
102#endif
103#ifndef __ALIGNED
104 #define __ALIGNED(x) __attribute__((aligned(x)))
105#endif
106#ifndef __RESTRICT
107 #define __RESTRICT __restrict
108#endif
109#ifndef __COMPILER_BARRIER
110 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
111#endif
112#ifndef __NO_INIT
113 #define __NO_INIT __attribute__ ((section (".bss.noinit")))
114#endif
115#ifndef __ALIAS
116 #define __ALIAS(x) __attribute__ ((alias(x)))
117#endif
118
119/* ######################### Startup and Lowlevel Init ######################## */
120
121#ifndef __PROGRAM_START
122#define __PROGRAM_START __main
123#endif
124
125#ifndef __INITIAL_SP
126#define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
127#endif
128
129#ifndef __STACK_LIMIT
130#define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
131#endif
132
133#ifndef __VECTOR_TABLE
134#define __VECTOR_TABLE __Vectors
135#endif
136
137#ifndef __VECTOR_TABLE_ATTRIBUTE
138#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET")))
139#endif
140
141#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
142#ifndef __STACK_SEAL
143#define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base
144#endif
145
146#ifndef __TZ_STACK_SEAL_SIZE
147#define __TZ_STACK_SEAL_SIZE 8U
148#endif
149
150#ifndef __TZ_STACK_SEAL_VALUE
151#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
152#endif
153
154
155__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
156 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
157}
158#endif
159
160
161/* ########################## Core Instruction Access ######################### */
167/* Define macros for porting to both thumb1 and thumb2.
168 * For thumb1, use low register (r0-r7), specified by constraint "l"
169 * Otherwise, use general registers, specified by constraint "r" */
170#if defined (__thumb__) && !defined (__thumb2__)
171#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
172#define __CMSIS_GCC_USE_REG(r) "l" (r)
173#else
174#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
175#define __CMSIS_GCC_USE_REG(r) "r" (r)
176#endif
177
182#define __NOP __builtin_arm_nop
183
188#define __WFI __builtin_arm_wfi
189
190
196#define __WFE __builtin_arm_wfe
197
198
203#define __SEV __builtin_arm_sev
204
205
212#define __ISB() __builtin_arm_isb(0xF)
213
219#define __DSB() __builtin_arm_dsb(0xF)
220
221
227#define __DMB() __builtin_arm_dmb(0xF)
228
229
236#define __REV(value) __builtin_bswap32(value)
237
238
245#define __REV16(value) __ROR(__REV(value), 16)
246
247
254#define __REVSH(value) (int16_t)__builtin_bswap16(value)
255
256
264__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
265{
266 op2 %= 32U;
267 if (op2 == 0U)
268 {
269 return op1;
270 }
271 return (op1 >> op2) | (op1 << (32U - op2));
272}
273
274
282#define __BKPT(value) __ASM volatile ("bkpt "#value)
283
284
291#define __RBIT __builtin_arm_rbit
292
299__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
300{
301 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
302 __builtin_clz(0) is undefined behaviour, so handle this case specially.
303 This guarantees ARM-compatible results if happening to compile on a non-ARM
304 target, and ensures the compiler doesn't decide to activate any
305 optimisations using the logic "value was passed to __builtin_clz, so it
306 is non-zero".
307 ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
308 single CLZ instruction.
309 */
310 if (value == 0U)
311 {
312 return 32U;
313 }
314 return __builtin_clz(value);
315}
316
317
318#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
319 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
320 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
321 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
328#define __LDREXB (uint8_t)__builtin_arm_ldrex
329
330
337#define __LDREXH (uint16_t)__builtin_arm_ldrex
338
339
346#define __LDREXW (uint32_t)__builtin_arm_ldrex
347
348
357#define __STREXB (uint32_t)__builtin_arm_strex
358
359
368#define __STREXH (uint32_t)__builtin_arm_strex
369
370
379#define __STREXW (uint32_t)__builtin_arm_strex
380
381
386#define __CLREX __builtin_arm_clrex
387
388#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
389 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
390 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
391 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
392
393
394#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
395 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
396 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
397
405#define __SSAT __builtin_arm_ssat
406
407
415#define __USAT __builtin_arm_usat
416
417
425__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
426{
427 uint32_t result;
428
429 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
430 return(result);
431}
432
433
440__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
441{
442 uint32_t result;
443
444 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
445 return ((uint8_t) result); /* Add explicit type cast here */
446}
447
448
455__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
456{
457 uint32_t result;
458
459 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
460 return ((uint16_t) result); /* Add explicit type cast here */
461}
462
463
470__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
471{
472 uint32_t result;
473
474 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
475 return(result);
476}
477
478
485__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
486{
487 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
488}
489
490
497__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
498{
499 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
500}
501
502
509__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
510{
511 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
512}
513
514#else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
515 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
516 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
517
525__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
526{
527 if ((sat >= 1U) && (sat <= 32U))
528 {
529 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
530 const int32_t min = -1 - max ;
531 if (val > max)
532 {
533 return max;
534 }
535 else if (val < min)
536 {
537 return min;
538 }
539 }
540 return val;
541}
542
550__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
551{
552 if (sat <= 31U)
553 {
554 const uint32_t max = ((1U << sat) - 1U);
555 if (val > (int32_t)max)
556 {
557 return max;
558 }
559 else if (val < 0)
560 {
561 return 0U;
562 }
563 }
564 return (uint32_t)val;
565}
566
567#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
568 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
569 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
570
571
572#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
573 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
580__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
581{
582 uint32_t result;
583
584 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
585 return ((uint8_t) result);
586}
587
588
595__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
596{
597 uint32_t result;
598
599 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
600 return ((uint16_t) result);
601}
602
603
610__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
611{
612 uint32_t result;
613
614 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
615 return(result);
616}
617
618
625__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
626{
627 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
628}
629
630
637__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
638{
639 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
640}
641
642
649__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
650{
651 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
652}
653
654
661#define __LDAEXB (uint8_t)__builtin_arm_ldaex
662
663
670#define __LDAEXH (uint16_t)__builtin_arm_ldaex
671
672
679#define __LDAEX (uint32_t)__builtin_arm_ldaex
680
681
690#define __STLEXB (uint32_t)__builtin_arm_stlex
691
692
701#define __STLEXH (uint32_t)__builtin_arm_stlex
702
703
712#define __STLEX (uint32_t)__builtin_arm_stlex
713
714#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
715 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
716 /* end of group CMSIS_Core_InstructionInterface */
718
719
720/* ########################### Core Function Access ########################### */
731#ifndef __ARM_COMPAT_H
732__STATIC_FORCEINLINE void __enable_irq(void)
733{
734 __ASM volatile ("cpsie i" : : : "memory");
735}
736#endif
737
738
744#ifndef __ARM_COMPAT_H
745__STATIC_FORCEINLINE void __disable_irq(void)
746{
747 __ASM volatile ("cpsid i" : : : "memory");
748}
749#endif
750
751
757__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
758{
759 uint32_t result;
760
761 __ASM volatile ("MRS %0, control" : "=r" (result) );
762 return(result);
763}
764
765
766#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
772__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
773{
774 uint32_t result;
775
776 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
777 return(result);
778}
779#endif
780
781
787__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
788{
789 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
790 __ISB();
791}
792
793
794#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
800__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
801{
802 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
803 __ISB();
804}
805#endif
806
807
813__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
814{
815 uint32_t result;
816
817 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
818 return(result);
819}
820
821
827__STATIC_FORCEINLINE uint32_t __get_APSR(void)
828{
829 uint32_t result;
830
831 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
832 return(result);
833}
834
835
841__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
842{
843 uint32_t result;
844
845 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
846 return(result);
847}
848
849
855__STATIC_FORCEINLINE uint32_t __get_PSP(void)
856{
857 uint32_t result;
858
859 __ASM volatile ("MRS %0, psp" : "=r" (result) );
860 return(result);
861}
862
863
864#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
870__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
871{
872 uint32_t result;
873
874 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
875 return(result);
876}
877#endif
878
879
885__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
886{
887 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
888}
889
890
891#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
897__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
898{
899 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
900}
901#endif
902
903
909__STATIC_FORCEINLINE uint32_t __get_MSP(void)
910{
911 uint32_t result;
912
913 __ASM volatile ("MRS %0, msp" : "=r" (result) );
914 return(result);
915}
916
917
918#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
924__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
925{
926 uint32_t result;
927
928 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
929 return(result);
930}
931#endif
932
933
939__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
940{
941 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
942}
943
944
945#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
951__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
952{
953 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
954}
955#endif
956
957
958#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
964__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
965{
966 uint32_t result;
967
968 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
969 return(result);
970}
971
972
978__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
979{
980 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
981}
982#endif
983
984
990__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
991{
992 uint32_t result;
993
994 __ASM volatile ("MRS %0, primask" : "=r" (result) );
995 return(result);
996}
997
998
999#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1005__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
1006{
1007 uint32_t result;
1008
1009 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
1010 return(result);
1011}
1012#endif
1013
1014
1020__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
1021{
1022 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
1023}
1024
1025
1026#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1032__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
1033{
1034 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
1035}
1036#endif
1037
1038
1039#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1040 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1041 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1047__STATIC_FORCEINLINE void __enable_fault_irq(void)
1048{
1049 __ASM volatile ("cpsie f" : : : "memory");
1050}
1051
1052
1058__STATIC_FORCEINLINE void __disable_fault_irq(void)
1059{
1060 __ASM volatile ("cpsid f" : : : "memory");
1061}
1062
1063
1069__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1070{
1071 uint32_t result;
1072
1073 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1074 return(result);
1075}
1076
1077
1078#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1084__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1085{
1086 uint32_t result;
1087
1088 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1089 return(result);
1090}
1091#endif
1092
1093
1099__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1100{
1101 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1102}
1103
1104
1105#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1111__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1112{
1113 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1114}
1115#endif
1116
1117
1124__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1125{
1126 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1127}
1128
1129
1135__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1136{
1137 uint32_t result;
1138
1139 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1140 return(result);
1141}
1142
1143
1144#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1150__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1151{
1152 uint32_t result;
1153
1154 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1155 return(result);
1156}
1157#endif
1158
1159
1165__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1166{
1167 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1168}
1169
1170
1171#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1177__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1178{
1179 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1180}
1181#endif
1182
1183#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1184 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1185 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1186
1187
1188#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1189 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1190
1200__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1201{
1202#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1203 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1204 // without main extensions, the non-secure PSPLIM is RAZ/WI
1205 return 0U;
1206#else
1207 uint32_t result;
1208 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
1209 return result;
1210#endif
1211}
1212
1213#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1223__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1224{
1225#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1226 // without main extensions, the non-secure PSPLIM is RAZ/WI
1227 return 0U;
1228#else
1229 uint32_t result;
1230 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
1231 return result;
1232#endif
1233}
1234#endif
1235
1236
1246__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1247{
1248#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1249 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1250 // without main extensions, the non-secure PSPLIM is RAZ/WI
1251 (void)ProcStackPtrLimit;
1252#else
1253 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1254#endif
1255}
1256
1257
1258#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1268__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1269{
1270#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1271 // without main extensions, the non-secure PSPLIM is RAZ/WI
1272 (void)ProcStackPtrLimit;
1273#else
1274 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1275#endif
1276}
1277#endif
1278
1279
1288__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1289{
1290#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1291 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1292 // without main extensions, the non-secure MSPLIM is RAZ/WI
1293 return 0U;
1294#else
1295 uint32_t result;
1296 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1297 return result;
1298#endif
1299}
1300
1301
1302#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1311__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1312{
1313#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1314 // without main extensions, the non-secure MSPLIM is RAZ/WI
1315 return 0U;
1316#else
1317 uint32_t result;
1318 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1319 return result;
1320#endif
1321}
1322#endif
1323
1324
1333__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1334{
1335#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1336 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1337 // without main extensions, the non-secure MSPLIM is RAZ/WI
1338 (void)MainStackPtrLimit;
1339#else
1340 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1341#endif
1342}
1343
1344
1345#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1354__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1355{
1356#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1357 // without main extensions, the non-secure MSPLIM is RAZ/WI
1358 (void)MainStackPtrLimit;
1359#else
1360 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1361#endif
1362}
1363#endif
1364
1365#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1366 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1367
1373#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1374 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1375#define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
1376#else
1377#define __get_FPSCR() ((uint32_t)0U)
1378#endif
1379
1385#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1386 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1387#define __set_FPSCR __builtin_arm_set_fpscr
1388#else
1389#define __set_FPSCR(x) ((void)(x))
1390#endif
1391
1392
1396/* ################### Compiler specific Intrinsics ########################### */
1402#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1403
1404__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1405{
1406 uint32_t result;
1407
1408 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1409 return(result);
1410}
1411
1412__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1413{
1414 uint32_t result;
1415
1416 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1417 return(result);
1418}
1419
1420__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1421{
1422 uint32_t result;
1423
1424 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1425 return(result);
1426}
1427
1428__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1429{
1430 uint32_t result;
1431
1432 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1433 return(result);
1434}
1435
1436__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1437{
1438 uint32_t result;
1439
1440 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1441 return(result);
1442}
1443
1444__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1445{
1446 uint32_t result;
1447
1448 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1449 return(result);
1450}
1451
1452
1453__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1454{
1455 uint32_t result;
1456
1457 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1458 return(result);
1459}
1460
1461__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1462{
1463 uint32_t result;
1464
1465 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1466 return(result);
1467}
1468
1469__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1470{
1471 uint32_t result;
1472
1473 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1474 return(result);
1475}
1476
1477__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1478{
1479 uint32_t result;
1480
1481 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1482 return(result);
1483}
1484
1485__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1486{
1487 uint32_t result;
1488
1489 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1490 return(result);
1491}
1492
1493__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1494{
1495 uint32_t result;
1496
1497 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1498 return(result);
1499}
1500
1501
1502__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1503{
1504 uint32_t result;
1505
1506 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1507 return(result);
1508}
1509
1510__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1511{
1512 uint32_t result;
1513
1514 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1515 return(result);
1516}
1517
1518__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1519{
1520 uint32_t result;
1521
1522 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1523 return(result);
1524}
1525
1526__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1527{
1528 uint32_t result;
1529
1530 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1531 return(result);
1532}
1533
1534__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1535{
1536 uint32_t result;
1537
1538 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1539 return(result);
1540}
1541
1542__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1543{
1544 uint32_t result;
1545
1546 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1547 return(result);
1548}
1549
1550__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1551{
1552 uint32_t result;
1553
1554 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1555 return(result);
1556}
1557
1558__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1559{
1560 uint32_t result;
1561
1562 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1563 return(result);
1564}
1565
1566__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1567{
1568 uint32_t result;
1569
1570 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1571 return(result);
1572}
1573
1574__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1575{
1576 uint32_t result;
1577
1578 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1579 return(result);
1580}
1581
1582__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1583{
1584 uint32_t result;
1585
1586 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1587 return(result);
1588}
1589
1590__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1591{
1592 uint32_t result;
1593
1594 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1595 return(result);
1596}
1597
1598__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1599{
1600 uint32_t result;
1601
1602 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1603 return(result);
1604}
1605
1606__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1607{
1608 uint32_t result;
1609
1610 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1611 return(result);
1612}
1613
1614__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1615{
1616 uint32_t result;
1617
1618 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1619 return(result);
1620}
1621
1622__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1623{
1624 uint32_t result;
1625
1626 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1627 return(result);
1628}
1629
1630__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1631{
1632 uint32_t result;
1633
1634 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1635 return(result);
1636}
1637
1638__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1639{
1640 uint32_t result;
1641
1642 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1643 return(result);
1644}
1645
1646__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1647{
1648 uint32_t result;
1649
1650 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1651 return(result);
1652}
1653
1654__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1655{
1656 uint32_t result;
1657
1658 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1659 return(result);
1660}
1661
1662__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1663{
1664 uint32_t result;
1665
1666 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1667 return(result);
1668}
1669
1670__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1671{
1672 uint32_t result;
1673
1674 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1675 return(result);
1676}
1677
1678__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1679{
1680 uint32_t result;
1681
1682 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1683 return(result);
1684}
1685
1686__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1687{
1688 uint32_t result;
1689
1690 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1691 return(result);
1692}
1693
1694__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1695{
1696 uint32_t result;
1697
1698 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1699 return(result);
1700}
1701
1702__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1703{
1704 uint32_t result;
1705
1706 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1707 return(result);
1708}
1709
1710#define __SSAT16(ARG1,ARG2) \
1711({ \
1712 int32_t __RES, __ARG1 = (ARG1); \
1713 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1714 __RES; \
1715 })
1716
1717#define __USAT16(ARG1,ARG2) \
1718({ \
1719 uint32_t __RES, __ARG1 = (ARG1); \
1720 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1721 __RES; \
1722 })
1723
1724__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1725{
1726 uint32_t result;
1727
1728 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1729 return(result);
1730}
1731
1732__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1733{
1734 uint32_t result;
1735
1736 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1737 return(result);
1738}
1739
1740__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1741{
1742 uint32_t result;
1743
1744 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1745 return(result);
1746}
1747
1748__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1749{
1750 uint32_t result;
1751
1752 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1753 return(result);
1754}
1755
1756__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1757{
1758 uint32_t result;
1759
1760 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1761 return(result);
1762}
1763
1764__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1765{
1766 uint32_t result;
1767
1768 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1769 return(result);
1770}
1771
1772__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1773{
1774 uint32_t result;
1775
1776 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1777 return(result);
1778}
1779
1780__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1781{
1782 uint32_t result;
1783
1784 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1785 return(result);
1786}
1787
1788__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1789{
1790 union llreg_u{
1791 uint32_t w32[2];
1792 uint64_t w64;
1793 } llr;
1794 llr.w64 = acc;
1795
1796#ifndef __ARMEB__ /* Little endian */
1797 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1798#else /* Big endian */
1799 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1800#endif
1801
1802 return(llr.w64);
1803}
1804
1805__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1806{
1807 union llreg_u{
1808 uint32_t w32[2];
1809 uint64_t w64;
1810 } llr;
1811 llr.w64 = acc;
1812
1813#ifndef __ARMEB__ /* Little endian */
1814 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1815#else /* Big endian */
1816 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1817#endif
1818
1819 return(llr.w64);
1820}
1821
1822__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1823{
1824 uint32_t result;
1825
1826 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1827 return(result);
1828}
1829
1830__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1831{
1832 uint32_t result;
1833
1834 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1835 return(result);
1836}
1837
1838__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1839{
1840 uint32_t result;
1841
1842 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1843 return(result);
1844}
1845
1846__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1847{
1848 uint32_t result;
1849
1850 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1851 return(result);
1852}
1853
1854__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1855{
1856 union llreg_u{
1857 uint32_t w32[2];
1858 uint64_t w64;
1859 } llr;
1860 llr.w64 = acc;
1861
1862#ifndef __ARMEB__ /* Little endian */
1863 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1864#else /* Big endian */
1865 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1866#endif
1867
1868 return(llr.w64);
1869}
1870
1871__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1872{
1873 union llreg_u{
1874 uint32_t w32[2];
1875 uint64_t w64;
1876 } llr;
1877 llr.w64 = acc;
1878
1879#ifndef __ARMEB__ /* Little endian */
1880 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1881#else /* Big endian */
1882 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1883#endif
1884
1885 return(llr.w64);
1886}
1887
1888__STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1889{
1890 uint32_t result;
1891
1892 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1893 return(result);
1894}
1895
1896__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
1897{
1898 int32_t result;
1899
1900 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1901 return(result);
1902}
1903
1904__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
1905{
1906 int32_t result;
1907
1908 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1909 return(result);
1910}
1911
1912#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1913 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1914
1915#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1916 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1917
1918#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
1919
1920#define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
1921
1922__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1923{
1924 int32_t result;
1925
1926 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1927 return(result);
1928}
1929
1930#endif /* (__ARM_FEATURE_DSP == 1) */
1934#endif /* __CMSIS_ARMCLANG_H */
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
#define __CLZ
Count leading zeros.
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
#define __ROR
Rotate Right in unsigned value (32 bit)
#define __ISB()
Instruction Synchronization Barrier.
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
#define __enable_irq
Enable IRQ Interrupts.
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
__STATIC_INLINE uint32_t __get_CONTROL(void)
Enable IRQ Interrupts.
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
#define __disable_irq
Disable IRQ Interrupts.
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
#define __get_xPSR
Get xPSR Register.
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
void __attribute__((noreturn))(*rom_reset_usb_boot_fn)(uint32_t
Reboot the device into BOOTSEL mode.
Definition bootrom.h:66