2 * Copyright (c) 2023-2024 Arm Limited. All rights reserved.
4 * SPDX-License-Identifier: Apache-2.0
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
20 * CMSIS-Core(A) Compiler LLVM/Clang Header File
23 #ifndef __CMSIS_CLANG_A_H
24 #define __CMSIS_CLANG_A_H
26 #pragma clang system_header /* treat file as system include file */
28 #ifndef __CMSIS_CLANG_H
29 #error "This file must not be included directly"
32 \brief STRT Unprivileged (8 bit)
33 \details Executes a Unprivileged STRT instruction for 8 bit values.
34 \param [in] value Value to store
35 \param [in] ptr Pointer to location
37 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
39 __ASM volatile ("strbt %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
44 \brief STRT Unprivileged (16 bit)
45 \details Executes a Unprivileged STRT instruction for 16 bit values.
46 \param [in] value Value to store
47 \param [in] ptr Pointer to location
49 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
51 __ASM volatile ("strht %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
56 \brief STRT Unprivileged (32 bit)
57 \details Executes a Unprivileged STRT instruction for 32 bit values.
58 \param [in] value Value to store
59 \param [in] ptr Pointer to location
61 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
63 __ASM volatile ("strt %1, %0, #0" : "=Q" (*ptr) : "r" (value) );
66 /* ################### Compiler specific Intrinsics ########################### */
67 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
68 Access to dedicated SIMD instructions
71 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
72 #define __SADD8 __builtin_arm_sadd8
73 #define __QADD8 __builtin_arm_qadd8
74 #define __SHADD8 __builtin_arm_shadd8
75 #define __UADD8 __builtin_arm_uadd8
76 #define __UQADD8 __builtin_arm_uqadd8
77 #define __UHADD8 __builtin_arm_uhadd8
78 #define __SSUB8 __builtin_arm_ssub8
79 #define __QSUB8 __builtin_arm_qsub8
80 #define __SHSUB8 __builtin_arm_shsub8
81 #define __USUB8 __builtin_arm_usub8
82 #define __UQSUB8 __builtin_arm_uqsub8
83 #define __UHSUB8 __builtin_arm_uhsub8
84 #define __SADD16 __builtin_arm_sadd16
85 #define __QADD16 __builtin_arm_qadd16
86 #define __SHADD16 __builtin_arm_shadd16
87 #define __UADD16 __builtin_arm_uadd16
88 #define __UQADD16 __builtin_arm_uqadd16
89 #define __UHADD16 __builtin_arm_uhadd16
90 #define __SSUB16 __builtin_arm_ssub16
91 #define __QSUB16 __builtin_arm_qsub16
92 #define __SHSUB16 __builtin_arm_shsub16
93 #define __USUB16 __builtin_arm_usub16
94 #define __UQSUB16 __builtin_arm_uqsub16
95 #define __UHSUB16 __builtin_arm_uhsub16
96 #define __SASX __builtin_arm_sasx
97 #define __QASX __builtin_arm_qasx
98 #define __SHASX __builtin_arm_shasx
99 #define __UASX __builtin_arm_uasx
100 #define __UQASX __builtin_arm_uqasx
101 #define __UHASX __builtin_arm_uhasx
102 #define __SSAX __builtin_arm_ssax
103 #define __QSAX __builtin_arm_qsax
104 #define __SHSAX __builtin_arm_shsax
105 #define __USAX __builtin_arm_usax
106 #define __UQSAX __builtin_arm_uqsax
107 #define __UHSAX __builtin_arm_uhsax
108 #define __USAD8 __builtin_arm_usad8
109 #define __USADA8 __builtin_arm_usada8
110 #define __SSAT16 __builtin_arm_ssat16
111 #define __USAT16 __builtin_arm_usat16
112 #define __UXTB16 __builtin_arm_uxtb16
113 #define __UXTAB16 __builtin_arm_uxtab16
114 #define __SXTB16 __builtin_arm_sxtb16
115 #define __SXTAB16 __builtin_arm_sxtab16
116 #define __SMUAD __builtin_arm_smuad
117 #define __SMUADX __builtin_arm_smuadx
118 #define __SMLAD __builtin_arm_smlad
119 #define __SMLADX __builtin_arm_smladx
120 #define __SMLALD __builtin_arm_smlald
121 #define __SMLALDX __builtin_arm_smlaldx
122 #define __SMUSD __builtin_arm_smusd
123 #define __SMUSDX __builtin_arm_smusdx
124 #define __SMLSD __builtin_arm_smlsd
125 #define __SMLSDX __builtin_arm_smlsdx
126 #define __SMLSLD __builtin_arm_smlsld
127 #define __SMLSLDX __builtin_arm_smlsldx
128 #define __SEL __builtin_arm_sel
129 #define __QADD __builtin_arm_qadd
130 #define __QSUB __builtin_arm_qsub
132 #define __PKHBT(ARG1,ARG2,ARG3) \
135 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
136 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
140 #define __PKHTB(ARG1,ARG2,ARG3) \
143 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
145 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
147 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
151 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
154 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
156 __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate));
160 result = __SXTB16(__ROR(op1, rotate));
165 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
168 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
170 __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate));
174 result = __SXTAB16(op1, __ROR(op2, rotate));
179 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
183 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
187 #endif /* (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) */
188 /** @} end of group CMSIS_SIMD_intrinsics */
190 /* ########################### Core Function Access ########################### */
191 /** \ingroup CMSIS_Core_FunctionInterface
192 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
196 /** \brief Get CPSR Register
197 \return CPSR Register value
199 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
202 __ASM volatile("MRS %0, cpsr" : "=r" (result) );
206 /** \brief Set CPSR Register
207 \param [in] cpsr CPSR value to set
209 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
211 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
215 \return Processor Mode
217 __STATIC_FORCEINLINE uint32_t __get_mode(void)
219 return (__get_CPSR() & 0x1FU);
223 \param [in] mode Mode value to set
225 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
227 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
230 /** \brief Get Stack Pointer
231 \return Stack Pointer value
233 __STATIC_FORCEINLINE uint32_t __get_SP(void)
236 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
240 /** \brief Set Stack Pointer
241 \param [in] stack Stack Pointer value to set
243 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
245 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
248 /** \brief Get USR/SYS Stack Pointer
249 \return USR/SYS Stack Pointer value
251 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
257 "CPS #0x1F \n" // no effect in USR mode
259 "MSR cpsr_c, %0 \n" // no effect in USR mode
260 "ISB" : "=r"(cpsr), "=r"(result) : : "memory"
265 /** \brief Set USR/SYS Stack Pointer
266 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
268 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
273 "CPS #0x1F \n" // no effect in USR mode
275 "MSR cpsr_c, %0 \n" // no effect in USR mode
276 "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory"
281 \return Floating Point Exception Control register value
283 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
285 #if (__FPU_PRESENT == 1)
287 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
295 \param [in] fpexc Floating Point Exception Control value to set
297 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
299 #if (__FPU_PRESENT == 1)
300 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
304 /** @} end of CMSIS_Core_RegAccFunctions */
308 * Include common core functions to access Coprocessor 15 registers
311 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
312 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
313 #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
314 #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
316 #include "cmsis_cp15.h"
318 /** \brief Enable Floating Point Unit
320 Critical section, called from undef handler, so systick is disabled
322 __STATIC_INLINE void __FPU_Enable(void)
324 // Permit access to VFP/NEON, registers by modifying CPACR
325 const uint32_t cpacr = __get_CPACR();
326 __set_CPACR(cpacr | 0x00F00000ul);
330 const uint32_t fpexc = __get_FPEXC();
331 __set_FPEXC(fpexc | 0x40000000ul);
334 // Initialise VFP/NEON registers to 0
337 // Initialise D16 registers to 0
355 #if (defined(__ARM_NEON) && (__ARM_NEON == 1))
356 // Initialise D32 registers to 0
377 // Initialise FPSCR to a known state
378 const uint32_t fpscr = __get_FPSCR();
379 __set_FPSCR(fpscr & 0x00086060ul);
382 /*@} end of group CMSIS_Core_intrinsics */
384 #pragma clang diagnostic pop
386 #endif /* __CMSIS_CLANG_A_H */