2 * Copyright (c) 2009-2024 Arm Limited. All rights reserved.
4 * SPDX-License-Identifier: Apache-2.0
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
20 * CMSIS-Core(A) Compiler ARMClang (Arm Compiler 6) Header File
23 #ifndef __CMSIS_ARMCLANG_A_H
24 #define __CMSIS_ARMCLANG_A_H
26 #pragma clang system_header /* treat file as system include file */
28 #ifndef __CMSIS_ARMCLANG_H
29 #error "This file must not be included directly"
33 \brief STRT Unprivileged (8 bit)
34 \details Executes a Unprivileged STRT instruction for 8 bit values.
35 \param [in] value Value to store
36 \param [in] ptr Pointer to location
38 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
40 __ASM volatile ("strbt %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
45 \brief STRT Unprivileged (16 bit)
46 \details Executes a Unprivileged STRT instruction for 16 bit values.
47 \param [in] value Value to store
48 \param [in] ptr Pointer to location
50 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
52 __ASM volatile ("strht %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
57 \brief STRT Unprivileged (32 bit)
58 \details Executes a Unprivileged STRT instruction for 32 bit values.
59 \param [in] value Value to store
60 \param [in] ptr Pointer to location
62 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
64 __ASM volatile ("strt %1, %0, #0" : "=Q" (*ptr) : "r" (value) );
69 /* ################### Compiler specific Intrinsics ########################### */
70 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
71 Access to dedicated SIMD instructions
74 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
76 #define __SADD8 __builtin_arm_sadd8
77 #define __QADD8 __builtin_arm_qadd8
78 #define __SHADD8 __builtin_arm_shadd8
79 #define __UADD8 __builtin_arm_uadd8
80 #define __UQADD8 __builtin_arm_uqadd8
81 #define __UHADD8 __builtin_arm_uhadd8
82 #define __SSUB8 __builtin_arm_ssub8
83 #define __QSUB8 __builtin_arm_qsub8
84 #define __SHSUB8 __builtin_arm_shsub8
85 #define __USUB8 __builtin_arm_usub8
86 #define __UQSUB8 __builtin_arm_uqsub8
87 #define __UHSUB8 __builtin_arm_uhsub8
88 #define __SADD16 __builtin_arm_sadd16
89 #define __QADD16 __builtin_arm_qadd16
90 #define __SHADD16 __builtin_arm_shadd16
91 #define __UADD16 __builtin_arm_uadd16
92 #define __UQADD16 __builtin_arm_uqadd16
93 #define __UHADD16 __builtin_arm_uhadd16
94 #define __SSUB16 __builtin_arm_ssub16
95 #define __QSUB16 __builtin_arm_qsub16
96 #define __SHSUB16 __builtin_arm_shsub16
97 #define __USUB16 __builtin_arm_usub16
98 #define __UQSUB16 __builtin_arm_uqsub16
99 #define __UHSUB16 __builtin_arm_uhsub16
100 #define __SASX __builtin_arm_sasx
101 #define __QASX __builtin_arm_qasx
102 #define __SHASX __builtin_arm_shasx
103 #define __UASX __builtin_arm_uasx
104 #define __UQASX __builtin_arm_uqasx
105 #define __UHASX __builtin_arm_uhasx
106 #define __SSAX __builtin_arm_ssax
107 #define __QSAX __builtin_arm_qsax
108 #define __SHSAX __builtin_arm_shsax
109 #define __USAX __builtin_arm_usax
110 #define __UQSAX __builtin_arm_uqsax
111 #define __UHSAX __builtin_arm_uhsax
112 #define __USAD8 __builtin_arm_usad8
113 #define __USADA8 __builtin_arm_usada8
114 #define __SSAT16 __builtin_arm_ssat16
115 #define __USAT16 __builtin_arm_usat16
116 #define __UXTB16 __builtin_arm_uxtb16
117 #define __UXTAB16 __builtin_arm_uxtab16
118 #define __SXTB16 __builtin_arm_sxtb16
119 #define __SXTAB16 __builtin_arm_sxtab16
120 #define __SMUAD __builtin_arm_smuad
121 #define __SMUADX __builtin_arm_smuadx
122 #define __SMLAD __builtin_arm_smlad
123 #define __SMLADX __builtin_arm_smladx
124 #define __SMLALD __builtin_arm_smlald
125 #define __SMLALDX __builtin_arm_smlaldx
126 #define __SMUSD __builtin_arm_smusd
127 #define __SMUSDX __builtin_arm_smusdx
128 #define __SMLSD __builtin_arm_smlsd
129 #define __SMLSDX __builtin_arm_smlsdx
130 #define __SMLSLD __builtin_arm_smlsld
131 #define __SMLSLDX __builtin_arm_smlsldx
132 #define __SEL __builtin_arm_sel
133 #define __QADD __builtin_arm_qadd
134 #define __QSUB __builtin_arm_qsub
136 #define __PKHBT(ARG1,ARG2,ARG3) \
139 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
140 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
144 #define __PKHTB(ARG1,ARG2,ARG3) \
147 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
149 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
151 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
155 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
158 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
160 __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate));
164 result = __SXTB16(__ROR(op1, rotate));
169 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
172 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
174 __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate));
178 result = __SXTAB16(op1, __ROR(op2, rotate));
183 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
187 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
191 #endif /* (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) */
192 /** @} end of group CMSIS_SIMD_intrinsics */
194 /* ########################### Core Function Access ########################### */
195 /** \ingroup CMSIS_Core_FunctionInterface
196 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
200 /** \brief Get CPSR Register
201 \return CPSR Register value
203 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
206 __ASM volatile("MRS %0, cpsr" : "=r" (result) );
210 /** \brief Set CPSR Register
211 \param [in] cpsr CPSR value to set
213 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
215 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
219 \return Processor Mode
221 __STATIC_FORCEINLINE uint32_t __get_mode(void)
223 return (__get_CPSR() & 0x1FU);
227 \param [in] mode Mode value to set
229 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
231 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
234 /** \brief Get Stack Pointer
235 \return Stack Pointer value
237 __STATIC_FORCEINLINE uint32_t __get_SP(void)
240 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
244 /** \brief Set Stack Pointer
245 \param [in] stack Stack Pointer value to set
247 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
249 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
252 /** \brief Get USR/SYS Stack Pointer
253 \return USR/SYS Stack Pointer value
255 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
261 "CPS #0x1F \n" // no effect in USR mode
263 "MSR cpsr_c, %0 \n" // no effect in USR mode
264 "ISB" : "=r"(cpsr), "=r"(result) : : "memory"
269 /** \brief Set USR/SYS Stack Pointer
270 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
272 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
277 "CPS #0x1F \n" // no effect in USR mode
279 "MSR cpsr_c, %0 \n" // no effect in USR mode
280 "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory"
285 \return Floating Point Exception Control register value
287 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
289 #if (__FPU_PRESENT == 1)
291 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
299 \param [in] fpexc Floating Point Exception Control value to set
301 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
303 #if (__FPU_PRESENT == 1)
304 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
308 /** @} end of CMSIS_Core_RegAccFunctions */
312 * Include common core functions to access Coprocessor 15 registers
315 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
316 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
317 #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
318 #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
320 #include "cmsis_cp15.h"
322 /** \brief Enable Floating Point Unit
324 Critical section, called from undef handler, so systick is disabled
326 __STATIC_INLINE void __FPU_Enable(void)
329 // Permit access to VFP/NEON, registers by modifying CPACR
330 " MRC p15,0,R1,c1,c0,2 \n"
331 " ORR R1,R1,#0x00F00000 \n"
332 " MCR p15,0,R1,c1,c0,2 \n"
334 // Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
339 " ORR R1,R1,#0x40000000 \n"
342 // Initialise VFP/NEON registers to 0
345 // Initialise D16 registers to 0
363 #if (defined(__ARM_NEON) && (__ARM_NEON == 1))
364 // Initialise D32 registers to 0
383 // Initialise FPSCR to a known state
385 " LDR R2,=0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
388 : : : "cc", "r1", "r2"
392 #endif /* __CMSIS_ARMCLANG_A_H */