2 * Copyright (c) 2023 Arm Limited. All rights reserved.
4 * SPDX-License-Identifier: Apache-2.0
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
20 * CMSIS-Core(A) Compiler LLVM/Clang Header File
23 #ifndef __CMSIS_CLANG_A_H
24 #define __CMSIS_CLANG_A_H
26 #pragma clang system_header /* treat file as system include file */
28 #if (__ARM_ACLE >= 200)
31 #error Compiler must support ACLE V2.0
32 #endif /* (__ARM_ACLE >= 200) */
34 /* CMSIS compiler specific defines */
39 #define __INLINE inline
42 #define __FORCEINLINE __attribute__((always_inline))
44 #ifndef __STATIC_INLINE
45 #define __STATIC_INLINE static inline
47 #ifndef __STATIC_FORCEINLINE
48 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
51 #define __NO_RETURN __attribute__((__noreturn__))
53 #ifndef CMSIS_DEPRECATED
54 #define CMSIS_DEPRECATED __attribute__((deprecated))
57 #define __USED __attribute__((used))
60 #define __WEAK __attribute__((weak))
63 #define __PACKED __attribute__((packed, aligned(1)))
65 #ifndef __PACKED_STRUCT
66 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
68 #ifndef __UNALIGNED_UINT16_WRITE
69 #pragma clang diagnostic push
70 #pragma clang diagnostic ignored "-Wpacked"
71 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
72 #pragma clang diagnostic pop
73 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
75 #ifndef __UNALIGNED_UINT16_READ
76 #pragma clang diagnostic push
77 #pragma clang diagnostic ignored "-Wpacked"
78 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
79 #pragma clang diagnostic pop
80 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
82 #ifndef __UNALIGNED_UINT32_WRITE
83 #pragma clang diagnostic push
84 #pragma clang diagnostic ignored "-Wpacked"
85 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
86 #pragma clang diagnostic pop
87 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
89 #ifndef __UNALIGNED_UINT32_READ
90 #pragma clang diagnostic push
91 #pragma clang diagnostic ignored "-Wpacked"
92 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
93 #pragma clang diagnostic pop
94 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
97 #define __ALIGNED(x) __attribute__((aligned(x)))
100 #define __RESTRICT __restrict
102 #ifndef __COMPILER_BARRIER
103 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
107 /* ########################## Core Instruction Access ######################### */
110 \details No Operation does nothing. This instruction can be used for code alignment purposes.
112 #define __NOP() __ASM volatile ("nop")
116 \brief Wait For Interrupt
117 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
119 #define __WFI() __ASM volatile ("wfi":::"memory")
123 \brief Wait For Event
124 \details Wait For Event is a hint instruction that permits the processor to enter
125 a low-power state until one of a number of events occurs.
127 #define __WFE() __ASM volatile ("wfe":::"memory")
132 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
134 #define __SEV() __ASM volatile ("sev")
138 \brief Instruction Synchronization Barrier
139 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
140 so that all instructions following the ISB are fetched from cache or memory,
141 after the instruction has been completed.
143 __STATIC_FORCEINLINE void __ISB(void)
145 __ASM volatile ("isb 0xF":::"memory");
150 \brief Data Synchronization Barrier
151 \details Acts as a special kind of Data Memory Barrier.
152 It completes when all explicit memory accesses before this instruction complete.
154 __STATIC_FORCEINLINE void __DSB(void)
156 __ASM volatile ("dsb 0xF":::"memory");
161 \brief Data Memory Barrier
162 \details Ensures the apparent order of the explicit memory operations before
163 and after the instruction, without ensuring their completion.
165 __STATIC_FORCEINLINE void __DMB(void)
167 __ASM volatile ("dmb 0xF":::"memory");
172 \brief Reverse byte order (32 bit)
173 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
174 \param [in] value Value to reverse
175 \return Reversed value
177 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
179 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
180 return __builtin_bswap32(value);
184 __ASM ("rev %0, %1" : "=r" (result) : "r" (value) );
191 \brief Reverse byte order (16 bit)
192 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
193 \param [in] value Value to reverse
194 \return Reversed value
196 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
199 __ASM ("rev16 %0, %1" : "=r" (result) : "r" (value));
205 \brief Reverse byte order (16 bit)
206 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
207 \param [in] value Value to reverse
208 \return Reversed value
210 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
212 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
213 return (int16_t)__builtin_bswap16(value);
217 __ASM ("revsh %0, %1" : "=r" (result) : "r" (value) );
224 \brief Rotate Right in unsigned value (32 bit)
225 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
226 \param [in] op1 Value to rotate
227 \param [in] op2 Number of Bits to rotate
228 \return Rotated value
230 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
237 return (op1 >> op2) | (op1 << (32U - op2));
243 \details Causes the processor to enter Debug state.
244 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
245 \param [in] value is ignored by the processor.
246 If required, a debugger can use it to store additional information about the breakpoint.
248 #define __BKPT(value) __ASM volatile ("bkpt "#value)
252 \brief Reverse bit order of value
253 \details Reverses the bit order of the given value.
254 \param [in] value Value to reverse
255 \return Reversed value
257 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
260 __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
266 \brief Count leading zeros
267 \details Counts the number of leading zeros of a data value.
268 \param [in] value Value to count the leading zeros
269 \return number of leading zeros in value
271 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
273 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
274 __builtin_clz(0) is undefined behaviour, so handle this case specially.
275 This guarantees ARM-compatible results if happening to compile on a non-ARM
276 target, and ensures the compiler doesn't decide to activate any
277 optimisations using the logic "value was passed to __builtin_clz, so it
284 return __builtin_clz(value);
289 \brief LDR Exclusive (8 bit)
290 \details Executes a exclusive LDR instruction for 8 bit value.
291 \param [in] ptr Pointer to data
292 \return value of type uint8_t at (*ptr)
294 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
298 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
299 return ((uint8_t) result); /* Add explicit type cast here */
304 \brief LDR Exclusive (16 bit)
305 \details Executes a exclusive LDR instruction for 16 bit values.
306 \param [in] ptr Pointer to data
307 \return value of type uint16_t at (*ptr)
309 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
313 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
314 return ((uint16_t) result); /* Add explicit type cast here */
319 \brief LDR Exclusive (32 bit)
320 \details Executes a exclusive LDR instruction for 32 bit values.
321 \param [in] ptr Pointer to data
322 \return value of type uint32_t at (*ptr)
324 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
328 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
334 \brief STR Exclusive (8 bit)
335 \details Executes a exclusive STR instruction for 8 bit values.
336 \param [in] value Value to store
337 \param [in] ptr Pointer to location
338 \return 0 Function succeeded
339 \return 1 Function failed
341 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
345 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
351 \brief STR Exclusive (16 bit)
352 \details Executes a exclusive STR instruction for 16 bit values.
353 \param [in] value Value to store
354 \param [in] ptr Pointer to location
355 \return 0 Function succeeded
356 \return 1 Function failed
358 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
362 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
368 \brief STR Exclusive (32 bit)
369 \details Executes a exclusive STR instruction for 32 bit values.
370 \param [in] value Value to store
371 \param [in] ptr Pointer to location
372 \return 0 Function succeeded
373 \return 1 Function failed
375 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
379 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
385 \brief Remove the exclusive lock
386 \details Removes the exclusive lock which is created by LDREX.
388 __STATIC_FORCEINLINE void __CLREX(void)
390 __ASM volatile ("clrex" ::: "memory");
394 \brief Signed Saturate
395 \details Saturates a signed value.
396 \param [in] ARG1 Value to be saturated
397 \param [in] ARG2 Bit position to saturate to (1..32)
398 \return Saturated value
400 #define __SSAT(ARG1, ARG2) \
403 int32_t __RES, __ARG1 = (ARG1); \
404 __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
410 \brief Unsigned Saturate
411 \details Saturates an unsigned value.
412 \param [in] ARG1 Value to be saturated
413 \param [in] ARG2 Bit position to saturate to (0..31)
414 \return Saturated value
416 #define __USAT(ARG1, ARG2) \
419 uint32_t __RES, __ARG1 = (ARG1); \
420 __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
425 \brief Rotate Right with Extend (32 bit)
426 \details Moves each bit of a bitstring right by one bit.
427 The carry input is shifted in at the left end of the bitstring.
428 \param [in] value Value to rotate
429 \return Rotated value
431 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
435 __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value));
441 \brief LDRT Unprivileged (8 bit)
442 \details Executes a Unprivileged LDRT instruction for 8 bit value.
443 \param [in] ptr Pointer to data
444 \return value of type uint8_t at (*ptr)
446 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
450 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
451 return ((uint8_t)result); /* Add explicit type cast here */
456 \brief LDRT Unprivileged (16 bit)
457 \details Executes a Unprivileged LDRT instruction for 16 bit values.
458 \param [in] ptr Pointer to data
459 \return value of type uint16_t at (*ptr)
461 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
465 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
466 return ((uint16_t)result); /* Add explicit type cast here */
471 \brief LDRT Unprivileged (32 bit)
472 \details Executes a Unprivileged LDRT instruction for 32 bit values.
473 \param [in] ptr Pointer to data
474 \return value of type uint32_t at (*ptr)
476 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
480 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
486 \brief STRT Unprivileged (8 bit)
487 \details Executes a Unprivileged STRT instruction for 8 bit values.
488 \param [in] value Value to store
489 \param [in] ptr Pointer to location
491 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
493 __ASM volatile ("strbt %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
498 \brief STRT Unprivileged (16 bit)
499 \details Executes a Unprivileged STRT instruction for 16 bit values.
500 \param [in] value Value to store
501 \param [in] ptr Pointer to location
503 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
505 __ASM volatile ("strht %1, %0, #0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
510 \brief STRT Unprivileged (32 bit)
511 \details Executes a Unprivileged STRT instruction for 32 bit values.
512 \param [in] value Value to store
513 \param [in] ptr Pointer to location
515 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
517 __ASM volatile ("strt %1, %0, #0" : "=Q" (*ptr) : "r" (value) );
520 /* ########################### Core Function Access ########################### */
521 /** \ingroup CMSIS_Core_FunctionInterface
522 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
527 \brief Enable IRQ Interrupts
528 \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
529 Can only be executed in Privileged modes.
531 __STATIC_FORCEINLINE void __enable_irq(void)
533 __ASM volatile ("cpsie i" : : : "memory");
538 \brief Disable IRQ Interrupts
539 \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
540 Can only be executed in Privileged modes.
542 __STATIC_FORCEINLINE void __disable_irq(void)
544 __ASM volatile ("cpsid i" : : : "memory");
549 \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
550 Can only be executed in Privileged modes.
552 __STATIC_FORCEINLINE void __enable_fault_irq(void)
554 __ASM volatile ("cpsie f" : : : "memory");
560 \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
561 Can only be executed in Privileged modes.
563 __STATIC_FORCEINLINE void __disable_fault_irq(void)
565 __ASM volatile ("cpsid f" : : : "memory");
570 \details Returns the current value of the Floating Point Status/Control register.
571 \return Floating Point Status/Control register value
573 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
576 return __builtin_arm_get_fpscr();
585 \details Assigns the given value to the Floating Point Status/Control register.
586 \param [in] fpscr Floating Point Status/Control value to set
588 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
591 __builtin_arm_set_fpscr(fpscr);
598 /*@} end of CMSIS_Core_RegAccFunctions */
601 /* ################### Compiler specific Intrinsics ########################### */
603 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
605 #define __SADD8 __builtin_arm_sadd8
606 #define __QADD8 __builtin_arm_qadd8
607 #define __SHADD8 __builtin_arm_shadd8
608 #define __UADD8 __builtin_arm_uadd8
609 #define __UQADD8 __builtin_arm_uqadd8
610 #define __UHADD8 __builtin_arm_uhadd8
611 #define __SSUB8 __builtin_arm_ssub8
612 #define __QSUB8 __builtin_arm_qsub8
613 #define __SHSUB8 __builtin_arm_shsub8
614 #define __USUB8 __builtin_arm_usub8
615 #define __UQSUB8 __builtin_arm_uqsub8
616 #define __UHSUB8 __builtin_arm_uhsub8
617 #define __SADD16 __builtin_arm_sadd16
618 #define __QADD16 __builtin_arm_qadd16
619 #define __SHADD16 __builtin_arm_shadd16
620 #define __UADD16 __builtin_arm_uadd16
621 #define __UQADD16 __builtin_arm_uqadd16
622 #define __UHADD16 __builtin_arm_uhadd16
623 #define __SSUB16 __builtin_arm_ssub16
624 #define __QSUB16 __builtin_arm_qsub16
625 #define __SHSUB16 __builtin_arm_shsub16
626 #define __USUB16 __builtin_arm_usub16
627 #define __UQSUB16 __builtin_arm_uqsub16
628 #define __UHSUB16 __builtin_arm_uhsub16
629 #define __SASX __builtin_arm_sasx
630 #define __QASX __builtin_arm_qasx
631 #define __SHASX __builtin_arm_shasx
632 #define __UASX __builtin_arm_uasx
633 #define __UQASX __builtin_arm_uqasx
634 #define __UHASX __builtin_arm_uhasx
635 #define __SSAX __builtin_arm_ssax
636 #define __QSAX __builtin_arm_qsax
637 #define __SHSAX __builtin_arm_shsax
638 #define __USAX __builtin_arm_usax
639 #define __UQSAX __builtin_arm_uqsax
640 #define __UHSAX __builtin_arm_uhsax
641 #define __USAD8 __builtin_arm_usad8
642 #define __USADA8 __builtin_arm_usada8
643 #define __SSAT16 __builtin_arm_ssat16
644 #define __USAT16 __builtin_arm_usat16
645 #define __UXTB16 __builtin_arm_uxtb16
646 #define __UXTAB16 __builtin_arm_uxtab16
647 #define __SXTB16 __builtin_arm_sxtb16
648 #define __SXTAB16 __builtin_arm_sxtab16
649 #define __SMUAD __builtin_arm_smuad
650 #define __SMUADX __builtin_arm_smuadx
651 #define __SMLAD __builtin_arm_smlad
652 #define __SMLADX __builtin_arm_smladx
653 #define __SMLALD __builtin_arm_smlald
654 #define __SMLALDX __builtin_arm_smlaldx
655 #define __SMUSD __builtin_arm_smusd
656 #define __SMUSDX __builtin_arm_smusdx
657 #define __SMLSD __builtin_arm_smlsd
658 #define __SMLSDX __builtin_arm_smlsdx
659 #define __SMLSLD __builtin_arm_smlsld
660 #define __SMLSLDX __builtin_arm_smlsldx
661 #define __SEL __builtin_arm_sel
662 #define __QADD __builtin_arm_qadd
663 #define __QSUB __builtin_arm_qsub
665 #define __PKHBT(ARG1,ARG2,ARG3) \
668 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
669 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
673 #define __PKHTB(ARG1,ARG2,ARG3) \
676 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
678 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
680 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
684 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
687 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
689 __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate));
693 result = __SXTB16(__ROR(op1, rotate));
698 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
701 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
703 __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate));
707 result = __SXTAB16(op1, __ROR(op2, rotate));
712 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
716 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
720 #endif /* (__ARM_FEATURE_DSP == 1) */
721 /** @} end of group CMSIS_SIMD_intrinsics */
723 /** \defgroup CMSIS_Core_intrinsics CMSIS Core Intrinsics
724 Access to dedicated SIMD instructions
728 /** \brief Get CPSR Register
729 \return CPSR Register value
731 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
734 __ASM volatile("MRS %0, cpsr" : "=r" (result) );
738 /** \brief Set CPSR Register
739 \param [in] cpsr CPSR value to set
741 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
743 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
747 \return Processor Mode
749 __STATIC_FORCEINLINE uint32_t __get_mode(void)
751 return (__get_CPSR() & 0x1FU);
755 \param [in] mode Mode value to set
757 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
759 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
762 /** \brief Get Stack Pointer
763 \return Stack Pointer value
765 __STATIC_FORCEINLINE uint32_t __get_SP(void)
768 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
772 /** \brief Set Stack Pointer
773 \param [in] stack Stack Pointer value to set
775 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
777 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
780 /** \brief Get USR/SYS Stack Pointer
781 \return USR/SYS Stack Pointer value
783 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
785 uint32_t cpsr = __get_CPSR();
789 "MOV %0, sp " : "=r"(result) : : "memory"
796 /** \brief Set USR/SYS Stack Pointer
797 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
799 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
801 uint32_t cpsr = __get_CPSR();
804 "MOV sp, %0 " : : "r" (topOfProcStack) : "memory"
811 \return Floating Point Exception Control register value
813 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
815 #if (__FPU_PRESENT == 1)
817 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
825 \param [in] fpexc Floating Point Exception Control value to set
827 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
829 #if (__FPU_PRESENT == 1)
830 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
835 * Include common core functions to access Coprocessor 15 registers
838 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
839 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
840 #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
841 #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
843 #include "cmsis_cp15.h"
845 /** \brief Enable Floating Point Unit
847 Critical section, called from undef handler, so systick is disabled
849 __STATIC_INLINE void __FPU_Enable(void)
851 // Permit access to VFP/NEON, registers by modifying CPACR
852 const uint32_t cpacr = __get_CPACR();
853 __set_CPACR(cpacr | 0x00F00000ul);
857 const uint32_t fpexc = __get_FPEXC();
858 __set_FPEXC(fpexc | 0x40000000ul);
861 // Initialise VFP/NEON registers to 0
864 // Initialise D16 registers to 0
882 #if (defined(__ARM_NEON) && (__ARM_NEON == 1))
883 // Initialise D32 registers to 0
904 // Initialise FPSCR to a known state
905 const uint32_t fpscr = __get_FPSCR();
906 __set_FPSCR(fpscr & 0x00086060ul);
909 /*@} end of group CMSIS_Core_intrinsics */
911 #pragma clang diagnostic pop
913 #endif /* __CMSIS_CLANG_A_H */