2 * Copyright (c) 2009-2023 Arm Limited. All rights reserved.
4 * SPDX-License-Identifier: Apache-2.0
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
20 * CMSIS-Core(A) Compiler GCC Header File
23 #ifndef __CMSIS_GCC_A_H
24 #define __CMSIS_GCC_A_H
26 /* ignore some GCC warnings */
27 #pragma GCC diagnostic push
28 #pragma GCC diagnostic ignored "-Wsign-conversion"
29 #pragma GCC diagnostic ignored "-Wconversion"
30 #pragma GCC diagnostic ignored "-Wunused-parameter"
32 /* Fallback for __has_builtin */
34 #define __has_builtin(x) (0)
37 /* CMSIS compiler specific defines */
42 #define __INLINE inline
45 #define __FORCEINLINE __attribute__((always_inline))
47 #ifndef __STATIC_INLINE
48 #define __STATIC_INLINE static inline
50 #ifndef __STATIC_FORCEINLINE
51 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
54 #define __NO_RETURN __attribute__((__noreturn__))
56 #ifndef CMSIS_DEPRECATED
57 #define CMSIS_DEPRECATED __attribute__((deprecated))
60 #define __USED __attribute__((used))
63 #define __WEAK __attribute__((weak))
66 #define __PACKED __attribute__((packed, aligned(1)))
68 #ifndef __PACKED_STRUCT
69 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
71 #ifndef __UNALIGNED_UINT16_WRITE
72 #pragma GCC diagnostic push
73 #pragma GCC diagnostic ignored "-Wpacked"
74 #pragma GCC diagnostic ignored "-Wattributes"
75 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
76 #pragma GCC diagnostic pop
77 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
79 #ifndef __UNALIGNED_UINT16_READ
80 #pragma GCC diagnostic push
81 #pragma GCC diagnostic ignored "-Wpacked"
82 #pragma GCC diagnostic ignored "-Wattributes"
83 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
84 #pragma GCC diagnostic pop
85 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
87 #ifndef __UNALIGNED_UINT32_WRITE
88 #pragma GCC diagnostic push
89 #pragma GCC diagnostic ignored "-Wpacked"
90 #pragma GCC diagnostic ignored "-Wattributes"
91 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
92 #pragma GCC diagnostic pop
93 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
95 #ifndef __UNALIGNED_UINT32_READ
96 #pragma GCC diagnostic push
97 #pragma GCC diagnostic ignored "-Wpacked"
98 #pragma GCC diagnostic ignored "-Wattributes"
99 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
100 #pragma GCC diagnostic pop
101 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
104 #define __ALIGNED(x) __attribute__((aligned(x)))
107 #define __RESTRICT __restrict
109 #ifndef __COMPILER_BARRIER
110 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
114 /* ########################## Core Instruction Access ######################### */
117 \details No Operation does nothing. This instruction can be used for code alignment purposes.
119 #define __NOP() __ASM volatile ("nop")
123 \brief Wait For Interrupt
124 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
126 #define __WFI() __ASM volatile ("wfi":::"memory")
130 \brief Wait For Event
131 \details Wait For Event is a hint instruction that permits the processor to enter
132 a low-power state until one of a number of events occurs.
134 #define __WFE() __ASM volatile ("wfe":::"memory")
139 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
141 #define __SEV() __ASM volatile ("sev")
145 \brief Instruction Synchronization Barrier
146 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
147 so that all instructions following the ISB are fetched from cache or memory,
148 after the instruction has been completed.
150 __STATIC_FORCEINLINE void __ISB(void)
152 __ASM volatile ("isb 0xF":::"memory");
157 \brief Data Synchronization Barrier
158 \details Acts as a special kind of Data Memory Barrier.
159 It completes when all explicit memory accesses before this instruction complete.
161 __STATIC_FORCEINLINE void __DSB(void)
163 __ASM volatile ("dsb 0xF":::"memory");
168 \brief Data Memory Barrier
169 \details Ensures the apparent order of the explicit memory operations before
170 and after the instruction, without ensuring their completion.
172 __STATIC_FORCEINLINE void __DMB(void)
174 __ASM volatile ("dmb 0xF":::"memory");
179 \brief Reverse byte order (32 bit)
180 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
181 \param [in] value Value to reverse
182 \return Reversed value
184 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
186 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
187 return __builtin_bswap32(value);
191 __ASM ("rev %0, %1" : "=r" (result) : "r" (value) );
198 \brief Reverse byte order (16 bit)
199 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
200 \param [in] value Value to reverse
201 \return Reversed value
203 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
206 __ASM ("rev16 %0, %1" : "=r" (result) : "r" (value));
212 \brief Reverse byte order (16 bit)
213 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
214 \param [in] value Value to reverse
215 \return Reversed value
217 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
219 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
220 return (int16_t)__builtin_bswap16(value);
224 __ASM ("revsh %0, %1" : "=r" (result) : "r" (value) );
231 \brief Rotate Right in unsigned value (32 bit)
232 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
233 \param [in] op1 Value to rotate
234 \param [in] op2 Number of Bits to rotate
235 \return Rotated value
237 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
244 return (op1 >> op2) | (op1 << (32U - op2));
250 \details Causes the processor to enter Debug state.
251 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
252 \param [in] value is ignored by the processor.
253 If required, a debugger can use it to store additional information about the breakpoint.
255 #define __BKPT(value) __ASM volatile ("bkpt "#value)
259 \brief Reverse bit order of value
260 \details Reverses the bit order of the given value.
261 \param [in] value Value to reverse
262 \return Reversed value
264 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
267 __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
273 \brief Count leading zeros
274 \details Counts the number of leading zeros of a data value.
275 \param [in] value Value to count the leading zeros
276 \return number of leading zeros in value
278 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
280 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
281 __builtin_clz(0) is undefined behaviour, so handle this case specially.
282 This guarantees ARM-compatible results if happening to compile on a non-ARM
283 target, and ensures the compiler doesn't decide to activate any
284 optimisations using the logic "value was passed to __builtin_clz, so it
286 ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a
287 single CLZ instruction.
293 return __builtin_clz(value);
298 \brief LDR Exclusive (8 bit)
299 \details Executes a exclusive LDR instruction for 8 bit value.
300 \param [in] ptr Pointer to data
301 \return value of type uint8_t at (*ptr)
303 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
307 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
308 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
310 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
311 accepted by assembler. So has to use following less efficient pattern.
313 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
315 return ((uint8_t) result); /* Add explicit type cast here */
320 \brief LDR Exclusive (16 bit)
321 \details Executes a exclusive LDR instruction for 16 bit values.
322 \param [in] ptr Pointer to data
323 \return value of type uint16_t at (*ptr)
325 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
329 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
330 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
332 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
333 accepted by assembler. So has to use following less efficient pattern.
335 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
337 return ((uint16_t) result); /* Add explicit type cast here */
342 \brief LDR Exclusive (32 bit)
343 \details Executes a exclusive LDR instruction for 32 bit values.
344 \param [in] ptr Pointer to data
345 \return value of type uint32_t at (*ptr)
347 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
351 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
357 \brief STR Exclusive (8 bit)
358 \details Executes a exclusive STR instruction for 8 bit values.
359 \param [in] value Value to store
360 \param [in] ptr Pointer to location
361 \return 0 Function succeeded
362 \return 1 Function failed
364 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
368 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
374 \brief STR Exclusive (16 bit)
375 \details Executes a exclusive STR instruction for 16 bit values.
376 \param [in] value Value to store
377 \param [in] ptr Pointer to location
378 \return 0 Function succeeded
379 \return 1 Function failed
381 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
385 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
391 \brief STR Exclusive (32 bit)
392 \details Executes a exclusive STR instruction for 32 bit values.
393 \param [in] value Value to store
394 \param [in] ptr Pointer to location
395 \return 0 Function succeeded
396 \return 1 Function failed
398 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
402 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
408 \brief Remove the exclusive lock
409 \details Removes the exclusive lock which is created by LDREX.
411 __STATIC_FORCEINLINE void __CLREX(void)
413 __ASM volatile ("clrex" ::: "memory");
417 \brief Signed Saturate
418 \details Saturates a signed value.
419 \param [in] ARG1 Value to be saturated
420 \param [in] ARG2 Bit position to saturate to (1..32)
421 \return Saturated value
423 #define __SSAT(ARG1, ARG2) \
426 int32_t __RES, __ARG1 = (ARG1); \
427 __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
433 \brief Unsigned Saturate
434 \details Saturates an unsigned value.
435 \param [in] ARG1 Value to be saturated
436 \param [in] ARG2 Bit position to saturate to (0..31)
437 \return Saturated value
439 #define __USAT(ARG1, ARG2) \
442 uint32_t __RES, __ARG1 = (ARG1); \
443 __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
448 \brief Rotate Right with Extend (32 bit)
449 \details Moves each bit of a bitstring right by one bit.
450 The carry input is shifted in at the left end of the bitstring.
451 \param [in] value Value to rotate
452 \return Rotated value
454 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
458 __ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value));
464 \brief LDRT Unprivileged (8 bit)
465 \details Executes a Unprivileged LDRT instruction for 8 bit value.
466 \param [in] ptr Pointer to data
467 \return value of type uint8_t at (*ptr)
469 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
473 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
474 return ((uint8_t)result); /* Add explicit type cast here */
479 \brief LDRT Unprivileged (16 bit)
480 \details Executes a Unprivileged LDRT instruction for 16 bit values.
481 \param [in] ptr Pointer to data
482 \return value of type uint16_t at (*ptr)
484 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
488 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
489 return ((uint16_t)result); /* Add explicit type cast here */
494 \brief LDRT Unprivileged (32 bit)
495 \details Executes a Unprivileged LDRT instruction for 32 bit values.
496 \param [in] ptr Pointer to data
497 \return value of type uint32_t at (*ptr)
499 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
503 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
509 \brief STRT Unprivileged (8 bit)
510 \details Executes a Unprivileged STRT instruction for 8 bit values.
511 \param [in] value Value to store
512 \param [in] ptr Pointer to location
514 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
516 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
521 \brief STRT Unprivileged (16 bit)
522 \details Executes a Unprivileged STRT instruction for 16 bit values.
523 \param [in] value Value to store
524 \param [in] ptr Pointer to location
526 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
528 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
533 \brief STRT Unprivileged (32 bit)
534 \details Executes a Unprivileged STRT instruction for 32 bit values.
535 \param [in] value Value to store
536 \param [in] ptr Pointer to location
538 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
540 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
543 /* ########################### Core Function Access ########################### */
544 /** \ingroup CMSIS_Core_FunctionInterface
545 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
550 \brief Enable IRQ Interrupts
551 \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
552 Can only be executed in Privileged modes.
554 __STATIC_FORCEINLINE void __enable_irq(void)
556 __ASM volatile ("cpsie i" : : : "memory");
561 \brief Disable IRQ Interrupts
562 \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
563 Can only be executed in Privileged modes.
565 __STATIC_FORCEINLINE void __disable_irq(void)
567 __ASM volatile ("cpsid i" : : : "memory");
572 \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
573 Can only be executed in Privileged modes.
575 __STATIC_FORCEINLINE void __enable_fault_irq(void)
577 __ASM volatile ("cpsie f" : : : "memory");
583 \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
584 Can only be executed in Privileged modes.
586 __STATIC_FORCEINLINE void __disable_fault_irq(void)
588 __ASM volatile ("cpsid f" : : : "memory");
593 \details Returns the current value of the Floating Point Status/Control register.
594 \return Floating Point Status/Control register value
596 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
599 return __builtin_arm_get_fpscr();
608 \details Assigns the given value to the Floating Point Status/Control register.
609 \param [in] fpscr Floating Point Status/Control value to set
611 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
614 __builtin_arm_set_fpscr(fpscr);
621 /*@} end of CMSIS_Core_RegAccFunctions */
624 /* ################### Compiler specific Intrinsics ########################### */
626 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
628 #define __SADD8 __builtin_arm_sadd8
629 #define __QADD8 __builtin_arm_qadd8
630 #define __SHADD8 __builtin_arm_shadd8
631 #define __UADD8 __builtin_arm_uadd8
632 #define __UQADD8 __builtin_arm_uqadd8
633 #define __UHADD8 __builtin_arm_uhadd8
634 #define __SSUB8 __builtin_arm_ssub8
635 #define __QSUB8 __builtin_arm_qsub8
636 #define __SHSUB8 __builtin_arm_shsub8
637 #define __USUB8 __builtin_arm_usub8
638 #define __UQSUB8 __builtin_arm_uqsub8
639 #define __UHSUB8 __builtin_arm_uhsub8
640 #define __SADD16 __builtin_arm_sadd16
641 #define __QADD16 __builtin_arm_qadd16
642 #define __SHADD16 __builtin_arm_shadd16
643 #define __UADD16 __builtin_arm_uadd16
644 #define __UQADD16 __builtin_arm_uqadd16
645 #define __UHADD16 __builtin_arm_uhadd16
646 #define __SSUB16 __builtin_arm_ssub16
647 #define __QSUB16 __builtin_arm_qsub16
648 #define __SHSUB16 __builtin_arm_shsub16
649 #define __USUB16 __builtin_arm_usub16
650 #define __UQSUB16 __builtin_arm_uqsub16
651 #define __UHSUB16 __builtin_arm_uhsub16
652 #define __SASX __builtin_arm_sasx
653 #define __QASX __builtin_arm_qasx
654 #define __SHASX __builtin_arm_shasx
655 #define __UASX __builtin_arm_uasx
656 #define __UQASX __builtin_arm_uqasx
657 #define __UHASX __builtin_arm_uhasx
658 #define __SSAX __builtin_arm_ssax
659 #define __QSAX __builtin_arm_qsax
660 #define __SHSAX __builtin_arm_shsax
661 #define __USAX __builtin_arm_usax
662 #define __UQSAX __builtin_arm_uqsax
663 #define __UHSAX __builtin_arm_uhsax
664 #define __USAD8 __builtin_arm_usad8
665 #define __USADA8 __builtin_arm_usada8
666 #define __SSAT16 __builtin_arm_ssat16
667 #define __USAT16 __builtin_arm_usat16
668 #define __UXTB16 __builtin_arm_uxtb16
669 #define __UXTAB16 __builtin_arm_uxtab16
670 #define __SXTB16 __builtin_arm_sxtb16
671 #define __SXTAB16 __builtin_arm_sxtab16
672 #define __SMUAD __builtin_arm_smuad
673 #define __SMUADX __builtin_arm_smuadx
674 #define __SMLAD __builtin_arm_smlad
675 #define __SMLADX __builtin_arm_smladx
676 #define __SMLALD __builtin_arm_smlald
677 #define __SMLALDX __builtin_arm_smlaldx
678 #define __SMUSD __builtin_arm_smusd
679 #define __SMUSDX __builtin_arm_smusdx
680 #define __SMLSD __builtin_arm_smlsd
681 #define __SMLSDX __builtin_arm_smlsdx
682 #define __SMLSLD __builtin_arm_smlsld
683 #define __SMLSLDX __builtin_arm_smlsldx
684 #define __SEL __builtin_arm_sel
685 #define __QADD __builtin_arm_qadd
686 #define __QSUB __builtin_arm_qsub
688 #define __PKHBT(ARG1,ARG2,ARG3) \
691 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
692 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
696 #define __PKHTB(ARG1,ARG2,ARG3) \
699 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
701 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
703 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
707 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
710 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
712 __ASM volatile("sxtb16 %0, %1, ROR %2" : "=r"(result) : "r"(op1), "i"(rotate));
716 result = __SXTB16(__ROR(op1, rotate));
721 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
724 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U)))
726 __ASM volatile("sxtab16 %0, %1, %2, ROR %3" : "=r"(result) : "r"(op1), "r"(op2), "i"(rotate));
730 result = __SXTAB16(op1, __ROR(op2, rotate));
735 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
739 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
743 #endif /* (__ARM_FEATURE_DSP == 1) */
744 /** @} end of group CMSIS_SIMD_intrinsics */
746 /** \defgroup CMSIS_Core_intrinsics CMSIS Core Intrinsics
747 Access to dedicated SIMD instructions
751 /** \brief Get CPSR Register
752 \return CPSR Register value
754 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
757 __ASM volatile("MRS %0, cpsr" : "=r" (result) );
761 /** \brief Set CPSR Register
762 \param [in] cpsr CPSR value to set
764 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
766 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
770 \return Processor Mode
772 __STATIC_FORCEINLINE uint32_t __get_mode(void)
774 return (__get_CPSR() & 0x1FU);
778 \param [in] mode Mode value to set
780 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
782 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
785 /** \brief Get Stack Pointer
786 \return Stack Pointer value
788 __STATIC_FORCEINLINE uint32_t __get_SP(void)
791 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
795 /** \brief Set Stack Pointer
796 \param [in] stack Stack Pointer value to set
798 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
800 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
803 /** \brief Get USR/SYS Stack Pointer
804 \return USR/SYS Stack Pointer value
806 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
808 uint32_t cpsr = __get_CPSR();
812 "MOV %0, sp " : "=r"(result) : : "memory"
819 /** \brief Set USR/SYS Stack Pointer
820 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
822 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
824 uint32_t cpsr = __get_CPSR();
827 "MOV sp, %0 " : : "r" (topOfProcStack) : "memory"
834 \return Floating Point Exception Control register value
836 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
838 #if (__FPU_PRESENT == 1)
840 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
848 \param [in] fpexc Floating Point Exception Control value to set
850 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
852 #if (__FPU_PRESENT == 1)
853 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
858 * Include common core functions to access Coprocessor 15 registers
861 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
862 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
863 #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
864 #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
866 #include "cmsis_cp15.h"
868 /** \brief Enable Floating Point Unit
870 Critical section, called from undef handler, so systick is disabled
872 __STATIC_INLINE void __FPU_Enable(void)
874 // Permit access to VFP/NEON, registers by modifying CPACR
875 const uint32_t cpacr = __get_CPACR();
876 __set_CPACR(cpacr | 0x00F00000ul);
880 const uint32_t fpexc = __get_FPEXC();
881 __set_FPEXC(fpexc | 0x40000000ul);
884 // Initialise VFP/NEON registers to 0
887 // Initialise D16 registers to 0
905 #if (defined(__ARM_NEON) && (__ARM_NEON == 1))
906 // Initialise D32 registers to 0
927 // Initialise FPSCR to a known state
928 const uint32_t fpscr = __get_FPSCR();
929 __set_FPSCR(fpscr & 0x00086060ul);
932 /*@} end of group CMSIS_Core_intrinsics */
934 #pragma GCC diagnostic pop
936 #endif /* __CMSIS_GCC_A_H */