1 /**************************************************************************//**
3 * @brief CMSIS compiler GCC header file
6 ******************************************************************************/
8 * Copyright (c) 2009-2023 Arm Limited. All rights reserved.
10 * SPDX-License-Identifier: Apache-2.0
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
16 * www.apache.org/licenses/LICENSE-2.0
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
25 #ifndef __CMSIS_GCC_R_H
26 #define __CMSIS_GCC_R_H
29 #error "This file must not be included directly"
32 /* ignore some GCC warnings */
33 #pragma GCC diagnostic push
34 #pragma GCC diagnostic ignored "-Wsign-conversion"
35 #pragma GCC diagnostic ignored "-Wconversion"
36 #pragma GCC diagnostic ignored "-Wunused-parameter"
39 /** \defgroup CMSIS_Core_intrinsics CMSIS Core Intrinsics
40 Access to dedicated SIMD instructions
44 /** \brief Get CPSR Register
45 \return CPSR Register value
47 __STATIC_FORCEINLINE uint32_t __get_CPSR(void)
50 __ASM volatile("MRS %0, cpsr" : "=r" (result) );
54 /** \brief Set CPSR Register
55 \param [in] cpsr CPSR value to set
57 __STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
59 __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory");
63 \return Processor Mode
65 __STATIC_FORCEINLINE uint32_t __get_mode(void)
67 return (__get_CPSR() & 0x1FU);
71 \param [in] mode Mode value to set
73 __STATIC_FORCEINLINE void __set_mode(uint32_t mode)
75 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
78 /** \brief Get Stack Pointer
79 \return Stack Pointer value
81 __STATIC_FORCEINLINE uint32_t __get_SP(void)
84 __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
88 /** \brief Set Stack Pointer
89 \param [in] stack Stack Pointer value to set
91 __STATIC_FORCEINLINE void __set_SP(uint32_t stack)
93 __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
96 /** \brief Get USR/SYS Stack Pointer
97 \return USR/SYS Stack Pointer value
99 __STATIC_FORCEINLINE uint32_t __get_SP_usr(void)
101 uint32_t cpsr = __get_CPSR();
105 "MOV %0, sp " : "=r"(result) : : "memory"
112 /** \brief Set USR/SYS Stack Pointer
113 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
115 __STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
117 uint32_t cpsr = __get_CPSR();
120 "MOV sp, %0 " : : "r" (topOfProcStack) : "memory"
127 \return Floating Point Exception Control register value
129 __STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
131 #if (__FPU_PRESENT == 1)
133 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
141 \param [in] fpexc Floating Point Exception Control value to set
143 __STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
145 #if (__FPU_PRESENT == 1)
146 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
151 * Include common core functions to access Coprocessor 15 registers
154 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
155 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
156 #define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
157 #define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
159 /*@} end of group CMSIS_Core_intrinsics */
161 #pragma GCC diagnostic pop
163 #endif /* __CMSIS_GCC_R_H */