1 /**************************************************************************//**
3 * @brief CMSIS compiler specific macros, functions, instructions
6 ******************************************************************************/
8 * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
10 * SPDX-License-Identifier: Apache-2.0
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
16 * www.apache.org/licenses/LICENSE-2.0
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
25 #ifndef __CMSIS_ARMCC_H
26 #define __CMSIS_ARMCC_H
28 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
29 #error "Please use Arm Compiler Toolchain V4.0.677 or later!"
32 /* CMSIS compiler control architecture macros */
33 #if (defined (__TARGET_ARCH_7_A ) && (__TARGET_ARCH_7_A == 1))
34 #define __ARM_ARCH_7A__ 1
37 /* CMSIS compiler specific defines */
42 #define __INLINE __inline
45 #define __FORCEINLINE __forceinline
47 #ifndef __STATIC_INLINE
48 #define __STATIC_INLINE static __inline
50 #ifndef __STATIC_FORCEINLINE
51 #define __STATIC_FORCEINLINE static __forceinline
54 #define __NO_RETURN __declspec(noreturn)
56 #ifndef CMSIS_DEPRECATED
57 #define CMSIS_DEPRECATED __attribute__((deprecated))
60 #define __USED __attribute__((used))
63 #define __WEAK __attribute__((weak))
66 #define __PACKED __attribute__((packed))
68 #ifndef __PACKED_STRUCT
69 #define __PACKED_STRUCT __packed struct
71 #ifndef __UNALIGNED_UINT16_WRITE
72 #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val))
74 #ifndef __UNALIGNED_UINT16_READ
75 #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr)))
77 #ifndef __UNALIGNED_UINT32_WRITE
78 #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val))
80 #ifndef __UNALIGNED_UINT32_READ
81 #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr)))
84 #define __ALIGNED(x) __attribute__((aligned(x)))
87 #define __PACKED __attribute__((packed))
89 #ifndef __COMPILER_BARRIER
90 #define __COMPILER_BARRIER() __memory_changed()
93 /* ########################## Core Instruction Access ######################### */
100 \brief Wait For Interrupt
105 \brief Wait For Event
115 \brief Instruction Synchronization Barrier
117 #define __ISB() __isb(0xF)
120 \brief Data Synchronization Barrier
122 #define __DSB() __dsb(0xF)
125 \brief Data Memory Barrier
127 #define __DMB() __dmb(0xF)
130 \brief Reverse byte order (32 bit)
131 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
132 \param [in] value Value to reverse
133 \return Reversed value
138 \brief Reverse byte order (16 bit)
139 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
140 \param [in] value Value to reverse
141 \return Reversed value
143 #ifndef __NO_EMBEDDED_ASM
144 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
152 \brief Reverse byte order (16 bit)
153 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
154 \param [in] value Value to reverse
155 \return Reversed value
157 #ifndef __NO_EMBEDDED_ASM
158 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
166 \brief Rotate Right in unsigned value (32 bit)
167 \param [in] op1 Value to rotate
168 \param [in] op2 Number of Bits to rotate
169 \return Rotated value
175 \param [in] value is ignored by the processor.
176 If required, a debugger can use it to store additional information about the breakpoint.
178 #define __BKPT(value) __breakpoint(value)
181 \brief Reverse bit order of value
182 \param [in] value Value to reverse
183 \return Reversed value
185 #define __RBIT __rbit
188 \brief Count leading zeros
189 \param [in] value Value to count the leading zeros
190 \return number of leading zeros in value
195 \brief LDR Exclusive (8 bit)
196 \details Executes a exclusive LDR instruction for 8 bit value.
197 \param [in] ptr Pointer to data
198 \return value of type uint8_t at (*ptr)
200 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
201 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
203 #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
207 \brief LDR Exclusive (16 bit)
208 \details Executes a exclusive LDR instruction for 16 bit values.
209 \param [in] ptr Pointer to data
210 \return value of type uint16_t at (*ptr)
212 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
213 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
215 #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
219 \brief LDR Exclusive (32 bit)
220 \details Executes a exclusive LDR instruction for 32 bit values.
221 \param [in] ptr Pointer to data
222 \return value of type uint32_t at (*ptr)
224 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
225 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
227 #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
231 \brief STR Exclusive (8 bit)
232 \details Executes a exclusive STR instruction for 8 bit values.
233 \param [in] value Value to store
234 \param [in] ptr Pointer to location
235 \return 0 Function succeeded
236 \return 1 Function failed
238 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
239 #define __STREXB(value, ptr) __strex(value, ptr)
241 #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
245 \brief STR Exclusive (16 bit)
246 \details Executes a exclusive STR instruction for 16 bit values.
247 \param [in] value Value to store
248 \param [in] ptr Pointer to location
249 \return 0 Function succeeded
250 \return 1 Function failed
252 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
253 #define __STREXH(value, ptr) __strex(value, ptr)
255 #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
259 \brief STR Exclusive (32 bit)
260 \details Executes a exclusive STR instruction for 32 bit values.
261 \param [in] value Value to store
262 \param [in] ptr Pointer to location
263 \return 0 Function succeeded
264 \return 1 Function failed
266 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
267 #define __STREXW(value, ptr) __strex(value, ptr)
269 #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
273 \brief Remove the exclusive lock
274 \details Removes the exclusive lock which is created by LDREX.
276 #define __CLREX __clrex
280 \brief Signed Saturate
281 \details Saturates a signed value.
282 \param [in] value Value to be saturated
283 \param [in] sat Bit position to saturate to (1..32)
284 \return Saturated value
286 #define __SSAT __ssat
289 \brief Unsigned Saturate
290 \details Saturates an unsigned value.
291 \param [in] value Value to be saturated
292 \param [in] sat Bit position to saturate to (0..31)
293 \return Saturated value
295 #define __USAT __usat
297 /* ########################### Core Function Access ########################### */
300 \brief Get FPSCR (Floating Point Status/Control)
301 \return Floating Point Status/Control register value
303 __STATIC_INLINE uint32_t __get_FPSCR(void)
305 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
306 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
307 register uint32_t __regfpscr __ASM("fpscr");
315 \brief Set FPSCR (Floating Point Status/Control)
316 \param [in] fpscr Floating Point Status/Control value to set
318 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
320 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
321 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
322 register uint32_t __regfpscr __ASM("fpscr");
323 __regfpscr = (fpscr);
329 /** \brief Get CPSR (Current Program Status Register)
330 \return CPSR Register value
332 __STATIC_INLINE uint32_t __get_CPSR(void)
334 register uint32_t __regCPSR __ASM("cpsr");
339 /** \brief Set CPSR (Current Program Status Register)
340 \param [in] cpsr CPSR value to set
342 __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
344 register uint32_t __regCPSR __ASM("cpsr");
349 \return Processor Mode
351 __STATIC_INLINE uint32_t __get_mode(void)
353 return (__get_CPSR() & 0x1FU);
357 \param [in] mode Mode value to set
359 __STATIC_INLINE __ASM void __set_mode(uint32_t mode)
366 /** \brief Get Stack Pointer
367 \return Stack Pointer
369 __STATIC_INLINE __ASM uint32_t __get_SP(void)
375 /** \brief Set Stack Pointer
376 \param [in] stack Stack Pointer value to set
378 __STATIC_INLINE __ASM void __set_SP(uint32_t stack)
385 /** \brief Get USR/SYS Stack Pointer
386 \return USR/SYSStack Pointer
388 __STATIC_INLINE __ASM uint32_t __get_SP_usr(void)
394 CPS #0x1F ;no effect in USR mode
396 MSR CPSR_c, R1 ;no effect in USR mode
401 /** \brief Set USR/SYS Stack Pointer
402 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
404 __STATIC_INLINE __ASM void __set_SP_usr(uint32_t topOfProcStack)
410 CPS #0x1F ;no effect in USR mode
412 MSR CPSR_c, R1 ;no effect in USR mode
417 /** \brief Get FPEXC (Floating Point Exception Control Register)
418 \return Floating Point Exception Control Register value
420 __STATIC_INLINE uint32_t __get_FPEXC(void)
422 #if (__FPU_PRESENT == 1)
423 register uint32_t __regfpexc __ASM("fpexc");
430 /** \brief Set FPEXC (Floating Point Exception Control Register)
431 \param [in] fpexc Floating Point Exception Control value to set
433 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
435 #if (__FPU_PRESENT == 1)
436 register uint32_t __regfpexc __ASM("fpexc");
437 __regfpexc = (fpexc);
442 * Include common core functions to access Coprocessor 15 registers
445 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); (Rt) = tmp; } while(0)
446 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = (Rt); } while(0)
447 #define __get_CP64(cp, op1, Rt, CRm) \
449 uint32_t ltmp, htmp; \
450 __ASM volatile("MRRC p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
451 (Rt) = ((((uint64_t)htmp) << 32U) | ((uint64_t)ltmp)); \
454 #define __set_CP64(cp, op1, Rt, CRm) \
456 const uint64_t tmp = (Rt); \
457 const uint32_t ltmp = (uint32_t)(tmp); \
458 const uint32_t htmp = (uint32_t)(tmp >> 32U); \
459 __ASM volatile("MCRR p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
462 #include "cmsis_cp15.h"
464 /** \brief Enable Floating Point Unit
466 Critical section, called from undef handler, so systick is disabled
468 __STATIC_INLINE __ASM void __FPU_Enable(void)
472 //Permit access to VFP/NEON, registers by modifying CPACR
474 ORR R1,R1,#0x00F00000
477 //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
482 ORR R1,R1,#0x40000000
485 //Initialise VFP/NEON registers to 0
488 //Initialise D16 registers to 0
506 IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} == 32
507 //Initialise D32 registers to 0
526 //Initialise FPSCR to a known state
528 LDR R2,=0x00086060 //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
535 #endif /* __CMSIS_ARMCC_H */