1 /**************************************************************************//**
3 * @brief CMSIS compiler specific macros, functions, instructions
6 ******************************************************************************/
8 * Copyright (c) 2009-2017 ARM Limited. All rights reserved.
10 * SPDX-License-Identifier: Apache-2.0
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
16 * www.apache.org/licenses/LICENSE-2.0
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
25 #ifndef __CMSIS_ARMCC_H
26 #define __CMSIS_ARMCC_H
28 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
29 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
32 /* CMSIS compiler control architecture macros */
33 #if (defined (__TARGET_ARCH_7_A ) && (__TARGET_ARCH_7_A == 1))
34 #define __ARM_ARCH_7A__ 1
37 /* CMSIS compiler specific defines */
42 #define __INLINE __inline
45 #define __FORCEINLINE __forceinline
47 #ifndef __STATIC_INLINE
48 #define __STATIC_INLINE static __inline
50 #ifndef __STATIC_FORCEINLINE
51 #define __STATIC_FORCEINLINE static __forceinline
54 #define __NO_RETURN __declspec(noreturn)
57 #define __USED __attribute__((used))
60 #define __WEAK __attribute__((weak))
63 #define __PACKED __attribute__((packed))
65 #ifndef __PACKED_STRUCT
66 #define __PACKED_STRUCT __packed struct
68 #ifndef __UNALIGNED_UINT16_WRITE
69 #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val))
71 #ifndef __UNALIGNED_UINT16_READ
72 #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr)))
74 #ifndef __UNALIGNED_UINT32_WRITE
75 #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val))
77 #ifndef __UNALIGNED_UINT32_READ
78 #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr)))
81 #define __ALIGNED(x) __attribute__((aligned(x)))
84 #define __PACKED __attribute__((packed))
87 /* ########################## Core Instruction Access ######################### */
94 \brief Wait For Interrupt
109 \brief Instruction Synchronization Barrier
111 #define __ISB() do {\
112 __schedule_barrier();\
114 __schedule_barrier();\
118 \brief Data Synchronization Barrier
120 #define __DSB() do {\
121 __schedule_barrier();\
123 __schedule_barrier();\
127 \brief Data Memory Barrier
129 #define __DMB() do {\
130 __schedule_barrier();\
132 __schedule_barrier();\
136 \brief Reverse byte order (32 bit)
137 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
138 \param [in] value Value to reverse
139 \return Reversed value
144 \brief Reverse byte order (16 bit)
145 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
146 \param [in] value Value to reverse
147 \return Reversed value
149 #ifndef __NO_EMBEDDED_ASM
150 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
158 \brief Reverse byte order (16 bit)
159 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
160 \param [in] value Value to reverse
161 \return Reversed value
163 #ifndef __NO_EMBEDDED_ASM
164 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
172 \brief Rotate Right in unsigned value (32 bit)
173 \param [in] op1 Value to rotate
174 \param [in] op2 Number of Bits to rotate
175 \return Rotated value
181 \param [in] value is ignored by the processor.
182 If required, a debugger can use it to store additional information about the breakpoint.
184 #define __BKPT(value) __breakpoint(value)
187 \brief Reverse bit order of value
188 \param [in] value Value to reverse
189 \return Reversed value
191 #define __RBIT __rbit
194 \brief Count leading zeros
195 \param [in] value Value to count the leading zeros
196 \return number of leading zeros in value
201 \brief LDR Exclusive (8 bit)
202 \details Executes a exclusive LDR instruction for 8 bit value.
203 \param [in] ptr Pointer to data
204 \return value of type uint8_t at (*ptr)
206 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
207 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
209 #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
213 \brief LDR Exclusive (16 bit)
214 \details Executes a exclusive LDR instruction for 16 bit values.
215 \param [in] ptr Pointer to data
216 \return value of type uint16_t at (*ptr)
218 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
219 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
221 #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
225 \brief LDR Exclusive (32 bit)
226 \details Executes a exclusive LDR instruction for 32 bit values.
227 \param [in] ptr Pointer to data
228 \return value of type uint32_t at (*ptr)
230 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
231 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
233 #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
237 \brief STR Exclusive (8 bit)
238 \details Executes a exclusive STR instruction for 8 bit values.
239 \param [in] value Value to store
240 \param [in] ptr Pointer to location
241 \return 0 Function succeeded
242 \return 1 Function failed
244 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
245 #define __STREXB(value, ptr) __strex(value, ptr)
247 #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
251 \brief STR Exclusive (16 bit)
252 \details Executes a exclusive STR instruction for 16 bit values.
253 \param [in] value Value to store
254 \param [in] ptr Pointer to location
255 \return 0 Function succeeded
256 \return 1 Function failed
258 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
259 #define __STREXH(value, ptr) __strex(value, ptr)
261 #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
265 \brief STR Exclusive (32 bit)
266 \details Executes a exclusive STR instruction for 32 bit values.
267 \param [in] value Value to store
268 \param [in] ptr Pointer to location
269 \return 0 Function succeeded
270 \return 1 Function failed
272 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
273 #define __STREXW(value, ptr) __strex(value, ptr)
275 #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
279 \brief Remove the exclusive lock
280 \details Removes the exclusive lock which is created by LDREX.
282 #define __CLREX __clrex
286 \brief Signed Saturate
287 \details Saturates a signed value.
288 \param [in] value Value to be saturated
289 \param [in] sat Bit position to saturate to (1..32)
290 \return Saturated value
292 #define __SSAT __ssat
295 \brief Unsigned Saturate
296 \details Saturates an unsigned value.
297 \param [in] value Value to be saturated
298 \param [in] sat Bit position to saturate to (0..31)
299 \return Saturated value
301 #define __USAT __usat
303 /* ########################### Core Function Access ########################### */
306 \brief Get FPSCR (Floating Point Status/Control)
307 \return Floating Point Status/Control register value
309 __STATIC_INLINE uint32_t __get_FPSCR(void)
311 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
312 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
313 register uint32_t __regfpscr __ASM("fpscr");
321 \brief Set FPSCR (Floating Point Status/Control)
322 \param [in] fpscr Floating Point Status/Control value to set
324 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
326 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
327 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
328 register uint32_t __regfpscr __ASM("fpscr");
329 __regfpscr = (fpscr);
335 /** \brief Get CPSR (Current Program Status Register)
336 \return CPSR Register value
338 __STATIC_INLINE uint32_t __get_CPSR(void)
340 register uint32_t __regCPSR __ASM("cpsr");
345 /** \brief Set CPSR (Current Program Status Register)
346 \param [in] cpsr CPSR value to set
348 __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
350 register uint32_t __regCPSR __ASM("cpsr");
355 \return Processor Mode
357 __STATIC_INLINE uint32_t __get_mode(void)
359 return (__get_CPSR() & 0x1FU);
363 \param [in] mode Mode value to set
365 __STATIC_INLINE __ASM void __set_mode(uint32_t mode)
372 /** \brief Get Stack Pointer
373 \return Stack Pointer
375 __STATIC_INLINE __ASM uint32_t __get_SP(void)
381 /** \brief Set Stack Pointer
382 \param [in] stack Stack Pointer value to set
384 __STATIC_INLINE __ASM void __set_SP(uint32_t stack)
391 /** \brief Get USR/SYS Stack Pointer
392 \return USR/SYSStack Pointer
394 __STATIC_INLINE __ASM uint32_t __get_SP_usr(void)
400 CPS #0x1F ;no effect in USR mode
402 MSR CPSR_c, R1 ;no effect in USR mode
407 /** \brief Set USR/SYS Stack Pointer
408 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
410 __STATIC_INLINE __ASM void __set_SP_usr(uint32_t topOfProcStack)
416 CPS #0x1F ;no effect in USR mode
418 MSR CPSR_c, R1 ;no effect in USR mode
423 /** \brief Get FPEXC (Floating Point Exception Control Register)
424 \return Floating Point Exception Control Register value
426 __STATIC_INLINE uint32_t __get_FPEXC(void)
428 #if (__FPU_PRESENT == 1)
429 register uint32_t __regfpexc __ASM("fpexc");
436 /** \brief Set FPEXC (Floating Point Exception Control Register)
437 \param [in] fpexc Floating Point Exception Control value to set
439 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
441 #if (__FPU_PRESENT == 1)
442 register uint32_t __regfpexc __ASM("fpexc");
443 __regfpexc = (fpexc);
448 * Include common core functions to access Coprocessor 15 registers
451 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); Rt = tmp; } while(0)
452 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = Rt; } while(0)
453 #define __get_CP64(cp, op1, Rt, CRm) \
455 uint32_t ltmp, htmp; \
456 __ASM volatile("MRRC p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
457 Rt = (((uint64_t)htmp) << 32U) | ((uint64_t)ltmp); \
460 #define __set_CP64(cp, op1, Rt, CRm) \
462 const uint32_t ltmp = (uint32_t)Rt; \
463 const uint32_t htmp = (uint32_t)(Rt >> 32); \
464 __ASM volatile("MCRR p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
467 #include "cmsis_cp15.h"
469 /** \brief Clean and Invalidate the entire data or unified cache
470 * \param [in] op 0 - invalidate, 1 - clean, otherwise - invalidate and clean
472 __STATIC_INLINE __ASM void __L1C_CleanInvalidateCache(uint32_t op)
478 MRC p15, 1, R6, c0, c0, 1 // Read CLIDR
479 ANDS R3, R6, #0x07000000 // Extract coherency level
480 MOV R3, R3, LSR #23 // Total cache levels << 1
481 BEQ Finished // If 0, no need to clean
483 MOV R10, #0 // R10 holds current cache level << 1
484 Loop1 ADD R2, R10, R10, LSR #1 // R2 holds cache "Set" position
485 MOV R1, R6, LSR R2 // Bottom 3 bits are the Cache-type for this level
486 AND R1, R1, #7 // Isolate those lower 3 bits
488 BLT Skip // No cache or only instruction cache at this level
490 MCR p15, 2, R10, c0, c0, 0 // Write the Cache Size selection register
491 ISB // ISB to sync the change to the CacheSizeID reg
492 MRC p15, 1, R1, c0, c0, 0 // Reads current Cache Size ID register
493 AND R2, R1, #7 // Extract the line length field
494 ADD R2, R2, #4 // Add 4 for the line length offset (log2 16 bytes)
496 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
497 CLZ R5, R4 // R5 is the bit position of the way size increment
499 ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned)
501 Loop2 MOV R9, R4 // R9 working copy of the max way size (right aligned)
503 Loop3 ORR R11, R10, R9, LSL R5 // Factor in the Way number and cache number into R11
504 ORR R11, R11, R7, LSL R2 // Factor in the Set number
507 MCR p15, 0, R11, c7, c6, 2 // DCISW. Invalidate by Set/Way
511 MCR p15, 0, R11, c7, c10, 2 // DCCSW. Clean by Set/Way
513 Dccisw MCR p15, 0, R11, c7, c14, 2 // DCCISW. Clean and Invalidate by Set/Way
514 cont SUBS R9, R9, #1 // Decrement the Way number
516 SUBS R7, R7, #1 // Decrement the Set number
518 Skip ADD R10, R10, #2 // Increment the cache number
528 /** \brief Enable Floating Point Unit
530 Critical section, called from undef handler, so systick is disabled
532 __STATIC_INLINE __ASM void __FPU_Enable(void)
536 //Permit access to VFP/NEON, registers by modifying CPACR
538 ORR R1,R1,#0x00F00000
541 //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
546 ORR R1,R1,#0x40000000
549 //Initialise VFP/NEON registers to 0
551 IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} >= 16
552 //Initialise D16 registers to 0
570 IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} == 32
571 //Initialise D32 registers to 0
590 //Initialise FPSCR to a known state
592 LDR R3,=0x00086060 //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
599 #endif /* __CMSIS_ARMCC_H */