1 /**************************************************************************//**
3 * @brief CMSIS compiler specific macros, functions, instructions
6 ******************************************************************************/
8 * Copyright (c) 2009-2017 ARM Limited. All rights reserved.
10 * SPDX-License-Identifier: Apache-2.0
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
16 * www.apache.org/licenses/LICENSE-2.0
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
25 #ifndef __CMSIS_ARMCC_H
26 #define __CMSIS_ARMCC_H
28 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
29 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
32 /* CMSIS compiler control architecture macros */
33 #if (defined (__TARGET_ARCH_7_A ) && (__TARGET_ARCH_7_A == 1))
34 #define __ARM_ARCH_7A__ 1
37 /* CMSIS compiler specific defines */
42 #define __INLINE __inline
45 #define __FORCEINLINE __forceinline
47 #ifndef __STATIC_INLINE
48 #define __STATIC_INLINE static __inline
50 #ifndef __STATIC_FORCEINLINE
51 #define __STATIC_FORCEINLINE static __forceinline
54 #define __NO_RETURN __declspec(noreturn)
57 #define __USED __attribute__((used))
60 #define __WEAK __attribute__((weak))
63 #define __PACKED __attribute__((packed))
65 #ifndef __PACKED_STRUCT
66 #define __PACKED_STRUCT __packed struct
68 #ifndef __UNALIGNED_UINT16_WRITE
69 #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val))
71 #ifndef __UNALIGNED_UINT16_READ
72 #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr)))
74 #ifndef __UNALIGNED_UINT32_WRITE
75 #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val))
77 #ifndef __UNALIGNED_UINT32_READ
78 #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr)))
81 #define __ALIGNED(x) __attribute__((aligned(x)))
84 #define __PACKED __attribute__((packed))
87 /* ########################## Core Instruction Access ######################### */
94 \brief Wait For Interrupt
109 \brief Instruction Synchronization Barrier
111 #define __ISB() do {\
112 __schedule_barrier();\
114 __schedule_barrier();\
118 \brief Data Synchronization Barrier
120 #define __DSB() do {\
121 __schedule_barrier();\
123 __schedule_barrier();\
127 \brief Data Memory Barrier
129 #define __DMB() do {\
130 __schedule_barrier();\
132 __schedule_barrier();\
136 \brief Reverse byte order (32 bit)
137 \param [in] value Value to reverse
138 \return Reversed value
143 \brief Reverse byte order (16 bit)
144 \param [in] value Value to reverse
145 \return Reversed value
147 #ifndef __NO_EMBEDDED_ASM
148 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
156 \brief Reverse byte order in signed short value
157 \param [in] value Value to reverse
158 \return Reversed value
160 #ifndef __NO_EMBEDDED_ASM
161 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
169 \brief Rotate Right in unsigned value (32 bit)
170 \param [in] op1 Value to rotate
171 \param [in] op2 Number of Bits to rotate
172 \return Rotated value
178 \param [in] value is ignored by the processor.
179 If required, a debugger can use it to store additional information about the breakpoint.
181 #define __BKPT(value) __breakpoint(value)
184 \brief Reverse bit order of value
185 \param [in] value Value to reverse
186 \return Reversed value
188 #define __RBIT __rbit
191 \brief Count leading zeros
192 \param [in] value Value to count the leading zeros
193 \return number of leading zeros in value
198 \brief LDR Exclusive (8 bit)
199 \details Executes a exclusive LDR instruction for 8 bit value.
200 \param [in] ptr Pointer to data
201 \return value of type uint8_t at (*ptr)
203 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
204 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
206 #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
210 \brief LDR Exclusive (16 bit)
211 \details Executes a exclusive LDR instruction for 16 bit values.
212 \param [in] ptr Pointer to data
213 \return value of type uint16_t at (*ptr)
215 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
216 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
218 #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
222 \brief LDR Exclusive (32 bit)
223 \details Executes a exclusive LDR instruction for 32 bit values.
224 \param [in] ptr Pointer to data
225 \return value of type uint32_t at (*ptr)
227 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
228 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
230 #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
234 \brief STR Exclusive (8 bit)
235 \details Executes a exclusive STR instruction for 8 bit values.
236 \param [in] value Value to store
237 \param [in] ptr Pointer to location
238 \return 0 Function succeeded
239 \return 1 Function failed
241 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
242 #define __STREXB(value, ptr) __strex(value, ptr)
244 #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
248 \brief STR Exclusive (16 bit)
249 \details Executes a exclusive STR instruction for 16 bit values.
250 \param [in] value Value to store
251 \param [in] ptr Pointer to location
252 \return 0 Function succeeded
253 \return 1 Function failed
255 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
256 #define __STREXH(value, ptr) __strex(value, ptr)
258 #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
262 \brief STR Exclusive (32 bit)
263 \details Executes a exclusive STR instruction for 32 bit values.
264 \param [in] value Value to store
265 \param [in] ptr Pointer to location
266 \return 0 Function succeeded
267 \return 1 Function failed
269 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
270 #define __STREXW(value, ptr) __strex(value, ptr)
272 #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
276 \brief Remove the exclusive lock
277 \details Removes the exclusive lock which is created by LDREX.
279 #define __CLREX __clrex
283 \brief Signed Saturate
284 \details Saturates a signed value.
285 \param [in] value Value to be saturated
286 \param [in] sat Bit position to saturate to (1..32)
287 \return Saturated value
289 #define __SSAT __ssat
292 \brief Unsigned Saturate
293 \details Saturates an unsigned value.
294 \param [in] value Value to be saturated
295 \param [in] sat Bit position to saturate to (0..31)
296 \return Saturated value
298 #define __USAT __usat
300 /* ########################### Core Function Access ########################### */
303 \brief Get FPSCR (Floating Point Status/Control)
304 \return Floating Point Status/Control register value
306 __STATIC_INLINE uint32_t __get_FPSCR(void)
308 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
309 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
310 register uint32_t __regfpscr __ASM("fpscr");
318 \brief Set FPSCR (Floating Point Status/Control)
319 \param [in] fpscr Floating Point Status/Control value to set
321 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
323 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
324 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
325 register uint32_t __regfpscr __ASM("fpscr");
326 __regfpscr = (fpscr);
332 /** \brief Get CPSR (Current Program Status Register)
333 \return CPSR Register value
335 __STATIC_INLINE uint32_t __get_CPSR(void)
337 register uint32_t __regCPSR __ASM("cpsr");
342 /** \brief Set CPSR (Current Program Status Register)
343 \param [in] cpsr CPSR value to set
345 __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
347 register uint32_t __regCPSR __ASM("cpsr");
352 \return Processor Mode
354 __STATIC_INLINE uint32_t __get_mode(void) {
355 return (__get_CPSR() & 0x1FU);
359 \param [in] mode Mode value to set
361 __STATIC_INLINE __ASM void __set_mode(uint32_t mode) {
367 /** \brief Get Stack Pointer
368 \return Stack Pointer
370 __STATIC_INLINE __ASM uint32_t __get_SP(void)
376 /** \brief Set Stack Pointer
377 \param [in] stack Stack Pointer value to set
379 __STATIC_INLINE __ASM void __set_SP(uint32_t stack)
386 /** \brief Get USR/SYS Stack Pointer
387 \return USR/SYSStack Pointer
389 __STATIC_INLINE __ASM uint32_t __get_SP_usr(void)
395 CPS #0x1F ;no effect in USR mode
397 MSR CPSR_c, R1 ;no effect in USR mode
402 /** \brief Set USR/SYS Stack Pointer
403 \param [in] topOfProcStack USR/SYS Stack Pointer value to set
405 __STATIC_INLINE __ASM void __set_SP_usr(uint32_t topOfProcStack)
411 CPS #0x1F ;no effect in USR mode
413 MSR CPSR_c, R1 ;no effect in USR mode
418 /** \brief Get FPEXC (Floating Point Exception Control Register)
419 \return Floating Point Exception Control Register value
421 __STATIC_INLINE uint32_t __get_FPEXC(void)
423 #if (__FPU_PRESENT == 1)
424 register uint32_t __regfpexc __ASM("fpexc");
431 /** \brief Set FPEXC (Floating Point Exception Control Register)
432 \param [in] fpexc Floating Point Exception Control value to set
434 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
436 #if (__FPU_PRESENT == 1)
437 register uint32_t __regfpexc __ASM("fpexc");
438 __regfpexc = (fpexc);
443 * Include common core functions to access Coprocessor 15 registers
446 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register uint32_t tmp __ASM("cp15:0:c1:c0:1"); Rt = tmp; } while(0)
447 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register uint32_t tmp __ASM("cp15:0:c1:c0:1"); tmp = Rt; } while(0)
449 #include "cmsis_cp15.h"
451 /** \brief Clean and Invalidate the entire data or unified cache
452 * \param [in] op 0 - invalidate, 1 - clean, otherwise - invalidate and clean
454 __STATIC_INLINE __ASM void __L1C_CleanInvalidateCache(uint32_t op) {
459 MRC p15, 1, R6, c0, c0, 1 // Read CLIDR
460 ANDS R3, R6, #0x07000000 // Extract coherency level
461 MOV R3, R3, LSR #23 // Total cache levels << 1
462 BEQ Finished // If 0, no need to clean
464 MOV R10, #0 // R10 holds current cache level << 1
465 Loop1 ADD R2, R10, R10, LSR #1 // R2 holds cache "Set" position
466 MOV R1, R6, LSR R2 // Bottom 3 bits are the Cache-type for this level
467 AND R1, R1, #7 // Isolate those lower 3 bits
469 BLT Skip // No cache or only instruction cache at this level
471 MCR p15, 2, R10, c0, c0, 0 // Write the Cache Size selection register
472 ISB // ISB to sync the change to the CacheSizeID reg
473 MRC p15, 1, R1, c0, c0, 0 // Reads current Cache Size ID register
474 AND R2, R1, #7 // Extract the line length field
475 ADD R2, R2, #4 // Add 4 for the line length offset (log2 16 bytes)
477 ANDS R4, R4, R1, LSR #3 // R4 is the max number on the way size (right aligned)
478 CLZ R5, R4 // R5 is the bit position of the way size increment
480 ANDS R7, R7, R1, LSR #13 // R7 is the max number of the index size (right aligned)
482 Loop2 MOV R9, R4 // R9 working copy of the max way size (right aligned)
484 Loop3 ORR R11, R10, R9, LSL R5 // Factor in the Way number and cache number into R11
485 ORR R11, R11, R7, LSL R2 // Factor in the Set number
488 MCR p15, 0, R11, c7, c6, 2 // DCISW. Invalidate by Set/Way
492 MCR p15, 0, R11, c7, c10, 2 // DCCSW. Clean by Set/Way
494 Dccisw MCR p15, 0, R11, c7, c14, 2 // DCCISW. Clean and Invalidate by Set/Way
495 cont SUBS R9, R9, #1 // Decrement the Way number
497 SUBS R7, R7, #1 // Decrement the Set number
499 Skip ADD R10, R10, #2 // Increment the cache number
509 /** \brief Enable Floating Point Unit
511 Critical section, called from undef handler, so systick is disabled
513 __STATIC_INLINE __ASM void __FPU_Enable(void) {
516 //Permit access to VFP/NEON, registers by modifying CPACR
518 ORR R1,R1,#0x00F00000
521 //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
526 ORR R1,R1,#0x40000000
529 //Initialise VFP/NEON registers to 0
531 IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} >= 16
532 //Initialise D16 registers to 0
550 IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} == 32
551 //Initialise D32 registers to 0
570 //Initialise FPSCR to a known state
572 LDR R3,=0x00086060 //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
579 #endif /* __CMSIS_ARMCC_H */