1 /**************************************************************************//**
3 * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file
6 ******************************************************************************/
8 //------------------------------------------------------------------------------
10 // Copyright (c) 2017-2018 IAR Systems
11 // Copyright (c) 2018-2019 Arm Limited
13 // Licensed under the Apache License, Version 2.0 (the "License")
14 // you may not use this file except in compliance with the License.
15 // You may obtain a copy of the License at
16 // http://www.apache.org/licenses/LICENSE-2.0
18 // Unless required by applicable law or agreed to in writing, software
19 // distributed under the License is distributed on an "AS IS" BASIS,
20 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 // See the License for the specific language governing permissions and
22 // limitations under the License.
24 //------------------------------------------------------------------------------
27 #ifndef __CMSIS_ICCARM_H__
28 #define __CMSIS_ICCARM_H__
31 #error This file should only be compiled by ICCARM
34 #pragma system_include
36 #define __IAR_FT _Pragma("inline=forced") __intrinsic
38 #if (__VER__ >= 8000000)
44 #pragma language=extended
48 #define __ALIGNED(x) __attribute__((aligned(x)))
49 #elif (__VER__ >= 7080000)
50 /* Needs IAR language extensions */
51 #define __ALIGNED(x) __attribute__((aligned(x)))
53 #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored.
59 /* Define compiler macros for CPU architecture, used in CMSIS 5.
62 /* Macro already defined */
64 #if defined(__ARM7A__)
65 #define __ARM_ARCH_7A__ 1
73 #ifndef __COMPILER_BARRIER
74 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
78 #define __INLINE inline
83 #define __NO_RETURN __attribute__((__noreturn__))
85 #define __NO_RETURN _Pragma("object_attribute=__noreturn")
90 /* Needs IAR language extensions */
92 #define __PACKED __attribute__((packed, aligned(1)))
94 #define __PACKED __packed
98 #ifndef __PACKED_STRUCT
99 /* Needs IAR language extensions */
101 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
103 #define __PACKED_STRUCT __packed struct
107 #ifndef __PACKED_UNION
108 /* Needs IAR language extensions */
110 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
112 #define __PACKED_UNION __packed union
118 #define __RESTRICT __restrict
120 /* Needs IAR language extensions */
121 #define __RESTRICT restrict
125 #ifndef __STATIC_INLINE
126 #define __STATIC_INLINE static inline
129 #ifndef __FORCEINLINE
130 #define __FORCEINLINE _Pragma("inline=forced")
133 #ifndef __STATIC_FORCEINLINE
134 #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE
137 #ifndef CMSIS_DEPRECATED
138 #define CMSIS_DEPRECATED __attribute__((deprecated))
141 #ifndef __UNALIGNED_UINT16_READ
142 #pragma language=save
143 #pragma language=extended
144 __IAR_FT uint16_t __iar_uint16_read(void const *ptr)
146 return *(__packed uint16_t*)(ptr);
148 #pragma language=restore
149 #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR)
153 #ifndef __UNALIGNED_UINT16_WRITE
154 #pragma language=save
155 #pragma language=extended
156 __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val)
158 *(__packed uint16_t*)(ptr) = val;;
160 #pragma language=restore
161 #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL)
164 #ifndef __UNALIGNED_UINT32_READ
165 #pragma language=save
166 #pragma language=extended
167 __IAR_FT uint32_t __iar_uint32_read(void const *ptr)
169 return *(__packed uint32_t*)(ptr);
171 #pragma language=restore
172 #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR)
175 #ifndef __UNALIGNED_UINT32_WRITE
176 #pragma language=save
177 #pragma language=extended
178 __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val)
180 *(__packed uint32_t*)(ptr) = val;;
182 #pragma language=restore
183 #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL)
187 #ifndef __UNALIGNED_UINT32 /* deprecated */
188 #pragma language=save
189 #pragma language=extended
190 __packed struct __iar_u32 { uint32_t v; };
191 #pragma language=restore
192 #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v)
198 #define __USED __attribute__((used))
200 #define __USED _Pragma("__root")
206 #define __WEAK __attribute__((weak))
208 #define __WEAK _Pragma("__weak")
213 #ifndef __ICCARM_INTRINSICS_VERSION__
214 #define __ICCARM_INTRINSICS_VERSION__ 0
217 #if __ICCARM_INTRINSICS_VERSION__ == 2
235 #include "iccarm_builtin.h"
237 #define __enable_irq __iar_builtin_enable_interrupt
238 #define __disable_irq __iar_builtin_disable_interrupt
239 #define __enable_fault_irq __iar_builtin_enable_fiq
240 #define __disable_fault_irq __iar_builtin_disable_fiq
241 #define __arm_rsr __iar_builtin_rsr
242 #define __arm_wsr __iar_builtin_wsr
245 #define __get_FPSCR() (__arm_rsr("FPSCR"))
247 #define __get_FPSCR() ( 0 )
250 #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", VALUE))
252 #define __get_CPSR() (__arm_rsr("CPSR"))
253 #define __get_mode() (__get_CPSR() & 0x1FU)
255 #define __set_CPSR(VALUE) (__arm_wsr("CPSR", (VALUE)))
256 #define __set_mode(VALUE) (__arm_wsr("CPSR_c", (VALUE)))
259 #define __get_FPEXC() (__arm_rsr("FPEXC"))
260 #define __set_FPEXC(VALUE) (__arm_wsr("FPEXC", VALUE))
262 #define __get_CP(cp, op1, RT, CRn, CRm, op2) \
263 ((RT) = __arm_rsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2))
265 #define __set_CP(cp, op1, RT, CRn, CRm, op2) \
266 (__arm_wsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2, (RT)))
268 #define __get_CP64(cp, op1, Rt, CRm) \
269 __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
271 #define __set_CP64(cp, op1, Rt, CRm) \
272 __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
274 #include "cmsis_cp15.h"
276 #define __NOP __iar_builtin_no_operation
278 #define __CLZ __iar_builtin_CLZ
279 #define __CLREX __iar_builtin_CLREX
281 #define __DMB __iar_builtin_DMB
282 #define __DSB __iar_builtin_DSB
283 #define __ISB __iar_builtin_ISB
285 #define __LDREXB __iar_builtin_LDREXB
286 #define __LDREXH __iar_builtin_LDREXH
287 #define __LDREXW __iar_builtin_LDREX
289 #define __RBIT __iar_builtin_RBIT
290 #define __REV __iar_builtin_REV
291 #define __REV16 __iar_builtin_REV16
293 __IAR_FT int16_t __REVSH(int16_t val)
295 return (int16_t) __iar_builtin_REVSH(val);
298 #define __ROR __iar_builtin_ROR
299 #define __RRX __iar_builtin_RRX
301 #define __SEV __iar_builtin_SEV
303 #define __SSAT __iar_builtin_SSAT
305 #define __STREXB __iar_builtin_STREXB
306 #define __STREXH __iar_builtin_STREXH
307 #define __STREXW __iar_builtin_STREX
309 #define __USAT __iar_builtin_USAT
311 #define __WFE __iar_builtin_WFE
312 #define __WFI __iar_builtin_WFI
314 #define __SADD8 __iar_builtin_SADD8
315 #define __QADD8 __iar_builtin_QADD8
316 #define __SHADD8 __iar_builtin_SHADD8
317 #define __UADD8 __iar_builtin_UADD8
318 #define __UQADD8 __iar_builtin_UQADD8
319 #define __UHADD8 __iar_builtin_UHADD8
320 #define __SSUB8 __iar_builtin_SSUB8
321 #define __QSUB8 __iar_builtin_QSUB8
322 #define __SHSUB8 __iar_builtin_SHSUB8
323 #define __USUB8 __iar_builtin_USUB8
324 #define __UQSUB8 __iar_builtin_UQSUB8
325 #define __UHSUB8 __iar_builtin_UHSUB8
326 #define __SADD16 __iar_builtin_SADD16
327 #define __QADD16 __iar_builtin_QADD16
328 #define __SHADD16 __iar_builtin_SHADD16
329 #define __UADD16 __iar_builtin_UADD16
330 #define __UQADD16 __iar_builtin_UQADD16
331 #define __UHADD16 __iar_builtin_UHADD16
332 #define __SSUB16 __iar_builtin_SSUB16
333 #define __QSUB16 __iar_builtin_QSUB16
334 #define __SHSUB16 __iar_builtin_SHSUB16
335 #define __USUB16 __iar_builtin_USUB16
336 #define __UQSUB16 __iar_builtin_UQSUB16
337 #define __UHSUB16 __iar_builtin_UHSUB16
338 #define __SASX __iar_builtin_SASX
339 #define __QASX __iar_builtin_QASX
340 #define __SHASX __iar_builtin_SHASX
341 #define __UASX __iar_builtin_UASX
342 #define __UQASX __iar_builtin_UQASX
343 #define __UHASX __iar_builtin_UHASX
344 #define __SSAX __iar_builtin_SSAX
345 #define __QSAX __iar_builtin_QSAX
346 #define __SHSAX __iar_builtin_SHSAX
347 #define __USAX __iar_builtin_USAX
348 #define __UQSAX __iar_builtin_UQSAX
349 #define __UHSAX __iar_builtin_UHSAX
350 #define __USAD8 __iar_builtin_USAD8
351 #define __USADA8 __iar_builtin_USADA8
352 #define __SSAT16 __iar_builtin_SSAT16
353 #define __USAT16 __iar_builtin_USAT16
354 #define __UXTB16 __iar_builtin_UXTB16
355 #define __UXTAB16 __iar_builtin_UXTAB16
356 #define __SXTB16 __iar_builtin_SXTB16
357 #define __SXTAB16 __iar_builtin_SXTAB16
358 #define __SMUAD __iar_builtin_SMUAD
359 #define __SMUADX __iar_builtin_SMUADX
360 #define __SMMLA __iar_builtin_SMMLA
361 #define __SMLAD __iar_builtin_SMLAD
362 #define __SMLADX __iar_builtin_SMLADX
363 #define __SMLALD __iar_builtin_SMLALD
364 #define __SMLALDX __iar_builtin_SMLALDX
365 #define __SMUSD __iar_builtin_SMUSD
366 #define __SMUSDX __iar_builtin_SMUSDX
367 #define __SMLSD __iar_builtin_SMLSD
368 #define __SMLSDX __iar_builtin_SMLSDX
369 #define __SMLSLD __iar_builtin_SMLSLD
370 #define __SMLSLDX __iar_builtin_SMLSLDX
371 #define __SEL __iar_builtin_SEL
372 #define __QADD __iar_builtin_QADD
373 #define __QSUB __iar_builtin_QSUB
374 #define __PKHBT __iar_builtin_PKHBT
375 #define __PKHTB __iar_builtin_PKHTB
377 #else /* __ICCARM_INTRINSICS_VERSION__ == 2 */
380 #define __get_FPSCR __cmsis_iar_get_FPSR_not_active
383 #ifdef __INTRINSICS_INCLUDED
384 #error intrinsics.h is already included previously!
387 #include <intrinsics.h>
390 #define __get_FPSCR() (0)
393 #pragma diag_suppress=Pe940
394 #pragma diag_suppress=Pe177
396 #define __enable_irq __enable_interrupt
397 #define __disable_irq __disable_interrupt
398 #define __enable_fault_irq __enable_fiq
399 #define __disable_fault_irq __disable_fiq
400 #define __NOP __no_operation
402 #define __get_xPSR __get_PSR
404 __IAR_FT void __set_mode(uint32_t mode)
406 __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
409 __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
411 return __LDREX((unsigned long *)ptr);
414 __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
416 return __STREX(value, (unsigned long *)ptr);
420 __IAR_FT uint32_t __RRX(uint32_t value)
423 __ASM("RRX %0, %1" : "=r"(result) : "r" (value) : "cc");
428 __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2)
430 return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2));
433 __IAR_FT uint32_t __get_FPEXC(void)
435 #if (__FPU_PRESENT == 1)
437 __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory");
444 __IAR_FT void __set_FPEXC(uint32_t fpexc)
446 #if (__FPU_PRESENT == 1)
447 __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
452 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) \
453 __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
454 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) \
455 __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
456 #define __get_CP64(cp, op1, Rt, CRm) \
457 __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
458 #define __set_CP64(cp, op1, Rt, CRm) \
459 __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
461 #include "cmsis_cp15.h"
463 #endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */
465 #define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value))
468 __IAR_FT uint32_t __get_SP_usr(void)
474 "CPS #0x1F \n" // no effect in USR mode
476 "MSR cpsr_c, %2 \n" // no effect in USR mode
477 "ISB" : "=r"(cpsr), "=r"(result) : "r"(cpsr) : "memory"
482 __IAR_FT void __set_SP_usr(uint32_t topOfProcStack)
487 "CPS #0x1F \n" // no effect in USR mode
489 "MSR cpsr_c, %2 \n" // no effect in USR mode
490 "ISB" : "=r"(cpsr) : "r" (topOfProcStack), "r"(cpsr) : "memory"
494 #define __get_mode() (__get_CPSR() & 0x1FU)
497 void __FPU_Enable(void)
500 //Permit access to VFP/NEON, registers by modifying CPACR
501 " MRC p15,0,R1,c1,c0,2 \n"
502 " ORR R1,R1,#0x00F00000 \n"
503 " MCR p15,0,R1,c1,c0,2 \n"
505 //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
510 " ORR R1,R1,#0x40000000 \n"
513 //Initialise VFP/NEON registers to 0
516 //Initialise D16 registers to 0
534 #ifdef __ARM_ADVANCED_SIMD__
535 //Initialise D32 registers to 0
554 //Initialise FPSCR to a known state
556 " MOV32 R2,#0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
559 : : : "cc", "r1", "r2"
568 #pragma diag_default=Pe940
569 #pragma diag_default=Pe177
571 #endif /* __CMSIS_ICCARM_H__ */