#pragma c99 on
#endif
-#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)
- #define COMPILER_C99 1
-#else
- #define COMPILER_C99 0
+#if defined(__STDC_VERSION__)
+ #if (__STDC_VERSION__ == 199409L) // IAR
+ #define COMPILER_C99 1 // not true, because partial C99, avoid miscompilation
+ #elif (__STDC_VERSION__ >= 199901L) // GCC
+ #define COMPILER_C99 1
+ #else
+ #define COMPILER_C99 0
+ #endif
#endif
#pragma language=extended
- #if CPU_ARM
+ /* IAR iccarm specific functions */
+ #include <intrinsics.h>
+ #pragma diag_suppress=Pe940
+ #pragma inline = forced
+
+ #define MEMORY_BARRIER asm("")
+
+ #if CPU_ARM || CPU_CM3
#define COMPILER_VARIADIC_MACROS 1
#define INTERRUPT(x) __irq __arm void x (void)
#define INLINE static inline
+ #define NAKED
/* Include some standard C89/C99 stuff */
#include <stddef.h>
#endif
/**
- * Force compiler to realod context variable.
+ * Force compiler to reload context variable.
*/
#define MEMORY_BARRIER asm volatile ("" : : : "memory")
* a refetch from memory. This also forbid from reordering successing instances
* of ACCESS_SAFE().
*/
+#ifdef __ICCARM__
+#define ACCESS_SAFE(x) x
+#else
#define ACCESS_SAFE(x) (*(volatile typeof(x) *)&(x))
+#endif
#endif /* BERTOS_COMPILER_H */
#define CPU_RAM_START 0x20000000
#endif
- #if defined(__ARMEB__)
- #define CPU_BYTE_ORDER CPU_BIG_ENDIAN
- #elif defined(__ARMEL__)
- #define CPU_BYTE_ORDER CPU_LITTLE_ENDIAN
- #else
- #error Unable to detect Cortex-M3 endianess!
- #endif
+ #if defined( __ICCARM__)
+ #if ((defined __LITTLE_ENDIAN__) && (__LITTLE_ENDIAN__ == 0))
+ #define CPU_BYTE_ORDER CPU_BIG_ENDIAN
+ #elif ((defined __LITTLE_ENDIAN__) && (__LITTLE_ENDIAN__ == 1))
+ #define CPU_BYTE_ORDER CPU_LITTLE_ENDIAN
+ #else
+ #error Unable to detect Cortex-M3 endianess!
+ #endif
+
+ #define NOP __no_operation()
+ #else
+ #if defined(__ARMEB__) // GCC
+ #define CPU_BYTE_ORDER CPU_BIG_ENDIAN
+ #elif defined(__ARMEL__) // GCC
+ #define CPU_BYTE_ORDER CPU_LITTLE_ENDIAN
+ #else
+ #error Unable to detect Cortex-M3 endianess!
+ #endif
#define NOP asm volatile ("nop")
#define PAUSE asm volatile ("wfi" ::: "memory")
* Function attribute to move it into ram memory.
*/
#define RAM_FUNC __attribute__((section(".ramfunc")))
+ #endif
#elif CPU_PPC
#ifndef PAUSE
/// Generic PAUSE implementation.
- #define PAUSE {NOP; MEMORY_BARRIER;}
+ #define PAUSE do {NOP; MEMORY_BARRIER;} while (0)
#endif
#endif /* CPU_ATTR_H */
#define CPU_DETECT_H
#if defined(__ARM_ARCH_4T__) /* GCC */ \
- || defined(__ARM4TM__) /* IAR: defined for all cores >= 4tm */
+ || (defined(__ICCARM__) && (__CORE__== __ARM4TM__)) /* IAR: defined for all cores == 4tm */
#define CPU_ARM 1
#define CPU_ID arm
#define CPU_CORE_NAME "ARM7TDMI"
- // AT91SAM7S core family
+ // AT91SAM7S products serie
#if defined(__ARM_AT91SAM7S32__)
#define CPU_ARM_AT91 1
#define CPU_ARM_AT91SAM7S32 1
#define CPU_ARM_AT91SAM7S512 0
#endif
- // AT91SAM7X core family
+ // AT91SAM7X products serie
#if defined(__ARM_AT91SAM7X128__)
#define CPU_ARM_AT91 1
#define CPU_ARM_SAM7X 1
#define CPU_ARM_LPC2378 0
#endif
-#if defined(__ARM_ARCH_7M__)
+#if defined(__ARM_ARCH_7M__) /* GCC */ \
+ || (defined(__ICCARM__) && (__CORE__== __ARM7M__)) /* IAR: defined for all cores v7M */
/* Cortex-M3 */
#define CPU_CM3 1
#define CPU_ID cm3
#define CPU_CM3_STM32F103RE 0
#endif
-
+ // AT91SAM3N products serie
#if defined (__ARM_SAM3N4__)
#define CPU_CM3_SAM3 1
#define CPU_CM3_SAM3N 1
#define CPU_CM3_SAM3N4 0
#endif
+ // AT91SAM3S products serie
#if defined (__ARM_SAM3S4__)
#define CPU_CM3_SAM3 1
#define CPU_CM3_SAM3S 1
#define CPU_CM3_SAM3S4 0
#endif
+ // AT91SAM3U products serie
#if defined (__ARM_SAM3U4__)
#define CPU_CM3_SAM3 1
#define CPU_CM3_SAM3U 1
#define CPU_CM3_SAM3U4 0
#endif
+ // AT91SAM3X products serie
#if defined (__ARM_SAM3X8__)
#define CPU_CM3_SAM3 1
#define CPU_CM3_SAM3X 1
#define CPU_CM3_STM32 0
#define CPU_CM3_STM32F103RB 0
#define CPU_CM3_STM32F101C4 0
- #define CPU_CM3_STM32F103RE 0
+ #define CPU_CM3_STM32F103RE 0
#define CPU_CM3_SAM3 0
#define CPU_CM3_SAM3N 0
#endif
#if (defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC)) \
- && !defined(__ARM4TM__) /* IAR: if not ARM assume I196 */
+ && !defined(__ICCARM__) /* IAR: if not ARM assume I196 */
#warning Assuming CPU is I196
#define CPU_I196 1
#define CPU_ID i196
* NOTE: 0 means that an interrupt is not affected by the global IRQ
* priority settings.
*/
- #define IRQ_PRIO 0x80
+ #define IRQ_PRIO 0x80
#define IRQ_PRIO_MIN 0xf0
#define IRQ_PRIO_MAX 0
/*
#define IRQ_PRIO_DISABLED 0x40
#define IRQ_PRIO_ENABLED 0
- #define IRQ_DISABLE \
- ({ \
- register cpu_flags_t reg = IRQ_PRIO_DISABLED; \
- asm volatile ( \
- "msr basepri, %0" \
- : : "r"(reg) : "memory", "cc"); \
- })
+ #ifdef __IAR_SYSTEMS_ICC__
+ INLINE cpu_flags_t CPU_READ_FLAGS(void)
+ {
+ return __get_BASEPRI();
+ }
- #define IRQ_ENABLE \
- ({ \
- register cpu_flags_t reg = IRQ_PRIO_ENABLED; \
- asm volatile ( \
- "msr basepri, %0" \
- : : "r"(reg) : "memory", "cc"); \
- })
+ INLINE void CPU_WRITE_FLAGS(cpu_flags_t flags)
+ {
+ __set_BASEPRI(flags);
+ }
- #define CPU_READ_FLAGS() \
- ({ \
- register cpu_flags_t reg; \
- asm volatile ( \
- "mrs %0, basepri" \
- : "=r"(reg) : : "memory", "cc"); \
- reg; \
- })
+ extern uint32_t CPU_READ_IPSR(void);
+ extern bool irq_running(void);
- #define IRQ_SAVE_DISABLE(x) \
- ({ \
- x = CPU_READ_FLAGS(); \
- IRQ_DISABLE; \
- })
+ #define IRQ_DISABLE CPU_WRITE_FLAGS(IRQ_PRIO_DISABLED)
- #define IRQ_RESTORE(x) \
- ({ \
- asm volatile ( \
- "msr basepri, %0" \
- : : "r"(x) : "memory", "cc"); \
- })
+ #define IRQ_ENABLE CPU_WRITE_FLAGS(IRQ_PRIO_ENABLED)
- #define IRQ_ENABLED() (CPU_READ_FLAGS() == IRQ_PRIO_ENABLED)
+ #define IRQ_SAVE_DISABLE(x) \
+ do { \
+ x = CPU_READ_FLAGS(); \
+ IRQ_DISABLE; \
+ } while (0)
- INLINE bool irq_running(void)
- {
- register uint32_t ret;
+ #define IRQ_RESTORE(x) \
+ do { \
+ CPU_WRITE_FLAGS(x); \
+ } while (0)
+ #else /* !__IAR_SYSTEMS_ICC__ */
+ #define IRQ_DISABLE \
+ ({ \
+ register cpu_flags_t reg = IRQ_PRIO_DISABLED; \
+ asm volatile ( \
+ "msr basepri, %0" \
+ : : "r"(reg) : "memory", "cc"); \
+ })
+
+ #define IRQ_ENABLE \
+ ({ \
+ register cpu_flags_t reg = IRQ_PRIO_ENABLED; \
+ asm volatile ( \
+ "msr basepri, %0" \
+ : : "r"(reg) : "memory", "cc"); \
+ })
+
+ #define CPU_READ_FLAGS() \
+ ({ \
+ register cpu_flags_t reg; \
+ asm volatile ( \
+ "mrs %0, basepri" \
+ : "=r"(reg) : : "memory", "cc"); \
+ reg; \
+ })
+
+ #define IRQ_SAVE_DISABLE(x) \
+ ({ \
+ x = CPU_READ_FLAGS(); \
+ IRQ_DISABLE; \
+ })
+
+ #define IRQ_RESTORE(x) \
+ ({ \
+ asm volatile ( \
+ "msr basepri, %0" \
+ : : "r"(x) : "memory", "cc"); \
+ })
+
+ INLINE bool irq_running(void)
+ {
+ register uint32_t ret;
+
+ /*
+ * Check if the current stack pointer is the main stack or
+ * process stack: we use the main stack only in Handler mode,
+ * so this means we're running inside an ISR.
+ */
+ asm volatile (
+ "mrs %0, msp\n\t"
+ "cmp sp, %0\n\t"
+ "ite ne\n\t"
+ "movne %0, #0\n\t"
+ "moveq %0, #1\n\t" : "=r"(ret) : : "cc");
+ return ret;
+ }
+ #endif /* __IAR_SYSTEMS_ICC__ */
+
+ #define IRQ_ENABLED() (CPU_READ_FLAGS() == IRQ_PRIO_ENABLED)
- /*
- * Check if the current stack pointer is the main stack or
- * process stack: we use the main stack only in Handler mode,
- * so this means we're running inside an ISR.
- */
- asm volatile (
- "mrs %0, msp\n\t"
- "cmp sp, %0\n\t"
- "ite ne\n\t"
- "movne %0, #0\n\t"
- "moveq %0, #1\n\t" : "=r"(ret) : : "cc");
- return ret;
- }
#define IRQ_RUNNING() irq_running()
#if (CONFIG_KERN && CONFIG_KERN_PREEMPT)