AS = as
AR = ar
OBJCOPY = objcopy
+STRIP = strip
MCUFLAGS = -mthumb -mcpu=cortex-m4 -mfloat-abi=hard -mfpu=fpv4-sp-d16
AFLAGS = $(MCUFLAGS)
CFLAGS = $(MCUFLAGS) -ggdb \
- -Iinclude -Iinclude/it \
+ -Iinclude -Iinclude/it -Iinclude/cmsis \
-fno-builtin -fsigned-char -ffreestanding \
-Wall -Werror -Wextra -pedantic \
-Wno-overlength-strings -Wno-discarded-qualifiers
$(patsubst src/%.s, $(OUTDIR)/%.asm.o, $(AFILES))
OUT = out/main.elf
+INITRD = initrd.img
all: $(OUT)
+#@$(CROSS)$(STRIP) --only-keep-debug $(OUT)
$(OUT): $(OFILES) initrd/init libinterp.a
+ @echo " INITRD " $(INITRD)
+ @rm -f $(INITRD)
+ @$(CROSS)$(AR) r $(INITRD) initrd/*
+ @$(CROSS)$(OBJCOPY) -B arm -I binary -O elf32-littlearm $(INITRD) out/initrd.img.o
@echo " LINK " $(OUT)
- @./mkinitrd.sh
- @$(CROSS)$(OBJCOPY) -B arm -I binary -O elf32-littlearm initrd.img out/initrd.img.o
@$(CROSS)$(CC) $(CFLAGS) $(LFLAGS) out/*.o -o $(OUT) -L. -linterp
$(OUTDIR)/%.o: src/%.c
--- /dev/null
+/**************************************************************************//**\r
+ * @file cmsis_gcc.h\r
+ * @brief CMSIS Cortex-M Core Function/Instruction Header File\r
+ * @version V4.30\r
+ * @date 20. October 2015\r
+ ******************************************************************************/\r
+/* Copyright (c) 2009 - 2015 ARM LIMITED\r
+\r
+ All rights reserved.\r
+ Redistribution and use in source and binary forms, with or without\r
+ modification, are permitted provided that the following conditions are met:\r
+ - Redistributions of source code must retain the above copyright\r
+ notice, this list of conditions and the following disclaimer.\r
+ - Redistributions in binary form must reproduce the above copyright\r
+ notice, this list of conditions and the following disclaimer in the\r
+ documentation and/or other materials provided with the distribution.\r
+ - Neither the name of ARM nor the names of its contributors may be used\r
+ to endorse or promote products derived from this software without\r
+ specific prior written permission.\r
+ *\r
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
+ ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
+ POSSIBILITY OF SUCH DAMAGE.\r
+ ---------------------------------------------------------------------------*/\r
+\r
+\r
+#ifndef __CMSIS_GCC_H\r
+#define __CMSIS_GCC_H\r
+\r
+/* ignore some GCC warnings */\r
+#if defined ( __GNUC__ )\r
+#pragma GCC diagnostic push\r
+#pragma GCC diagnostic ignored "-Wsign-conversion"\r
+#pragma GCC diagnostic ignored "-Wconversion"\r
+#pragma GCC diagnostic ignored "-Wunused-parameter"\r
+#endif\r
+\r
+\r
+/* ########################### Core Function Access ########################### */\r
+/** \ingroup CMSIS_Core_FunctionInterface\r
+ \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Enable IRQ Interrupts\r
+ \details Enables IRQ interrupts by clearing the I-bit in the CPSR.\r
+ Can only be executed in Privileged modes.\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)\r
+{\r
+ __ASM volatile ("cpsie i" : : : "memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Disable IRQ Interrupts\r
+ \details Disables IRQ interrupts by setting the I-bit in the CPSR.\r
+ Can only be executed in Privileged modes.\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_irq(void)\r
+{\r
+ __ASM volatile ("cpsid i" : : : "memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Control Register\r
+ \details Returns the content of the Control Register.\r
+ \return Control Register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CONTROL(void)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, control" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Control Register\r
+ \details Writes the given value to the Control Register.\r
+ \param [in] control Control Register value to set\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CONTROL(uint32_t control)\r
+{\r
+ __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Get IPSR Register\r
+ \details Returns the content of the IPSR Register.\r
+ \return IPSR Register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_IPSR(void)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, ipsr" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Get APSR Register\r
+ \details Returns the content of the APSR Register.\r
+ \return APSR Register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, apsr" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Get xPSR Register\r
+ \details Returns the content of the xPSR Register.\r
+\r
+ \return xPSR Register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_xPSR(void)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, xpsr" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Process Stack Pointer\r
+ \details Returns the current value of the Process Stack Pointer (PSP).\r
+ \return PSP Register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_PSP(void)\r
+{\r
+ register uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, psp\n" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Process Stack Pointer\r
+ \details Assigns the given value to the Process Stack Pointer (PSP).\r
+ \param [in] topOfProcStack Process Stack Pointer value to set\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)\r
+{\r
+ __ASM volatile ("MSR psp, %0\n" : : "r" (topOfProcStack) : "sp");\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Main Stack Pointer\r
+ \details Returns the current value of the Main Stack Pointer (MSP).\r
+ \return MSP Register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_MSP(void)\r
+{\r
+ register uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, msp\n" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Main Stack Pointer\r
+ \details Assigns the given value to the Main Stack Pointer (MSP).\r
+\r
+ \param [in] topOfMainStack Main Stack Pointer value to set\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)\r
+{\r
+ __ASM volatile ("MSR msp, %0\n" : : "r" (topOfMainStack) : "sp");\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Priority Mask\r
+ \details Returns the current state of the priority mask bit from the Priority Mask Register.\r
+ \return Priority Mask value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_PRIMASK(void)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, primask" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Priority Mask\r
+ \details Assigns the given value to the Priority Mask Register.\r
+ \param [in] priMask Priority Mask\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)\r
+{\r
+ __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");\r
+}\r
+\r
+\r
+#if (__CORTEX_M >= 0x03U)\r
+\r
+/**\r
+ \brief Enable FIQ\r
+ \details Enables FIQ interrupts by clearing the F-bit in the CPSR.\r
+ Can only be executed in Privileged modes.\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_fault_irq(void)\r
+{\r
+ __ASM volatile ("cpsie f" : : : "memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Disable FIQ\r
+ \details Disables FIQ interrupts by setting the F-bit in the CPSR.\r
+ Can only be executed in Privileged modes.\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_fault_irq(void)\r
+{\r
+ __ASM volatile ("cpsid f" : : : "memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Base Priority\r
+ \details Returns the current value of the Base Priority register.\r
+ \return Base Priority register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_BASEPRI(void)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, basepri" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Base Priority\r
+ \details Assigns the given value to the Base Priority register.\r
+ \param [in] basePri Base Priority value to set\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_BASEPRI(uint32_t value)\r
+{\r
+ __ASM volatile ("MSR basepri, %0" : : "r" (value) : "memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Base Priority with condition\r
+ \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,\r
+ or the new value increases the BASEPRI priority level.\r
+ \param [in] basePri Base Priority value to set\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t value)\r
+{\r
+ __ASM volatile ("MSR basepri_max, %0" : : "r" (value) : "memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Fault Mask\r
+ \details Returns the current value of the Fault Mask register.\r
+ \return Fault Mask register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FAULTMASK(void)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("MRS %0, faultmask" : "=r" (result) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Fault Mask\r
+ \details Assigns the given value to the Fault Mask register.\r
+ \param [in] faultMask Fault Mask value to set\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)\r
+{\r
+ __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");\r
+}\r
+\r
+#endif /* (__CORTEX_M >= 0x03U) */\r
+\r
+\r
+#if (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U)\r
+\r
+/**\r
+ \brief Get FPSCR\r
+ \details Returns the current value of the Floating Point Status/Control register.\r
+ \return Floating Point Status/Control register value\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)\r
+{\r
+#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)\r
+ uint32_t result;\r
+\r
+ /* Empty asm statement works as a scheduling barrier */\r
+ __ASM volatile ("");\r
+ __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );\r
+ __ASM volatile ("");\r
+ return(result);\r
+#else\r
+ return(0);\r
+#endif\r
+}\r
+\r
+\r
+/**\r
+ \brief Set FPSCR\r
+ \details Assigns the given value to the Floating Point Status/Control register.\r
+ \param [in] fpscr Floating Point Status/Control value to set\r
+ */\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)\r
+{\r
+#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)\r
+ /* Empty asm statement works as a scheduling barrier */\r
+ __ASM volatile ("");\r
+ __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc");\r
+ __ASM volatile ("");\r
+#endif\r
+}\r
+\r
+#endif /* (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) */\r
+\r
+\r
+\r
+/*@} end of CMSIS_Core_RegAccFunctions */\r
+\r
+\r
+/* ########################## Core Instruction Access ######################### */\r
+/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface\r
+ Access to dedicated instructions\r
+ @{\r
+*/\r
+\r
+/* Define macros for porting to both thumb1 and thumb2.\r
+ * For thumb1, use low register (r0-r7), specified by constraint "l"\r
+ * Otherwise, use general registers, specified by constraint "r" */\r
+#if defined (__thumb__) && !defined (__thumb2__)\r
+#define __CMSIS_GCC_OUT_REG(r) "=l" (r)\r
+#define __CMSIS_GCC_USE_REG(r) "l" (r)\r
+#else\r
+#define __CMSIS_GCC_OUT_REG(r) "=r" (r)\r
+#define __CMSIS_GCC_USE_REG(r) "r" (r)\r
+#endif\r
+\r
+/**\r
+ \brief No Operation\r
+ \details No Operation does nothing. This instruction can be used for code alignment purposes.\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __NOP(void)\r
+{\r
+ __ASM volatile ("nop");\r
+}\r
+\r
+\r
+/**\r
+ \brief Wait For Interrupt\r
+ \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __WFI(void)\r
+{\r
+ __ASM volatile ("wfi");\r
+}\r
+\r
+\r
+/**\r
+ \brief Wait For Event\r
+ \details Wait For Event is a hint instruction that permits the processor to enter\r
+ a low-power state until one of a number of events occurs.\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __WFE(void)\r
+{\r
+ __ASM volatile ("wfe");\r
+}\r
+\r
+\r
+/**\r
+ \brief Send Event\r
+ \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __SEV(void)\r
+{\r
+ __ASM volatile ("sev");\r
+}\r
+\r
+\r
+/**\r
+ \brief Instruction Synchronization Barrier\r
+ \details Instruction Synchronization Barrier flushes the pipeline in the processor,\r
+ so that all instructions following the ISB are fetched from cache or memory,\r
+ after the instruction has been completed.\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __ISB(void)\r
+{\r
+ __ASM volatile ("isb 0xF":::"memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Data Synchronization Barrier\r
+ \details Acts as a special kind of Data Memory Barrier.\r
+ It completes when all explicit memory accesses before this instruction complete.\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __DSB(void)\r
+{\r
+ __ASM volatile ("dsb 0xF":::"memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Data Memory Barrier\r
+ \details Ensures the apparent order of the explicit memory operations before\r
+ and after the instruction, without ensuring their completion.\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __DMB(void)\r
+{\r
+ __ASM volatile ("dmb 0xF":::"memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Reverse byte order (32 bit)\r
+ \details Reverses the byte order in integer value.\r
+ \param [in] value Value to reverse\r
+ \return Reversed value\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __REV(uint32_t value)\r
+{\r
+#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)\r
+ return __builtin_bswap32(value);\r
+#else\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );\r
+ return(result);\r
+#endif\r
+}\r
+\r
+\r
+/**\r
+ \brief Reverse byte order (16 bit)\r
+ \details Reverses the byte order in two unsigned short values.\r
+ \param [in] value Value to reverse\r
+ \return Reversed value\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Reverse byte order in signed short value\r
+ \details Reverses the byte order in a signed short value with sign extension to integer.\r
+ \param [in] value Value to reverse\r
+ \return Reversed value\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value)\r
+{\r
+#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
+ return (short)__builtin_bswap16(value);\r
+#else\r
+ int32_t result;\r
+\r
+ __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );\r
+ return(result);\r
+#endif\r
+}\r
+\r
+\r
+/**\r
+ \brief Rotate Right in unsigned value (32 bit)\r
+ \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.\r
+ \param [in] value Value to rotate\r
+ \param [in] value Number of Bits to rotate\r
+ \return Rotated value\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)\r
+{\r
+ return (op1 >> op2) | (op1 << (32U - op2));\r
+}\r
+\r
+\r
+/**\r
+ \brief Breakpoint\r
+ \details Causes the processor to enter Debug state.\r
+ Debug tools can use this to investigate system state when the instruction at a particular address is reached.\r
+ \param [in] value is ignored by the processor.\r
+ If required, a debugger can use it to store additional information about the breakpoint.\r
+ */\r
+#define __BKPT(value) __ASM volatile ("bkpt "#value)\r
+\r
+\r
+/**\r
+ \brief Reverse bit order of value\r
+ \details Reverses the bit order of the given value.\r
+ \param [in] value Value to reverse\r
+ \return Reversed value\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)\r
+{\r
+ uint32_t result;\r
+\r
+#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)\r
+ __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );\r
+#else\r
+ int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */\r
+\r
+ result = value; /* r will be reversed bits of v; first get LSB of v */\r
+ for (value >>= 1U; value; value >>= 1U)\r
+ {\r
+ result <<= 1U;\r
+ result |= value & 1U;\r
+ s--;\r
+ }\r
+ result <<= s; /* shift when v's highest bits are zero */\r
+#endif\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Count leading zeros\r
+ \details Counts the number of leading zeros of a data value.\r
+ \param [in] value Value to count the leading zeros\r
+ \return number of leading zeros in value\r
+ */\r
+#define __CLZ __builtin_clz\r
+\r
+\r
+#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)\r
+\r
+/**\r
+ \brief LDR Exclusive (8 bit)\r
+ \details Executes a exclusive LDR instruction for 8 bit value.\r
+ \param [in] ptr Pointer to data\r
+ \return value of type uint8_t at (*ptr)\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
+ __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );\r
+#else\r
+ /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not\r
+ accepted by assembler. So has to use following less efficient pattern.\r
+ */\r
+ __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );\r
+#endif\r
+ return ((uint8_t) result); /* Add explicit type cast here */\r
+}\r
+\r
+\r
+/**\r
+ \brief LDR Exclusive (16 bit)\r
+ \details Executes a exclusive LDR instruction for 16 bit values.\r
+ \param [in] ptr Pointer to data\r
+ \return value of type uint16_t at (*ptr)\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
+ __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );\r
+#else\r
+ /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not\r
+ accepted by assembler. So has to use following less efficient pattern.\r
+ */\r
+ __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );\r
+#endif\r
+ return ((uint16_t) result); /* Add explicit type cast here */\r
+}\r
+\r
+\r
+/**\r
+ \brief LDR Exclusive (32 bit)\r
+ \details Executes a exclusive LDR instruction for 32 bit values.\r
+ \param [in] ptr Pointer to data\r
+ \return value of type uint32_t at (*ptr)\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief STR Exclusive (8 bit)\r
+ \details Executes a exclusive STR instruction for 8 bit values.\r
+ \param [in] value Value to store\r
+ \param [in] ptr Pointer to location\r
+ \return 0 Function succeeded\r
+ \return 1 Function failed\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief STR Exclusive (16 bit)\r
+ \details Executes a exclusive STR instruction for 16 bit values.\r
+ \param [in] value Value to store\r
+ \param [in] ptr Pointer to location\r
+ \return 0 Function succeeded\r
+ \return 1 Function failed\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief STR Exclusive (32 bit)\r
+ \details Executes a exclusive STR instruction for 32 bit values.\r
+ \param [in] value Value to store\r
+ \param [in] ptr Pointer to location\r
+ \return 0 Function succeeded\r
+ \return 1 Function failed\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief Remove the exclusive lock\r
+ \details Removes the exclusive lock which is created by LDREX.\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __CLREX(void)\r
+{\r
+ __ASM volatile ("clrex" ::: "memory");\r
+}\r
+\r
+\r
+/**\r
+ \brief Signed Saturate\r
+ \details Saturates a signed value.\r
+ \param [in] value Value to be saturated\r
+ \param [in] sat Bit position to saturate to (1..32)\r
+ \return Saturated value\r
+ */\r
+#define __SSAT(ARG1,ARG2) \\r
+({ \\r
+ uint32_t __RES, __ARG1 = (ARG1); \\r
+ __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
+ __RES; \\r
+ })\r
+\r
+\r
+/**\r
+ \brief Unsigned Saturate\r
+ \details Saturates an unsigned value.\r
+ \param [in] value Value to be saturated\r
+ \param [in] sat Bit position to saturate to (0..31)\r
+ \return Saturated value\r
+ */\r
+#define __USAT(ARG1,ARG2) \\r
+({ \\r
+ uint32_t __RES, __ARG1 = (ARG1); \\r
+ __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
+ __RES; \\r
+ })\r
+\r
+\r
+/**\r
+ \brief Rotate Right with Extend (32 bit)\r
+ \details Moves each bit of a bitstring right by one bit.\r
+ The carry input is shifted in at the left end of the bitstring.\r
+ \param [in] value Value to rotate\r
+ \return Rotated value\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief LDRT Unprivileged (8 bit)\r
+ \details Executes a Unprivileged LDRT instruction for 8 bit value.\r
+ \param [in] ptr Pointer to data\r
+ \return value of type uint8_t at (*ptr)\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
+ __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) );\r
+#else\r
+ /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not\r
+ accepted by assembler. So has to use following less efficient pattern.\r
+ */\r
+ __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );\r
+#endif\r
+ return ((uint8_t) result); /* Add explicit type cast here */\r
+}\r
+\r
+\r
+/**\r
+ \brief LDRT Unprivileged (16 bit)\r
+ \details Executes a Unprivileged LDRT instruction for 16 bit values.\r
+ \param [in] ptr Pointer to data\r
+ \return value of type uint16_t at (*ptr)\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
+ __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) );\r
+#else\r
+ /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not\r
+ accepted by assembler. So has to use following less efficient pattern.\r
+ */\r
+ __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );\r
+#endif\r
+ return ((uint16_t) result); /* Add explicit type cast here */\r
+}\r
+\r
+\r
+/**\r
+ \brief LDRT Unprivileged (32 bit)\r
+ \details Executes a Unprivileged LDRT instruction for 32 bit values.\r
+ \param [in] ptr Pointer to data\r
+ \return value of type uint32_t at (*ptr)\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) );\r
+ return(result);\r
+}\r
+\r
+\r
+/**\r
+ \brief STRT Unprivileged (8 bit)\r
+ \details Executes a Unprivileged STRT instruction for 8 bit values.\r
+ \param [in] value Value to store\r
+ \param [in] ptr Pointer to location\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr)\r
+{\r
+ __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );\r
+}\r
+\r
+\r
+/**\r
+ \brief STRT Unprivileged (16 bit)\r
+ \details Executes a Unprivileged STRT instruction for 16 bit values.\r
+ \param [in] value Value to store\r
+ \param [in] ptr Pointer to location\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr)\r
+{\r
+ __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );\r
+}\r
+\r
+\r
+/**\r
+ \brief STRT Unprivileged (32 bit)\r
+ \details Executes a Unprivileged STRT instruction for 32 bit values.\r
+ \param [in] value Value to store\r
+ \param [in] ptr Pointer to location\r
+ */\r
+__attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr)\r
+{\r
+ __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) );\r
+}\r
+\r
+#endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */\r
+\r
+/*@}*/ /* end of group CMSIS_Core_InstructionInterface */\r
+\r
+\r
+/* ################### Compiler specific Intrinsics ########################### */\r
+/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics\r
+ Access to dedicated SIMD instructions\r
+ @{\r
+*/\r
+\r
+#if (__CORTEX_M >= 0x04U) /* only for Cortex-M4 and above */\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
+ return(result);\r
+}\r
+\r
+#define __SSAT16(ARG1,ARG2) \\r
+({ \\r
+ int32_t __RES, __ARG1 = (ARG1); \\r
+ __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
+ __RES; \\r
+ })\r
+\r
+#define __USAT16(ARG1,ARG2) \\r
+({ \\r
+ uint32_t __RES, __ARG1 = (ARG1); \\r
+ __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
+ __RES; \\r
+ })\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)\r
+{\r
+ union llreg_u{\r
+ uint32_t w32[2];\r
+ uint64_t w64;\r
+ } llr;\r
+ llr.w64 = acc;\r
+\r
+#ifndef __ARMEB__ /* Little endian */\r
+ __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );\r
+#else /* Big endian */\r
+ __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );\r
+#endif\r
+\r
+ return(llr.w64);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)\r
+{\r
+ union llreg_u{\r
+ uint32_t w32[2];\r
+ uint64_t w64;\r
+ } llr;\r
+ llr.w64 = acc;\r
+\r
+#ifndef __ARMEB__ /* Little endian */\r
+ __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );\r
+#else /* Big endian */\r
+ __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );\r
+#endif\r
+\r
+ return(llr.w64);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)\r
+{\r
+ union llreg_u{\r
+ uint32_t w32[2];\r
+ uint64_t w64;\r
+ } llr;\r
+ llr.w64 = acc;\r
+\r
+#ifndef __ARMEB__ /* Little endian */\r
+ __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );\r
+#else /* Big endian */\r
+ __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );\r
+#endif\r
+\r
+ return(llr.w64);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)\r
+{\r
+ union llreg_u{\r
+ uint32_t w32[2];\r
+ uint64_t w64;\r
+ } llr;\r
+ llr.w64 = acc;\r
+\r
+#ifndef __ARMEB__ /* Little endian */\r
+ __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );\r
+#else /* Big endian */\r
+ __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );\r
+#endif\r
+\r
+ return(llr.w64);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)\r
+{\r
+ uint32_t result;\r
+\r
+ __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __QADD( int32_t op1, int32_t op2)\r
+{\r
+ int32_t result;\r
+\r
+ __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __QSUB( int32_t op1, int32_t op2)\r
+{\r
+ int32_t result;\r
+\r
+ __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
+ return(result);\r
+}\r
+\r
+#define __PKHBT(ARG1,ARG2,ARG3) \\r
+({ \\r
+ uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \\r
+ __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \\r
+ __RES; \\r
+ })\r
+\r
+#define __PKHTB(ARG1,ARG2,ARG3) \\r
+({ \\r
+ uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \\r
+ if (ARG3 == 0) \\r
+ __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \\r
+ else \\r
+ __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \\r
+ __RES; \\r
+ })\r
+\r
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)\r
+{\r
+ int32_t result;\r
+\r
+ __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );\r
+ return(result);\r
+}\r
+\r
+#endif /* (__CORTEX_M >= 0x04) */\r
+/*@} end of group CMSIS_SIMD_intrinsics */\r
+\r
+\r
+#if defined ( __GNUC__ )\r
+#pragma GCC diagnostic pop\r
+#endif\r
+\r
+#endif /* __CMSIS_GCC_H */\r
--- /dev/null
+/**************************************************************************//**\r
+ * @file core_cm4.h\r
+ * @brief CMSIS Cortex-M4 Core Peripheral Access Layer Header File\r
+ * @version V4.30\r
+ * @date 20. October 2015\r
+ ******************************************************************************/\r
+/* Copyright (c) 2009 - 2015 ARM LIMITED\r
+\r
+ All rights reserved.\r
+ Redistribution and use in source and binary forms, with or without\r
+ modification, are permitted provided that the following conditions are met:\r
+ - Redistributions of source code must retain the above copyright\r
+ notice, this list of conditions and the following disclaimer.\r
+ - Redistributions in binary form must reproduce the above copyright\r
+ notice, this list of conditions and the following disclaimer in the\r
+ documentation and/or other materials provided with the distribution.\r
+ - Neither the name of ARM nor the names of its contributors may be used\r
+ to endorse or promote products derived from this software without\r
+ specific prior written permission.\r
+ *\r
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
+ ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
+ POSSIBILITY OF SUCH DAMAGE.\r
+ ---------------------------------------------------------------------------*/\r
+\r
+\r
+#if defined ( __ICCARM__ )\r
+ #pragma system_include /* treat file as system include file for MISRA check */\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #pragma clang system_header /* treat file as system include file */\r
+#endif\r
+\r
+#ifndef __CORE_CM4_H_GENERIC\r
+#define __CORE_CM4_H_GENERIC\r
+\r
+#include <stdint.h>\r
+\r
+#ifdef __cplusplus\r
+ extern "C" {\r
+#endif\r
+\r
+/**\r
+ \page CMSIS_MISRA_Exceptions MISRA-C:2004 Compliance Exceptions\r
+ CMSIS violates the following MISRA-C:2004 rules:\r
+\r
+ \li Required Rule 8.5, object/function definition in header file.<br>\r
+ Function definitions in header files are used to allow 'inlining'.\r
+\r
+ \li Required Rule 18.4, declaration of union type or object of union type: '{...}'.<br>\r
+ Unions are used for effective representation of core registers.\r
+\r
+ \li Advisory Rule 19.7, Function-like macro defined.<br>\r
+ Function-like macros are used to allow more efficient code.\r
+ */\r
+\r
+\r
+/*******************************************************************************\r
+ * CMSIS definitions\r
+ ******************************************************************************/\r
+/**\r
+ \ingroup Cortex_M4\r
+ @{\r
+ */\r
+\r
+/* CMSIS CM4 definitions */\r
+#define __CM4_CMSIS_VERSION_MAIN (0x04U) /*!< [31:16] CMSIS HAL main version */\r
+#define __CM4_CMSIS_VERSION_SUB (0x1EU) /*!< [15:0] CMSIS HAL sub version */\r
+#define __CM4_CMSIS_VERSION ((__CM4_CMSIS_VERSION_MAIN << 16U) | \\r
+ __CM4_CMSIS_VERSION_SUB ) /*!< CMSIS HAL version number */\r
+\r
+#define __CORTEX_M (0x04U) /*!< Cortex-M Core */\r
+\r
+\r
+#if defined ( __CC_ARM )\r
+ #define __ASM __asm /*!< asm keyword for ARM Compiler */\r
+ #define __INLINE __inline /*!< inline keyword for ARM Compiler */\r
+ #define __STATIC_INLINE static __inline\r
+\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #define __ASM __asm /*!< asm keyword for ARM Compiler */\r
+ #define __INLINE __inline /*!< inline keyword for ARM Compiler */\r
+ #define __STATIC_INLINE static __inline\r
+\r
+#elif defined ( __GNUC__ )\r
+ #define __ASM __asm /*!< asm keyword for GNU Compiler */\r
+ #define __INLINE inline /*!< inline keyword for GNU Compiler */\r
+ #define __STATIC_INLINE static inline\r
+\r
+#elif defined ( __ICCARM__ )\r
+ #define __ASM __asm /*!< asm keyword for IAR Compiler */\r
+ #define __INLINE inline /*!< inline keyword for IAR Compiler. Only available in High optimization mode! */\r
+ #define __STATIC_INLINE static inline\r
+\r
+#elif defined ( __TMS470__ )\r
+ #define __ASM __asm /*!< asm keyword for TI CCS Compiler */\r
+ #define __STATIC_INLINE static inline\r
+\r
+#elif defined ( __TASKING__ )\r
+ #define __ASM __asm /*!< asm keyword for TASKING Compiler */\r
+ #define __INLINE inline /*!< inline keyword for TASKING Compiler */\r
+ #define __STATIC_INLINE static inline\r
+\r
+#elif defined ( __CSMC__ )\r
+ #define __packed\r
+ #define __ASM _asm /*!< asm keyword for COSMIC Compiler */\r
+ #define __INLINE inline /*!< inline keyword for COSMIC Compiler. Use -pc99 on compile line */\r
+ #define __STATIC_INLINE static inline\r
+\r
+#else\r
+ #error Unknown compiler\r
+#endif\r
+\r
+/** __FPU_USED indicates whether an FPU is used or not.\r
+ For this, __FPU_PRESENT has to be checked prior to making use of FPU specific registers and functions.\r
+*/\r
+#if defined ( __CC_ARM )\r
+ #if defined __TARGET_FPU_VFP\r
+ #if (__FPU_PRESENT == 1U)\r
+ #define __FPU_USED 1U\r
+ #else\r
+ #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
+ #define __FPU_USED 0U\r
+ #endif\r
+ #else\r
+ #define __FPU_USED 0U\r
+ #endif\r
+\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #if defined __ARM_PCS_VFP\r
+ #if (__FPU_PRESENT == 1)\r
+ #define __FPU_USED 1U\r
+ #else\r
+ #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
+ #define __FPU_USED 0U\r
+ #endif\r
+ #else\r
+ #define __FPU_USED 0U\r
+ #endif\r
+\r
+#elif defined ( __GNUC__ )\r
+ #if defined (__VFP_FP__) && !defined(__SOFTFP__)\r
+ #if (__FPU_PRESENT == 1U)\r
+ #define __FPU_USED 1U\r
+ #else\r
+ #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
+ #define __FPU_USED 0U\r
+ #endif\r
+ #else\r
+ #define __FPU_USED 0U\r
+ #endif\r
+\r
+#elif defined ( __ICCARM__ )\r
+ #if defined __ARMVFP__\r
+ #if (__FPU_PRESENT == 1U)\r
+ #define __FPU_USED 1U\r
+ #else\r
+ #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
+ #define __FPU_USED 0U\r
+ #endif\r
+ #else\r
+ #define __FPU_USED 0U\r
+ #endif\r
+\r
+#elif defined ( __TMS470__ )\r
+ #if defined __TI_VFP_SUPPORT__\r
+ #if (__FPU_PRESENT == 1U)\r
+ #define __FPU_USED 1U\r
+ #else\r
+ #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
+ #define __FPU_USED 0U\r
+ #endif\r
+ #else\r
+ #define __FPU_USED 0U\r
+ #endif\r
+\r
+#elif defined ( __TASKING__ )\r
+ #if defined __FPU_VFP__\r
+ #if (__FPU_PRESENT == 1U)\r
+ #define __FPU_USED 1U\r
+ #else\r
+ #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
+ #define __FPU_USED 0U\r
+ #endif\r
+ #else\r
+ #define __FPU_USED 0U\r
+ #endif\r
+\r
+#elif defined ( __CSMC__ )\r
+ #if ( __CSMC__ & 0x400U)\r
+ #if (__FPU_PRESENT == 1U)\r
+ #define __FPU_USED 1U\r
+ #else\r
+ #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
+ #define __FPU_USED 0U\r
+ #endif\r
+ #else\r
+ #define __FPU_USED 0U\r
+ #endif\r
+\r
+#endif\r
+\r
+#include "core_cmInstr.h" /* Core Instruction Access */\r
+#include "core_cmFunc.h" /* Core Function Access */\r
+#include "core_cmSimd.h" /* Compiler specific SIMD Intrinsics */\r
+\r
+#ifdef __cplusplus\r
+}\r
+#endif\r
+\r
+#endif /* __CORE_CM4_H_GENERIC */\r
+\r
+#ifndef __CMSIS_GENERIC\r
+\r
+#ifndef __CORE_CM4_H_DEPENDANT\r
+#define __CORE_CM4_H_DEPENDANT\r
+\r
+#ifdef __cplusplus\r
+ extern "C" {\r
+#endif\r
+\r
+/* check device defines and use defaults */\r
+#if defined __CHECK_DEVICE_DEFINES\r
+ #ifndef __CM4_REV\r
+ #define __CM4_REV 0x0000U\r
+ #warning "__CM4_REV not defined in device header file; using default!"\r
+ #endif\r
+\r
+ #ifndef __FPU_PRESENT\r
+ #define __FPU_PRESENT 0U\r
+ #warning "__FPU_PRESENT not defined in device header file; using default!"\r
+ #endif\r
+\r
+ #ifndef __MPU_PRESENT\r
+ #define __MPU_PRESENT 0U\r
+ #warning "__MPU_PRESENT not defined in device header file; using default!"\r
+ #endif\r
+\r
+ #ifndef __NVIC_PRIO_BITS\r
+ #define __NVIC_PRIO_BITS 4U\r
+ #warning "__NVIC_PRIO_BITS not defined in device header file; using default!"\r
+ #endif\r
+\r
+ #ifndef __Vendor_SysTickConfig\r
+ #define __Vendor_SysTickConfig 0U\r
+ #warning "__Vendor_SysTickConfig not defined in device header file; using default!"\r
+ #endif\r
+#endif\r
+\r
+/* IO definitions (access restrictions to peripheral registers) */\r
+/**\r
+ \defgroup CMSIS_glob_defs CMSIS Global Defines\r
+\r
+ <strong>IO Type Qualifiers</strong> are used\r
+ \li to specify the access to peripheral variables.\r
+ \li for automatic generation of peripheral register debug information.\r
+*/\r
+#ifdef __cplusplus\r
+ #define __I volatile /*!< Defines 'read only' permissions */\r
+#else\r
+ #define __I volatile const /*!< Defines 'read only' permissions */\r
+#endif\r
+#define __O volatile /*!< Defines 'write only' permissions */\r
+#define __IO volatile /*!< Defines 'read / write' permissions */\r
+\r
+/* following defines should be used for structure members */\r
+#define __IM volatile const /*! Defines 'read only' structure member permissions */\r
+#define __OM volatile /*! Defines 'write only' structure member permissions */\r
+#define __IOM volatile /*! Defines 'read / write' structure member permissions */\r
+\r
+/*@} end of group Cortex_M4 */\r
+\r
+\r
+\r
+/*******************************************************************************\r
+ * Register Abstraction\r
+ Core Register contain:\r
+ - Core Register\r
+ - Core NVIC Register\r
+ - Core SCB Register\r
+ - Core SysTick Register\r
+ - Core Debug Register\r
+ - Core MPU Register\r
+ - Core FPU Register\r
+ ******************************************************************************/\r
+/**\r
+ \defgroup CMSIS_core_register Defines and Type Definitions\r
+ \brief Type definitions and defines for Cortex-M processor based devices.\r
+*/\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_CORE Status and Control Registers\r
+ \brief Core Register type definitions.\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Union type to access the Application Program Status Register (APSR).\r
+ */\r
+typedef union\r
+{\r
+ struct\r
+ {\r
+ uint32_t _reserved0:16; /*!< bit: 0..15 Reserved */\r
+ uint32_t GE:4; /*!< bit: 16..19 Greater than or Equal flags */\r
+ uint32_t _reserved1:7; /*!< bit: 20..26 Reserved */\r
+ uint32_t Q:1; /*!< bit: 27 Saturation condition flag */\r
+ uint32_t V:1; /*!< bit: 28 Overflow condition code flag */\r
+ uint32_t C:1; /*!< bit: 29 Carry condition code flag */\r
+ uint32_t Z:1; /*!< bit: 30 Zero condition code flag */\r
+ uint32_t N:1; /*!< bit: 31 Negative condition code flag */\r
+ } b; /*!< Structure used for bit access */\r
+ uint32_t w; /*!< Type used for word access */\r
+} APSR_Type;\r
+\r
+/* APSR Register Definitions */\r
+#define APSR_N_Pos 31U /*!< APSR: N Position */\r
+#define APSR_N_Msk (1UL << APSR_N_Pos) /*!< APSR: N Mask */\r
+\r
+#define APSR_Z_Pos 30U /*!< APSR: Z Position */\r
+#define APSR_Z_Msk (1UL << APSR_Z_Pos) /*!< APSR: Z Mask */\r
+\r
+#define APSR_C_Pos 29U /*!< APSR: C Position */\r
+#define APSR_C_Msk (1UL << APSR_C_Pos) /*!< APSR: C Mask */\r
+\r
+#define APSR_V_Pos 28U /*!< APSR: V Position */\r
+#define APSR_V_Msk (1UL << APSR_V_Pos) /*!< APSR: V Mask */\r
+\r
+#define APSR_Q_Pos 27U /*!< APSR: Q Position */\r
+#define APSR_Q_Msk (1UL << APSR_Q_Pos) /*!< APSR: Q Mask */\r
+\r
+#define APSR_GE_Pos 16U /*!< APSR: GE Position */\r
+#define APSR_GE_Msk (0xFUL << APSR_GE_Pos) /*!< APSR: GE Mask */\r
+\r
+\r
+/**\r
+ \brief Union type to access the Interrupt Program Status Register (IPSR).\r
+ */\r
+typedef union\r
+{\r
+ struct\r
+ {\r
+ uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\r
+ uint32_t _reserved0:23; /*!< bit: 9..31 Reserved */\r
+ } b; /*!< Structure used for bit access */\r
+ uint32_t w; /*!< Type used for word access */\r
+} IPSR_Type;\r
+\r
+/* IPSR Register Definitions */\r
+#define IPSR_ISR_Pos 0U /*!< IPSR: ISR Position */\r
+#define IPSR_ISR_Msk (0x1FFUL /*<< IPSR_ISR_Pos*/) /*!< IPSR: ISR Mask */\r
+\r
+\r
+/**\r
+ \brief Union type to access the Special-Purpose Program Status Registers (xPSR).\r
+ */\r
+typedef union\r
+{\r
+ struct\r
+ {\r
+ uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\r
+ uint32_t _reserved0:7; /*!< bit: 9..15 Reserved */\r
+ uint32_t GE:4; /*!< bit: 16..19 Greater than or Equal flags */\r
+ uint32_t _reserved1:4; /*!< bit: 20..23 Reserved */\r
+ uint32_t T:1; /*!< bit: 24 Thumb bit (read 0) */\r
+ uint32_t IT:2; /*!< bit: 25..26 saved IT state (read 0) */\r
+ uint32_t Q:1; /*!< bit: 27 Saturation condition flag */\r
+ uint32_t V:1; /*!< bit: 28 Overflow condition code flag */\r
+ uint32_t C:1; /*!< bit: 29 Carry condition code flag */\r
+ uint32_t Z:1; /*!< bit: 30 Zero condition code flag */\r
+ uint32_t N:1; /*!< bit: 31 Negative condition code flag */\r
+ } b; /*!< Structure used for bit access */\r
+ uint32_t w; /*!< Type used for word access */\r
+} xPSR_Type;\r
+\r
+/* xPSR Register Definitions */\r
+#define xPSR_N_Pos 31U /*!< xPSR: N Position */\r
+#define xPSR_N_Msk (1UL << xPSR_N_Pos) /*!< xPSR: N Mask */\r
+\r
+#define xPSR_Z_Pos 30U /*!< xPSR: Z Position */\r
+#define xPSR_Z_Msk (1UL << xPSR_Z_Pos) /*!< xPSR: Z Mask */\r
+\r
+#define xPSR_C_Pos 29U /*!< xPSR: C Position */\r
+#define xPSR_C_Msk (1UL << xPSR_C_Pos) /*!< xPSR: C Mask */\r
+\r
+#define xPSR_V_Pos 28U /*!< xPSR: V Position */\r
+#define xPSR_V_Msk (1UL << xPSR_V_Pos) /*!< xPSR: V Mask */\r
+\r
+#define xPSR_Q_Pos 27U /*!< xPSR: Q Position */\r
+#define xPSR_Q_Msk (1UL << xPSR_Q_Pos) /*!< xPSR: Q Mask */\r
+\r
+#define xPSR_IT_Pos 25U /*!< xPSR: IT Position */\r
+#define xPSR_IT_Msk (3UL << xPSR_IT_Pos) /*!< xPSR: IT Mask */\r
+\r
+#define xPSR_T_Pos 24U /*!< xPSR: T Position */\r
+#define xPSR_T_Msk (1UL << xPSR_T_Pos) /*!< xPSR: T Mask */\r
+\r
+#define xPSR_GE_Pos 16U /*!< xPSR: GE Position */\r
+#define xPSR_GE_Msk (0xFUL << xPSR_GE_Pos) /*!< xPSR: GE Mask */\r
+\r
+#define xPSR_ISR_Pos 0U /*!< xPSR: ISR Position */\r
+#define xPSR_ISR_Msk (0x1FFUL /*<< xPSR_ISR_Pos*/) /*!< xPSR: ISR Mask */\r
+\r
+\r
+/**\r
+ \brief Union type to access the Control Registers (CONTROL).\r
+ */\r
+typedef union\r
+{\r
+ struct\r
+ {\r
+ uint32_t nPRIV:1; /*!< bit: 0 Execution privilege in Thread mode */\r
+ uint32_t SPSEL:1; /*!< bit: 1 Stack to be used */\r
+ uint32_t FPCA:1; /*!< bit: 2 FP extension active flag */\r
+ uint32_t _reserved0:29; /*!< bit: 3..31 Reserved */\r
+ } b; /*!< Structure used for bit access */\r
+ uint32_t w; /*!< Type used for word access */\r
+} CONTROL_Type;\r
+\r
+/* CONTROL Register Definitions */\r
+#define CONTROL_FPCA_Pos 2U /*!< CONTROL: FPCA Position */\r
+#define CONTROL_FPCA_Msk (1UL << CONTROL_FPCA_Pos) /*!< CONTROL: FPCA Mask */\r
+\r
+#define CONTROL_SPSEL_Pos 1U /*!< CONTROL: SPSEL Position */\r
+#define CONTROL_SPSEL_Msk (1UL << CONTROL_SPSEL_Pos) /*!< CONTROL: SPSEL Mask */\r
+\r
+#define CONTROL_nPRIV_Pos 0U /*!< CONTROL: nPRIV Position */\r
+#define CONTROL_nPRIV_Msk (1UL /*<< CONTROL_nPRIV_Pos*/) /*!< CONTROL: nPRIV Mask */\r
+\r
+/*@} end of group CMSIS_CORE */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_NVIC Nested Vectored Interrupt Controller (NVIC)\r
+ \brief Type definitions for the NVIC Registers\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the Nested Vectored Interrupt Controller (NVIC).\r
+ */\r
+typedef struct\r
+{\r
+ __IOM uint32_t ISER[8U]; /*!< Offset: 0x000 (R/W) Interrupt Set Enable Register */\r
+ uint32_t RESERVED0[24U];\r
+ __IOM uint32_t ICER[8U]; /*!< Offset: 0x080 (R/W) Interrupt Clear Enable Register */\r
+ uint32_t RSERVED1[24U];\r
+ __IOM uint32_t ISPR[8U]; /*!< Offset: 0x100 (R/W) Interrupt Set Pending Register */\r
+ uint32_t RESERVED2[24U];\r
+ __IOM uint32_t ICPR[8U]; /*!< Offset: 0x180 (R/W) Interrupt Clear Pending Register */\r
+ uint32_t RESERVED3[24U];\r
+ __IOM uint32_t IABR[8U]; /*!< Offset: 0x200 (R/W) Interrupt Active bit Register */\r
+ uint32_t RESERVED4[56U];\r
+ __IOM uint8_t IP[240U]; /*!< Offset: 0x300 (R/W) Interrupt Priority Register (8Bit wide) */\r
+ uint32_t RESERVED5[644U];\r
+ __OM uint32_t STIR; /*!< Offset: 0xE00 ( /W) Software Trigger Interrupt Register */\r
+} NVIC_Type;\r
+\r
+/* Software Triggered Interrupt Register Definitions */\r
+#define NVIC_STIR_INTID_Pos 0U /*!< STIR: INTLINESNUM Position */\r
+#define NVIC_STIR_INTID_Msk (0x1FFUL /*<< NVIC_STIR_INTID_Pos*/) /*!< STIR: INTLINESNUM Mask */\r
+\r
+/*@} end of group CMSIS_NVIC */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_SCB System Control Block (SCB)\r
+ \brief Type definitions for the System Control Block Registers\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the System Control Block (SCB).\r
+ */\r
+typedef struct\r
+{\r
+ __IM uint32_t CPUID; /*!< Offset: 0x000 (R/ ) CPUID Base Register */\r
+ __IOM uint32_t ICSR; /*!< Offset: 0x004 (R/W) Interrupt Control and State Register */\r
+ __IOM uint32_t VTOR; /*!< Offset: 0x008 (R/W) Vector Table Offset Register */\r
+ __IOM uint32_t AIRCR; /*!< Offset: 0x00C (R/W) Application Interrupt and Reset Control Register */\r
+ __IOM uint32_t SCR; /*!< Offset: 0x010 (R/W) System Control Register */\r
+ __IOM uint32_t CCR; /*!< Offset: 0x014 (R/W) Configuration Control Register */\r
+ __IOM uint8_t SHP[12U]; /*!< Offset: 0x018 (R/W) System Handlers Priority Registers (4-7, 8-11, 12-15) */\r
+ __IOM uint32_t SHCSR; /*!< Offset: 0x024 (R/W) System Handler Control and State Register */\r
+ __IOM uint32_t CFSR; /*!< Offset: 0x028 (R/W) Configurable Fault Status Register */\r
+ __IOM uint32_t HFSR; /*!< Offset: 0x02C (R/W) HardFault Status Register */\r
+ __IOM uint32_t DFSR; /*!< Offset: 0x030 (R/W) Debug Fault Status Register */\r
+ __IOM uint32_t MMFAR; /*!< Offset: 0x034 (R/W) MemManage Fault Address Register */\r
+ __IOM uint32_t BFAR; /*!< Offset: 0x038 (R/W) BusFault Address Register */\r
+ __IOM uint32_t AFSR; /*!< Offset: 0x03C (R/W) Auxiliary Fault Status Register */\r
+ __IM uint32_t PFR[2U]; /*!< Offset: 0x040 (R/ ) Processor Feature Register */\r
+ __IM uint32_t DFR; /*!< Offset: 0x048 (R/ ) Debug Feature Register */\r
+ __IM uint32_t ADR; /*!< Offset: 0x04C (R/ ) Auxiliary Feature Register */\r
+ __IM uint32_t MMFR[4U]; /*!< Offset: 0x050 (R/ ) Memory Model Feature Register */\r
+ __IM uint32_t ISAR[5U]; /*!< Offset: 0x060 (R/ ) Instruction Set Attributes Register */\r
+ uint32_t RESERVED0[5U];\r
+ __IOM uint32_t CPACR; /*!< Offset: 0x088 (R/W) Coprocessor Access Control Register */\r
+} SCB_Type;\r
+\r
+/* SCB CPUID Register Definitions */\r
+#define SCB_CPUID_IMPLEMENTER_Pos 24U /*!< SCB CPUID: IMPLEMENTER Position */\r
+#define SCB_CPUID_IMPLEMENTER_Msk (0xFFUL << SCB_CPUID_IMPLEMENTER_Pos) /*!< SCB CPUID: IMPLEMENTER Mask */\r
+\r
+#define SCB_CPUID_VARIANT_Pos 20U /*!< SCB CPUID: VARIANT Position */\r
+#define SCB_CPUID_VARIANT_Msk (0xFUL << SCB_CPUID_VARIANT_Pos) /*!< SCB CPUID: VARIANT Mask */\r
+\r
+#define SCB_CPUID_ARCHITECTURE_Pos 16U /*!< SCB CPUID: ARCHITECTURE Position */\r
+#define SCB_CPUID_ARCHITECTURE_Msk (0xFUL << SCB_CPUID_ARCHITECTURE_Pos) /*!< SCB CPUID: ARCHITECTURE Mask */\r
+\r
+#define SCB_CPUID_PARTNO_Pos 4U /*!< SCB CPUID: PARTNO Position */\r
+#define SCB_CPUID_PARTNO_Msk (0xFFFUL << SCB_CPUID_PARTNO_Pos) /*!< SCB CPUID: PARTNO Mask */\r
+\r
+#define SCB_CPUID_REVISION_Pos 0U /*!< SCB CPUID: REVISION Position */\r
+#define SCB_CPUID_REVISION_Msk (0xFUL /*<< SCB_CPUID_REVISION_Pos*/) /*!< SCB CPUID: REVISION Mask */\r
+\r
+/* SCB Interrupt Control State Register Definitions */\r
+#define SCB_ICSR_NMIPENDSET_Pos 31U /*!< SCB ICSR: NMIPENDSET Position */\r
+#define SCB_ICSR_NMIPENDSET_Msk (1UL << SCB_ICSR_NMIPENDSET_Pos) /*!< SCB ICSR: NMIPENDSET Mask */\r
+\r
+#define SCB_ICSR_PENDSVSET_Pos 28U /*!< SCB ICSR: PENDSVSET Position */\r
+#define SCB_ICSR_PENDSVSET_Msk (1UL << SCB_ICSR_PENDSVSET_Pos) /*!< SCB ICSR: PENDSVSET Mask */\r
+\r
+#define SCB_ICSR_PENDSVCLR_Pos 27U /*!< SCB ICSR: PENDSVCLR Position */\r
+#define SCB_ICSR_PENDSVCLR_Msk (1UL << SCB_ICSR_PENDSVCLR_Pos) /*!< SCB ICSR: PENDSVCLR Mask */\r
+\r
+#define SCB_ICSR_PENDSTSET_Pos 26U /*!< SCB ICSR: PENDSTSET Position */\r
+#define SCB_ICSR_PENDSTSET_Msk (1UL << SCB_ICSR_PENDSTSET_Pos) /*!< SCB ICSR: PENDSTSET Mask */\r
+\r
+#define SCB_ICSR_PENDSTCLR_Pos 25U /*!< SCB ICSR: PENDSTCLR Position */\r
+#define SCB_ICSR_PENDSTCLR_Msk (1UL << SCB_ICSR_PENDSTCLR_Pos) /*!< SCB ICSR: PENDSTCLR Mask */\r
+\r
+#define SCB_ICSR_ISRPREEMPT_Pos 23U /*!< SCB ICSR: ISRPREEMPT Position */\r
+#define SCB_ICSR_ISRPREEMPT_Msk (1UL << SCB_ICSR_ISRPREEMPT_Pos) /*!< SCB ICSR: ISRPREEMPT Mask */\r
+\r
+#define SCB_ICSR_ISRPENDING_Pos 22U /*!< SCB ICSR: ISRPENDING Position */\r
+#define SCB_ICSR_ISRPENDING_Msk (1UL << SCB_ICSR_ISRPENDING_Pos) /*!< SCB ICSR: ISRPENDING Mask */\r
+\r
+#define SCB_ICSR_VECTPENDING_Pos 12U /*!< SCB ICSR: VECTPENDING Position */\r
+#define SCB_ICSR_VECTPENDING_Msk (0x1FFUL << SCB_ICSR_VECTPENDING_Pos) /*!< SCB ICSR: VECTPENDING Mask */\r
+\r
+#define SCB_ICSR_RETTOBASE_Pos 11U /*!< SCB ICSR: RETTOBASE Position */\r
+#define SCB_ICSR_RETTOBASE_Msk (1UL << SCB_ICSR_RETTOBASE_Pos) /*!< SCB ICSR: RETTOBASE Mask */\r
+\r
+#define SCB_ICSR_VECTACTIVE_Pos 0U /*!< SCB ICSR: VECTACTIVE Position */\r
+#define SCB_ICSR_VECTACTIVE_Msk (0x1FFUL /*<< SCB_ICSR_VECTACTIVE_Pos*/) /*!< SCB ICSR: VECTACTIVE Mask */\r
+\r
+/* SCB Vector Table Offset Register Definitions */\r
+#define SCB_VTOR_TBLOFF_Pos 7U /*!< SCB VTOR: TBLOFF Position */\r
+#define SCB_VTOR_TBLOFF_Msk (0x1FFFFFFUL << SCB_VTOR_TBLOFF_Pos) /*!< SCB VTOR: TBLOFF Mask */\r
+\r
+/* SCB Application Interrupt and Reset Control Register Definitions */\r
+#define SCB_AIRCR_VECTKEY_Pos 16U /*!< SCB AIRCR: VECTKEY Position */\r
+#define SCB_AIRCR_VECTKEY_Msk (0xFFFFUL << SCB_AIRCR_VECTKEY_Pos) /*!< SCB AIRCR: VECTKEY Mask */\r
+\r
+#define SCB_AIRCR_VECTKEYSTAT_Pos 16U /*!< SCB AIRCR: VECTKEYSTAT Position */\r
+#define SCB_AIRCR_VECTKEYSTAT_Msk (0xFFFFUL << SCB_AIRCR_VECTKEYSTAT_Pos) /*!< SCB AIRCR: VECTKEYSTAT Mask */\r
+\r
+#define SCB_AIRCR_ENDIANESS_Pos 15U /*!< SCB AIRCR: ENDIANESS Position */\r
+#define SCB_AIRCR_ENDIANESS_Msk (1UL << SCB_AIRCR_ENDIANESS_Pos) /*!< SCB AIRCR: ENDIANESS Mask */\r
+\r
+#define SCB_AIRCR_PRIGROUP_Pos 8U /*!< SCB AIRCR: PRIGROUP Position */\r
+#define SCB_AIRCR_PRIGROUP_Msk (7UL << SCB_AIRCR_PRIGROUP_Pos) /*!< SCB AIRCR: PRIGROUP Mask */\r
+\r
+#define SCB_AIRCR_SYSRESETREQ_Pos 2U /*!< SCB AIRCR: SYSRESETREQ Position */\r
+#define SCB_AIRCR_SYSRESETREQ_Msk (1UL << SCB_AIRCR_SYSRESETREQ_Pos) /*!< SCB AIRCR: SYSRESETREQ Mask */\r
+\r
+#define SCB_AIRCR_VECTCLRACTIVE_Pos 1U /*!< SCB AIRCR: VECTCLRACTIVE Position */\r
+#define SCB_AIRCR_VECTCLRACTIVE_Msk (1UL << SCB_AIRCR_VECTCLRACTIVE_Pos) /*!< SCB AIRCR: VECTCLRACTIVE Mask */\r
+\r
+#define SCB_AIRCR_VECTRESET_Pos 0U /*!< SCB AIRCR: VECTRESET Position */\r
+#define SCB_AIRCR_VECTRESET_Msk (1UL /*<< SCB_AIRCR_VECTRESET_Pos*/) /*!< SCB AIRCR: VECTRESET Mask */\r
+\r
+/* SCB System Control Register Definitions */\r
+#define SCB_SCR_SEVONPEND_Pos 4U /*!< SCB SCR: SEVONPEND Position */\r
+#define SCB_SCR_SEVONPEND_Msk (1UL << SCB_SCR_SEVONPEND_Pos) /*!< SCB SCR: SEVONPEND Mask */\r
+\r
+#define SCB_SCR_SLEEPDEEP_Pos 2U /*!< SCB SCR: SLEEPDEEP Position */\r
+#define SCB_SCR_SLEEPDEEP_Msk (1UL << SCB_SCR_SLEEPDEEP_Pos) /*!< SCB SCR: SLEEPDEEP Mask */\r
+\r
+#define SCB_SCR_SLEEPONEXIT_Pos 1U /*!< SCB SCR: SLEEPONEXIT Position */\r
+#define SCB_SCR_SLEEPONEXIT_Msk (1UL << SCB_SCR_SLEEPONEXIT_Pos) /*!< SCB SCR: SLEEPONEXIT Mask */\r
+\r
+/* SCB Configuration Control Register Definitions */\r
+#define SCB_CCR_STKALIGN_Pos 9U /*!< SCB CCR: STKALIGN Position */\r
+#define SCB_CCR_STKALIGN_Msk (1UL << SCB_CCR_STKALIGN_Pos) /*!< SCB CCR: STKALIGN Mask */\r
+\r
+#define SCB_CCR_BFHFNMIGN_Pos 8U /*!< SCB CCR: BFHFNMIGN Position */\r
+#define SCB_CCR_BFHFNMIGN_Msk (1UL << SCB_CCR_BFHFNMIGN_Pos) /*!< SCB CCR: BFHFNMIGN Mask */\r
+\r
+#define SCB_CCR_DIV_0_TRP_Pos 4U /*!< SCB CCR: DIV_0_TRP Position */\r
+#define SCB_CCR_DIV_0_TRP_Msk (1UL << SCB_CCR_DIV_0_TRP_Pos) /*!< SCB CCR: DIV_0_TRP Mask */\r
+\r
+#define SCB_CCR_UNALIGN_TRP_Pos 3U /*!< SCB CCR: UNALIGN_TRP Position */\r
+#define SCB_CCR_UNALIGN_TRP_Msk (1UL << SCB_CCR_UNALIGN_TRP_Pos) /*!< SCB CCR: UNALIGN_TRP Mask */\r
+\r
+#define SCB_CCR_USERSETMPEND_Pos 1U /*!< SCB CCR: USERSETMPEND Position */\r
+#define SCB_CCR_USERSETMPEND_Msk (1UL << SCB_CCR_USERSETMPEND_Pos) /*!< SCB CCR: USERSETMPEND Mask */\r
+\r
+#define SCB_CCR_NONBASETHRDENA_Pos 0U /*!< SCB CCR: NONBASETHRDENA Position */\r
+#define SCB_CCR_NONBASETHRDENA_Msk (1UL /*<< SCB_CCR_NONBASETHRDENA_Pos*/) /*!< SCB CCR: NONBASETHRDENA Mask */\r
+\r
+/* SCB System Handler Control and State Register Definitions */\r
+#define SCB_SHCSR_USGFAULTENA_Pos 18U /*!< SCB SHCSR: USGFAULTENA Position */\r
+#define SCB_SHCSR_USGFAULTENA_Msk (1UL << SCB_SHCSR_USGFAULTENA_Pos) /*!< SCB SHCSR: USGFAULTENA Mask */\r
+\r
+#define SCB_SHCSR_BUSFAULTENA_Pos 17U /*!< SCB SHCSR: BUSFAULTENA Position */\r
+#define SCB_SHCSR_BUSFAULTENA_Msk (1UL << SCB_SHCSR_BUSFAULTENA_Pos) /*!< SCB SHCSR: BUSFAULTENA Mask */\r
+\r
+#define SCB_SHCSR_MEMFAULTENA_Pos 16U /*!< SCB SHCSR: MEMFAULTENA Position */\r
+#define SCB_SHCSR_MEMFAULTENA_Msk (1UL << SCB_SHCSR_MEMFAULTENA_Pos) /*!< SCB SHCSR: MEMFAULTENA Mask */\r
+\r
+#define SCB_SHCSR_SVCALLPENDED_Pos 15U /*!< SCB SHCSR: SVCALLPENDED Position */\r
+#define SCB_SHCSR_SVCALLPENDED_Msk (1UL << SCB_SHCSR_SVCALLPENDED_Pos) /*!< SCB SHCSR: SVCALLPENDED Mask */\r
+\r
+#define SCB_SHCSR_BUSFAULTPENDED_Pos 14U /*!< SCB SHCSR: BUSFAULTPENDED Position */\r
+#define SCB_SHCSR_BUSFAULTPENDED_Msk (1UL << SCB_SHCSR_BUSFAULTPENDED_Pos) /*!< SCB SHCSR: BUSFAULTPENDED Mask */\r
+\r
+#define SCB_SHCSR_MEMFAULTPENDED_Pos 13U /*!< SCB SHCSR: MEMFAULTPENDED Position */\r
+#define SCB_SHCSR_MEMFAULTPENDED_Msk (1UL << SCB_SHCSR_MEMFAULTPENDED_Pos) /*!< SCB SHCSR: MEMFAULTPENDED Mask */\r
+\r
+#define SCB_SHCSR_USGFAULTPENDED_Pos 12U /*!< SCB SHCSR: USGFAULTPENDED Position */\r
+#define SCB_SHCSR_USGFAULTPENDED_Msk (1UL << SCB_SHCSR_USGFAULTPENDED_Pos) /*!< SCB SHCSR: USGFAULTPENDED Mask */\r
+\r
+#define SCB_SHCSR_SYSTICKACT_Pos 11U /*!< SCB SHCSR: SYSTICKACT Position */\r
+#define SCB_SHCSR_SYSTICKACT_Msk (1UL << SCB_SHCSR_SYSTICKACT_Pos) /*!< SCB SHCSR: SYSTICKACT Mask */\r
+\r
+#define SCB_SHCSR_PENDSVACT_Pos 10U /*!< SCB SHCSR: PENDSVACT Position */\r
+#define SCB_SHCSR_PENDSVACT_Msk (1UL << SCB_SHCSR_PENDSVACT_Pos) /*!< SCB SHCSR: PENDSVACT Mask */\r
+\r
+#define SCB_SHCSR_MONITORACT_Pos 8U /*!< SCB SHCSR: MONITORACT Position */\r
+#define SCB_SHCSR_MONITORACT_Msk (1UL << SCB_SHCSR_MONITORACT_Pos) /*!< SCB SHCSR: MONITORACT Mask */\r
+\r
+#define SCB_SHCSR_SVCALLACT_Pos 7U /*!< SCB SHCSR: SVCALLACT Position */\r
+#define SCB_SHCSR_SVCALLACT_Msk (1UL << SCB_SHCSR_SVCALLACT_Pos) /*!< SCB SHCSR: SVCALLACT Mask */\r
+\r
+#define SCB_SHCSR_USGFAULTACT_Pos 3U /*!< SCB SHCSR: USGFAULTACT Position */\r
+#define SCB_SHCSR_USGFAULTACT_Msk (1UL << SCB_SHCSR_USGFAULTACT_Pos) /*!< SCB SHCSR: USGFAULTACT Mask */\r
+\r
+#define SCB_SHCSR_BUSFAULTACT_Pos 1U /*!< SCB SHCSR: BUSFAULTACT Position */\r
+#define SCB_SHCSR_BUSFAULTACT_Msk (1UL << SCB_SHCSR_BUSFAULTACT_Pos) /*!< SCB SHCSR: BUSFAULTACT Mask */\r
+\r
+#define SCB_SHCSR_MEMFAULTACT_Pos 0U /*!< SCB SHCSR: MEMFAULTACT Position */\r
+#define SCB_SHCSR_MEMFAULTACT_Msk (1UL /*<< SCB_SHCSR_MEMFAULTACT_Pos*/) /*!< SCB SHCSR: MEMFAULTACT Mask */\r
+\r
+/* SCB Configurable Fault Status Register Definitions */\r
+#define SCB_CFSR_USGFAULTSR_Pos 16U /*!< SCB CFSR: Usage Fault Status Register Position */\r
+#define SCB_CFSR_USGFAULTSR_Msk (0xFFFFUL << SCB_CFSR_USGFAULTSR_Pos) /*!< SCB CFSR: Usage Fault Status Register Mask */\r
+\r
+#define SCB_CFSR_BUSFAULTSR_Pos 8U /*!< SCB CFSR: Bus Fault Status Register Position */\r
+#define SCB_CFSR_BUSFAULTSR_Msk (0xFFUL << SCB_CFSR_BUSFAULTSR_Pos) /*!< SCB CFSR: Bus Fault Status Register Mask */\r
+\r
+#define SCB_CFSR_MEMFAULTSR_Pos 0U /*!< SCB CFSR: Memory Manage Fault Status Register Position */\r
+#define SCB_CFSR_MEMFAULTSR_Msk (0xFFUL /*<< SCB_CFSR_MEMFAULTSR_Pos*/) /*!< SCB CFSR: Memory Manage Fault Status Register Mask */\r
+\r
+/* SCB Hard Fault Status Register Definitions */\r
+#define SCB_HFSR_DEBUGEVT_Pos 31U /*!< SCB HFSR: DEBUGEVT Position */\r
+#define SCB_HFSR_DEBUGEVT_Msk (1UL << SCB_HFSR_DEBUGEVT_Pos) /*!< SCB HFSR: DEBUGEVT Mask */\r
+\r
+#define SCB_HFSR_FORCED_Pos 30U /*!< SCB HFSR: FORCED Position */\r
+#define SCB_HFSR_FORCED_Msk (1UL << SCB_HFSR_FORCED_Pos) /*!< SCB HFSR: FORCED Mask */\r
+\r
+#define SCB_HFSR_VECTTBL_Pos 1U /*!< SCB HFSR: VECTTBL Position */\r
+#define SCB_HFSR_VECTTBL_Msk (1UL << SCB_HFSR_VECTTBL_Pos) /*!< SCB HFSR: VECTTBL Mask */\r
+\r
+/* SCB Debug Fault Status Register Definitions */\r
+#define SCB_DFSR_EXTERNAL_Pos 4U /*!< SCB DFSR: EXTERNAL Position */\r
+#define SCB_DFSR_EXTERNAL_Msk (1UL << SCB_DFSR_EXTERNAL_Pos) /*!< SCB DFSR: EXTERNAL Mask */\r
+\r
+#define SCB_DFSR_VCATCH_Pos 3U /*!< SCB DFSR: VCATCH Position */\r
+#define SCB_DFSR_VCATCH_Msk (1UL << SCB_DFSR_VCATCH_Pos) /*!< SCB DFSR: VCATCH Mask */\r
+\r
+#define SCB_DFSR_DWTTRAP_Pos 2U /*!< SCB DFSR: DWTTRAP Position */\r
+#define SCB_DFSR_DWTTRAP_Msk (1UL << SCB_DFSR_DWTTRAP_Pos) /*!< SCB DFSR: DWTTRAP Mask */\r
+\r
+#define SCB_DFSR_BKPT_Pos 1U /*!< SCB DFSR: BKPT Position */\r
+#define SCB_DFSR_BKPT_Msk (1UL << SCB_DFSR_BKPT_Pos) /*!< SCB DFSR: BKPT Mask */\r
+\r
+#define SCB_DFSR_HALTED_Pos 0U /*!< SCB DFSR: HALTED Position */\r
+#define SCB_DFSR_HALTED_Msk (1UL /*<< SCB_DFSR_HALTED_Pos*/) /*!< SCB DFSR: HALTED Mask */\r
+\r
+/*@} end of group CMSIS_SCB */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_SCnSCB System Controls not in SCB (SCnSCB)\r
+ \brief Type definitions for the System Control and ID Register not in the SCB\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the System Control and ID Register not in the SCB.\r
+ */\r
+typedef struct\r
+{\r
+ uint32_t RESERVED0[1U];\r
+ __IM uint32_t ICTR; /*!< Offset: 0x004 (R/ ) Interrupt Controller Type Register */\r
+ __IOM uint32_t ACTLR; /*!< Offset: 0x008 (R/W) Auxiliary Control Register */\r
+} SCnSCB_Type;\r
+\r
+/* Interrupt Controller Type Register Definitions */\r
+#define SCnSCB_ICTR_INTLINESNUM_Pos 0U /*!< ICTR: INTLINESNUM Position */\r
+#define SCnSCB_ICTR_INTLINESNUM_Msk (0xFUL /*<< SCnSCB_ICTR_INTLINESNUM_Pos*/) /*!< ICTR: INTLINESNUM Mask */\r
+\r
+/* Auxiliary Control Register Definitions */\r
+#define SCnSCB_ACTLR_DISOOFP_Pos 9U /*!< ACTLR: DISOOFP Position */\r
+#define SCnSCB_ACTLR_DISOOFP_Msk (1UL << SCnSCB_ACTLR_DISOOFP_Pos) /*!< ACTLR: DISOOFP Mask */\r
+\r
+#define SCnSCB_ACTLR_DISFPCA_Pos 8U /*!< ACTLR: DISFPCA Position */\r
+#define SCnSCB_ACTLR_DISFPCA_Msk (1UL << SCnSCB_ACTLR_DISFPCA_Pos) /*!< ACTLR: DISFPCA Mask */\r
+\r
+#define SCnSCB_ACTLR_DISFOLD_Pos 2U /*!< ACTLR: DISFOLD Position */\r
+#define SCnSCB_ACTLR_DISFOLD_Msk (1UL << SCnSCB_ACTLR_DISFOLD_Pos) /*!< ACTLR: DISFOLD Mask */\r
+\r
+#define SCnSCB_ACTLR_DISDEFWBUF_Pos 1U /*!< ACTLR: DISDEFWBUF Position */\r
+#define SCnSCB_ACTLR_DISDEFWBUF_Msk (1UL << SCnSCB_ACTLR_DISDEFWBUF_Pos) /*!< ACTLR: DISDEFWBUF Mask */\r
+\r
+#define SCnSCB_ACTLR_DISMCYCINT_Pos 0U /*!< ACTLR: DISMCYCINT Position */\r
+#define SCnSCB_ACTLR_DISMCYCINT_Msk (1UL /*<< SCnSCB_ACTLR_DISMCYCINT_Pos*/) /*!< ACTLR: DISMCYCINT Mask */\r
+\r
+/*@} end of group CMSIS_SCnotSCB */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_SysTick System Tick Timer (SysTick)\r
+ \brief Type definitions for the System Timer Registers.\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the System Timer (SysTick).\r
+ */\r
+typedef struct\r
+{\r
+ __IOM uint32_t CTRL; /*!< Offset: 0x000 (R/W) SysTick Control and Status Register */\r
+ __IOM uint32_t LOAD; /*!< Offset: 0x004 (R/W) SysTick Reload Value Register */\r
+ __IOM uint32_t VAL; /*!< Offset: 0x008 (R/W) SysTick Current Value Register */\r
+ __IM uint32_t CALIB; /*!< Offset: 0x00C (R/ ) SysTick Calibration Register */\r
+} SysTick_Type;\r
+\r
+/* SysTick Control / Status Register Definitions */\r
+#define SysTick_CTRL_COUNTFLAG_Pos 16U /*!< SysTick CTRL: COUNTFLAG Position */\r
+#define SysTick_CTRL_COUNTFLAG_Msk (1UL << SysTick_CTRL_COUNTFLAG_Pos) /*!< SysTick CTRL: COUNTFLAG Mask */\r
+\r
+#define SysTick_CTRL_CLKSOURCE_Pos 2U /*!< SysTick CTRL: CLKSOURCE Position */\r
+#define SysTick_CTRL_CLKSOURCE_Msk (1UL << SysTick_CTRL_CLKSOURCE_Pos) /*!< SysTick CTRL: CLKSOURCE Mask */\r
+\r
+#define SysTick_CTRL_TICKINT_Pos 1U /*!< SysTick CTRL: TICKINT Position */\r
+#define SysTick_CTRL_TICKINT_Msk (1UL << SysTick_CTRL_TICKINT_Pos) /*!< SysTick CTRL: TICKINT Mask */\r
+\r
+#define SysTick_CTRL_ENABLE_Pos 0U /*!< SysTick CTRL: ENABLE Position */\r
+#define SysTick_CTRL_ENABLE_Msk (1UL /*<< SysTick_CTRL_ENABLE_Pos*/) /*!< SysTick CTRL: ENABLE Mask */\r
+\r
+/* SysTick Reload Register Definitions */\r
+#define SysTick_LOAD_RELOAD_Pos 0U /*!< SysTick LOAD: RELOAD Position */\r
+#define SysTick_LOAD_RELOAD_Msk (0xFFFFFFUL /*<< SysTick_LOAD_RELOAD_Pos*/) /*!< SysTick LOAD: RELOAD Mask */\r
+\r
+/* SysTick Current Register Definitions */\r
+#define SysTick_VAL_CURRENT_Pos 0U /*!< SysTick VAL: CURRENT Position */\r
+#define SysTick_VAL_CURRENT_Msk (0xFFFFFFUL /*<< SysTick_VAL_CURRENT_Pos*/) /*!< SysTick VAL: CURRENT Mask */\r
+\r
+/* SysTick Calibration Register Definitions */\r
+#define SysTick_CALIB_NOREF_Pos 31U /*!< SysTick CALIB: NOREF Position */\r
+#define SysTick_CALIB_NOREF_Msk (1UL << SysTick_CALIB_NOREF_Pos) /*!< SysTick CALIB: NOREF Mask */\r
+\r
+#define SysTick_CALIB_SKEW_Pos 30U /*!< SysTick CALIB: SKEW Position */\r
+#define SysTick_CALIB_SKEW_Msk (1UL << SysTick_CALIB_SKEW_Pos) /*!< SysTick CALIB: SKEW Mask */\r
+\r
+#define SysTick_CALIB_TENMS_Pos 0U /*!< SysTick CALIB: TENMS Position */\r
+#define SysTick_CALIB_TENMS_Msk (0xFFFFFFUL /*<< SysTick_CALIB_TENMS_Pos*/) /*!< SysTick CALIB: TENMS Mask */\r
+\r
+/*@} end of group CMSIS_SysTick */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_ITM Instrumentation Trace Macrocell (ITM)\r
+ \brief Type definitions for the Instrumentation Trace Macrocell (ITM)\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the Instrumentation Trace Macrocell Register (ITM).\r
+ */\r
+typedef struct\r
+{\r
+ __OM union\r
+ {\r
+ __OM uint8_t u8; /*!< Offset: 0x000 ( /W) ITM Stimulus Port 8-bit */\r
+ __OM uint16_t u16; /*!< Offset: 0x000 ( /W) ITM Stimulus Port 16-bit */\r
+ __OM uint32_t u32; /*!< Offset: 0x000 ( /W) ITM Stimulus Port 32-bit */\r
+ } PORT [32U]; /*!< Offset: 0x000 ( /W) ITM Stimulus Port Registers */\r
+ uint32_t RESERVED0[864U];\r
+ __IOM uint32_t TER; /*!< Offset: 0xE00 (R/W) ITM Trace Enable Register */\r
+ uint32_t RESERVED1[15U];\r
+ __IOM uint32_t TPR; /*!< Offset: 0xE40 (R/W) ITM Trace Privilege Register */\r
+ uint32_t RESERVED2[15U];\r
+ __IOM uint32_t TCR; /*!< Offset: 0xE80 (R/W) ITM Trace Control Register */\r
+ uint32_t RESERVED3[29U];\r
+ __OM uint32_t IWR; /*!< Offset: 0xEF8 ( /W) ITM Integration Write Register */\r
+ __IM uint32_t IRR; /*!< Offset: 0xEFC (R/ ) ITM Integration Read Register */\r
+ __IOM uint32_t IMCR; /*!< Offset: 0xF00 (R/W) ITM Integration Mode Control Register */\r
+ uint32_t RESERVED4[43U];\r
+ __OM uint32_t LAR; /*!< Offset: 0xFB0 ( /W) ITM Lock Access Register */\r
+ __IM uint32_t LSR; /*!< Offset: 0xFB4 (R/ ) ITM Lock Status Register */\r
+ uint32_t RESERVED5[6U];\r
+ __IM uint32_t PID4; /*!< Offset: 0xFD0 (R/ ) ITM Peripheral Identification Register #4 */\r
+ __IM uint32_t PID5; /*!< Offset: 0xFD4 (R/ ) ITM Peripheral Identification Register #5 */\r
+ __IM uint32_t PID6; /*!< Offset: 0xFD8 (R/ ) ITM Peripheral Identification Register #6 */\r
+ __IM uint32_t PID7; /*!< Offset: 0xFDC (R/ ) ITM Peripheral Identification Register #7 */\r
+ __IM uint32_t PID0; /*!< Offset: 0xFE0 (R/ ) ITM Peripheral Identification Register #0 */\r
+ __IM uint32_t PID1; /*!< Offset: 0xFE4 (R/ ) ITM Peripheral Identification Register #1 */\r
+ __IM uint32_t PID2; /*!< Offset: 0xFE8 (R/ ) ITM Peripheral Identification Register #2 */\r
+ __IM uint32_t PID3; /*!< Offset: 0xFEC (R/ ) ITM Peripheral Identification Register #3 */\r
+ __IM uint32_t CID0; /*!< Offset: 0xFF0 (R/ ) ITM Component Identification Register #0 */\r
+ __IM uint32_t CID1; /*!< Offset: 0xFF4 (R/ ) ITM Component Identification Register #1 */\r
+ __IM uint32_t CID2; /*!< Offset: 0xFF8 (R/ ) ITM Component Identification Register #2 */\r
+ __IM uint32_t CID3; /*!< Offset: 0xFFC (R/ ) ITM Component Identification Register #3 */\r
+} ITM_Type;\r
+\r
+/* ITM Trace Privilege Register Definitions */\r
+#define ITM_TPR_PRIVMASK_Pos 0U /*!< ITM TPR: PRIVMASK Position */\r
+#define ITM_TPR_PRIVMASK_Msk (0xFUL /*<< ITM_TPR_PRIVMASK_Pos*/) /*!< ITM TPR: PRIVMASK Mask */\r
+\r
+/* ITM Trace Control Register Definitions */\r
+#define ITM_TCR_BUSY_Pos 23U /*!< ITM TCR: BUSY Position */\r
+#define ITM_TCR_BUSY_Msk (1UL << ITM_TCR_BUSY_Pos) /*!< ITM TCR: BUSY Mask */\r
+\r
+#define ITM_TCR_TraceBusID_Pos 16U /*!< ITM TCR: ATBID Position */\r
+#define ITM_TCR_TraceBusID_Msk (0x7FUL << ITM_TCR_TraceBusID_Pos) /*!< ITM TCR: ATBID Mask */\r
+\r
+#define ITM_TCR_GTSFREQ_Pos 10U /*!< ITM TCR: Global timestamp frequency Position */\r
+#define ITM_TCR_GTSFREQ_Msk (3UL << ITM_TCR_GTSFREQ_Pos) /*!< ITM TCR: Global timestamp frequency Mask */\r
+\r
+#define ITM_TCR_TSPrescale_Pos 8U /*!< ITM TCR: TSPrescale Position */\r
+#define ITM_TCR_TSPrescale_Msk (3UL << ITM_TCR_TSPrescale_Pos) /*!< ITM TCR: TSPrescale Mask */\r
+\r
+#define ITM_TCR_SWOENA_Pos 4U /*!< ITM TCR: SWOENA Position */\r
+#define ITM_TCR_SWOENA_Msk (1UL << ITM_TCR_SWOENA_Pos) /*!< ITM TCR: SWOENA Mask */\r
+\r
+#define ITM_TCR_DWTENA_Pos 3U /*!< ITM TCR: DWTENA Position */\r
+#define ITM_TCR_DWTENA_Msk (1UL << ITM_TCR_DWTENA_Pos) /*!< ITM TCR: DWTENA Mask */\r
+\r
+#define ITM_TCR_SYNCENA_Pos 2U /*!< ITM TCR: SYNCENA Position */\r
+#define ITM_TCR_SYNCENA_Msk (1UL << ITM_TCR_SYNCENA_Pos) /*!< ITM TCR: SYNCENA Mask */\r
+\r
+#define ITM_TCR_TSENA_Pos 1U /*!< ITM TCR: TSENA Position */\r
+#define ITM_TCR_TSENA_Msk (1UL << ITM_TCR_TSENA_Pos) /*!< ITM TCR: TSENA Mask */\r
+\r
+#define ITM_TCR_ITMENA_Pos 0U /*!< ITM TCR: ITM Enable bit Position */\r
+#define ITM_TCR_ITMENA_Msk (1UL /*<< ITM_TCR_ITMENA_Pos*/) /*!< ITM TCR: ITM Enable bit Mask */\r
+\r
+/* ITM Integration Write Register Definitions */\r
+#define ITM_IWR_ATVALIDM_Pos 0U /*!< ITM IWR: ATVALIDM Position */\r
+#define ITM_IWR_ATVALIDM_Msk (1UL /*<< ITM_IWR_ATVALIDM_Pos*/) /*!< ITM IWR: ATVALIDM Mask */\r
+\r
+/* ITM Integration Read Register Definitions */\r
+#define ITM_IRR_ATREADYM_Pos 0U /*!< ITM IRR: ATREADYM Position */\r
+#define ITM_IRR_ATREADYM_Msk (1UL /*<< ITM_IRR_ATREADYM_Pos*/) /*!< ITM IRR: ATREADYM Mask */\r
+\r
+/* ITM Integration Mode Control Register Definitions */\r
+#define ITM_IMCR_INTEGRATION_Pos 0U /*!< ITM IMCR: INTEGRATION Position */\r
+#define ITM_IMCR_INTEGRATION_Msk (1UL /*<< ITM_IMCR_INTEGRATION_Pos*/) /*!< ITM IMCR: INTEGRATION Mask */\r
+\r
+/* ITM Lock Status Register Definitions */\r
+#define ITM_LSR_ByteAcc_Pos 2U /*!< ITM LSR: ByteAcc Position */\r
+#define ITM_LSR_ByteAcc_Msk (1UL << ITM_LSR_ByteAcc_Pos) /*!< ITM LSR: ByteAcc Mask */\r
+\r
+#define ITM_LSR_Access_Pos 1U /*!< ITM LSR: Access Position */\r
+#define ITM_LSR_Access_Msk (1UL << ITM_LSR_Access_Pos) /*!< ITM LSR: Access Mask */\r
+\r
+#define ITM_LSR_Present_Pos 0U /*!< ITM LSR: Present Position */\r
+#define ITM_LSR_Present_Msk (1UL /*<< ITM_LSR_Present_Pos*/) /*!< ITM LSR: Present Mask */\r
+\r
+/*@}*/ /* end of group CMSIS_ITM */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_DWT Data Watchpoint and Trace (DWT)\r
+ \brief Type definitions for the Data Watchpoint and Trace (DWT)\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the Data Watchpoint and Trace Register (DWT).\r
+ */\r
+typedef struct\r
+{\r
+ __IOM uint32_t CTRL; /*!< Offset: 0x000 (R/W) Control Register */\r
+ __IOM uint32_t CYCCNT; /*!< Offset: 0x004 (R/W) Cycle Count Register */\r
+ __IOM uint32_t CPICNT; /*!< Offset: 0x008 (R/W) CPI Count Register */\r
+ __IOM uint32_t EXCCNT; /*!< Offset: 0x00C (R/W) Exception Overhead Count Register */\r
+ __IOM uint32_t SLEEPCNT; /*!< Offset: 0x010 (R/W) Sleep Count Register */\r
+ __IOM uint32_t LSUCNT; /*!< Offset: 0x014 (R/W) LSU Count Register */\r
+ __IOM uint32_t FOLDCNT; /*!< Offset: 0x018 (R/W) Folded-instruction Count Register */\r
+ __IM uint32_t PCSR; /*!< Offset: 0x01C (R/ ) Program Counter Sample Register */\r
+ __IOM uint32_t COMP0; /*!< Offset: 0x020 (R/W) Comparator Register 0 */\r
+ __IOM uint32_t MASK0; /*!< Offset: 0x024 (R/W) Mask Register 0 */\r
+ __IOM uint32_t FUNCTION0; /*!< Offset: 0x028 (R/W) Function Register 0 */\r
+ uint32_t RESERVED0[1U];\r
+ __IOM uint32_t COMP1; /*!< Offset: 0x030 (R/W) Comparator Register 1 */\r
+ __IOM uint32_t MASK1; /*!< Offset: 0x034 (R/W) Mask Register 1 */\r
+ __IOM uint32_t FUNCTION1; /*!< Offset: 0x038 (R/W) Function Register 1 */\r
+ uint32_t RESERVED1[1U];\r
+ __IOM uint32_t COMP2; /*!< Offset: 0x040 (R/W) Comparator Register 2 */\r
+ __IOM uint32_t MASK2; /*!< Offset: 0x044 (R/W) Mask Register 2 */\r
+ __IOM uint32_t FUNCTION2; /*!< Offset: 0x048 (R/W) Function Register 2 */\r
+ uint32_t RESERVED2[1U];\r
+ __IOM uint32_t COMP3; /*!< Offset: 0x050 (R/W) Comparator Register 3 */\r
+ __IOM uint32_t MASK3; /*!< Offset: 0x054 (R/W) Mask Register 3 */\r
+ __IOM uint32_t FUNCTION3; /*!< Offset: 0x058 (R/W) Function Register 3 */\r
+} DWT_Type;\r
+\r
+/* DWT Control Register Definitions */\r
+#define DWT_CTRL_NUMCOMP_Pos 28U /*!< DWT CTRL: NUMCOMP Position */\r
+#define DWT_CTRL_NUMCOMP_Msk (0xFUL << DWT_CTRL_NUMCOMP_Pos) /*!< DWT CTRL: NUMCOMP Mask */\r
+\r
+#define DWT_CTRL_NOTRCPKT_Pos 27U /*!< DWT CTRL: NOTRCPKT Position */\r
+#define DWT_CTRL_NOTRCPKT_Msk (0x1UL << DWT_CTRL_NOTRCPKT_Pos) /*!< DWT CTRL: NOTRCPKT Mask */\r
+\r
+#define DWT_CTRL_NOEXTTRIG_Pos 26U /*!< DWT CTRL: NOEXTTRIG Position */\r
+#define DWT_CTRL_NOEXTTRIG_Msk (0x1UL << DWT_CTRL_NOEXTTRIG_Pos) /*!< DWT CTRL: NOEXTTRIG Mask */\r
+\r
+#define DWT_CTRL_NOCYCCNT_Pos 25U /*!< DWT CTRL: NOCYCCNT Position */\r
+#define DWT_CTRL_NOCYCCNT_Msk (0x1UL << DWT_CTRL_NOCYCCNT_Pos) /*!< DWT CTRL: NOCYCCNT Mask */\r
+\r
+#define DWT_CTRL_NOPRFCNT_Pos 24U /*!< DWT CTRL: NOPRFCNT Position */\r
+#define DWT_CTRL_NOPRFCNT_Msk (0x1UL << DWT_CTRL_NOPRFCNT_Pos) /*!< DWT CTRL: NOPRFCNT Mask */\r
+\r
+#define DWT_CTRL_CYCEVTENA_Pos 22U /*!< DWT CTRL: CYCEVTENA Position */\r
+#define DWT_CTRL_CYCEVTENA_Msk (0x1UL << DWT_CTRL_CYCEVTENA_Pos) /*!< DWT CTRL: CYCEVTENA Mask */\r
+\r
+#define DWT_CTRL_FOLDEVTENA_Pos 21U /*!< DWT CTRL: FOLDEVTENA Position */\r
+#define DWT_CTRL_FOLDEVTENA_Msk (0x1UL << DWT_CTRL_FOLDEVTENA_Pos) /*!< DWT CTRL: FOLDEVTENA Mask */\r
+\r
+#define DWT_CTRL_LSUEVTENA_Pos 20U /*!< DWT CTRL: LSUEVTENA Position */\r
+#define DWT_CTRL_LSUEVTENA_Msk (0x1UL << DWT_CTRL_LSUEVTENA_Pos) /*!< DWT CTRL: LSUEVTENA Mask */\r
+\r
+#define DWT_CTRL_SLEEPEVTENA_Pos 19U /*!< DWT CTRL: SLEEPEVTENA Position */\r
+#define DWT_CTRL_SLEEPEVTENA_Msk (0x1UL << DWT_CTRL_SLEEPEVTENA_Pos) /*!< DWT CTRL: SLEEPEVTENA Mask */\r
+\r
+#define DWT_CTRL_EXCEVTENA_Pos 18U /*!< DWT CTRL: EXCEVTENA Position */\r
+#define DWT_CTRL_EXCEVTENA_Msk (0x1UL << DWT_CTRL_EXCEVTENA_Pos) /*!< DWT CTRL: EXCEVTENA Mask */\r
+\r
+#define DWT_CTRL_CPIEVTENA_Pos 17U /*!< DWT CTRL: CPIEVTENA Position */\r
+#define DWT_CTRL_CPIEVTENA_Msk (0x1UL << DWT_CTRL_CPIEVTENA_Pos) /*!< DWT CTRL: CPIEVTENA Mask */\r
+\r
+#define DWT_CTRL_EXCTRCENA_Pos 16U /*!< DWT CTRL: EXCTRCENA Position */\r
+#define DWT_CTRL_EXCTRCENA_Msk (0x1UL << DWT_CTRL_EXCTRCENA_Pos) /*!< DWT CTRL: EXCTRCENA Mask */\r
+\r
+#define DWT_CTRL_PCSAMPLENA_Pos 12U /*!< DWT CTRL: PCSAMPLENA Position */\r
+#define DWT_CTRL_PCSAMPLENA_Msk (0x1UL << DWT_CTRL_PCSAMPLENA_Pos) /*!< DWT CTRL: PCSAMPLENA Mask */\r
+\r
+#define DWT_CTRL_SYNCTAP_Pos 10U /*!< DWT CTRL: SYNCTAP Position */\r
+#define DWT_CTRL_SYNCTAP_Msk (0x3UL << DWT_CTRL_SYNCTAP_Pos) /*!< DWT CTRL: SYNCTAP Mask */\r
+\r
+#define DWT_CTRL_CYCTAP_Pos 9U /*!< DWT CTRL: CYCTAP Position */\r
+#define DWT_CTRL_CYCTAP_Msk (0x1UL << DWT_CTRL_CYCTAP_Pos) /*!< DWT CTRL: CYCTAP Mask */\r
+\r
+#define DWT_CTRL_POSTINIT_Pos 5U /*!< DWT CTRL: POSTINIT Position */\r
+#define DWT_CTRL_POSTINIT_Msk (0xFUL << DWT_CTRL_POSTINIT_Pos) /*!< DWT CTRL: POSTINIT Mask */\r
+\r
+#define DWT_CTRL_POSTPRESET_Pos 1U /*!< DWT CTRL: POSTPRESET Position */\r
+#define DWT_CTRL_POSTPRESET_Msk (0xFUL << DWT_CTRL_POSTPRESET_Pos) /*!< DWT CTRL: POSTPRESET Mask */\r
+\r
+#define DWT_CTRL_CYCCNTENA_Pos 0U /*!< DWT CTRL: CYCCNTENA Position */\r
+#define DWT_CTRL_CYCCNTENA_Msk (0x1UL /*<< DWT_CTRL_CYCCNTENA_Pos*/) /*!< DWT CTRL: CYCCNTENA Mask */\r
+\r
+/* DWT CPI Count Register Definitions */\r
+#define DWT_CPICNT_CPICNT_Pos 0U /*!< DWT CPICNT: CPICNT Position */\r
+#define DWT_CPICNT_CPICNT_Msk (0xFFUL /*<< DWT_CPICNT_CPICNT_Pos*/) /*!< DWT CPICNT: CPICNT Mask */\r
+\r
+/* DWT Exception Overhead Count Register Definitions */\r
+#define DWT_EXCCNT_EXCCNT_Pos 0U /*!< DWT EXCCNT: EXCCNT Position */\r
+#define DWT_EXCCNT_EXCCNT_Msk (0xFFUL /*<< DWT_EXCCNT_EXCCNT_Pos*/) /*!< DWT EXCCNT: EXCCNT Mask */\r
+\r
+/* DWT Sleep Count Register Definitions */\r
+#define DWT_SLEEPCNT_SLEEPCNT_Pos 0U /*!< DWT SLEEPCNT: SLEEPCNT Position */\r
+#define DWT_SLEEPCNT_SLEEPCNT_Msk (0xFFUL /*<< DWT_SLEEPCNT_SLEEPCNT_Pos*/) /*!< DWT SLEEPCNT: SLEEPCNT Mask */\r
+\r
+/* DWT LSU Count Register Definitions */\r
+#define DWT_LSUCNT_LSUCNT_Pos 0U /*!< DWT LSUCNT: LSUCNT Position */\r
+#define DWT_LSUCNT_LSUCNT_Msk (0xFFUL /*<< DWT_LSUCNT_LSUCNT_Pos*/) /*!< DWT LSUCNT: LSUCNT Mask */\r
+\r
+/* DWT Folded-instruction Count Register Definitions */\r
+#define DWT_FOLDCNT_FOLDCNT_Pos 0U /*!< DWT FOLDCNT: FOLDCNT Position */\r
+#define DWT_FOLDCNT_FOLDCNT_Msk (0xFFUL /*<< DWT_FOLDCNT_FOLDCNT_Pos*/) /*!< DWT FOLDCNT: FOLDCNT Mask */\r
+\r
+/* DWT Comparator Mask Register Definitions */\r
+#define DWT_MASK_MASK_Pos 0U /*!< DWT MASK: MASK Position */\r
+#define DWT_MASK_MASK_Msk (0x1FUL /*<< DWT_MASK_MASK_Pos*/) /*!< DWT MASK: MASK Mask */\r
+\r
+/* DWT Comparator Function Register Definitions */\r
+#define DWT_FUNCTION_MATCHED_Pos 24U /*!< DWT FUNCTION: MATCHED Position */\r
+#define DWT_FUNCTION_MATCHED_Msk (0x1UL << DWT_FUNCTION_MATCHED_Pos) /*!< DWT FUNCTION: MATCHED Mask */\r
+\r
+#define DWT_FUNCTION_DATAVADDR1_Pos 16U /*!< DWT FUNCTION: DATAVADDR1 Position */\r
+#define DWT_FUNCTION_DATAVADDR1_Msk (0xFUL << DWT_FUNCTION_DATAVADDR1_Pos) /*!< DWT FUNCTION: DATAVADDR1 Mask */\r
+\r
+#define DWT_FUNCTION_DATAVADDR0_Pos 12U /*!< DWT FUNCTION: DATAVADDR0 Position */\r
+#define DWT_FUNCTION_DATAVADDR0_Msk (0xFUL << DWT_FUNCTION_DATAVADDR0_Pos) /*!< DWT FUNCTION: DATAVADDR0 Mask */\r
+\r
+#define DWT_FUNCTION_DATAVSIZE_Pos 10U /*!< DWT FUNCTION: DATAVSIZE Position */\r
+#define DWT_FUNCTION_DATAVSIZE_Msk (0x3UL << DWT_FUNCTION_DATAVSIZE_Pos) /*!< DWT FUNCTION: DATAVSIZE Mask */\r
+\r
+#define DWT_FUNCTION_LNK1ENA_Pos 9U /*!< DWT FUNCTION: LNK1ENA Position */\r
+#define DWT_FUNCTION_LNK1ENA_Msk (0x1UL << DWT_FUNCTION_LNK1ENA_Pos) /*!< DWT FUNCTION: LNK1ENA Mask */\r
+\r
+#define DWT_FUNCTION_DATAVMATCH_Pos 8U /*!< DWT FUNCTION: DATAVMATCH Position */\r
+#define DWT_FUNCTION_DATAVMATCH_Msk (0x1UL << DWT_FUNCTION_DATAVMATCH_Pos) /*!< DWT FUNCTION: DATAVMATCH Mask */\r
+\r
+#define DWT_FUNCTION_CYCMATCH_Pos 7U /*!< DWT FUNCTION: CYCMATCH Position */\r
+#define DWT_FUNCTION_CYCMATCH_Msk (0x1UL << DWT_FUNCTION_CYCMATCH_Pos) /*!< DWT FUNCTION: CYCMATCH Mask */\r
+\r
+#define DWT_FUNCTION_EMITRANGE_Pos 5U /*!< DWT FUNCTION: EMITRANGE Position */\r
+#define DWT_FUNCTION_EMITRANGE_Msk (0x1UL << DWT_FUNCTION_EMITRANGE_Pos) /*!< DWT FUNCTION: EMITRANGE Mask */\r
+\r
+#define DWT_FUNCTION_FUNCTION_Pos 0U /*!< DWT FUNCTION: FUNCTION Position */\r
+#define DWT_FUNCTION_FUNCTION_Msk (0xFUL /*<< DWT_FUNCTION_FUNCTION_Pos*/) /*!< DWT FUNCTION: FUNCTION Mask */\r
+\r
+/*@}*/ /* end of group CMSIS_DWT */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_TPI Trace Port Interface (TPI)\r
+ \brief Type definitions for the Trace Port Interface (TPI)\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the Trace Port Interface Register (TPI).\r
+ */\r
+typedef struct\r
+{\r
+ __IOM uint32_t SSPSR; /*!< Offset: 0x000 (R/ ) Supported Parallel Port Size Register */\r
+ __IOM uint32_t CSPSR; /*!< Offset: 0x004 (R/W) Current Parallel Port Size Register */\r
+ uint32_t RESERVED0[2U];\r
+ __IOM uint32_t ACPR; /*!< Offset: 0x010 (R/W) Asynchronous Clock Prescaler Register */\r
+ uint32_t RESERVED1[55U];\r
+ __IOM uint32_t SPPR; /*!< Offset: 0x0F0 (R/W) Selected Pin Protocol Register */\r
+ uint32_t RESERVED2[131U];\r
+ __IM uint32_t FFSR; /*!< Offset: 0x300 (R/ ) Formatter and Flush Status Register */\r
+ __IOM uint32_t FFCR; /*!< Offset: 0x304 (R/W) Formatter and Flush Control Register */\r
+ __IM uint32_t FSCR; /*!< Offset: 0x308 (R/ ) Formatter Synchronization Counter Register */\r
+ uint32_t RESERVED3[759U];\r
+ __IM uint32_t TRIGGER; /*!< Offset: 0xEE8 (R/ ) TRIGGER */\r
+ __IM uint32_t FIFO0; /*!< Offset: 0xEEC (R/ ) Integration ETM Data */\r
+ __IM uint32_t ITATBCTR2; /*!< Offset: 0xEF0 (R/ ) ITATBCTR2 */\r
+ uint32_t RESERVED4[1U];\r
+ __IM uint32_t ITATBCTR0; /*!< Offset: 0xEF8 (R/ ) ITATBCTR0 */\r
+ __IM uint32_t FIFO1; /*!< Offset: 0xEFC (R/ ) Integration ITM Data */\r
+ __IOM uint32_t ITCTRL; /*!< Offset: 0xF00 (R/W) Integration Mode Control */\r
+ uint32_t RESERVED5[39U];\r
+ __IOM uint32_t CLAIMSET; /*!< Offset: 0xFA0 (R/W) Claim tag set */\r
+ __IOM uint32_t CLAIMCLR; /*!< Offset: 0xFA4 (R/W) Claim tag clear */\r
+ uint32_t RESERVED7[8U];\r
+ __IM uint32_t DEVID; /*!< Offset: 0xFC8 (R/ ) TPIU_DEVID */\r
+ __IM uint32_t DEVTYPE; /*!< Offset: 0xFCC (R/ ) TPIU_DEVTYPE */\r
+} TPI_Type;\r
+\r
+/* TPI Asynchronous Clock Prescaler Register Definitions */\r
+#define TPI_ACPR_PRESCALER_Pos 0U /*!< TPI ACPR: PRESCALER Position */\r
+#define TPI_ACPR_PRESCALER_Msk (0x1FFFUL /*<< TPI_ACPR_PRESCALER_Pos*/) /*!< TPI ACPR: PRESCALER Mask */\r
+\r
+/* TPI Selected Pin Protocol Register Definitions */\r
+#define TPI_SPPR_TXMODE_Pos 0U /*!< TPI SPPR: TXMODE Position */\r
+#define TPI_SPPR_TXMODE_Msk (0x3UL /*<< TPI_SPPR_TXMODE_Pos*/) /*!< TPI SPPR: TXMODE Mask */\r
+\r
+/* TPI Formatter and Flush Status Register Definitions */\r
+#define TPI_FFSR_FtNonStop_Pos 3U /*!< TPI FFSR: FtNonStop Position */\r
+#define TPI_FFSR_FtNonStop_Msk (0x1UL << TPI_FFSR_FtNonStop_Pos) /*!< TPI FFSR: FtNonStop Mask */\r
+\r
+#define TPI_FFSR_TCPresent_Pos 2U /*!< TPI FFSR: TCPresent Position */\r
+#define TPI_FFSR_TCPresent_Msk (0x1UL << TPI_FFSR_TCPresent_Pos) /*!< TPI FFSR: TCPresent Mask */\r
+\r
+#define TPI_FFSR_FtStopped_Pos 1U /*!< TPI FFSR: FtStopped Position */\r
+#define TPI_FFSR_FtStopped_Msk (0x1UL << TPI_FFSR_FtStopped_Pos) /*!< TPI FFSR: FtStopped Mask */\r
+\r
+#define TPI_FFSR_FlInProg_Pos 0U /*!< TPI FFSR: FlInProg Position */\r
+#define TPI_FFSR_FlInProg_Msk (0x1UL /*<< TPI_FFSR_FlInProg_Pos*/) /*!< TPI FFSR: FlInProg Mask */\r
+\r
+/* TPI Formatter and Flush Control Register Definitions */\r
+#define TPI_FFCR_TrigIn_Pos 8U /*!< TPI FFCR: TrigIn Position */\r
+#define TPI_FFCR_TrigIn_Msk (0x1UL << TPI_FFCR_TrigIn_Pos) /*!< TPI FFCR: TrigIn Mask */\r
+\r
+#define TPI_FFCR_EnFCont_Pos 1U /*!< TPI FFCR: EnFCont Position */\r
+#define TPI_FFCR_EnFCont_Msk (0x1UL << TPI_FFCR_EnFCont_Pos) /*!< TPI FFCR: EnFCont Mask */\r
+\r
+/* TPI TRIGGER Register Definitions */\r
+#define TPI_TRIGGER_TRIGGER_Pos 0U /*!< TPI TRIGGER: TRIGGER Position */\r
+#define TPI_TRIGGER_TRIGGER_Msk (0x1UL /*<< TPI_TRIGGER_TRIGGER_Pos*/) /*!< TPI TRIGGER: TRIGGER Mask */\r
+\r
+/* TPI Integration ETM Data Register Definitions (FIFO0) */\r
+#define TPI_FIFO0_ITM_ATVALID_Pos 29U /*!< TPI FIFO0: ITM_ATVALID Position */\r
+#define TPI_FIFO0_ITM_ATVALID_Msk (0x3UL << TPI_FIFO0_ITM_ATVALID_Pos) /*!< TPI FIFO0: ITM_ATVALID Mask */\r
+\r
+#define TPI_FIFO0_ITM_bytecount_Pos 27U /*!< TPI FIFO0: ITM_bytecount Position */\r
+#define TPI_FIFO0_ITM_bytecount_Msk (0x3UL << TPI_FIFO0_ITM_bytecount_Pos) /*!< TPI FIFO0: ITM_bytecount Mask */\r
+\r
+#define TPI_FIFO0_ETM_ATVALID_Pos 26U /*!< TPI FIFO0: ETM_ATVALID Position */\r
+#define TPI_FIFO0_ETM_ATVALID_Msk (0x3UL << TPI_FIFO0_ETM_ATVALID_Pos) /*!< TPI FIFO0: ETM_ATVALID Mask */\r
+\r
+#define TPI_FIFO0_ETM_bytecount_Pos 24U /*!< TPI FIFO0: ETM_bytecount Position */\r
+#define TPI_FIFO0_ETM_bytecount_Msk (0x3UL << TPI_FIFO0_ETM_bytecount_Pos) /*!< TPI FIFO0: ETM_bytecount Mask */\r
+\r
+#define TPI_FIFO0_ETM2_Pos 16U /*!< TPI FIFO0: ETM2 Position */\r
+#define TPI_FIFO0_ETM2_Msk (0xFFUL << TPI_FIFO0_ETM2_Pos) /*!< TPI FIFO0: ETM2 Mask */\r
+\r
+#define TPI_FIFO0_ETM1_Pos 8U /*!< TPI FIFO0: ETM1 Position */\r
+#define TPI_FIFO0_ETM1_Msk (0xFFUL << TPI_FIFO0_ETM1_Pos) /*!< TPI FIFO0: ETM1 Mask */\r
+\r
+#define TPI_FIFO0_ETM0_Pos 0U /*!< TPI FIFO0: ETM0 Position */\r
+#define TPI_FIFO0_ETM0_Msk (0xFFUL /*<< TPI_FIFO0_ETM0_Pos*/) /*!< TPI FIFO0: ETM0 Mask */\r
+\r
+/* TPI ITATBCTR2 Register Definitions */\r
+#define TPI_ITATBCTR2_ATREADY_Pos 0U /*!< TPI ITATBCTR2: ATREADY Position */\r
+#define TPI_ITATBCTR2_ATREADY_Msk (0x1UL /*<< TPI_ITATBCTR2_ATREADY_Pos*/) /*!< TPI ITATBCTR2: ATREADY Mask */\r
+\r
+/* TPI Integration ITM Data Register Definitions (FIFO1) */\r
+#define TPI_FIFO1_ITM_ATVALID_Pos 29U /*!< TPI FIFO1: ITM_ATVALID Position */\r
+#define TPI_FIFO1_ITM_ATVALID_Msk (0x3UL << TPI_FIFO1_ITM_ATVALID_Pos) /*!< TPI FIFO1: ITM_ATVALID Mask */\r
+\r
+#define TPI_FIFO1_ITM_bytecount_Pos 27U /*!< TPI FIFO1: ITM_bytecount Position */\r
+#define TPI_FIFO1_ITM_bytecount_Msk (0x3UL << TPI_FIFO1_ITM_bytecount_Pos) /*!< TPI FIFO1: ITM_bytecount Mask */\r
+\r
+#define TPI_FIFO1_ETM_ATVALID_Pos 26U /*!< TPI FIFO1: ETM_ATVALID Position */\r
+#define TPI_FIFO1_ETM_ATVALID_Msk (0x3UL << TPI_FIFO1_ETM_ATVALID_Pos) /*!< TPI FIFO1: ETM_ATVALID Mask */\r
+\r
+#define TPI_FIFO1_ETM_bytecount_Pos 24U /*!< TPI FIFO1: ETM_bytecount Position */\r
+#define TPI_FIFO1_ETM_bytecount_Msk (0x3UL << TPI_FIFO1_ETM_bytecount_Pos) /*!< TPI FIFO1: ETM_bytecount Mask */\r
+\r
+#define TPI_FIFO1_ITM2_Pos 16U /*!< TPI FIFO1: ITM2 Position */\r
+#define TPI_FIFO1_ITM2_Msk (0xFFUL << TPI_FIFO1_ITM2_Pos) /*!< TPI FIFO1: ITM2 Mask */\r
+\r
+#define TPI_FIFO1_ITM1_Pos 8U /*!< TPI FIFO1: ITM1 Position */\r
+#define TPI_FIFO1_ITM1_Msk (0xFFUL << TPI_FIFO1_ITM1_Pos) /*!< TPI FIFO1: ITM1 Mask */\r
+\r
+#define TPI_FIFO1_ITM0_Pos 0U /*!< TPI FIFO1: ITM0 Position */\r
+#define TPI_FIFO1_ITM0_Msk (0xFFUL /*<< TPI_FIFO1_ITM0_Pos*/) /*!< TPI FIFO1: ITM0 Mask */\r
+\r
+/* TPI ITATBCTR0 Register Definitions */\r
+#define TPI_ITATBCTR0_ATREADY_Pos 0U /*!< TPI ITATBCTR0: ATREADY Position */\r
+#define TPI_ITATBCTR0_ATREADY_Msk (0x1UL /*<< TPI_ITATBCTR0_ATREADY_Pos*/) /*!< TPI ITATBCTR0: ATREADY Mask */\r
+\r
+/* TPI Integration Mode Control Register Definitions */\r
+#define TPI_ITCTRL_Mode_Pos 0U /*!< TPI ITCTRL: Mode Position */\r
+#define TPI_ITCTRL_Mode_Msk (0x1UL /*<< TPI_ITCTRL_Mode_Pos*/) /*!< TPI ITCTRL: Mode Mask */\r
+\r
+/* TPI DEVID Register Definitions */\r
+#define TPI_DEVID_NRZVALID_Pos 11U /*!< TPI DEVID: NRZVALID Position */\r
+#define TPI_DEVID_NRZVALID_Msk (0x1UL << TPI_DEVID_NRZVALID_Pos) /*!< TPI DEVID: NRZVALID Mask */\r
+\r
+#define TPI_DEVID_MANCVALID_Pos 10U /*!< TPI DEVID: MANCVALID Position */\r
+#define TPI_DEVID_MANCVALID_Msk (0x1UL << TPI_DEVID_MANCVALID_Pos) /*!< TPI DEVID: MANCVALID Mask */\r
+\r
+#define TPI_DEVID_PTINVALID_Pos 9U /*!< TPI DEVID: PTINVALID Position */\r
+#define TPI_DEVID_PTINVALID_Msk (0x1UL << TPI_DEVID_PTINVALID_Pos) /*!< TPI DEVID: PTINVALID Mask */\r
+\r
+#define TPI_DEVID_MinBufSz_Pos 6U /*!< TPI DEVID: MinBufSz Position */\r
+#define TPI_DEVID_MinBufSz_Msk (0x7UL << TPI_DEVID_MinBufSz_Pos) /*!< TPI DEVID: MinBufSz Mask */\r
+\r
+#define TPI_DEVID_AsynClkIn_Pos 5U /*!< TPI DEVID: AsynClkIn Position */\r
+#define TPI_DEVID_AsynClkIn_Msk (0x1UL << TPI_DEVID_AsynClkIn_Pos) /*!< TPI DEVID: AsynClkIn Mask */\r
+\r
+#define TPI_DEVID_NrTraceInput_Pos 0U /*!< TPI DEVID: NrTraceInput Position */\r
+#define TPI_DEVID_NrTraceInput_Msk (0x1FUL /*<< TPI_DEVID_NrTraceInput_Pos*/) /*!< TPI DEVID: NrTraceInput Mask */\r
+\r
+/* TPI DEVTYPE Register Definitions */\r
+#define TPI_DEVTYPE_MajorType_Pos 4U /*!< TPI DEVTYPE: MajorType Position */\r
+#define TPI_DEVTYPE_MajorType_Msk (0xFUL << TPI_DEVTYPE_MajorType_Pos) /*!< TPI DEVTYPE: MajorType Mask */\r
+\r
+#define TPI_DEVTYPE_SubType_Pos 0U /*!< TPI DEVTYPE: SubType Position */\r
+#define TPI_DEVTYPE_SubType_Msk (0xFUL /*<< TPI_DEVTYPE_SubType_Pos*/) /*!< TPI DEVTYPE: SubType Mask */\r
+\r
+/*@}*/ /* end of group CMSIS_TPI */\r
+\r
+\r
+#if (__MPU_PRESENT == 1U)\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_MPU Memory Protection Unit (MPU)\r
+ \brief Type definitions for the Memory Protection Unit (MPU)\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the Memory Protection Unit (MPU).\r
+ */\r
+typedef struct\r
+{\r
+ __IM uint32_t TYPE; /*!< Offset: 0x000 (R/ ) MPU Type Register */\r
+ __IOM uint32_t CTRL; /*!< Offset: 0x004 (R/W) MPU Control Register */\r
+ __IOM uint32_t RNR; /*!< Offset: 0x008 (R/W) MPU Region RNRber Register */\r
+ __IOM uint32_t RBAR; /*!< Offset: 0x00C (R/W) MPU Region Base Address Register */\r
+ __IOM uint32_t RASR; /*!< Offset: 0x010 (R/W) MPU Region Attribute and Size Register */\r
+ __IOM uint32_t RBAR_A1; /*!< Offset: 0x014 (R/W) MPU Alias 1 Region Base Address Register */\r
+ __IOM uint32_t RASR_A1; /*!< Offset: 0x018 (R/W) MPU Alias 1 Region Attribute and Size Register */\r
+ __IOM uint32_t RBAR_A2; /*!< Offset: 0x01C (R/W) MPU Alias 2 Region Base Address Register */\r
+ __IOM uint32_t RASR_A2; /*!< Offset: 0x020 (R/W) MPU Alias 2 Region Attribute and Size Register */\r
+ __IOM uint32_t RBAR_A3; /*!< Offset: 0x024 (R/W) MPU Alias 3 Region Base Address Register */\r
+ __IOM uint32_t RASR_A3; /*!< Offset: 0x028 (R/W) MPU Alias 3 Region Attribute and Size Register */\r
+} MPU_Type;\r
+\r
+/* MPU Type Register Definitions */\r
+#define MPU_TYPE_IREGION_Pos 16U /*!< MPU TYPE: IREGION Position */\r
+#define MPU_TYPE_IREGION_Msk (0xFFUL << MPU_TYPE_IREGION_Pos) /*!< MPU TYPE: IREGION Mask */\r
+\r
+#define MPU_TYPE_DREGION_Pos 8U /*!< MPU TYPE: DREGION Position */\r
+#define MPU_TYPE_DREGION_Msk (0xFFUL << MPU_TYPE_DREGION_Pos) /*!< MPU TYPE: DREGION Mask */\r
+\r
+#define MPU_TYPE_SEPARATE_Pos 0U /*!< MPU TYPE: SEPARATE Position */\r
+#define MPU_TYPE_SEPARATE_Msk (1UL /*<< MPU_TYPE_SEPARATE_Pos*/) /*!< MPU TYPE: SEPARATE Mask */\r
+\r
+/* MPU Control Register Definitions */\r
+#define MPU_CTRL_PRIVDEFENA_Pos 2U /*!< MPU CTRL: PRIVDEFENA Position */\r
+#define MPU_CTRL_PRIVDEFENA_Msk (1UL << MPU_CTRL_PRIVDEFENA_Pos) /*!< MPU CTRL: PRIVDEFENA Mask */\r
+\r
+#define MPU_CTRL_HFNMIENA_Pos 1U /*!< MPU CTRL: HFNMIENA Position */\r
+#define MPU_CTRL_HFNMIENA_Msk (1UL << MPU_CTRL_HFNMIENA_Pos) /*!< MPU CTRL: HFNMIENA Mask */\r
+\r
+#define MPU_CTRL_ENABLE_Pos 0U /*!< MPU CTRL: ENABLE Position */\r
+#define MPU_CTRL_ENABLE_Msk (1UL /*<< MPU_CTRL_ENABLE_Pos*/) /*!< MPU CTRL: ENABLE Mask */\r
+\r
+/* MPU Region Number Register Definitions */\r
+#define MPU_RNR_REGION_Pos 0U /*!< MPU RNR: REGION Position */\r
+#define MPU_RNR_REGION_Msk (0xFFUL /*<< MPU_RNR_REGION_Pos*/) /*!< MPU RNR: REGION Mask */\r
+\r
+/* MPU Region Base Address Register Definitions */\r
+#define MPU_RBAR_ADDR_Pos 5U /*!< MPU RBAR: ADDR Position */\r
+#define MPU_RBAR_ADDR_Msk (0x7FFFFFFUL << MPU_RBAR_ADDR_Pos) /*!< MPU RBAR: ADDR Mask */\r
+\r
+#define MPU_RBAR_VALID_Pos 4U /*!< MPU RBAR: VALID Position */\r
+#define MPU_RBAR_VALID_Msk (1UL << MPU_RBAR_VALID_Pos) /*!< MPU RBAR: VALID Mask */\r
+\r
+#define MPU_RBAR_REGION_Pos 0U /*!< MPU RBAR: REGION Position */\r
+#define MPU_RBAR_REGION_Msk (0xFUL /*<< MPU_RBAR_REGION_Pos*/) /*!< MPU RBAR: REGION Mask */\r
+\r
+/* MPU Region Attribute and Size Register Definitions */\r
+#define MPU_RASR_ATTRS_Pos 16U /*!< MPU RASR: MPU Region Attribute field Position */\r
+#define MPU_RASR_ATTRS_Msk (0xFFFFUL << MPU_RASR_ATTRS_Pos) /*!< MPU RASR: MPU Region Attribute field Mask */\r
+\r
+#define MPU_RASR_XN_Pos 28U /*!< MPU RASR: ATTRS.XN Position */\r
+#define MPU_RASR_XN_Msk (1UL << MPU_RASR_XN_Pos) /*!< MPU RASR: ATTRS.XN Mask */\r
+\r
+#define MPU_RASR_AP_Pos 24U /*!< MPU RASR: ATTRS.AP Position */\r
+#define MPU_RASR_AP_Msk (0x7UL << MPU_RASR_AP_Pos) /*!< MPU RASR: ATTRS.AP Mask */\r
+\r
+#define MPU_RASR_TEX_Pos 19U /*!< MPU RASR: ATTRS.TEX Position */\r
+#define MPU_RASR_TEX_Msk (0x7UL << MPU_RASR_TEX_Pos) /*!< MPU RASR: ATTRS.TEX Mask */\r
+\r
+#define MPU_RASR_S_Pos 18U /*!< MPU RASR: ATTRS.S Position */\r
+#define MPU_RASR_S_Msk (1UL << MPU_RASR_S_Pos) /*!< MPU RASR: ATTRS.S Mask */\r
+\r
+#define MPU_RASR_C_Pos 17U /*!< MPU RASR: ATTRS.C Position */\r
+#define MPU_RASR_C_Msk (1UL << MPU_RASR_C_Pos) /*!< MPU RASR: ATTRS.C Mask */\r
+\r
+#define MPU_RASR_B_Pos 16U /*!< MPU RASR: ATTRS.B Position */\r
+#define MPU_RASR_B_Msk (1UL << MPU_RASR_B_Pos) /*!< MPU RASR: ATTRS.B Mask */\r
+\r
+#define MPU_RASR_SRD_Pos 8U /*!< MPU RASR: Sub-Region Disable Position */\r
+#define MPU_RASR_SRD_Msk (0xFFUL << MPU_RASR_SRD_Pos) /*!< MPU RASR: Sub-Region Disable Mask */\r
+\r
+#define MPU_RASR_SIZE_Pos 1U /*!< MPU RASR: Region Size Field Position */\r
+#define MPU_RASR_SIZE_Msk (0x1FUL << MPU_RASR_SIZE_Pos) /*!< MPU RASR: Region Size Field Mask */\r
+\r
+#define MPU_RASR_ENABLE_Pos 0U /*!< MPU RASR: Region enable bit Position */\r
+#define MPU_RASR_ENABLE_Msk (1UL /*<< MPU_RASR_ENABLE_Pos*/) /*!< MPU RASR: Region enable bit Disable Mask */\r
+\r
+/*@} end of group CMSIS_MPU */\r
+#endif\r
+\r
+\r
+#if (__FPU_PRESENT == 1U)\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_FPU Floating Point Unit (FPU)\r
+ \brief Type definitions for the Floating Point Unit (FPU)\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the Floating Point Unit (FPU).\r
+ */\r
+typedef struct\r
+{\r
+ uint32_t RESERVED0[1U];\r
+ __IOM uint32_t FPCCR; /*!< Offset: 0x004 (R/W) Floating-Point Context Control Register */\r
+ __IOM uint32_t FPCAR; /*!< Offset: 0x008 (R/W) Floating-Point Context Address Register */\r
+ __IOM uint32_t FPDSCR; /*!< Offset: 0x00C (R/W) Floating-Point Default Status Control Register */\r
+ __IM uint32_t MVFR0; /*!< Offset: 0x010 (R/ ) Media and FP Feature Register 0 */\r
+ __IM uint32_t MVFR1; /*!< Offset: 0x014 (R/ ) Media and FP Feature Register 1 */\r
+} FPU_Type;\r
+\r
+/* Floating-Point Context Control Register Definitions */\r
+#define FPU_FPCCR_ASPEN_Pos 31U /*!< FPCCR: ASPEN bit Position */\r
+#define FPU_FPCCR_ASPEN_Msk (1UL << FPU_FPCCR_ASPEN_Pos) /*!< FPCCR: ASPEN bit Mask */\r
+\r
+#define FPU_FPCCR_LSPEN_Pos 30U /*!< FPCCR: LSPEN Position */\r
+#define FPU_FPCCR_LSPEN_Msk (1UL << FPU_FPCCR_LSPEN_Pos) /*!< FPCCR: LSPEN bit Mask */\r
+\r
+#define FPU_FPCCR_MONRDY_Pos 8U /*!< FPCCR: MONRDY Position */\r
+#define FPU_FPCCR_MONRDY_Msk (1UL << FPU_FPCCR_MONRDY_Pos) /*!< FPCCR: MONRDY bit Mask */\r
+\r
+#define FPU_FPCCR_BFRDY_Pos 6U /*!< FPCCR: BFRDY Position */\r
+#define FPU_FPCCR_BFRDY_Msk (1UL << FPU_FPCCR_BFRDY_Pos) /*!< FPCCR: BFRDY bit Mask */\r
+\r
+#define FPU_FPCCR_MMRDY_Pos 5U /*!< FPCCR: MMRDY Position */\r
+#define FPU_FPCCR_MMRDY_Msk (1UL << FPU_FPCCR_MMRDY_Pos) /*!< FPCCR: MMRDY bit Mask */\r
+\r
+#define FPU_FPCCR_HFRDY_Pos 4U /*!< FPCCR: HFRDY Position */\r
+#define FPU_FPCCR_HFRDY_Msk (1UL << FPU_FPCCR_HFRDY_Pos) /*!< FPCCR: HFRDY bit Mask */\r
+\r
+#define FPU_FPCCR_THREAD_Pos 3U /*!< FPCCR: processor mode bit Position */\r
+#define FPU_FPCCR_THREAD_Msk (1UL << FPU_FPCCR_THREAD_Pos) /*!< FPCCR: processor mode active bit Mask */\r
+\r
+#define FPU_FPCCR_USER_Pos 1U /*!< FPCCR: privilege level bit Position */\r
+#define FPU_FPCCR_USER_Msk (1UL << FPU_FPCCR_USER_Pos) /*!< FPCCR: privilege level bit Mask */\r
+\r
+#define FPU_FPCCR_LSPACT_Pos 0U /*!< FPCCR: Lazy state preservation active bit Position */\r
+#define FPU_FPCCR_LSPACT_Msk (1UL /*<< FPU_FPCCR_LSPACT_Pos*/) /*!< FPCCR: Lazy state preservation active bit Mask */\r
+\r
+/* Floating-Point Context Address Register Definitions */\r
+#define FPU_FPCAR_ADDRESS_Pos 3U /*!< FPCAR: ADDRESS bit Position */\r
+#define FPU_FPCAR_ADDRESS_Msk (0x1FFFFFFFUL << FPU_FPCAR_ADDRESS_Pos) /*!< FPCAR: ADDRESS bit Mask */\r
+\r
+/* Floating-Point Default Status Control Register Definitions */\r
+#define FPU_FPDSCR_AHP_Pos 26U /*!< FPDSCR: AHP bit Position */\r
+#define FPU_FPDSCR_AHP_Msk (1UL << FPU_FPDSCR_AHP_Pos) /*!< FPDSCR: AHP bit Mask */\r
+\r
+#define FPU_FPDSCR_DN_Pos 25U /*!< FPDSCR: DN bit Position */\r
+#define FPU_FPDSCR_DN_Msk (1UL << FPU_FPDSCR_DN_Pos) /*!< FPDSCR: DN bit Mask */\r
+\r
+#define FPU_FPDSCR_FZ_Pos 24U /*!< FPDSCR: FZ bit Position */\r
+#define FPU_FPDSCR_FZ_Msk (1UL << FPU_FPDSCR_FZ_Pos) /*!< FPDSCR: FZ bit Mask */\r
+\r
+#define FPU_FPDSCR_RMode_Pos 22U /*!< FPDSCR: RMode bit Position */\r
+#define FPU_FPDSCR_RMode_Msk (3UL << FPU_FPDSCR_RMode_Pos) /*!< FPDSCR: RMode bit Mask */\r
+\r
+/* Media and FP Feature Register 0 Definitions */\r
+#define FPU_MVFR0_FP_rounding_modes_Pos 28U /*!< MVFR0: FP rounding modes bits Position */\r
+#define FPU_MVFR0_FP_rounding_modes_Msk (0xFUL << FPU_MVFR0_FP_rounding_modes_Pos) /*!< MVFR0: FP rounding modes bits Mask */\r
+\r
+#define FPU_MVFR0_Short_vectors_Pos 24U /*!< MVFR0: Short vectors bits Position */\r
+#define FPU_MVFR0_Short_vectors_Msk (0xFUL << FPU_MVFR0_Short_vectors_Pos) /*!< MVFR0: Short vectors bits Mask */\r
+\r
+#define FPU_MVFR0_Square_root_Pos 20U /*!< MVFR0: Square root bits Position */\r
+#define FPU_MVFR0_Square_root_Msk (0xFUL << FPU_MVFR0_Square_root_Pos) /*!< MVFR0: Square root bits Mask */\r
+\r
+#define FPU_MVFR0_Divide_Pos 16U /*!< MVFR0: Divide bits Position */\r
+#define FPU_MVFR0_Divide_Msk (0xFUL << FPU_MVFR0_Divide_Pos) /*!< MVFR0: Divide bits Mask */\r
+\r
+#define FPU_MVFR0_FP_excep_trapping_Pos 12U /*!< MVFR0: FP exception trapping bits Position */\r
+#define FPU_MVFR0_FP_excep_trapping_Msk (0xFUL << FPU_MVFR0_FP_excep_trapping_Pos) /*!< MVFR0: FP exception trapping bits Mask */\r
+\r
+#define FPU_MVFR0_Double_precision_Pos 8U /*!< MVFR0: Double-precision bits Position */\r
+#define FPU_MVFR0_Double_precision_Msk (0xFUL << FPU_MVFR0_Double_precision_Pos) /*!< MVFR0: Double-precision bits Mask */\r
+\r
+#define FPU_MVFR0_Single_precision_Pos 4U /*!< MVFR0: Single-precision bits Position */\r
+#define FPU_MVFR0_Single_precision_Msk (0xFUL << FPU_MVFR0_Single_precision_Pos) /*!< MVFR0: Single-precision bits Mask */\r
+\r
+#define FPU_MVFR0_A_SIMD_registers_Pos 0U /*!< MVFR0: A_SIMD registers bits Position */\r
+#define FPU_MVFR0_A_SIMD_registers_Msk (0xFUL /*<< FPU_MVFR0_A_SIMD_registers_Pos*/) /*!< MVFR0: A_SIMD registers bits Mask */\r
+\r
+/* Media and FP Feature Register 1 Definitions */\r
+#define FPU_MVFR1_FP_fused_MAC_Pos 28U /*!< MVFR1: FP fused MAC bits Position */\r
+#define FPU_MVFR1_FP_fused_MAC_Msk (0xFUL << FPU_MVFR1_FP_fused_MAC_Pos) /*!< MVFR1: FP fused MAC bits Mask */\r
+\r
+#define FPU_MVFR1_FP_HPFP_Pos 24U /*!< MVFR1: FP HPFP bits Position */\r
+#define FPU_MVFR1_FP_HPFP_Msk (0xFUL << FPU_MVFR1_FP_HPFP_Pos) /*!< MVFR1: FP HPFP bits Mask */\r
+\r
+#define FPU_MVFR1_D_NaN_mode_Pos 4U /*!< MVFR1: D_NaN mode bits Position */\r
+#define FPU_MVFR1_D_NaN_mode_Msk (0xFUL << FPU_MVFR1_D_NaN_mode_Pos) /*!< MVFR1: D_NaN mode bits Mask */\r
+\r
+#define FPU_MVFR1_FtZ_mode_Pos 0U /*!< MVFR1: FtZ mode bits Position */\r
+#define FPU_MVFR1_FtZ_mode_Msk (0xFUL /*<< FPU_MVFR1_FtZ_mode_Pos*/) /*!< MVFR1: FtZ mode bits Mask */\r
+\r
+/*@} end of group CMSIS_FPU */\r
+#endif\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_CoreDebug Core Debug Registers (CoreDebug)\r
+ \brief Type definitions for the Core Debug Registers\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Structure type to access the Core Debug Register (CoreDebug).\r
+ */\r
+typedef struct\r
+{\r
+ __IOM uint32_t DHCSR; /*!< Offset: 0x000 (R/W) Debug Halting Control and Status Register */\r
+ __OM uint32_t DCRSR; /*!< Offset: 0x004 ( /W) Debug Core Register Selector Register */\r
+ __IOM uint32_t DCRDR; /*!< Offset: 0x008 (R/W) Debug Core Register Data Register */\r
+ __IOM uint32_t DEMCR; /*!< Offset: 0x00C (R/W) Debug Exception and Monitor Control Register */\r
+} CoreDebug_Type;\r
+\r
+/* Debug Halting Control and Status Register Definitions */\r
+#define CoreDebug_DHCSR_DBGKEY_Pos 16U /*!< CoreDebug DHCSR: DBGKEY Position */\r
+#define CoreDebug_DHCSR_DBGKEY_Msk (0xFFFFUL << CoreDebug_DHCSR_DBGKEY_Pos) /*!< CoreDebug DHCSR: DBGKEY Mask */\r
+\r
+#define CoreDebug_DHCSR_S_RESET_ST_Pos 25U /*!< CoreDebug DHCSR: S_RESET_ST Position */\r
+#define CoreDebug_DHCSR_S_RESET_ST_Msk (1UL << CoreDebug_DHCSR_S_RESET_ST_Pos) /*!< CoreDebug DHCSR: S_RESET_ST Mask */\r
+\r
+#define CoreDebug_DHCSR_S_RETIRE_ST_Pos 24U /*!< CoreDebug DHCSR: S_RETIRE_ST Position */\r
+#define CoreDebug_DHCSR_S_RETIRE_ST_Msk (1UL << CoreDebug_DHCSR_S_RETIRE_ST_Pos) /*!< CoreDebug DHCSR: S_RETIRE_ST Mask */\r
+\r
+#define CoreDebug_DHCSR_S_LOCKUP_Pos 19U /*!< CoreDebug DHCSR: S_LOCKUP Position */\r
+#define CoreDebug_DHCSR_S_LOCKUP_Msk (1UL << CoreDebug_DHCSR_S_LOCKUP_Pos) /*!< CoreDebug DHCSR: S_LOCKUP Mask */\r
+\r
+#define CoreDebug_DHCSR_S_SLEEP_Pos 18U /*!< CoreDebug DHCSR: S_SLEEP Position */\r
+#define CoreDebug_DHCSR_S_SLEEP_Msk (1UL << CoreDebug_DHCSR_S_SLEEP_Pos) /*!< CoreDebug DHCSR: S_SLEEP Mask */\r
+\r
+#define CoreDebug_DHCSR_S_HALT_Pos 17U /*!< CoreDebug DHCSR: S_HALT Position */\r
+#define CoreDebug_DHCSR_S_HALT_Msk (1UL << CoreDebug_DHCSR_S_HALT_Pos) /*!< CoreDebug DHCSR: S_HALT Mask */\r
+\r
+#define CoreDebug_DHCSR_S_REGRDY_Pos 16U /*!< CoreDebug DHCSR: S_REGRDY Position */\r
+#define CoreDebug_DHCSR_S_REGRDY_Msk (1UL << CoreDebug_DHCSR_S_REGRDY_Pos) /*!< CoreDebug DHCSR: S_REGRDY Mask */\r
+\r
+#define CoreDebug_DHCSR_C_SNAPSTALL_Pos 5U /*!< CoreDebug DHCSR: C_SNAPSTALL Position */\r
+#define CoreDebug_DHCSR_C_SNAPSTALL_Msk (1UL << CoreDebug_DHCSR_C_SNAPSTALL_Pos) /*!< CoreDebug DHCSR: C_SNAPSTALL Mask */\r
+\r
+#define CoreDebug_DHCSR_C_MASKINTS_Pos 3U /*!< CoreDebug DHCSR: C_MASKINTS Position */\r
+#define CoreDebug_DHCSR_C_MASKINTS_Msk (1UL << CoreDebug_DHCSR_C_MASKINTS_Pos) /*!< CoreDebug DHCSR: C_MASKINTS Mask */\r
+\r
+#define CoreDebug_DHCSR_C_STEP_Pos 2U /*!< CoreDebug DHCSR: C_STEP Position */\r
+#define CoreDebug_DHCSR_C_STEP_Msk (1UL << CoreDebug_DHCSR_C_STEP_Pos) /*!< CoreDebug DHCSR: C_STEP Mask */\r
+\r
+#define CoreDebug_DHCSR_C_HALT_Pos 1U /*!< CoreDebug DHCSR: C_HALT Position */\r
+#define CoreDebug_DHCSR_C_HALT_Msk (1UL << CoreDebug_DHCSR_C_HALT_Pos) /*!< CoreDebug DHCSR: C_HALT Mask */\r
+\r
+#define CoreDebug_DHCSR_C_DEBUGEN_Pos 0U /*!< CoreDebug DHCSR: C_DEBUGEN Position */\r
+#define CoreDebug_DHCSR_C_DEBUGEN_Msk (1UL /*<< CoreDebug_DHCSR_C_DEBUGEN_Pos*/) /*!< CoreDebug DHCSR: C_DEBUGEN Mask */\r
+\r
+/* Debug Core Register Selector Register Definitions */\r
+#define CoreDebug_DCRSR_REGWnR_Pos 16U /*!< CoreDebug DCRSR: REGWnR Position */\r
+#define CoreDebug_DCRSR_REGWnR_Msk (1UL << CoreDebug_DCRSR_REGWnR_Pos) /*!< CoreDebug DCRSR: REGWnR Mask */\r
+\r
+#define CoreDebug_DCRSR_REGSEL_Pos 0U /*!< CoreDebug DCRSR: REGSEL Position */\r
+#define CoreDebug_DCRSR_REGSEL_Msk (0x1FUL /*<< CoreDebug_DCRSR_REGSEL_Pos*/) /*!< CoreDebug DCRSR: REGSEL Mask */\r
+\r
+/* Debug Exception and Monitor Control Register Definitions */\r
+#define CoreDebug_DEMCR_TRCENA_Pos 24U /*!< CoreDebug DEMCR: TRCENA Position */\r
+#define CoreDebug_DEMCR_TRCENA_Msk (1UL << CoreDebug_DEMCR_TRCENA_Pos) /*!< CoreDebug DEMCR: TRCENA Mask */\r
+\r
+#define CoreDebug_DEMCR_MON_REQ_Pos 19U /*!< CoreDebug DEMCR: MON_REQ Position */\r
+#define CoreDebug_DEMCR_MON_REQ_Msk (1UL << CoreDebug_DEMCR_MON_REQ_Pos) /*!< CoreDebug DEMCR: MON_REQ Mask */\r
+\r
+#define CoreDebug_DEMCR_MON_STEP_Pos 18U /*!< CoreDebug DEMCR: MON_STEP Position */\r
+#define CoreDebug_DEMCR_MON_STEP_Msk (1UL << CoreDebug_DEMCR_MON_STEP_Pos) /*!< CoreDebug DEMCR: MON_STEP Mask */\r
+\r
+#define CoreDebug_DEMCR_MON_PEND_Pos 17U /*!< CoreDebug DEMCR: MON_PEND Position */\r
+#define CoreDebug_DEMCR_MON_PEND_Msk (1UL << CoreDebug_DEMCR_MON_PEND_Pos) /*!< CoreDebug DEMCR: MON_PEND Mask */\r
+\r
+#define CoreDebug_DEMCR_MON_EN_Pos 16U /*!< CoreDebug DEMCR: MON_EN Position */\r
+#define CoreDebug_DEMCR_MON_EN_Msk (1UL << CoreDebug_DEMCR_MON_EN_Pos) /*!< CoreDebug DEMCR: MON_EN Mask */\r
+\r
+#define CoreDebug_DEMCR_VC_HARDERR_Pos 10U /*!< CoreDebug DEMCR: VC_HARDERR Position */\r
+#define CoreDebug_DEMCR_VC_HARDERR_Msk (1UL << CoreDebug_DEMCR_VC_HARDERR_Pos) /*!< CoreDebug DEMCR: VC_HARDERR Mask */\r
+\r
+#define CoreDebug_DEMCR_VC_INTERR_Pos 9U /*!< CoreDebug DEMCR: VC_INTERR Position */\r
+#define CoreDebug_DEMCR_VC_INTERR_Msk (1UL << CoreDebug_DEMCR_VC_INTERR_Pos) /*!< CoreDebug DEMCR: VC_INTERR Mask */\r
+\r
+#define CoreDebug_DEMCR_VC_BUSERR_Pos 8U /*!< CoreDebug DEMCR: VC_BUSERR Position */\r
+#define CoreDebug_DEMCR_VC_BUSERR_Msk (1UL << CoreDebug_DEMCR_VC_BUSERR_Pos) /*!< CoreDebug DEMCR: VC_BUSERR Mask */\r
+\r
+#define CoreDebug_DEMCR_VC_STATERR_Pos 7U /*!< CoreDebug DEMCR: VC_STATERR Position */\r
+#define CoreDebug_DEMCR_VC_STATERR_Msk (1UL << CoreDebug_DEMCR_VC_STATERR_Pos) /*!< CoreDebug DEMCR: VC_STATERR Mask */\r
+\r
+#define CoreDebug_DEMCR_VC_CHKERR_Pos 6U /*!< CoreDebug DEMCR: VC_CHKERR Position */\r
+#define CoreDebug_DEMCR_VC_CHKERR_Msk (1UL << CoreDebug_DEMCR_VC_CHKERR_Pos) /*!< CoreDebug DEMCR: VC_CHKERR Mask */\r
+\r
+#define CoreDebug_DEMCR_VC_NOCPERR_Pos 5U /*!< CoreDebug DEMCR: VC_NOCPERR Position */\r
+#define CoreDebug_DEMCR_VC_NOCPERR_Msk (1UL << CoreDebug_DEMCR_VC_NOCPERR_Pos) /*!< CoreDebug DEMCR: VC_NOCPERR Mask */\r
+\r
+#define CoreDebug_DEMCR_VC_MMERR_Pos 4U /*!< CoreDebug DEMCR: VC_MMERR Position */\r
+#define CoreDebug_DEMCR_VC_MMERR_Msk (1UL << CoreDebug_DEMCR_VC_MMERR_Pos) /*!< CoreDebug DEMCR: VC_MMERR Mask */\r
+\r
+#define CoreDebug_DEMCR_VC_CORERESET_Pos 0U /*!< CoreDebug DEMCR: VC_CORERESET Position */\r
+#define CoreDebug_DEMCR_VC_CORERESET_Msk (1UL /*<< CoreDebug_DEMCR_VC_CORERESET_Pos*/) /*!< CoreDebug DEMCR: VC_CORERESET Mask */\r
+\r
+/*@} end of group CMSIS_CoreDebug */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_core_bitfield Core register bit field macros\r
+ \brief Macros for use with bit field definitions (xxx_Pos, xxx_Msk).\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Mask and shift a bit field value for use in a register bit range.\r
+ \param[in] field Name of the register bit field.\r
+ \param[in] value Value of the bit field.\r
+ \return Masked and shifted value.\r
+*/\r
+#define _VAL2FLD(field, value) ((value << field ## _Pos) & field ## _Msk)\r
+\r
+/**\r
+ \brief Mask and shift a register value to extract a bit filed value.\r
+ \param[in] field Name of the register bit field.\r
+ \param[in] value Value of register.\r
+ \return Masked and shifted bit field value.\r
+*/\r
+#define _FLD2VAL(field, value) ((value & field ## _Msk) >> field ## _Pos)\r
+\r
+/*@} end of group CMSIS_core_bitfield */\r
+\r
+\r
+/**\r
+ \ingroup CMSIS_core_register\r
+ \defgroup CMSIS_core_base Core Definitions\r
+ \brief Definitions for base addresses, unions, and structures.\r
+ @{\r
+ */\r
+\r
+/* Memory mapping of Cortex-M4 Hardware */\r
+#define SCS_BASE (0xE000E000UL) /*!< System Control Space Base Address */\r
+#define ITM_BASE (0xE0000000UL) /*!< ITM Base Address */\r
+#define DWT_BASE (0xE0001000UL) /*!< DWT Base Address */\r
+#define TPI_BASE (0xE0040000UL) /*!< TPI Base Address */\r
+#define CoreDebug_BASE (0xE000EDF0UL) /*!< Core Debug Base Address */\r
+#define SysTick_BASE (SCS_BASE + 0x0010UL) /*!< SysTick Base Address */\r
+#define NVIC_BASE (SCS_BASE + 0x0100UL) /*!< NVIC Base Address */\r
+#define SCB_BASE (SCS_BASE + 0x0D00UL) /*!< System Control Block Base Address */\r
+\r
+#define SCnSCB ((SCnSCB_Type *) SCS_BASE ) /*!< System control Register not in SCB */\r
+#define SCB ((SCB_Type *) SCB_BASE ) /*!< SCB configuration struct */\r
+#define SysTick ((SysTick_Type *) SysTick_BASE ) /*!< SysTick configuration struct */\r
+#define NVIC ((NVIC_Type *) NVIC_BASE ) /*!< NVIC configuration struct */\r
+#define ITM ((ITM_Type *) ITM_BASE ) /*!< ITM configuration struct */\r
+#define DWT ((DWT_Type *) DWT_BASE ) /*!< DWT configuration struct */\r
+#define TPI ((TPI_Type *) TPI_BASE ) /*!< TPI configuration struct */\r
+#define CoreDebug ((CoreDebug_Type *) CoreDebug_BASE) /*!< Core Debug configuration struct */\r
+\r
+#if (__MPU_PRESENT == 1U)\r
+ #define MPU_BASE (SCS_BASE + 0x0D90UL) /*!< Memory Protection Unit */\r
+ #define MPU ((MPU_Type *) MPU_BASE ) /*!< Memory Protection Unit */\r
+#endif\r
+\r
+#if (__FPU_PRESENT == 1U)\r
+ #define FPU_BASE (SCS_BASE + 0x0F30UL) /*!< Floating Point Unit */\r
+ #define FPU ((FPU_Type *) FPU_BASE ) /*!< Floating Point Unit */\r
+#endif\r
+\r
+/*@} */\r
+\r
+\r
+\r
+/*******************************************************************************\r
+ * Hardware Abstraction Layer\r
+ Core Function Interface contains:\r
+ - Core NVIC Functions\r
+ - Core SysTick Functions\r
+ - Core Debug Functions\r
+ - Core Register Access Functions\r
+ ******************************************************************************/\r
+/**\r
+ \defgroup CMSIS_Core_FunctionInterface Functions and Instructions Reference\r
+*/\r
+\r
+\r
+\r
+/* ########################## NVIC functions #################################### */\r
+/**\r
+ \ingroup CMSIS_Core_FunctionInterface\r
+ \defgroup CMSIS_Core_NVICFunctions NVIC Functions\r
+ \brief Functions that manage interrupts and exceptions via the NVIC.\r
+ @{\r
+ */\r
+\r
+/**\r
+ \brief Set Priority Grouping\r
+ \details Sets the priority grouping field using the required unlock sequence.\r
+ The parameter PriorityGroup is assigned to the field SCB->AIRCR [10:8] PRIGROUP field.\r
+ Only values from 0..7 are used.\r
+ In case of a conflict between priority grouping and available\r
+ priority bits (__NVIC_PRIO_BITS), the smallest possible priority group is set.\r
+ \param [in] PriorityGroup Priority grouping field.\r
+ */\r
+__STATIC_INLINE void NVIC_SetPriorityGrouping(uint32_t PriorityGroup)\r
+{\r
+ uint32_t reg_value;\r
+ uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL); /* only values 0..7 are used */\r
+\r
+ reg_value = SCB->AIRCR; /* read old register configuration */\r
+ reg_value &= ~((uint32_t)(SCB_AIRCR_VECTKEY_Msk | SCB_AIRCR_PRIGROUP_Msk)); /* clear bits to change */\r
+ reg_value = (reg_value |\r
+ ((uint32_t)0x5FAUL << SCB_AIRCR_VECTKEY_Pos) |\r
+ (PriorityGroupTmp << 8U) ); /* Insert write key and priorty group */\r
+ SCB->AIRCR = reg_value;\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Priority Grouping\r
+ \details Reads the priority grouping field from the NVIC Interrupt Controller.\r
+ \return Priority grouping field (SCB->AIRCR [10:8] PRIGROUP field).\r
+ */\r
+__STATIC_INLINE uint32_t NVIC_GetPriorityGrouping(void)\r
+{\r
+ return ((uint32_t)((SCB->AIRCR & SCB_AIRCR_PRIGROUP_Msk) >> SCB_AIRCR_PRIGROUP_Pos));\r
+}\r
+\r
+\r
+/**\r
+ \brief Enable External Interrupt\r
+ \details Enables a device-specific interrupt in the NVIC interrupt controller.\r
+ \param [in] IRQn External interrupt number. Value cannot be negative.\r
+ */\r
+__STATIC_INLINE void NVIC_EnableIRQ(IRQn_Type IRQn)\r
+{\r
+ NVIC->ISER[(((uint32_t)(int32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL));\r
+}\r
+\r
+\r
+/**\r
+ \brief Disable External Interrupt\r
+ \details Disables a device-specific interrupt in the NVIC interrupt controller.\r
+ \param [in] IRQn External interrupt number. Value cannot be negative.\r
+ */\r
+__STATIC_INLINE void NVIC_DisableIRQ(IRQn_Type IRQn)\r
+{\r
+ NVIC->ICER[(((uint32_t)(int32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL));\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Pending Interrupt\r
+ \details Reads the pending register in the NVIC and returns the pending bit for the specified interrupt.\r
+ \param [in] IRQn Interrupt number.\r
+ \return 0 Interrupt status is not pending.\r
+ \return 1 Interrupt status is pending.\r
+ */\r
+__STATIC_INLINE uint32_t NVIC_GetPendingIRQ(IRQn_Type IRQn)\r
+{\r
+ return((uint32_t)(((NVIC->ISPR[(((uint32_t)(int32_t)IRQn) >> 5UL)] & (1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL))) != 0UL) ? 1UL : 0UL));\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Pending Interrupt\r
+ \details Sets the pending bit of an external interrupt.\r
+ \param [in] IRQn Interrupt number. Value cannot be negative.\r
+ */\r
+__STATIC_INLINE void NVIC_SetPendingIRQ(IRQn_Type IRQn)\r
+{\r
+ NVIC->ISPR[(((uint32_t)(int32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL));\r
+}\r
+\r
+\r
+/**\r
+ \brief Clear Pending Interrupt\r
+ \details Clears the pending bit of an external interrupt.\r
+ \param [in] IRQn External interrupt number. Value cannot be negative.\r
+ */\r
+__STATIC_INLINE void NVIC_ClearPendingIRQ(IRQn_Type IRQn)\r
+{\r
+ NVIC->ICPR[(((uint32_t)(int32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL));\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Active Interrupt\r
+ \details Reads the active register in NVIC and returns the active bit.\r
+ \param [in] IRQn Interrupt number.\r
+ \return 0 Interrupt status is not active.\r
+ \return 1 Interrupt status is active.\r
+ */\r
+__STATIC_INLINE uint32_t NVIC_GetActive(IRQn_Type IRQn)\r
+{\r
+ return((uint32_t)(((NVIC->IABR[(((uint32_t)(int32_t)IRQn) >> 5UL)] & (1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL))) != 0UL) ? 1UL : 0UL));\r
+}\r
+\r
+\r
+/**\r
+ \brief Set Interrupt Priority\r
+ \details Sets the priority of an interrupt.\r
+ \note The priority cannot be set for every core interrupt.\r
+ \param [in] IRQn Interrupt number.\r
+ \param [in] priority Priority to set.\r
+ */\r
+__STATIC_INLINE void NVIC_SetPriority(IRQn_Type IRQn, uint32_t priority)\r
+{\r
+ if ((int32_t)(IRQn) < 0)\r
+ {\r
+ SCB->SHP[(((uint32_t)(int32_t)IRQn) & 0xFUL)-4UL] = (uint8_t)((priority << (8U - __NVIC_PRIO_BITS)) & (uint32_t)0xFFUL);\r
+ }\r
+ else\r
+ {\r
+ NVIC->IP[((uint32_t)(int32_t)IRQn)] = (uint8_t)((priority << (8U - __NVIC_PRIO_BITS)) & (uint32_t)0xFFUL);\r
+ }\r
+}\r
+\r
+\r
+/**\r
+ \brief Get Interrupt Priority\r
+ \details Reads the priority of an interrupt.\r
+ The interrupt number can be positive to specify an external (device specific) interrupt,\r
+ or negative to specify an internal (core) interrupt.\r
+ \param [in] IRQn Interrupt number.\r
+ \return Interrupt Priority.\r
+ Value is aligned automatically to the implemented priority bits of the microcontroller.\r
+ */\r
+__STATIC_INLINE uint32_t NVIC_GetPriority(IRQn_Type IRQn)\r
+{\r
+\r
+ if ((int32_t)(IRQn) < 0)\r
+ {\r
+ return(((uint32_t)SCB->SHP[(((uint32_t)(int32_t)IRQn) & 0xFUL)-4UL] >> (8U - __NVIC_PRIO_BITS)));\r
+ }\r
+ else\r
+ {\r
+ return(((uint32_t)NVIC->IP[((uint32_t)(int32_t)IRQn)] >> (8U - __NVIC_PRIO_BITS)));\r
+ }\r
+}\r
+\r
+\r
+/**\r
+ \brief Encode Priority\r
+ \details Encodes the priority for an interrupt with the given priority group,\r
+ preemptive priority value, and subpriority value.\r
+ In case of a conflict between priority grouping and available\r
+ priority bits (__NVIC_PRIO_BITS), the smallest possible priority group is set.\r
+ \param [in] PriorityGroup Used priority group.\r
+ \param [in] PreemptPriority Preemptive priority value (starting from 0).\r
+ \param [in] SubPriority Subpriority value (starting from 0).\r
+ \return Encoded priority. Value can be used in the function \ref NVIC_SetPriority().\r
+ */\r
+__STATIC_INLINE uint32_t NVIC_EncodePriority (uint32_t PriorityGroup, uint32_t PreemptPriority, uint32_t SubPriority)\r
+{\r
+ uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL); /* only values 0..7 are used */\r
+ uint32_t PreemptPriorityBits;\r
+ uint32_t SubPriorityBits;\r
+\r
+ PreemptPriorityBits = ((7UL - PriorityGroupTmp) > (uint32_t)(__NVIC_PRIO_BITS)) ? (uint32_t)(__NVIC_PRIO_BITS) : (uint32_t)(7UL - PriorityGroupTmp);\r
+ SubPriorityBits = ((PriorityGroupTmp + (uint32_t)(__NVIC_PRIO_BITS)) < (uint32_t)7UL) ? (uint32_t)0UL : (uint32_t)((PriorityGroupTmp - 7UL) + (uint32_t)(__NVIC_PRIO_BITS));\r
+\r
+ return (\r
+ ((PreemptPriority & (uint32_t)((1UL << (PreemptPriorityBits)) - 1UL)) << SubPriorityBits) |\r
+ ((SubPriority & (uint32_t)((1UL << (SubPriorityBits )) - 1UL)))\r
+ );\r
+}\r
+\r
+\r
+/**\r
+ \brief Decode Priority\r
+ \details Decodes an interrupt priority value with a given priority group to\r
+ preemptive priority value and subpriority value.\r
+ In case of a conflict between priority grouping and available\r
+ priority bits (__NVIC_PRIO_BITS) the smallest possible priority group is set.\r
+ \param [in] Priority Priority value, which can be retrieved with the function \ref NVIC_GetPriority().\r
+ \param [in] PriorityGroup Used priority group.\r
+ \param [out] pPreemptPriority Preemptive priority value (starting from 0).\r
+ \param [out] pSubPriority Subpriority value (starting from 0).\r
+ */\r
+__STATIC_INLINE void NVIC_DecodePriority (uint32_t Priority, uint32_t PriorityGroup, uint32_t* const pPreemptPriority, uint32_t* const pSubPriority)\r
+{\r
+ uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL); /* only values 0..7 are used */\r
+ uint32_t PreemptPriorityBits;\r
+ uint32_t SubPriorityBits;\r
+\r
+ PreemptPriorityBits = ((7UL - PriorityGroupTmp) > (uint32_t)(__NVIC_PRIO_BITS)) ? (uint32_t)(__NVIC_PRIO_BITS) : (uint32_t)(7UL - PriorityGroupTmp);\r
+ SubPriorityBits = ((PriorityGroupTmp + (uint32_t)(__NVIC_PRIO_BITS)) < (uint32_t)7UL) ? (uint32_t)0UL : (uint32_t)((PriorityGroupTmp - 7UL) + (uint32_t)(__NVIC_PRIO_BITS));\r
+\r
+ *pPreemptPriority = (Priority >> SubPriorityBits) & (uint32_t)((1UL << (PreemptPriorityBits)) - 1UL);\r
+ *pSubPriority = (Priority ) & (uint32_t)((1UL << (SubPriorityBits )) - 1UL);\r
+}\r
+\r
+\r
+/**\r
+ \brief System Reset\r
+ \details Initiates a system reset request to reset the MCU.\r
+ */\r
+__STATIC_INLINE void NVIC_SystemReset(void)\r
+{\r
+ __DSB(); /* Ensure all outstanding memory accesses included\r
+ buffered write are completed before reset */\r
+ SCB->AIRCR = (uint32_t)((0x5FAUL << SCB_AIRCR_VECTKEY_Pos) |\r
+ (SCB->AIRCR & SCB_AIRCR_PRIGROUP_Msk) |\r
+ SCB_AIRCR_SYSRESETREQ_Msk ); /* Keep priority group unchanged */\r
+ __DSB(); /* Ensure completion of memory access */\r
+\r
+ for(;;) /* wait until reset */\r
+ {\r
+ __NOP();\r
+ }\r
+}\r
+\r
+/*@} end of CMSIS_Core_NVICFunctions */\r
+\r
+\r
+\r
+/* ################################## SysTick function ############################################ */\r
+/**\r
+ \ingroup CMSIS_Core_FunctionInterface\r
+ \defgroup CMSIS_Core_SysTickFunctions SysTick Functions\r
+ \brief Functions that configure the System.\r
+ @{\r
+ */\r
+\r
+#if (__Vendor_SysTickConfig == 0U)\r
+\r
+/**\r
+ \brief System Tick Configuration\r
+ \details Initializes the System Timer and its interrupt, and starts the System Tick Timer.\r
+ Counter is in free running mode to generate periodic interrupts.\r
+ \param [in] ticks Number of ticks between two interrupts.\r
+ \return 0 Function succeeded.\r
+ \return 1 Function failed.\r
+ \note When the variable <b>__Vendor_SysTickConfig</b> is set to 1, then the\r
+ function <b>SysTick_Config</b> is not included. In this case, the file <b><i>device</i>.h</b>\r
+ must contain a vendor-specific implementation of this function.\r
+ */\r
+__STATIC_INLINE uint32_t SysTick_Config(uint32_t ticks)\r
+{\r
+ if ((ticks - 1UL) > SysTick_LOAD_RELOAD_Msk)\r
+ {\r
+ return (1UL); /* Reload value impossible */\r
+ }\r
+\r
+ SysTick->LOAD = (uint32_t)(ticks - 1UL); /* set reload register */\r
+ NVIC_SetPriority (SysTick_IRQn, (1UL << __NVIC_PRIO_BITS) - 1UL); /* set Priority for Systick Interrupt */\r
+ SysTick->VAL = 0UL; /* Load the SysTick Counter Value */\r
+ SysTick->CTRL = SysTick_CTRL_CLKSOURCE_Msk |\r
+ SysTick_CTRL_TICKINT_Msk |\r
+ SysTick_CTRL_ENABLE_Msk; /* Enable SysTick IRQ and SysTick Timer */\r
+ return (0UL); /* Function successful */\r
+}\r
+\r
+#endif\r
+\r
+/*@} end of CMSIS_Core_SysTickFunctions */\r
+\r
+\r
+\r
+/* ##################################### Debug In/Output function ########################################### */\r
+/**\r
+ \ingroup CMSIS_Core_FunctionInterface\r
+ \defgroup CMSIS_core_DebugFunctions ITM Functions\r
+ \brief Functions that access the ITM debug interface.\r
+ @{\r
+ */\r
+\r
+extern volatile int32_t ITM_RxBuffer; /*!< External variable to receive characters. */\r
+#define ITM_RXBUFFER_EMPTY 0x5AA55AA5U /*!< Value identifying \ref ITM_RxBuffer is ready for next character. */\r
+\r
+\r
+/**\r
+ \brief ITM Send Character\r
+ \details Transmits a character via the ITM channel 0, and\r
+ \li Just returns when no debugger is connected that has booked the output.\r
+ \li Is blocking when a debugger is connected, but the previous character sent has not been transmitted.\r
+ \param [in] ch Character to transmit.\r
+ \returns Character to transmit.\r
+ */\r
+__STATIC_INLINE uint32_t ITM_SendChar (uint32_t ch)\r
+{\r
+ if (((ITM->TCR & ITM_TCR_ITMENA_Msk) != 0UL) && /* ITM enabled */\r
+ ((ITM->TER & 1UL ) != 0UL) ) /* ITM Port #0 enabled */\r
+ {\r
+ while (ITM->PORT[0U].u32 == 0UL)\r
+ {\r
+ __NOP();\r
+ }\r
+ ITM->PORT[0U].u8 = (uint8_t)ch;\r
+ }\r
+ return (ch);\r
+}\r
+\r
+\r
+/**\r
+ \brief ITM Receive Character\r
+ \details Inputs a character via the external variable \ref ITM_RxBuffer.\r
+ \return Received character.\r
+ \return -1 No character pending.\r
+ */\r
+__STATIC_INLINE int32_t ITM_ReceiveChar (void)\r
+{\r
+ int32_t ch = -1; /* no character available */\r
+\r
+ if (ITM_RxBuffer != ITM_RXBUFFER_EMPTY)\r
+ {\r
+ ch = ITM_RxBuffer;\r
+ ITM_RxBuffer = ITM_RXBUFFER_EMPTY; /* ready for next character */\r
+ }\r
+\r
+ return (ch);\r
+}\r
+\r
+\r
+/**\r
+ \brief ITM Check Character\r
+ \details Checks whether a character is pending for reading in the variable \ref ITM_RxBuffer.\r
+ \return 0 No character available.\r
+ \return 1 Character available.\r
+ */\r
+__STATIC_INLINE int32_t ITM_CheckChar (void)\r
+{\r
+\r
+ if (ITM_RxBuffer == ITM_RXBUFFER_EMPTY)\r
+ {\r
+ return (0); /* no character available */\r
+ }\r
+ else\r
+ {\r
+ return (1); /* character available */\r
+ }\r
+}\r
+\r
+/*@} end of CMSIS_core_DebugFunctions */\r
+\r
+\r
+\r
+\r
+#ifdef __cplusplus\r
+}\r
+#endif\r
+\r
+#endif /* __CORE_CM4_H_DEPENDANT */\r
+\r
+#endif /* __CMSIS_GENERIC */\r
--- /dev/null
+/**************************************************************************//**\r
+ * @file core_cmFunc.h\r
+ * @brief CMSIS Cortex-M Core Function Access Header File\r
+ * @version V4.30\r
+ * @date 20. October 2015\r
+ ******************************************************************************/\r
+/* Copyright (c) 2009 - 2015 ARM LIMITED\r
+\r
+ All rights reserved.\r
+ Redistribution and use in source and binary forms, with or without\r
+ modification, are permitted provided that the following conditions are met:\r
+ - Redistributions of source code must retain the above copyright\r
+ notice, this list of conditions and the following disclaimer.\r
+ - Redistributions in binary form must reproduce the above copyright\r
+ notice, this list of conditions and the following disclaimer in the\r
+ documentation and/or other materials provided with the distribution.\r
+ - Neither the name of ARM nor the names of its contributors may be used\r
+ to endorse or promote products derived from this software without\r
+ specific prior written permission.\r
+ *\r
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
+ ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
+ POSSIBILITY OF SUCH DAMAGE.\r
+ ---------------------------------------------------------------------------*/\r
+\r
+\r
+#if defined ( __ICCARM__ )\r
+ #pragma system_include /* treat file as system include file for MISRA check */\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #pragma clang system_header /* treat file as system include file */\r
+#endif\r
+\r
+#ifndef __CORE_CMFUNC_H\r
+#define __CORE_CMFUNC_H\r
+\r
+\r
+/* ########################### Core Function Access ########################### */\r
+/** \ingroup CMSIS_Core_FunctionInterface\r
+ \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions\r
+ @{\r
+*/\r
+\r
+/*------------------ RealView Compiler -----------------*/\r
+#if defined ( __CC_ARM )\r
+ #include "cmsis_armcc.h"\r
+\r
+/*------------------ ARM Compiler V6 -------------------*/\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #include "cmsis_armcc_V6.h"\r
+\r
+/*------------------ GNU Compiler ----------------------*/\r
+#elif defined ( __GNUC__ )\r
+ #include "cmsis_gcc.h"\r
+\r
+/*------------------ ICC Compiler ----------------------*/\r
+#elif defined ( __ICCARM__ )\r
+ #include <cmsis_iar.h>\r
+\r
+/*------------------ TI CCS Compiler -------------------*/\r
+#elif defined ( __TMS470__ )\r
+ #include <cmsis_ccs.h>\r
+\r
+/*------------------ TASKING Compiler ------------------*/\r
+#elif defined ( __TASKING__ )\r
+ /*\r
+ * The CMSIS functions have been implemented as intrinsics in the compiler.\r
+ * Please use "carm -?i" to get an up to date list of all intrinsics,\r
+ * Including the CMSIS ones.\r
+ */\r
+\r
+/*------------------ COSMIC Compiler -------------------*/\r
+#elif defined ( __CSMC__ )\r
+ #include <cmsis_csm.h>\r
+\r
+#endif\r
+\r
+/*@} end of CMSIS_Core_RegAccFunctions */\r
+\r
+#endif /* __CORE_CMFUNC_H */\r
--- /dev/null
+/**************************************************************************//**\r
+ * @file core_cmInstr.h\r
+ * @brief CMSIS Cortex-M Core Instruction Access Header File\r
+ * @version V4.30\r
+ * @date 20. October 2015\r
+ ******************************************************************************/\r
+/* Copyright (c) 2009 - 2015 ARM LIMITED\r
+\r
+ All rights reserved.\r
+ Redistribution and use in source and binary forms, with or without\r
+ modification, are permitted provided that the following conditions are met:\r
+ - Redistributions of source code must retain the above copyright\r
+ notice, this list of conditions and the following disclaimer.\r
+ - Redistributions in binary form must reproduce the above copyright\r
+ notice, this list of conditions and the following disclaimer in the\r
+ documentation and/or other materials provided with the distribution.\r
+ - Neither the name of ARM nor the names of its contributors may be used\r
+ to endorse or promote products derived from this software without\r
+ specific prior written permission.\r
+ *\r
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
+ ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
+ POSSIBILITY OF SUCH DAMAGE.\r
+ ---------------------------------------------------------------------------*/\r
+\r
+\r
+#if defined ( __ICCARM__ )\r
+ #pragma system_include /* treat file as system include file for MISRA check */\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #pragma clang system_header /* treat file as system include file */\r
+#endif\r
+\r
+#ifndef __CORE_CMINSTR_H\r
+#define __CORE_CMINSTR_H\r
+\r
+\r
+/* ########################## Core Instruction Access ######################### */\r
+/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface\r
+ Access to dedicated instructions\r
+ @{\r
+*/\r
+\r
+/* CHIBIOS FIX */\r
+#if !defined(__CORTEX_SC)\r
+#define __CORTEX_SC 0\r
+#endif\r
+\r
+/*------------------ RealView Compiler -----------------*/\r
+#if defined ( __CC_ARM )\r
+ #include "cmsis_armcc.h"\r
+\r
+/*------------------ ARM Compiler V6 -------------------*/\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #include "cmsis_armcc_V6.h"\r
+\r
+/*------------------ GNU Compiler ----------------------*/\r
+#elif defined ( __GNUC__ )\r
+ #include "cmsis_gcc.h"\r
+\r
+/*------------------ ICC Compiler ----------------------*/\r
+#elif defined ( __ICCARM__ )\r
+ #include <cmsis_iar.h>\r
+\r
+/*------------------ TI CCS Compiler -------------------*/\r
+#elif defined ( __TMS470__ )\r
+ #include <cmsis_ccs.h>\r
+\r
+/*------------------ TASKING Compiler ------------------*/\r
+#elif defined ( __TASKING__ )\r
+ /*\r
+ * The CMSIS functions have been implemented as intrinsics in the compiler.\r
+ * Please use "carm -?i" to get an up to date list of all intrinsics,\r
+ * Including the CMSIS ones.\r
+ */\r
+\r
+/*------------------ COSMIC Compiler -------------------*/\r
+#elif defined ( __CSMC__ )\r
+ #include <cmsis_csm.h>\r
+\r
+#endif\r
+\r
+/*@}*/ /* end of group CMSIS_Core_InstructionInterface */\r
+\r
+#endif /* __CORE_CMINSTR_H */\r
--- /dev/null
+/**************************************************************************//**\r
+ * @file core_cmSimd.h\r
+ * @brief CMSIS Cortex-M SIMD Header File\r
+ * @version V4.30\r
+ * @date 20. October 2015\r
+ ******************************************************************************/\r
+/* Copyright (c) 2009 - 2015 ARM LIMITED\r
+\r
+ All rights reserved.\r
+ Redistribution and use in source and binary forms, with or without\r
+ modification, are permitted provided that the following conditions are met:\r
+ - Redistributions of source code must retain the above copyright\r
+ notice, this list of conditions and the following disclaimer.\r
+ - Redistributions in binary form must reproduce the above copyright\r
+ notice, this list of conditions and the following disclaimer in the\r
+ documentation and/or other materials provided with the distribution.\r
+ - Neither the name of ARM nor the names of its contributors may be used\r
+ to endorse or promote products derived from this software without\r
+ specific prior written permission.\r
+ *\r
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
+ ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
+ POSSIBILITY OF SUCH DAMAGE.\r
+ ---------------------------------------------------------------------------*/\r
+\r
+\r
+#if defined ( __ICCARM__ )\r
+ #pragma system_include /* treat file as system include file for MISRA check */\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #pragma clang system_header /* treat file as system include file */\r
+#endif\r
+\r
+#ifndef __CORE_CMSIMD_H\r
+#define __CORE_CMSIMD_H\r
+\r
+#ifdef __cplusplus\r
+ extern "C" {\r
+#endif\r
+\r
+\r
+/* ################### Compiler specific Intrinsics ########################### */\r
+/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics\r
+ Access to dedicated SIMD instructions\r
+ @{\r
+*/\r
+\r
+/*------------------ RealView Compiler -----------------*/\r
+#if defined ( __CC_ARM )\r
+ #include "cmsis_armcc.h"\r
+\r
+/*------------------ ARM Compiler V6 -------------------*/\r
+#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
+ #include "cmsis_armcc_V6.h"\r
+\r
+/*------------------ GNU Compiler ----------------------*/\r
+#elif defined ( __GNUC__ )\r
+ #include "cmsis_gcc.h"\r
+\r
+/*------------------ ICC Compiler ----------------------*/\r
+#elif defined ( __ICCARM__ )\r
+ #include <cmsis_iar.h>\r
+\r
+/*------------------ TI CCS Compiler -------------------*/\r
+#elif defined ( __TMS470__ )\r
+ #include <cmsis_ccs.h>\r
+\r
+/*------------------ TASKING Compiler ------------------*/\r
+#elif defined ( __TASKING__ )\r
+ /*\r
+ * The CMSIS functions have been implemented as intrinsics in the compiler.\r
+ * Please use "carm -?i" to get an up to date list of all intrinsics,\r
+ * Including the CMSIS ones.\r
+ */\r
+\r
+/*------------------ COSMIC Compiler -------------------*/\r
+#elif defined ( __CSMC__ )\r
+ #include <cmsis_csm.h>\r
+\r
+#endif\r
+\r
+/*@} end of group CMSIS_SIMD_intrinsics */\r
+\r
+\r
+#ifdef __cplusplus\r
+}\r
+#endif\r
+\r
+#endif /* __CORE_CMSIMD_H */\r
+++ /dev/null
-/**************************************************************************//**\r
- * @file cmsis_gcc.h\r
- * @brief CMSIS Cortex-M Core Function/Instruction Header File\r
- * @version V4.30\r
- * @date 20. October 2015\r
- ******************************************************************************/\r
-/* Copyright (c) 2009 - 2015 ARM LIMITED\r
-\r
- All rights reserved.\r
- Redistribution and use in source and binary forms, with or without\r
- modification, are permitted provided that the following conditions are met:\r
- - Redistributions of source code must retain the above copyright\r
- notice, this list of conditions and the following disclaimer.\r
- - Redistributions in binary form must reproduce the above copyright\r
- notice, this list of conditions and the following disclaimer in the\r
- documentation and/or other materials provided with the distribution.\r
- - Neither the name of ARM nor the names of its contributors may be used\r
- to endorse or promote products derived from this software without\r
- specific prior written permission.\r
- *\r
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
- ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
- POSSIBILITY OF SUCH DAMAGE.\r
- ---------------------------------------------------------------------------*/\r
-\r
-\r
-#ifndef __CMSIS_GCC_H\r
-#define __CMSIS_GCC_H\r
-\r
-/* ignore some GCC warnings */\r
-#if defined ( __GNUC__ )\r
-#pragma GCC diagnostic push\r
-#pragma GCC diagnostic ignored "-Wsign-conversion"\r
-#pragma GCC diagnostic ignored "-Wconversion"\r
-#pragma GCC diagnostic ignored "-Wunused-parameter"\r
-#endif\r
-\r
-\r
-/* ########################### Core Function Access ########################### */\r
-/** \ingroup CMSIS_Core_FunctionInterface\r
- \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions\r
- @{\r
- */\r
-\r
-/**\r
- \brief Enable IRQ Interrupts\r
- \details Enables IRQ interrupts by clearing the I-bit in the CPSR.\r
- Can only be executed in Privileged modes.\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)\r
-{\r
- __ASM volatile ("cpsie i" : : : "memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Disable IRQ Interrupts\r
- \details Disables IRQ interrupts by setting the I-bit in the CPSR.\r
- Can only be executed in Privileged modes.\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_irq(void)\r
-{\r
- __ASM volatile ("cpsid i" : : : "memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Get Control Register\r
- \details Returns the content of the Control Register.\r
- \return Control Register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CONTROL(void)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, control" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Set Control Register\r
- \details Writes the given value to the Control Register.\r
- \param [in] control Control Register value to set\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CONTROL(uint32_t control)\r
-{\r
- __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Get IPSR Register\r
- \details Returns the content of the IPSR Register.\r
- \return IPSR Register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_IPSR(void)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, ipsr" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Get APSR Register\r
- \details Returns the content of the APSR Register.\r
- \return APSR Register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, apsr" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Get xPSR Register\r
- \details Returns the content of the xPSR Register.\r
-\r
- \return xPSR Register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_xPSR(void)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, xpsr" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Get Process Stack Pointer\r
- \details Returns the current value of the Process Stack Pointer (PSP).\r
- \return PSP Register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_PSP(void)\r
-{\r
- register uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, psp\n" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Set Process Stack Pointer\r
- \details Assigns the given value to the Process Stack Pointer (PSP).\r
- \param [in] topOfProcStack Process Stack Pointer value to set\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)\r
-{\r
- __ASM volatile ("MSR psp, %0\n" : : "r" (topOfProcStack) : "sp");\r
-}\r
-\r
-\r
-/**\r
- \brief Get Main Stack Pointer\r
- \details Returns the current value of the Main Stack Pointer (MSP).\r
- \return MSP Register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_MSP(void)\r
-{\r
- register uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, msp\n" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Set Main Stack Pointer\r
- \details Assigns the given value to the Main Stack Pointer (MSP).\r
-\r
- \param [in] topOfMainStack Main Stack Pointer value to set\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)\r
-{\r
- __ASM volatile ("MSR msp, %0\n" : : "r" (topOfMainStack) : "sp");\r
-}\r
-\r
-\r
-/**\r
- \brief Get Priority Mask\r
- \details Returns the current state of the priority mask bit from the Priority Mask Register.\r
- \return Priority Mask value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_PRIMASK(void)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, primask" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Set Priority Mask\r
- \details Assigns the given value to the Priority Mask Register.\r
- \param [in] priMask Priority Mask\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)\r
-{\r
- __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");\r
-}\r
-\r
-\r
-#if (__CORTEX_M >= 0x03U)\r
-\r
-/**\r
- \brief Enable FIQ\r
- \details Enables FIQ interrupts by clearing the F-bit in the CPSR.\r
- Can only be executed in Privileged modes.\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_fault_irq(void)\r
-{\r
- __ASM volatile ("cpsie f" : : : "memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Disable FIQ\r
- \details Disables FIQ interrupts by setting the F-bit in the CPSR.\r
- Can only be executed in Privileged modes.\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_fault_irq(void)\r
-{\r
- __ASM volatile ("cpsid f" : : : "memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Get Base Priority\r
- \details Returns the current value of the Base Priority register.\r
- \return Base Priority register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_BASEPRI(void)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, basepri" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Set Base Priority\r
- \details Assigns the given value to the Base Priority register.\r
- \param [in] basePri Base Priority value to set\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_BASEPRI(uint32_t value)\r
-{\r
- __ASM volatile ("MSR basepri, %0" : : "r" (value) : "memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Set Base Priority with condition\r
- \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,\r
- or the new value increases the BASEPRI priority level.\r
- \param [in] basePri Base Priority value to set\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t value)\r
-{\r
- __ASM volatile ("MSR basepri_max, %0" : : "r" (value) : "memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Get Fault Mask\r
- \details Returns the current value of the Fault Mask register.\r
- \return Fault Mask register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FAULTMASK(void)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("MRS %0, faultmask" : "=r" (result) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Set Fault Mask\r
- \details Assigns the given value to the Fault Mask register.\r
- \param [in] faultMask Fault Mask value to set\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)\r
-{\r
- __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");\r
-}\r
-\r
-#endif /* (__CORTEX_M >= 0x03U) */\r
-\r
-\r
-#if (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U)\r
-\r
-/**\r
- \brief Get FPSCR\r
- \details Returns the current value of the Floating Point Status/Control register.\r
- \return Floating Point Status/Control register value\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)\r
-{\r
-#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)\r
- uint32_t result;\r
-\r
- /* Empty asm statement works as a scheduling barrier */\r
- __ASM volatile ("");\r
- __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );\r
- __ASM volatile ("");\r
- return(result);\r
-#else\r
- return(0);\r
-#endif\r
-}\r
-\r
-\r
-/**\r
- \brief Set FPSCR\r
- \details Assigns the given value to the Floating Point Status/Control register.\r
- \param [in] fpscr Floating Point Status/Control value to set\r
- */\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)\r
-{\r
-#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)\r
- /* Empty asm statement works as a scheduling barrier */\r
- __ASM volatile ("");\r
- __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc");\r
- __ASM volatile ("");\r
-#endif\r
-}\r
-\r
-#endif /* (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) */\r
-\r
-\r
-\r
-/*@} end of CMSIS_Core_RegAccFunctions */\r
-\r
-\r
-/* ########################## Core Instruction Access ######################### */\r
-/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface\r
- Access to dedicated instructions\r
- @{\r
-*/\r
-\r
-/* Define macros for porting to both thumb1 and thumb2.\r
- * For thumb1, use low register (r0-r7), specified by constraint "l"\r
- * Otherwise, use general registers, specified by constraint "r" */\r
-#if defined (__thumb__) && !defined (__thumb2__)\r
-#define __CMSIS_GCC_OUT_REG(r) "=l" (r)\r
-#define __CMSIS_GCC_USE_REG(r) "l" (r)\r
-#else\r
-#define __CMSIS_GCC_OUT_REG(r) "=r" (r)\r
-#define __CMSIS_GCC_USE_REG(r) "r" (r)\r
-#endif\r
-\r
-/**\r
- \brief No Operation\r
- \details No Operation does nothing. This instruction can be used for code alignment purposes.\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __NOP(void)\r
-{\r
- __ASM volatile ("nop");\r
-}\r
-\r
-\r
-/**\r
- \brief Wait For Interrupt\r
- \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __WFI(void)\r
-{\r
- __ASM volatile ("wfi");\r
-}\r
-\r
-\r
-/**\r
- \brief Wait For Event\r
- \details Wait For Event is a hint instruction that permits the processor to enter\r
- a low-power state until one of a number of events occurs.\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __WFE(void)\r
-{\r
- __ASM volatile ("wfe");\r
-}\r
-\r
-\r
-/**\r
- \brief Send Event\r
- \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __SEV(void)\r
-{\r
- __ASM volatile ("sev");\r
-}\r
-\r
-\r
-/**\r
- \brief Instruction Synchronization Barrier\r
- \details Instruction Synchronization Barrier flushes the pipeline in the processor,\r
- so that all instructions following the ISB are fetched from cache or memory,\r
- after the instruction has been completed.\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __ISB(void)\r
-{\r
- __ASM volatile ("isb 0xF":::"memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Data Synchronization Barrier\r
- \details Acts as a special kind of Data Memory Barrier.\r
- It completes when all explicit memory accesses before this instruction complete.\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __DSB(void)\r
-{\r
- __ASM volatile ("dsb 0xF":::"memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Data Memory Barrier\r
- \details Ensures the apparent order of the explicit memory operations before\r
- and after the instruction, without ensuring their completion.\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __DMB(void)\r
-{\r
- __ASM volatile ("dmb 0xF":::"memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Reverse byte order (32 bit)\r
- \details Reverses the byte order in integer value.\r
- \param [in] value Value to reverse\r
- \return Reversed value\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __REV(uint32_t value)\r
-{\r
-#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)\r
- return __builtin_bswap32(value);\r
-#else\r
- uint32_t result;\r
-\r
- __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );\r
- return(result);\r
-#endif\r
-}\r
-\r
-\r
-/**\r
- \brief Reverse byte order (16 bit)\r
- \details Reverses the byte order in two unsigned short values.\r
- \param [in] value Value to reverse\r
- \return Reversed value\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Reverse byte order in signed short value\r
- \details Reverses the byte order in a signed short value with sign extension to integer.\r
- \param [in] value Value to reverse\r
- \return Reversed value\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value)\r
-{\r
-#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
- return (short)__builtin_bswap16(value);\r
-#else\r
- int32_t result;\r
-\r
- __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );\r
- return(result);\r
-#endif\r
-}\r
-\r
-\r
-/**\r
- \brief Rotate Right in unsigned value (32 bit)\r
- \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.\r
- \param [in] value Value to rotate\r
- \param [in] value Number of Bits to rotate\r
- \return Rotated value\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)\r
-{\r
- return (op1 >> op2) | (op1 << (32U - op2));\r
-}\r
-\r
-\r
-/**\r
- \brief Breakpoint\r
- \details Causes the processor to enter Debug state.\r
- Debug tools can use this to investigate system state when the instruction at a particular address is reached.\r
- \param [in] value is ignored by the processor.\r
- If required, a debugger can use it to store additional information about the breakpoint.\r
- */\r
-#define __BKPT(value) __ASM volatile ("bkpt "#value)\r
-\r
-\r
-/**\r
- \brief Reverse bit order of value\r
- \details Reverses the bit order of the given value.\r
- \param [in] value Value to reverse\r
- \return Reversed value\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)\r
-{\r
- uint32_t result;\r
-\r
-#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)\r
- __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );\r
-#else\r
- int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */\r
-\r
- result = value; /* r will be reversed bits of v; first get LSB of v */\r
- for (value >>= 1U; value; value >>= 1U)\r
- {\r
- result <<= 1U;\r
- result |= value & 1U;\r
- s--;\r
- }\r
- result <<= s; /* shift when v's highest bits are zero */\r
-#endif\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Count leading zeros\r
- \details Counts the number of leading zeros of a data value.\r
- \param [in] value Value to count the leading zeros\r
- \return number of leading zeros in value\r
- */\r
-#define __CLZ __builtin_clz\r
-\r
-\r
-#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)\r
-\r
-/**\r
- \brief LDR Exclusive (8 bit)\r
- \details Executes a exclusive LDR instruction for 8 bit value.\r
- \param [in] ptr Pointer to data\r
- \return value of type uint8_t at (*ptr)\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)\r
-{\r
- uint32_t result;\r
-\r
-#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
- __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );\r
-#else\r
- /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not\r
- accepted by assembler. So has to use following less efficient pattern.\r
- */\r
- __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );\r
-#endif\r
- return ((uint8_t) result); /* Add explicit type cast here */\r
-}\r
-\r
-\r
-/**\r
- \brief LDR Exclusive (16 bit)\r
- \details Executes a exclusive LDR instruction for 16 bit values.\r
- \param [in] ptr Pointer to data\r
- \return value of type uint16_t at (*ptr)\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)\r
-{\r
- uint32_t result;\r
-\r
-#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
- __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );\r
-#else\r
- /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not\r
- accepted by assembler. So has to use following less efficient pattern.\r
- */\r
- __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );\r
-#endif\r
- return ((uint16_t) result); /* Add explicit type cast here */\r
-}\r
-\r
-\r
-/**\r
- \brief LDR Exclusive (32 bit)\r
- \details Executes a exclusive LDR instruction for 32 bit values.\r
- \param [in] ptr Pointer to data\r
- \return value of type uint32_t at (*ptr)\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief STR Exclusive (8 bit)\r
- \details Executes a exclusive STR instruction for 8 bit values.\r
- \param [in] value Value to store\r
- \param [in] ptr Pointer to location\r
- \return 0 Function succeeded\r
- \return 1 Function failed\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief STR Exclusive (16 bit)\r
- \details Executes a exclusive STR instruction for 16 bit values.\r
- \param [in] value Value to store\r
- \param [in] ptr Pointer to location\r
- \return 0 Function succeeded\r
- \return 1 Function failed\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief STR Exclusive (32 bit)\r
- \details Executes a exclusive STR instruction for 32 bit values.\r
- \param [in] value Value to store\r
- \param [in] ptr Pointer to location\r
- \return 0 Function succeeded\r
- \return 1 Function failed\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief Remove the exclusive lock\r
- \details Removes the exclusive lock which is created by LDREX.\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __CLREX(void)\r
-{\r
- __ASM volatile ("clrex" ::: "memory");\r
-}\r
-\r
-\r
-/**\r
- \brief Signed Saturate\r
- \details Saturates a signed value.\r
- \param [in] value Value to be saturated\r
- \param [in] sat Bit position to saturate to (1..32)\r
- \return Saturated value\r
- */\r
-#define __SSAT(ARG1,ARG2) \\r
-({ \\r
- uint32_t __RES, __ARG1 = (ARG1); \\r
- __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
- __RES; \\r
- })\r
-\r
-\r
-/**\r
- \brief Unsigned Saturate\r
- \details Saturates an unsigned value.\r
- \param [in] value Value to be saturated\r
- \param [in] sat Bit position to saturate to (0..31)\r
- \return Saturated value\r
- */\r
-#define __USAT(ARG1,ARG2) \\r
-({ \\r
- uint32_t __RES, __ARG1 = (ARG1); \\r
- __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
- __RES; \\r
- })\r
-\r
-\r
-/**\r
- \brief Rotate Right with Extend (32 bit)\r
- \details Moves each bit of a bitstring right by one bit.\r
- The carry input is shifted in at the left end of the bitstring.\r
- \param [in] value Value to rotate\r
- \return Rotated value\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief LDRT Unprivileged (8 bit)\r
- \details Executes a Unprivileged LDRT instruction for 8 bit value.\r
- \param [in] ptr Pointer to data\r
- \return value of type uint8_t at (*ptr)\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr)\r
-{\r
- uint32_t result;\r
-\r
-#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
- __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) );\r
-#else\r
- /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not\r
- accepted by assembler. So has to use following less efficient pattern.\r
- */\r
- __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );\r
-#endif\r
- return ((uint8_t) result); /* Add explicit type cast here */\r
-}\r
-\r
-\r
-/**\r
- \brief LDRT Unprivileged (16 bit)\r
- \details Executes a Unprivileged LDRT instruction for 16 bit values.\r
- \param [in] ptr Pointer to data\r
- \return value of type uint16_t at (*ptr)\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr)\r
-{\r
- uint32_t result;\r
-\r
-#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)\r
- __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) );\r
-#else\r
- /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not\r
- accepted by assembler. So has to use following less efficient pattern.\r
- */\r
- __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );\r
-#endif\r
- return ((uint16_t) result); /* Add explicit type cast here */\r
-}\r
-\r
-\r
-/**\r
- \brief LDRT Unprivileged (32 bit)\r
- \details Executes a Unprivileged LDRT instruction for 32 bit values.\r
- \param [in] ptr Pointer to data\r
- \return value of type uint32_t at (*ptr)\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) );\r
- return(result);\r
-}\r
-\r
-\r
-/**\r
- \brief STRT Unprivileged (8 bit)\r
- \details Executes a Unprivileged STRT instruction for 8 bit values.\r
- \param [in] value Value to store\r
- \param [in] ptr Pointer to location\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr)\r
-{\r
- __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );\r
-}\r
-\r
-\r
-/**\r
- \brief STRT Unprivileged (16 bit)\r
- \details Executes a Unprivileged STRT instruction for 16 bit values.\r
- \param [in] value Value to store\r
- \param [in] ptr Pointer to location\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr)\r
-{\r
- __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );\r
-}\r
-\r
-\r
-/**\r
- \brief STRT Unprivileged (32 bit)\r
- \details Executes a Unprivileged STRT instruction for 32 bit values.\r
- \param [in] value Value to store\r
- \param [in] ptr Pointer to location\r
- */\r
-__attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr)\r
-{\r
- __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) );\r
-}\r
-\r
-#endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */\r
-\r
-/*@}*/ /* end of group CMSIS_Core_InstructionInterface */\r
-\r
-\r
-/* ################### Compiler specific Intrinsics ########################### */\r
-/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics\r
- Access to dedicated SIMD instructions\r
- @{\r
-*/\r
-\r
-#if (__CORTEX_M >= 0x04U) /* only for Cortex-M4 and above */\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
- return(result);\r
-}\r
-\r
-#define __SSAT16(ARG1,ARG2) \\r
-({ \\r
- int32_t __RES, __ARG1 = (ARG1); \\r
- __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
- __RES; \\r
- })\r
-\r
-#define __USAT16(ARG1,ARG2) \\r
-({ \\r
- uint32_t __RES, __ARG1 = (ARG1); \\r
- __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
- __RES; \\r
- })\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)\r
-{\r
- union llreg_u{\r
- uint32_t w32[2];\r
- uint64_t w64;\r
- } llr;\r
- llr.w64 = acc;\r
-\r
-#ifndef __ARMEB__ /* Little endian */\r
- __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );\r
-#else /* Big endian */\r
- __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );\r
-#endif\r
-\r
- return(llr.w64);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)\r
-{\r
- union llreg_u{\r
- uint32_t w32[2];\r
- uint64_t w64;\r
- } llr;\r
- llr.w64 = acc;\r
-\r
-#ifndef __ARMEB__ /* Little endian */\r
- __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );\r
-#else /* Big endian */\r
- __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );\r
-#endif\r
-\r
- return(llr.w64);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)\r
-{\r
- union llreg_u{\r
- uint32_t w32[2];\r
- uint64_t w64;\r
- } llr;\r
- llr.w64 = acc;\r
-\r
-#ifndef __ARMEB__ /* Little endian */\r
- __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );\r
-#else /* Big endian */\r
- __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );\r
-#endif\r
-\r
- return(llr.w64);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)\r
-{\r
- union llreg_u{\r
- uint32_t w32[2];\r
- uint64_t w64;\r
- } llr;\r
- llr.w64 = acc;\r
-\r
-#ifndef __ARMEB__ /* Little endian */\r
- __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );\r
-#else /* Big endian */\r
- __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );\r
-#endif\r
-\r
- return(llr.w64);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)\r
-{\r
- uint32_t result;\r
-\r
- __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __QADD( int32_t op1, int32_t op2)\r
-{\r
- int32_t result;\r
-\r
- __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __QSUB( int32_t op1, int32_t op2)\r
-{\r
- int32_t result;\r
-\r
- __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );\r
- return(result);\r
-}\r
-\r
-#define __PKHBT(ARG1,ARG2,ARG3) \\r
-({ \\r
- uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \\r
- __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \\r
- __RES; \\r
- })\r
-\r
-#define __PKHTB(ARG1,ARG2,ARG3) \\r
-({ \\r
- uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \\r
- if (ARG3 == 0) \\r
- __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \\r
- else \\r
- __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \\r
- __RES; \\r
- })\r
-\r
-__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)\r
-{\r
- int32_t result;\r
-\r
- __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );\r
- return(result);\r
-}\r
-\r
-#endif /* (__CORTEX_M >= 0x04) */\r
-/*@} end of group CMSIS_SIMD_intrinsics */\r
-\r
-\r
-#if defined ( __GNUC__ )\r
-#pragma GCC diagnostic pop\r
-#endif\r
-\r
-#endif /* __CMSIS_GCC_H */\r
+++ /dev/null
-/**************************************************************************//**\r
- * @file core_cm4.h\r
- * @brief CMSIS Cortex-M4 Core Peripheral Access Layer Header File\r
- * @version V4.30\r
- * @date 20. October 2015\r
- ******************************************************************************/\r
-/* Copyright (c) 2009 - 2015 ARM LIMITED\r
-\r
- All rights reserved.\r
- Redistribution and use in source and binary forms, with or without\r
- modification, are permitted provided that the following conditions are met:\r
- - Redistributions of source code must retain the above copyright\r
- notice, this list of conditions and the following disclaimer.\r
- - Redistributions in binary form must reproduce the above copyright\r
- notice, this list of conditions and the following disclaimer in the\r
- documentation and/or other materials provided with the distribution.\r
- - Neither the name of ARM nor the names of its contributors may be used\r
- to endorse or promote products derived from this software without\r
- specific prior written permission.\r
- *\r
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
- ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
- POSSIBILITY OF SUCH DAMAGE.\r
- ---------------------------------------------------------------------------*/\r
-\r
-\r
-#if defined ( __ICCARM__ )\r
- #pragma system_include /* treat file as system include file for MISRA check */\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #pragma clang system_header /* treat file as system include file */\r
-#endif\r
-\r
-#ifndef __CORE_CM4_H_GENERIC\r
-#define __CORE_CM4_H_GENERIC\r
-\r
-#include <stdint.h>\r
-\r
-#ifdef __cplusplus\r
- extern "C" {\r
-#endif\r
-\r
-/**\r
- \page CMSIS_MISRA_Exceptions MISRA-C:2004 Compliance Exceptions\r
- CMSIS violates the following MISRA-C:2004 rules:\r
-\r
- \li Required Rule 8.5, object/function definition in header file.<br>\r
- Function definitions in header files are used to allow 'inlining'.\r
-\r
- \li Required Rule 18.4, declaration of union type or object of union type: '{...}'.<br>\r
- Unions are used for effective representation of core registers.\r
-\r
- \li Advisory Rule 19.7, Function-like macro defined.<br>\r
- Function-like macros are used to allow more efficient code.\r
- */\r
-\r
-\r
-/*******************************************************************************\r
- * CMSIS definitions\r
- ******************************************************************************/\r
-/**\r
- \ingroup Cortex_M4\r
- @{\r
- */\r
-\r
-/* CMSIS CM4 definitions */\r
-#define __CM4_CMSIS_VERSION_MAIN (0x04U) /*!< [31:16] CMSIS HAL main version */\r
-#define __CM4_CMSIS_VERSION_SUB (0x1EU) /*!< [15:0] CMSIS HAL sub version */\r
-#define __CM4_CMSIS_VERSION ((__CM4_CMSIS_VERSION_MAIN << 16U) | \\r
- __CM4_CMSIS_VERSION_SUB ) /*!< CMSIS HAL version number */\r
-\r
-#define __CORTEX_M (0x04U) /*!< Cortex-M Core */\r
-\r
-\r
-#if defined ( __CC_ARM )\r
- #define __ASM __asm /*!< asm keyword for ARM Compiler */\r
- #define __INLINE __inline /*!< inline keyword for ARM Compiler */\r
- #define __STATIC_INLINE static __inline\r
-\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #define __ASM __asm /*!< asm keyword for ARM Compiler */\r
- #define __INLINE __inline /*!< inline keyword for ARM Compiler */\r
- #define __STATIC_INLINE static __inline\r
-\r
-#elif defined ( __GNUC__ )\r
- #define __ASM __asm /*!< asm keyword for GNU Compiler */\r
- #define __INLINE inline /*!< inline keyword for GNU Compiler */\r
- #define __STATIC_INLINE static inline\r
-\r
-#elif defined ( __ICCARM__ )\r
- #define __ASM __asm /*!< asm keyword for IAR Compiler */\r
- #define __INLINE inline /*!< inline keyword for IAR Compiler. Only available in High optimization mode! */\r
- #define __STATIC_INLINE static inline\r
-\r
-#elif defined ( __TMS470__ )\r
- #define __ASM __asm /*!< asm keyword for TI CCS Compiler */\r
- #define __STATIC_INLINE static inline\r
-\r
-#elif defined ( __TASKING__ )\r
- #define __ASM __asm /*!< asm keyword for TASKING Compiler */\r
- #define __INLINE inline /*!< inline keyword for TASKING Compiler */\r
- #define __STATIC_INLINE static inline\r
-\r
-#elif defined ( __CSMC__ )\r
- #define __packed\r
- #define __ASM _asm /*!< asm keyword for COSMIC Compiler */\r
- #define __INLINE inline /*!< inline keyword for COSMIC Compiler. Use -pc99 on compile line */\r
- #define __STATIC_INLINE static inline\r
-\r
-#else\r
- #error Unknown compiler\r
-#endif\r
-\r
-/** __FPU_USED indicates whether an FPU is used or not.\r
- For this, __FPU_PRESENT has to be checked prior to making use of FPU specific registers and functions.\r
-*/\r
-#if defined ( __CC_ARM )\r
- #if defined __TARGET_FPU_VFP\r
- #if (__FPU_PRESENT == 1U)\r
- #define __FPU_USED 1U\r
- #else\r
- #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
- #define __FPU_USED 0U\r
- #endif\r
- #else\r
- #define __FPU_USED 0U\r
- #endif\r
-\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #if defined __ARM_PCS_VFP\r
- #if (__FPU_PRESENT == 1)\r
- #define __FPU_USED 1U\r
- #else\r
- #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
- #define __FPU_USED 0U\r
- #endif\r
- #else\r
- #define __FPU_USED 0U\r
- #endif\r
-\r
-#elif defined ( __GNUC__ )\r
- #if defined (__VFP_FP__) && !defined(__SOFTFP__)\r
- #if (__FPU_PRESENT == 1U)\r
- #define __FPU_USED 1U\r
- #else\r
- #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
- #define __FPU_USED 0U\r
- #endif\r
- #else\r
- #define __FPU_USED 0U\r
- #endif\r
-\r
-#elif defined ( __ICCARM__ )\r
- #if defined __ARMVFP__\r
- #if (__FPU_PRESENT == 1U)\r
- #define __FPU_USED 1U\r
- #else\r
- #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
- #define __FPU_USED 0U\r
- #endif\r
- #else\r
- #define __FPU_USED 0U\r
- #endif\r
-\r
-#elif defined ( __TMS470__ )\r
- #if defined __TI_VFP_SUPPORT__\r
- #if (__FPU_PRESENT == 1U)\r
- #define __FPU_USED 1U\r
- #else\r
- #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
- #define __FPU_USED 0U\r
- #endif\r
- #else\r
- #define __FPU_USED 0U\r
- #endif\r
-\r
-#elif defined ( __TASKING__ )\r
- #if defined __FPU_VFP__\r
- #if (__FPU_PRESENT == 1U)\r
- #define __FPU_USED 1U\r
- #else\r
- #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
- #define __FPU_USED 0U\r
- #endif\r
- #else\r
- #define __FPU_USED 0U\r
- #endif\r
-\r
-#elif defined ( __CSMC__ )\r
- #if ( __CSMC__ & 0x400U)\r
- #if (__FPU_PRESENT == 1U)\r
- #define __FPU_USED 1U\r
- #else\r
- #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"\r
- #define __FPU_USED 0U\r
- #endif\r
- #else\r
- #define __FPU_USED 0U\r
- #endif\r
-\r
-#endif\r
-\r
-#include "core_cmInstr.h" /* Core Instruction Access */\r
-#include "core_cmFunc.h" /* Core Function Access */\r
-#include "core_cmSimd.h" /* Compiler specific SIMD Intrinsics */\r
-\r
-#ifdef __cplusplus\r
-}\r
-#endif\r
-\r
-#endif /* __CORE_CM4_H_GENERIC */\r
-\r
-#ifndef __CMSIS_GENERIC\r
-\r
-#ifndef __CORE_CM4_H_DEPENDANT\r
-#define __CORE_CM4_H_DEPENDANT\r
-\r
-#ifdef __cplusplus\r
- extern "C" {\r
-#endif\r
-\r
-/* check device defines and use defaults */\r
-#if defined __CHECK_DEVICE_DEFINES\r
- #ifndef __CM4_REV\r
- #define __CM4_REV 0x0000U\r
- #warning "__CM4_REV not defined in device header file; using default!"\r
- #endif\r
-\r
- #ifndef __FPU_PRESENT\r
- #define __FPU_PRESENT 0U\r
- #warning "__FPU_PRESENT not defined in device header file; using default!"\r
- #endif\r
-\r
- #ifndef __MPU_PRESENT\r
- #define __MPU_PRESENT 0U\r
- #warning "__MPU_PRESENT not defined in device header file; using default!"\r
- #endif\r
-\r
- #ifndef __NVIC_PRIO_BITS\r
- #define __NVIC_PRIO_BITS 4U\r
- #warning "__NVIC_PRIO_BITS not defined in device header file; using default!"\r
- #endif\r
-\r
- #ifndef __Vendor_SysTickConfig\r
- #define __Vendor_SysTickConfig 0U\r
- #warning "__Vendor_SysTickConfig not defined in device header file; using default!"\r
- #endif\r
-#endif\r
-\r
-/* IO definitions (access restrictions to peripheral registers) */\r
-/**\r
- \defgroup CMSIS_glob_defs CMSIS Global Defines\r
-\r
- <strong>IO Type Qualifiers</strong> are used\r
- \li to specify the access to peripheral variables.\r
- \li for automatic generation of peripheral register debug information.\r
-*/\r
-#ifdef __cplusplus\r
- #define __I volatile /*!< Defines 'read only' permissions */\r
-#else\r
- #define __I volatile const /*!< Defines 'read only' permissions */\r
-#endif\r
-#define __O volatile /*!< Defines 'write only' permissions */\r
-#define __IO volatile /*!< Defines 'read / write' permissions */\r
-\r
-/* following defines should be used for structure members */\r
-#define __IM volatile const /*! Defines 'read only' structure member permissions */\r
-#define __OM volatile /*! Defines 'write only' structure member permissions */\r
-#define __IOM volatile /*! Defines 'read / write' structure member permissions */\r
-\r
-/*@} end of group Cortex_M4 */\r
-\r
-\r
-\r
-/*******************************************************************************\r
- * Register Abstraction\r
- Core Register contain:\r
- - Core Register\r
- - Core NVIC Register\r
- - Core SCB Register\r
- - Core SysTick Register\r
- - Core Debug Register\r
- - Core MPU Register\r
- - Core FPU Register\r
- ******************************************************************************/\r
-/**\r
- \defgroup CMSIS_core_register Defines and Type Definitions\r
- \brief Type definitions and defines for Cortex-M processor based devices.\r
-*/\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_CORE Status and Control Registers\r
- \brief Core Register type definitions.\r
- @{\r
- */\r
-\r
-/**\r
- \brief Union type to access the Application Program Status Register (APSR).\r
- */\r
-typedef union\r
-{\r
- struct\r
- {\r
- uint32_t _reserved0:16; /*!< bit: 0..15 Reserved */\r
- uint32_t GE:4; /*!< bit: 16..19 Greater than or Equal flags */\r
- uint32_t _reserved1:7; /*!< bit: 20..26 Reserved */\r
- uint32_t Q:1; /*!< bit: 27 Saturation condition flag */\r
- uint32_t V:1; /*!< bit: 28 Overflow condition code flag */\r
- uint32_t C:1; /*!< bit: 29 Carry condition code flag */\r
- uint32_t Z:1; /*!< bit: 30 Zero condition code flag */\r
- uint32_t N:1; /*!< bit: 31 Negative condition code flag */\r
- } b; /*!< Structure used for bit access */\r
- uint32_t w; /*!< Type used for word access */\r
-} APSR_Type;\r
-\r
-/* APSR Register Definitions */\r
-#define APSR_N_Pos 31U /*!< APSR: N Position */\r
-#define APSR_N_Msk (1UL << APSR_N_Pos) /*!< APSR: N Mask */\r
-\r
-#define APSR_Z_Pos 30U /*!< APSR: Z Position */\r
-#define APSR_Z_Msk (1UL << APSR_Z_Pos) /*!< APSR: Z Mask */\r
-\r
-#define APSR_C_Pos 29U /*!< APSR: C Position */\r
-#define APSR_C_Msk (1UL << APSR_C_Pos) /*!< APSR: C Mask */\r
-\r
-#define APSR_V_Pos 28U /*!< APSR: V Position */\r
-#define APSR_V_Msk (1UL << APSR_V_Pos) /*!< APSR: V Mask */\r
-\r
-#define APSR_Q_Pos 27U /*!< APSR: Q Position */\r
-#define APSR_Q_Msk (1UL << APSR_Q_Pos) /*!< APSR: Q Mask */\r
-\r
-#define APSR_GE_Pos 16U /*!< APSR: GE Position */\r
-#define APSR_GE_Msk (0xFUL << APSR_GE_Pos) /*!< APSR: GE Mask */\r
-\r
-\r
-/**\r
- \brief Union type to access the Interrupt Program Status Register (IPSR).\r
- */\r
-typedef union\r
-{\r
- struct\r
- {\r
- uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\r
- uint32_t _reserved0:23; /*!< bit: 9..31 Reserved */\r
- } b; /*!< Structure used for bit access */\r
- uint32_t w; /*!< Type used for word access */\r
-} IPSR_Type;\r
-\r
-/* IPSR Register Definitions */\r
-#define IPSR_ISR_Pos 0U /*!< IPSR: ISR Position */\r
-#define IPSR_ISR_Msk (0x1FFUL /*<< IPSR_ISR_Pos*/) /*!< IPSR: ISR Mask */\r
-\r
-\r
-/**\r
- \brief Union type to access the Special-Purpose Program Status Registers (xPSR).\r
- */\r
-typedef union\r
-{\r
- struct\r
- {\r
- uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\r
- uint32_t _reserved0:7; /*!< bit: 9..15 Reserved */\r
- uint32_t GE:4; /*!< bit: 16..19 Greater than or Equal flags */\r
- uint32_t _reserved1:4; /*!< bit: 20..23 Reserved */\r
- uint32_t T:1; /*!< bit: 24 Thumb bit (read 0) */\r
- uint32_t IT:2; /*!< bit: 25..26 saved IT state (read 0) */\r
- uint32_t Q:1; /*!< bit: 27 Saturation condition flag */\r
- uint32_t V:1; /*!< bit: 28 Overflow condition code flag */\r
- uint32_t C:1; /*!< bit: 29 Carry condition code flag */\r
- uint32_t Z:1; /*!< bit: 30 Zero condition code flag */\r
- uint32_t N:1; /*!< bit: 31 Negative condition code flag */\r
- } b; /*!< Structure used for bit access */\r
- uint32_t w; /*!< Type used for word access */\r
-} xPSR_Type;\r
-\r
-/* xPSR Register Definitions */\r
-#define xPSR_N_Pos 31U /*!< xPSR: N Position */\r
-#define xPSR_N_Msk (1UL << xPSR_N_Pos) /*!< xPSR: N Mask */\r
-\r
-#define xPSR_Z_Pos 30U /*!< xPSR: Z Position */\r
-#define xPSR_Z_Msk (1UL << xPSR_Z_Pos) /*!< xPSR: Z Mask */\r
-\r
-#define xPSR_C_Pos 29U /*!< xPSR: C Position */\r
-#define xPSR_C_Msk (1UL << xPSR_C_Pos) /*!< xPSR: C Mask */\r
-\r
-#define xPSR_V_Pos 28U /*!< xPSR: V Position */\r
-#define xPSR_V_Msk (1UL << xPSR_V_Pos) /*!< xPSR: V Mask */\r
-\r
-#define xPSR_Q_Pos 27U /*!< xPSR: Q Position */\r
-#define xPSR_Q_Msk (1UL << xPSR_Q_Pos) /*!< xPSR: Q Mask */\r
-\r
-#define xPSR_IT_Pos 25U /*!< xPSR: IT Position */\r
-#define xPSR_IT_Msk (3UL << xPSR_IT_Pos) /*!< xPSR: IT Mask */\r
-\r
-#define xPSR_T_Pos 24U /*!< xPSR: T Position */\r
-#define xPSR_T_Msk (1UL << xPSR_T_Pos) /*!< xPSR: T Mask */\r
-\r
-#define xPSR_GE_Pos 16U /*!< xPSR: GE Position */\r
-#define xPSR_GE_Msk (0xFUL << xPSR_GE_Pos) /*!< xPSR: GE Mask */\r
-\r
-#define xPSR_ISR_Pos 0U /*!< xPSR: ISR Position */\r
-#define xPSR_ISR_Msk (0x1FFUL /*<< xPSR_ISR_Pos*/) /*!< xPSR: ISR Mask */\r
-\r
-\r
-/**\r
- \brief Union type to access the Control Registers (CONTROL).\r
- */\r
-typedef union\r
-{\r
- struct\r
- {\r
- uint32_t nPRIV:1; /*!< bit: 0 Execution privilege in Thread mode */\r
- uint32_t SPSEL:1; /*!< bit: 1 Stack to be used */\r
- uint32_t FPCA:1; /*!< bit: 2 FP extension active flag */\r
- uint32_t _reserved0:29; /*!< bit: 3..31 Reserved */\r
- } b; /*!< Structure used for bit access */\r
- uint32_t w; /*!< Type used for word access */\r
-} CONTROL_Type;\r
-\r
-/* CONTROL Register Definitions */\r
-#define CONTROL_FPCA_Pos 2U /*!< CONTROL: FPCA Position */\r
-#define CONTROL_FPCA_Msk (1UL << CONTROL_FPCA_Pos) /*!< CONTROL: FPCA Mask */\r
-\r
-#define CONTROL_SPSEL_Pos 1U /*!< CONTROL: SPSEL Position */\r
-#define CONTROL_SPSEL_Msk (1UL << CONTROL_SPSEL_Pos) /*!< CONTROL: SPSEL Mask */\r
-\r
-#define CONTROL_nPRIV_Pos 0U /*!< CONTROL: nPRIV Position */\r
-#define CONTROL_nPRIV_Msk (1UL /*<< CONTROL_nPRIV_Pos*/) /*!< CONTROL: nPRIV Mask */\r
-\r
-/*@} end of group CMSIS_CORE */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_NVIC Nested Vectored Interrupt Controller (NVIC)\r
- \brief Type definitions for the NVIC Registers\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the Nested Vectored Interrupt Controller (NVIC).\r
- */\r
-typedef struct\r
-{\r
- __IOM uint32_t ISER[8U]; /*!< Offset: 0x000 (R/W) Interrupt Set Enable Register */\r
- uint32_t RESERVED0[24U];\r
- __IOM uint32_t ICER[8U]; /*!< Offset: 0x080 (R/W) Interrupt Clear Enable Register */\r
- uint32_t RSERVED1[24U];\r
- __IOM uint32_t ISPR[8U]; /*!< Offset: 0x100 (R/W) Interrupt Set Pending Register */\r
- uint32_t RESERVED2[24U];\r
- __IOM uint32_t ICPR[8U]; /*!< Offset: 0x180 (R/W) Interrupt Clear Pending Register */\r
- uint32_t RESERVED3[24U];\r
- __IOM uint32_t IABR[8U]; /*!< Offset: 0x200 (R/W) Interrupt Active bit Register */\r
- uint32_t RESERVED4[56U];\r
- __IOM uint8_t IP[240U]; /*!< Offset: 0x300 (R/W) Interrupt Priority Register (8Bit wide) */\r
- uint32_t RESERVED5[644U];\r
- __OM uint32_t STIR; /*!< Offset: 0xE00 ( /W) Software Trigger Interrupt Register */\r
-} NVIC_Type;\r
-\r
-/* Software Triggered Interrupt Register Definitions */\r
-#define NVIC_STIR_INTID_Pos 0U /*!< STIR: INTLINESNUM Position */\r
-#define NVIC_STIR_INTID_Msk (0x1FFUL /*<< NVIC_STIR_INTID_Pos*/) /*!< STIR: INTLINESNUM Mask */\r
-\r
-/*@} end of group CMSIS_NVIC */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_SCB System Control Block (SCB)\r
- \brief Type definitions for the System Control Block Registers\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the System Control Block (SCB).\r
- */\r
-typedef struct\r
-{\r
- __IM uint32_t CPUID; /*!< Offset: 0x000 (R/ ) CPUID Base Register */\r
- __IOM uint32_t ICSR; /*!< Offset: 0x004 (R/W) Interrupt Control and State Register */\r
- __IOM uint32_t VTOR; /*!< Offset: 0x008 (R/W) Vector Table Offset Register */\r
- __IOM uint32_t AIRCR; /*!< Offset: 0x00C (R/W) Application Interrupt and Reset Control Register */\r
- __IOM uint32_t SCR; /*!< Offset: 0x010 (R/W) System Control Register */\r
- __IOM uint32_t CCR; /*!< Offset: 0x014 (R/W) Configuration Control Register */\r
- __IOM uint8_t SHP[12U]; /*!< Offset: 0x018 (R/W) System Handlers Priority Registers (4-7, 8-11, 12-15) */\r
- __IOM uint32_t SHCSR; /*!< Offset: 0x024 (R/W) System Handler Control and State Register */\r
- __IOM uint32_t CFSR; /*!< Offset: 0x028 (R/W) Configurable Fault Status Register */\r
- __IOM uint32_t HFSR; /*!< Offset: 0x02C (R/W) HardFault Status Register */\r
- __IOM uint32_t DFSR; /*!< Offset: 0x030 (R/W) Debug Fault Status Register */\r
- __IOM uint32_t MMFAR; /*!< Offset: 0x034 (R/W) MemManage Fault Address Register */\r
- __IOM uint32_t BFAR; /*!< Offset: 0x038 (R/W) BusFault Address Register */\r
- __IOM uint32_t AFSR; /*!< Offset: 0x03C (R/W) Auxiliary Fault Status Register */\r
- __IM uint32_t PFR[2U]; /*!< Offset: 0x040 (R/ ) Processor Feature Register */\r
- __IM uint32_t DFR; /*!< Offset: 0x048 (R/ ) Debug Feature Register */\r
- __IM uint32_t ADR; /*!< Offset: 0x04C (R/ ) Auxiliary Feature Register */\r
- __IM uint32_t MMFR[4U]; /*!< Offset: 0x050 (R/ ) Memory Model Feature Register */\r
- __IM uint32_t ISAR[5U]; /*!< Offset: 0x060 (R/ ) Instruction Set Attributes Register */\r
- uint32_t RESERVED0[5U];\r
- __IOM uint32_t CPACR; /*!< Offset: 0x088 (R/W) Coprocessor Access Control Register */\r
-} SCB_Type;\r
-\r
-/* SCB CPUID Register Definitions */\r
-#define SCB_CPUID_IMPLEMENTER_Pos 24U /*!< SCB CPUID: IMPLEMENTER Position */\r
-#define SCB_CPUID_IMPLEMENTER_Msk (0xFFUL << SCB_CPUID_IMPLEMENTER_Pos) /*!< SCB CPUID: IMPLEMENTER Mask */\r
-\r
-#define SCB_CPUID_VARIANT_Pos 20U /*!< SCB CPUID: VARIANT Position */\r
-#define SCB_CPUID_VARIANT_Msk (0xFUL << SCB_CPUID_VARIANT_Pos) /*!< SCB CPUID: VARIANT Mask */\r
-\r
-#define SCB_CPUID_ARCHITECTURE_Pos 16U /*!< SCB CPUID: ARCHITECTURE Position */\r
-#define SCB_CPUID_ARCHITECTURE_Msk (0xFUL << SCB_CPUID_ARCHITECTURE_Pos) /*!< SCB CPUID: ARCHITECTURE Mask */\r
-\r
-#define SCB_CPUID_PARTNO_Pos 4U /*!< SCB CPUID: PARTNO Position */\r
-#define SCB_CPUID_PARTNO_Msk (0xFFFUL << SCB_CPUID_PARTNO_Pos) /*!< SCB CPUID: PARTNO Mask */\r
-\r
-#define SCB_CPUID_REVISION_Pos 0U /*!< SCB CPUID: REVISION Position */\r
-#define SCB_CPUID_REVISION_Msk (0xFUL /*<< SCB_CPUID_REVISION_Pos*/) /*!< SCB CPUID: REVISION Mask */\r
-\r
-/* SCB Interrupt Control State Register Definitions */\r
-#define SCB_ICSR_NMIPENDSET_Pos 31U /*!< SCB ICSR: NMIPENDSET Position */\r
-#define SCB_ICSR_NMIPENDSET_Msk (1UL << SCB_ICSR_NMIPENDSET_Pos) /*!< SCB ICSR: NMIPENDSET Mask */\r
-\r
-#define SCB_ICSR_PENDSVSET_Pos 28U /*!< SCB ICSR: PENDSVSET Position */\r
-#define SCB_ICSR_PENDSVSET_Msk (1UL << SCB_ICSR_PENDSVSET_Pos) /*!< SCB ICSR: PENDSVSET Mask */\r
-\r
-#define SCB_ICSR_PENDSVCLR_Pos 27U /*!< SCB ICSR: PENDSVCLR Position */\r
-#define SCB_ICSR_PENDSVCLR_Msk (1UL << SCB_ICSR_PENDSVCLR_Pos) /*!< SCB ICSR: PENDSVCLR Mask */\r
-\r
-#define SCB_ICSR_PENDSTSET_Pos 26U /*!< SCB ICSR: PENDSTSET Position */\r
-#define SCB_ICSR_PENDSTSET_Msk (1UL << SCB_ICSR_PENDSTSET_Pos) /*!< SCB ICSR: PENDSTSET Mask */\r
-\r
-#define SCB_ICSR_PENDSTCLR_Pos 25U /*!< SCB ICSR: PENDSTCLR Position */\r
-#define SCB_ICSR_PENDSTCLR_Msk (1UL << SCB_ICSR_PENDSTCLR_Pos) /*!< SCB ICSR: PENDSTCLR Mask */\r
-\r
-#define SCB_ICSR_ISRPREEMPT_Pos 23U /*!< SCB ICSR: ISRPREEMPT Position */\r
-#define SCB_ICSR_ISRPREEMPT_Msk (1UL << SCB_ICSR_ISRPREEMPT_Pos) /*!< SCB ICSR: ISRPREEMPT Mask */\r
-\r
-#define SCB_ICSR_ISRPENDING_Pos 22U /*!< SCB ICSR: ISRPENDING Position */\r
-#define SCB_ICSR_ISRPENDING_Msk (1UL << SCB_ICSR_ISRPENDING_Pos) /*!< SCB ICSR: ISRPENDING Mask */\r
-\r
-#define SCB_ICSR_VECTPENDING_Pos 12U /*!< SCB ICSR: VECTPENDING Position */\r
-#define SCB_ICSR_VECTPENDING_Msk (0x1FFUL << SCB_ICSR_VECTPENDING_Pos) /*!< SCB ICSR: VECTPENDING Mask */\r
-\r
-#define SCB_ICSR_RETTOBASE_Pos 11U /*!< SCB ICSR: RETTOBASE Position */\r
-#define SCB_ICSR_RETTOBASE_Msk (1UL << SCB_ICSR_RETTOBASE_Pos) /*!< SCB ICSR: RETTOBASE Mask */\r
-\r
-#define SCB_ICSR_VECTACTIVE_Pos 0U /*!< SCB ICSR: VECTACTIVE Position */\r
-#define SCB_ICSR_VECTACTIVE_Msk (0x1FFUL /*<< SCB_ICSR_VECTACTIVE_Pos*/) /*!< SCB ICSR: VECTACTIVE Mask */\r
-\r
-/* SCB Vector Table Offset Register Definitions */\r
-#define SCB_VTOR_TBLOFF_Pos 7U /*!< SCB VTOR: TBLOFF Position */\r
-#define SCB_VTOR_TBLOFF_Msk (0x1FFFFFFUL << SCB_VTOR_TBLOFF_Pos) /*!< SCB VTOR: TBLOFF Mask */\r
-\r
-/* SCB Application Interrupt and Reset Control Register Definitions */\r
-#define SCB_AIRCR_VECTKEY_Pos 16U /*!< SCB AIRCR: VECTKEY Position */\r
-#define SCB_AIRCR_VECTKEY_Msk (0xFFFFUL << SCB_AIRCR_VECTKEY_Pos) /*!< SCB AIRCR: VECTKEY Mask */\r
-\r
-#define SCB_AIRCR_VECTKEYSTAT_Pos 16U /*!< SCB AIRCR: VECTKEYSTAT Position */\r
-#define SCB_AIRCR_VECTKEYSTAT_Msk (0xFFFFUL << SCB_AIRCR_VECTKEYSTAT_Pos) /*!< SCB AIRCR: VECTKEYSTAT Mask */\r
-\r
-#define SCB_AIRCR_ENDIANESS_Pos 15U /*!< SCB AIRCR: ENDIANESS Position */\r
-#define SCB_AIRCR_ENDIANESS_Msk (1UL << SCB_AIRCR_ENDIANESS_Pos) /*!< SCB AIRCR: ENDIANESS Mask */\r
-\r
-#define SCB_AIRCR_PRIGROUP_Pos 8U /*!< SCB AIRCR: PRIGROUP Position */\r
-#define SCB_AIRCR_PRIGROUP_Msk (7UL << SCB_AIRCR_PRIGROUP_Pos) /*!< SCB AIRCR: PRIGROUP Mask */\r
-\r
-#define SCB_AIRCR_SYSRESETREQ_Pos 2U /*!< SCB AIRCR: SYSRESETREQ Position */\r
-#define SCB_AIRCR_SYSRESETREQ_Msk (1UL << SCB_AIRCR_SYSRESETREQ_Pos) /*!< SCB AIRCR: SYSRESETREQ Mask */\r
-\r
-#define SCB_AIRCR_VECTCLRACTIVE_Pos 1U /*!< SCB AIRCR: VECTCLRACTIVE Position */\r
-#define SCB_AIRCR_VECTCLRACTIVE_Msk (1UL << SCB_AIRCR_VECTCLRACTIVE_Pos) /*!< SCB AIRCR: VECTCLRACTIVE Mask */\r
-\r
-#define SCB_AIRCR_VECTRESET_Pos 0U /*!< SCB AIRCR: VECTRESET Position */\r
-#define SCB_AIRCR_VECTRESET_Msk (1UL /*<< SCB_AIRCR_VECTRESET_Pos*/) /*!< SCB AIRCR: VECTRESET Mask */\r
-\r
-/* SCB System Control Register Definitions */\r
-#define SCB_SCR_SEVONPEND_Pos 4U /*!< SCB SCR: SEVONPEND Position */\r
-#define SCB_SCR_SEVONPEND_Msk (1UL << SCB_SCR_SEVONPEND_Pos) /*!< SCB SCR: SEVONPEND Mask */\r
-\r
-#define SCB_SCR_SLEEPDEEP_Pos 2U /*!< SCB SCR: SLEEPDEEP Position */\r
-#define SCB_SCR_SLEEPDEEP_Msk (1UL << SCB_SCR_SLEEPDEEP_Pos) /*!< SCB SCR: SLEEPDEEP Mask */\r
-\r
-#define SCB_SCR_SLEEPONEXIT_Pos 1U /*!< SCB SCR: SLEEPONEXIT Position */\r
-#define SCB_SCR_SLEEPONEXIT_Msk (1UL << SCB_SCR_SLEEPONEXIT_Pos) /*!< SCB SCR: SLEEPONEXIT Mask */\r
-\r
-/* SCB Configuration Control Register Definitions */\r
-#define SCB_CCR_STKALIGN_Pos 9U /*!< SCB CCR: STKALIGN Position */\r
-#define SCB_CCR_STKALIGN_Msk (1UL << SCB_CCR_STKALIGN_Pos) /*!< SCB CCR: STKALIGN Mask */\r
-\r
-#define SCB_CCR_BFHFNMIGN_Pos 8U /*!< SCB CCR: BFHFNMIGN Position */\r
-#define SCB_CCR_BFHFNMIGN_Msk (1UL << SCB_CCR_BFHFNMIGN_Pos) /*!< SCB CCR: BFHFNMIGN Mask */\r
-\r
-#define SCB_CCR_DIV_0_TRP_Pos 4U /*!< SCB CCR: DIV_0_TRP Position */\r
-#define SCB_CCR_DIV_0_TRP_Msk (1UL << SCB_CCR_DIV_0_TRP_Pos) /*!< SCB CCR: DIV_0_TRP Mask */\r
-\r
-#define SCB_CCR_UNALIGN_TRP_Pos 3U /*!< SCB CCR: UNALIGN_TRP Position */\r
-#define SCB_CCR_UNALIGN_TRP_Msk (1UL << SCB_CCR_UNALIGN_TRP_Pos) /*!< SCB CCR: UNALIGN_TRP Mask */\r
-\r
-#define SCB_CCR_USERSETMPEND_Pos 1U /*!< SCB CCR: USERSETMPEND Position */\r
-#define SCB_CCR_USERSETMPEND_Msk (1UL << SCB_CCR_USERSETMPEND_Pos) /*!< SCB CCR: USERSETMPEND Mask */\r
-\r
-#define SCB_CCR_NONBASETHRDENA_Pos 0U /*!< SCB CCR: NONBASETHRDENA Position */\r
-#define SCB_CCR_NONBASETHRDENA_Msk (1UL /*<< SCB_CCR_NONBASETHRDENA_Pos*/) /*!< SCB CCR: NONBASETHRDENA Mask */\r
-\r
-/* SCB System Handler Control and State Register Definitions */\r
-#define SCB_SHCSR_USGFAULTENA_Pos 18U /*!< SCB SHCSR: USGFAULTENA Position */\r
-#define SCB_SHCSR_USGFAULTENA_Msk (1UL << SCB_SHCSR_USGFAULTENA_Pos) /*!< SCB SHCSR: USGFAULTENA Mask */\r
-\r
-#define SCB_SHCSR_BUSFAULTENA_Pos 17U /*!< SCB SHCSR: BUSFAULTENA Position */\r
-#define SCB_SHCSR_BUSFAULTENA_Msk (1UL << SCB_SHCSR_BUSFAULTENA_Pos) /*!< SCB SHCSR: BUSFAULTENA Mask */\r
-\r
-#define SCB_SHCSR_MEMFAULTENA_Pos 16U /*!< SCB SHCSR: MEMFAULTENA Position */\r
-#define SCB_SHCSR_MEMFAULTENA_Msk (1UL << SCB_SHCSR_MEMFAULTENA_Pos) /*!< SCB SHCSR: MEMFAULTENA Mask */\r
-\r
-#define SCB_SHCSR_SVCALLPENDED_Pos 15U /*!< SCB SHCSR: SVCALLPENDED Position */\r
-#define SCB_SHCSR_SVCALLPENDED_Msk (1UL << SCB_SHCSR_SVCALLPENDED_Pos) /*!< SCB SHCSR: SVCALLPENDED Mask */\r
-\r
-#define SCB_SHCSR_BUSFAULTPENDED_Pos 14U /*!< SCB SHCSR: BUSFAULTPENDED Position */\r
-#define SCB_SHCSR_BUSFAULTPENDED_Msk (1UL << SCB_SHCSR_BUSFAULTPENDED_Pos) /*!< SCB SHCSR: BUSFAULTPENDED Mask */\r
-\r
-#define SCB_SHCSR_MEMFAULTPENDED_Pos 13U /*!< SCB SHCSR: MEMFAULTPENDED Position */\r
-#define SCB_SHCSR_MEMFAULTPENDED_Msk (1UL << SCB_SHCSR_MEMFAULTPENDED_Pos) /*!< SCB SHCSR: MEMFAULTPENDED Mask */\r
-\r
-#define SCB_SHCSR_USGFAULTPENDED_Pos 12U /*!< SCB SHCSR: USGFAULTPENDED Position */\r
-#define SCB_SHCSR_USGFAULTPENDED_Msk (1UL << SCB_SHCSR_USGFAULTPENDED_Pos) /*!< SCB SHCSR: USGFAULTPENDED Mask */\r
-\r
-#define SCB_SHCSR_SYSTICKACT_Pos 11U /*!< SCB SHCSR: SYSTICKACT Position */\r
-#define SCB_SHCSR_SYSTICKACT_Msk (1UL << SCB_SHCSR_SYSTICKACT_Pos) /*!< SCB SHCSR: SYSTICKACT Mask */\r
-\r
-#define SCB_SHCSR_PENDSVACT_Pos 10U /*!< SCB SHCSR: PENDSVACT Position */\r
-#define SCB_SHCSR_PENDSVACT_Msk (1UL << SCB_SHCSR_PENDSVACT_Pos) /*!< SCB SHCSR: PENDSVACT Mask */\r
-\r
-#define SCB_SHCSR_MONITORACT_Pos 8U /*!< SCB SHCSR: MONITORACT Position */\r
-#define SCB_SHCSR_MONITORACT_Msk (1UL << SCB_SHCSR_MONITORACT_Pos) /*!< SCB SHCSR: MONITORACT Mask */\r
-\r
-#define SCB_SHCSR_SVCALLACT_Pos 7U /*!< SCB SHCSR: SVCALLACT Position */\r
-#define SCB_SHCSR_SVCALLACT_Msk (1UL << SCB_SHCSR_SVCALLACT_Pos) /*!< SCB SHCSR: SVCALLACT Mask */\r
-\r
-#define SCB_SHCSR_USGFAULTACT_Pos 3U /*!< SCB SHCSR: USGFAULTACT Position */\r
-#define SCB_SHCSR_USGFAULTACT_Msk (1UL << SCB_SHCSR_USGFAULTACT_Pos) /*!< SCB SHCSR: USGFAULTACT Mask */\r
-\r
-#define SCB_SHCSR_BUSFAULTACT_Pos 1U /*!< SCB SHCSR: BUSFAULTACT Position */\r
-#define SCB_SHCSR_BUSFAULTACT_Msk (1UL << SCB_SHCSR_BUSFAULTACT_Pos) /*!< SCB SHCSR: BUSFAULTACT Mask */\r
-\r
-#define SCB_SHCSR_MEMFAULTACT_Pos 0U /*!< SCB SHCSR: MEMFAULTACT Position */\r
-#define SCB_SHCSR_MEMFAULTACT_Msk (1UL /*<< SCB_SHCSR_MEMFAULTACT_Pos*/) /*!< SCB SHCSR: MEMFAULTACT Mask */\r
-\r
-/* SCB Configurable Fault Status Register Definitions */\r
-#define SCB_CFSR_USGFAULTSR_Pos 16U /*!< SCB CFSR: Usage Fault Status Register Position */\r
-#define SCB_CFSR_USGFAULTSR_Msk (0xFFFFUL << SCB_CFSR_USGFAULTSR_Pos) /*!< SCB CFSR: Usage Fault Status Register Mask */\r
-\r
-#define SCB_CFSR_BUSFAULTSR_Pos 8U /*!< SCB CFSR: Bus Fault Status Register Position */\r
-#define SCB_CFSR_BUSFAULTSR_Msk (0xFFUL << SCB_CFSR_BUSFAULTSR_Pos) /*!< SCB CFSR: Bus Fault Status Register Mask */\r
-\r
-#define SCB_CFSR_MEMFAULTSR_Pos 0U /*!< SCB CFSR: Memory Manage Fault Status Register Position */\r
-#define SCB_CFSR_MEMFAULTSR_Msk (0xFFUL /*<< SCB_CFSR_MEMFAULTSR_Pos*/) /*!< SCB CFSR: Memory Manage Fault Status Register Mask */\r
-\r
-/* SCB Hard Fault Status Register Definitions */\r
-#define SCB_HFSR_DEBUGEVT_Pos 31U /*!< SCB HFSR: DEBUGEVT Position */\r
-#define SCB_HFSR_DEBUGEVT_Msk (1UL << SCB_HFSR_DEBUGEVT_Pos) /*!< SCB HFSR: DEBUGEVT Mask */\r
-\r
-#define SCB_HFSR_FORCED_Pos 30U /*!< SCB HFSR: FORCED Position */\r
-#define SCB_HFSR_FORCED_Msk (1UL << SCB_HFSR_FORCED_Pos) /*!< SCB HFSR: FORCED Mask */\r
-\r
-#define SCB_HFSR_VECTTBL_Pos 1U /*!< SCB HFSR: VECTTBL Position */\r
-#define SCB_HFSR_VECTTBL_Msk (1UL << SCB_HFSR_VECTTBL_Pos) /*!< SCB HFSR: VECTTBL Mask */\r
-\r
-/* SCB Debug Fault Status Register Definitions */\r
-#define SCB_DFSR_EXTERNAL_Pos 4U /*!< SCB DFSR: EXTERNAL Position */\r
-#define SCB_DFSR_EXTERNAL_Msk (1UL << SCB_DFSR_EXTERNAL_Pos) /*!< SCB DFSR: EXTERNAL Mask */\r
-\r
-#define SCB_DFSR_VCATCH_Pos 3U /*!< SCB DFSR: VCATCH Position */\r
-#define SCB_DFSR_VCATCH_Msk (1UL << SCB_DFSR_VCATCH_Pos) /*!< SCB DFSR: VCATCH Mask */\r
-\r
-#define SCB_DFSR_DWTTRAP_Pos 2U /*!< SCB DFSR: DWTTRAP Position */\r
-#define SCB_DFSR_DWTTRAP_Msk (1UL << SCB_DFSR_DWTTRAP_Pos) /*!< SCB DFSR: DWTTRAP Mask */\r
-\r
-#define SCB_DFSR_BKPT_Pos 1U /*!< SCB DFSR: BKPT Position */\r
-#define SCB_DFSR_BKPT_Msk (1UL << SCB_DFSR_BKPT_Pos) /*!< SCB DFSR: BKPT Mask */\r
-\r
-#define SCB_DFSR_HALTED_Pos 0U /*!< SCB DFSR: HALTED Position */\r
-#define SCB_DFSR_HALTED_Msk (1UL /*<< SCB_DFSR_HALTED_Pos*/) /*!< SCB DFSR: HALTED Mask */\r
-\r
-/*@} end of group CMSIS_SCB */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_SCnSCB System Controls not in SCB (SCnSCB)\r
- \brief Type definitions for the System Control and ID Register not in the SCB\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the System Control and ID Register not in the SCB.\r
- */\r
-typedef struct\r
-{\r
- uint32_t RESERVED0[1U];\r
- __IM uint32_t ICTR; /*!< Offset: 0x004 (R/ ) Interrupt Controller Type Register */\r
- __IOM uint32_t ACTLR; /*!< Offset: 0x008 (R/W) Auxiliary Control Register */\r
-} SCnSCB_Type;\r
-\r
-/* Interrupt Controller Type Register Definitions */\r
-#define SCnSCB_ICTR_INTLINESNUM_Pos 0U /*!< ICTR: INTLINESNUM Position */\r
-#define SCnSCB_ICTR_INTLINESNUM_Msk (0xFUL /*<< SCnSCB_ICTR_INTLINESNUM_Pos*/) /*!< ICTR: INTLINESNUM Mask */\r
-\r
-/* Auxiliary Control Register Definitions */\r
-#define SCnSCB_ACTLR_DISOOFP_Pos 9U /*!< ACTLR: DISOOFP Position */\r
-#define SCnSCB_ACTLR_DISOOFP_Msk (1UL << SCnSCB_ACTLR_DISOOFP_Pos) /*!< ACTLR: DISOOFP Mask */\r
-\r
-#define SCnSCB_ACTLR_DISFPCA_Pos 8U /*!< ACTLR: DISFPCA Position */\r
-#define SCnSCB_ACTLR_DISFPCA_Msk (1UL << SCnSCB_ACTLR_DISFPCA_Pos) /*!< ACTLR: DISFPCA Mask */\r
-\r
-#define SCnSCB_ACTLR_DISFOLD_Pos 2U /*!< ACTLR: DISFOLD Position */\r
-#define SCnSCB_ACTLR_DISFOLD_Msk (1UL << SCnSCB_ACTLR_DISFOLD_Pos) /*!< ACTLR: DISFOLD Mask */\r
-\r
-#define SCnSCB_ACTLR_DISDEFWBUF_Pos 1U /*!< ACTLR: DISDEFWBUF Position */\r
-#define SCnSCB_ACTLR_DISDEFWBUF_Msk (1UL << SCnSCB_ACTLR_DISDEFWBUF_Pos) /*!< ACTLR: DISDEFWBUF Mask */\r
-\r
-#define SCnSCB_ACTLR_DISMCYCINT_Pos 0U /*!< ACTLR: DISMCYCINT Position */\r
-#define SCnSCB_ACTLR_DISMCYCINT_Msk (1UL /*<< SCnSCB_ACTLR_DISMCYCINT_Pos*/) /*!< ACTLR: DISMCYCINT Mask */\r
-\r
-/*@} end of group CMSIS_SCnotSCB */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_SysTick System Tick Timer (SysTick)\r
- \brief Type definitions for the System Timer Registers.\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the System Timer (SysTick).\r
- */\r
-typedef struct\r
-{\r
- __IOM uint32_t CTRL; /*!< Offset: 0x000 (R/W) SysTick Control and Status Register */\r
- __IOM uint32_t LOAD; /*!< Offset: 0x004 (R/W) SysTick Reload Value Register */\r
- __IOM uint32_t VAL; /*!< Offset: 0x008 (R/W) SysTick Current Value Register */\r
- __IM uint32_t CALIB; /*!< Offset: 0x00C (R/ ) SysTick Calibration Register */\r
-} SysTick_Type;\r
-\r
-/* SysTick Control / Status Register Definitions */\r
-#define SysTick_CTRL_COUNTFLAG_Pos 16U /*!< SysTick CTRL: COUNTFLAG Position */\r
-#define SysTick_CTRL_COUNTFLAG_Msk (1UL << SysTick_CTRL_COUNTFLAG_Pos) /*!< SysTick CTRL: COUNTFLAG Mask */\r
-\r
-#define SysTick_CTRL_CLKSOURCE_Pos 2U /*!< SysTick CTRL: CLKSOURCE Position */\r
-#define SysTick_CTRL_CLKSOURCE_Msk (1UL << SysTick_CTRL_CLKSOURCE_Pos) /*!< SysTick CTRL: CLKSOURCE Mask */\r
-\r
-#define SysTick_CTRL_TICKINT_Pos 1U /*!< SysTick CTRL: TICKINT Position */\r
-#define SysTick_CTRL_TICKINT_Msk (1UL << SysTick_CTRL_TICKINT_Pos) /*!< SysTick CTRL: TICKINT Mask */\r
-\r
-#define SysTick_CTRL_ENABLE_Pos 0U /*!< SysTick CTRL: ENABLE Position */\r
-#define SysTick_CTRL_ENABLE_Msk (1UL /*<< SysTick_CTRL_ENABLE_Pos*/) /*!< SysTick CTRL: ENABLE Mask */\r
-\r
-/* SysTick Reload Register Definitions */\r
-#define SysTick_LOAD_RELOAD_Pos 0U /*!< SysTick LOAD: RELOAD Position */\r
-#define SysTick_LOAD_RELOAD_Msk (0xFFFFFFUL /*<< SysTick_LOAD_RELOAD_Pos*/) /*!< SysTick LOAD: RELOAD Mask */\r
-\r
-/* SysTick Current Register Definitions */\r
-#define SysTick_VAL_CURRENT_Pos 0U /*!< SysTick VAL: CURRENT Position */\r
-#define SysTick_VAL_CURRENT_Msk (0xFFFFFFUL /*<< SysTick_VAL_CURRENT_Pos*/) /*!< SysTick VAL: CURRENT Mask */\r
-\r
-/* SysTick Calibration Register Definitions */\r
-#define SysTick_CALIB_NOREF_Pos 31U /*!< SysTick CALIB: NOREF Position */\r
-#define SysTick_CALIB_NOREF_Msk (1UL << SysTick_CALIB_NOREF_Pos) /*!< SysTick CALIB: NOREF Mask */\r
-\r
-#define SysTick_CALIB_SKEW_Pos 30U /*!< SysTick CALIB: SKEW Position */\r
-#define SysTick_CALIB_SKEW_Msk (1UL << SysTick_CALIB_SKEW_Pos) /*!< SysTick CALIB: SKEW Mask */\r
-\r
-#define SysTick_CALIB_TENMS_Pos 0U /*!< SysTick CALIB: TENMS Position */\r
-#define SysTick_CALIB_TENMS_Msk (0xFFFFFFUL /*<< SysTick_CALIB_TENMS_Pos*/) /*!< SysTick CALIB: TENMS Mask */\r
-\r
-/*@} end of group CMSIS_SysTick */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_ITM Instrumentation Trace Macrocell (ITM)\r
- \brief Type definitions for the Instrumentation Trace Macrocell (ITM)\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the Instrumentation Trace Macrocell Register (ITM).\r
- */\r
-typedef struct\r
-{\r
- __OM union\r
- {\r
- __OM uint8_t u8; /*!< Offset: 0x000 ( /W) ITM Stimulus Port 8-bit */\r
- __OM uint16_t u16; /*!< Offset: 0x000 ( /W) ITM Stimulus Port 16-bit */\r
- __OM uint32_t u32; /*!< Offset: 0x000 ( /W) ITM Stimulus Port 32-bit */\r
- } PORT [32U]; /*!< Offset: 0x000 ( /W) ITM Stimulus Port Registers */\r
- uint32_t RESERVED0[864U];\r
- __IOM uint32_t TER; /*!< Offset: 0xE00 (R/W) ITM Trace Enable Register */\r
- uint32_t RESERVED1[15U];\r
- __IOM uint32_t TPR; /*!< Offset: 0xE40 (R/W) ITM Trace Privilege Register */\r
- uint32_t RESERVED2[15U];\r
- __IOM uint32_t TCR; /*!< Offset: 0xE80 (R/W) ITM Trace Control Register */\r
- uint32_t RESERVED3[29U];\r
- __OM uint32_t IWR; /*!< Offset: 0xEF8 ( /W) ITM Integration Write Register */\r
- __IM uint32_t IRR; /*!< Offset: 0xEFC (R/ ) ITM Integration Read Register */\r
- __IOM uint32_t IMCR; /*!< Offset: 0xF00 (R/W) ITM Integration Mode Control Register */\r
- uint32_t RESERVED4[43U];\r
- __OM uint32_t LAR; /*!< Offset: 0xFB0 ( /W) ITM Lock Access Register */\r
- __IM uint32_t LSR; /*!< Offset: 0xFB4 (R/ ) ITM Lock Status Register */\r
- uint32_t RESERVED5[6U];\r
- __IM uint32_t PID4; /*!< Offset: 0xFD0 (R/ ) ITM Peripheral Identification Register #4 */\r
- __IM uint32_t PID5; /*!< Offset: 0xFD4 (R/ ) ITM Peripheral Identification Register #5 */\r
- __IM uint32_t PID6; /*!< Offset: 0xFD8 (R/ ) ITM Peripheral Identification Register #6 */\r
- __IM uint32_t PID7; /*!< Offset: 0xFDC (R/ ) ITM Peripheral Identification Register #7 */\r
- __IM uint32_t PID0; /*!< Offset: 0xFE0 (R/ ) ITM Peripheral Identification Register #0 */\r
- __IM uint32_t PID1; /*!< Offset: 0xFE4 (R/ ) ITM Peripheral Identification Register #1 */\r
- __IM uint32_t PID2; /*!< Offset: 0xFE8 (R/ ) ITM Peripheral Identification Register #2 */\r
- __IM uint32_t PID3; /*!< Offset: 0xFEC (R/ ) ITM Peripheral Identification Register #3 */\r
- __IM uint32_t CID0; /*!< Offset: 0xFF0 (R/ ) ITM Component Identification Register #0 */\r
- __IM uint32_t CID1; /*!< Offset: 0xFF4 (R/ ) ITM Component Identification Register #1 */\r
- __IM uint32_t CID2; /*!< Offset: 0xFF8 (R/ ) ITM Component Identification Register #2 */\r
- __IM uint32_t CID3; /*!< Offset: 0xFFC (R/ ) ITM Component Identification Register #3 */\r
-} ITM_Type;\r
-\r
-/* ITM Trace Privilege Register Definitions */\r
-#define ITM_TPR_PRIVMASK_Pos 0U /*!< ITM TPR: PRIVMASK Position */\r
-#define ITM_TPR_PRIVMASK_Msk (0xFUL /*<< ITM_TPR_PRIVMASK_Pos*/) /*!< ITM TPR: PRIVMASK Mask */\r
-\r
-/* ITM Trace Control Register Definitions */\r
-#define ITM_TCR_BUSY_Pos 23U /*!< ITM TCR: BUSY Position */\r
-#define ITM_TCR_BUSY_Msk (1UL << ITM_TCR_BUSY_Pos) /*!< ITM TCR: BUSY Mask */\r
-\r
-#define ITM_TCR_TraceBusID_Pos 16U /*!< ITM TCR: ATBID Position */\r
-#define ITM_TCR_TraceBusID_Msk (0x7FUL << ITM_TCR_TraceBusID_Pos) /*!< ITM TCR: ATBID Mask */\r
-\r
-#define ITM_TCR_GTSFREQ_Pos 10U /*!< ITM TCR: Global timestamp frequency Position */\r
-#define ITM_TCR_GTSFREQ_Msk (3UL << ITM_TCR_GTSFREQ_Pos) /*!< ITM TCR: Global timestamp frequency Mask */\r
-\r
-#define ITM_TCR_TSPrescale_Pos 8U /*!< ITM TCR: TSPrescale Position */\r
-#define ITM_TCR_TSPrescale_Msk (3UL << ITM_TCR_TSPrescale_Pos) /*!< ITM TCR: TSPrescale Mask */\r
-\r
-#define ITM_TCR_SWOENA_Pos 4U /*!< ITM TCR: SWOENA Position */\r
-#define ITM_TCR_SWOENA_Msk (1UL << ITM_TCR_SWOENA_Pos) /*!< ITM TCR: SWOENA Mask */\r
-\r
-#define ITM_TCR_DWTENA_Pos 3U /*!< ITM TCR: DWTENA Position */\r
-#define ITM_TCR_DWTENA_Msk (1UL << ITM_TCR_DWTENA_Pos) /*!< ITM TCR: DWTENA Mask */\r
-\r
-#define ITM_TCR_SYNCENA_Pos 2U /*!< ITM TCR: SYNCENA Position */\r
-#define ITM_TCR_SYNCENA_Msk (1UL << ITM_TCR_SYNCENA_Pos) /*!< ITM TCR: SYNCENA Mask */\r
-\r
-#define ITM_TCR_TSENA_Pos 1U /*!< ITM TCR: TSENA Position */\r
-#define ITM_TCR_TSENA_Msk (1UL << ITM_TCR_TSENA_Pos) /*!< ITM TCR: TSENA Mask */\r
-\r
-#define ITM_TCR_ITMENA_Pos 0U /*!< ITM TCR: ITM Enable bit Position */\r
-#define ITM_TCR_ITMENA_Msk (1UL /*<< ITM_TCR_ITMENA_Pos*/) /*!< ITM TCR: ITM Enable bit Mask */\r
-\r
-/* ITM Integration Write Register Definitions */\r
-#define ITM_IWR_ATVALIDM_Pos 0U /*!< ITM IWR: ATVALIDM Position */\r
-#define ITM_IWR_ATVALIDM_Msk (1UL /*<< ITM_IWR_ATVALIDM_Pos*/) /*!< ITM IWR: ATVALIDM Mask */\r
-\r
-/* ITM Integration Read Register Definitions */\r
-#define ITM_IRR_ATREADYM_Pos 0U /*!< ITM IRR: ATREADYM Position */\r
-#define ITM_IRR_ATREADYM_Msk (1UL /*<< ITM_IRR_ATREADYM_Pos*/) /*!< ITM IRR: ATREADYM Mask */\r
-\r
-/* ITM Integration Mode Control Register Definitions */\r
-#define ITM_IMCR_INTEGRATION_Pos 0U /*!< ITM IMCR: INTEGRATION Position */\r
-#define ITM_IMCR_INTEGRATION_Msk (1UL /*<< ITM_IMCR_INTEGRATION_Pos*/) /*!< ITM IMCR: INTEGRATION Mask */\r
-\r
-/* ITM Lock Status Register Definitions */\r
-#define ITM_LSR_ByteAcc_Pos 2U /*!< ITM LSR: ByteAcc Position */\r
-#define ITM_LSR_ByteAcc_Msk (1UL << ITM_LSR_ByteAcc_Pos) /*!< ITM LSR: ByteAcc Mask */\r
-\r
-#define ITM_LSR_Access_Pos 1U /*!< ITM LSR: Access Position */\r
-#define ITM_LSR_Access_Msk (1UL << ITM_LSR_Access_Pos) /*!< ITM LSR: Access Mask */\r
-\r
-#define ITM_LSR_Present_Pos 0U /*!< ITM LSR: Present Position */\r
-#define ITM_LSR_Present_Msk (1UL /*<< ITM_LSR_Present_Pos*/) /*!< ITM LSR: Present Mask */\r
-\r
-/*@}*/ /* end of group CMSIS_ITM */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_DWT Data Watchpoint and Trace (DWT)\r
- \brief Type definitions for the Data Watchpoint and Trace (DWT)\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the Data Watchpoint and Trace Register (DWT).\r
- */\r
-typedef struct\r
-{\r
- __IOM uint32_t CTRL; /*!< Offset: 0x000 (R/W) Control Register */\r
- __IOM uint32_t CYCCNT; /*!< Offset: 0x004 (R/W) Cycle Count Register */\r
- __IOM uint32_t CPICNT; /*!< Offset: 0x008 (R/W) CPI Count Register */\r
- __IOM uint32_t EXCCNT; /*!< Offset: 0x00C (R/W) Exception Overhead Count Register */\r
- __IOM uint32_t SLEEPCNT; /*!< Offset: 0x010 (R/W) Sleep Count Register */\r
- __IOM uint32_t LSUCNT; /*!< Offset: 0x014 (R/W) LSU Count Register */\r
- __IOM uint32_t FOLDCNT; /*!< Offset: 0x018 (R/W) Folded-instruction Count Register */\r
- __IM uint32_t PCSR; /*!< Offset: 0x01C (R/ ) Program Counter Sample Register */\r
- __IOM uint32_t COMP0; /*!< Offset: 0x020 (R/W) Comparator Register 0 */\r
- __IOM uint32_t MASK0; /*!< Offset: 0x024 (R/W) Mask Register 0 */\r
- __IOM uint32_t FUNCTION0; /*!< Offset: 0x028 (R/W) Function Register 0 */\r
- uint32_t RESERVED0[1U];\r
- __IOM uint32_t COMP1; /*!< Offset: 0x030 (R/W) Comparator Register 1 */\r
- __IOM uint32_t MASK1; /*!< Offset: 0x034 (R/W) Mask Register 1 */\r
- __IOM uint32_t FUNCTION1; /*!< Offset: 0x038 (R/W) Function Register 1 */\r
- uint32_t RESERVED1[1U];\r
- __IOM uint32_t COMP2; /*!< Offset: 0x040 (R/W) Comparator Register 2 */\r
- __IOM uint32_t MASK2; /*!< Offset: 0x044 (R/W) Mask Register 2 */\r
- __IOM uint32_t FUNCTION2; /*!< Offset: 0x048 (R/W) Function Register 2 */\r
- uint32_t RESERVED2[1U];\r
- __IOM uint32_t COMP3; /*!< Offset: 0x050 (R/W) Comparator Register 3 */\r
- __IOM uint32_t MASK3; /*!< Offset: 0x054 (R/W) Mask Register 3 */\r
- __IOM uint32_t FUNCTION3; /*!< Offset: 0x058 (R/W) Function Register 3 */\r
-} DWT_Type;\r
-\r
-/* DWT Control Register Definitions */\r
-#define DWT_CTRL_NUMCOMP_Pos 28U /*!< DWT CTRL: NUMCOMP Position */\r
-#define DWT_CTRL_NUMCOMP_Msk (0xFUL << DWT_CTRL_NUMCOMP_Pos) /*!< DWT CTRL: NUMCOMP Mask */\r
-\r
-#define DWT_CTRL_NOTRCPKT_Pos 27U /*!< DWT CTRL: NOTRCPKT Position */\r
-#define DWT_CTRL_NOTRCPKT_Msk (0x1UL << DWT_CTRL_NOTRCPKT_Pos) /*!< DWT CTRL: NOTRCPKT Mask */\r
-\r
-#define DWT_CTRL_NOEXTTRIG_Pos 26U /*!< DWT CTRL: NOEXTTRIG Position */\r
-#define DWT_CTRL_NOEXTTRIG_Msk (0x1UL << DWT_CTRL_NOEXTTRIG_Pos) /*!< DWT CTRL: NOEXTTRIG Mask */\r
-\r
-#define DWT_CTRL_NOCYCCNT_Pos 25U /*!< DWT CTRL: NOCYCCNT Position */\r
-#define DWT_CTRL_NOCYCCNT_Msk (0x1UL << DWT_CTRL_NOCYCCNT_Pos) /*!< DWT CTRL: NOCYCCNT Mask */\r
-\r
-#define DWT_CTRL_NOPRFCNT_Pos 24U /*!< DWT CTRL: NOPRFCNT Position */\r
-#define DWT_CTRL_NOPRFCNT_Msk (0x1UL << DWT_CTRL_NOPRFCNT_Pos) /*!< DWT CTRL: NOPRFCNT Mask */\r
-\r
-#define DWT_CTRL_CYCEVTENA_Pos 22U /*!< DWT CTRL: CYCEVTENA Position */\r
-#define DWT_CTRL_CYCEVTENA_Msk (0x1UL << DWT_CTRL_CYCEVTENA_Pos) /*!< DWT CTRL: CYCEVTENA Mask */\r
-\r
-#define DWT_CTRL_FOLDEVTENA_Pos 21U /*!< DWT CTRL: FOLDEVTENA Position */\r
-#define DWT_CTRL_FOLDEVTENA_Msk (0x1UL << DWT_CTRL_FOLDEVTENA_Pos) /*!< DWT CTRL: FOLDEVTENA Mask */\r
-\r
-#define DWT_CTRL_LSUEVTENA_Pos 20U /*!< DWT CTRL: LSUEVTENA Position */\r
-#define DWT_CTRL_LSUEVTENA_Msk (0x1UL << DWT_CTRL_LSUEVTENA_Pos) /*!< DWT CTRL: LSUEVTENA Mask */\r
-\r
-#define DWT_CTRL_SLEEPEVTENA_Pos 19U /*!< DWT CTRL: SLEEPEVTENA Position */\r
-#define DWT_CTRL_SLEEPEVTENA_Msk (0x1UL << DWT_CTRL_SLEEPEVTENA_Pos) /*!< DWT CTRL: SLEEPEVTENA Mask */\r
-\r
-#define DWT_CTRL_EXCEVTENA_Pos 18U /*!< DWT CTRL: EXCEVTENA Position */\r
-#define DWT_CTRL_EXCEVTENA_Msk (0x1UL << DWT_CTRL_EXCEVTENA_Pos) /*!< DWT CTRL: EXCEVTENA Mask */\r
-\r
-#define DWT_CTRL_CPIEVTENA_Pos 17U /*!< DWT CTRL: CPIEVTENA Position */\r
-#define DWT_CTRL_CPIEVTENA_Msk (0x1UL << DWT_CTRL_CPIEVTENA_Pos) /*!< DWT CTRL: CPIEVTENA Mask */\r
-\r
-#define DWT_CTRL_EXCTRCENA_Pos 16U /*!< DWT CTRL: EXCTRCENA Position */\r
-#define DWT_CTRL_EXCTRCENA_Msk (0x1UL << DWT_CTRL_EXCTRCENA_Pos) /*!< DWT CTRL: EXCTRCENA Mask */\r
-\r
-#define DWT_CTRL_PCSAMPLENA_Pos 12U /*!< DWT CTRL: PCSAMPLENA Position */\r
-#define DWT_CTRL_PCSAMPLENA_Msk (0x1UL << DWT_CTRL_PCSAMPLENA_Pos) /*!< DWT CTRL: PCSAMPLENA Mask */\r
-\r
-#define DWT_CTRL_SYNCTAP_Pos 10U /*!< DWT CTRL: SYNCTAP Position */\r
-#define DWT_CTRL_SYNCTAP_Msk (0x3UL << DWT_CTRL_SYNCTAP_Pos) /*!< DWT CTRL: SYNCTAP Mask */\r
-\r
-#define DWT_CTRL_CYCTAP_Pos 9U /*!< DWT CTRL: CYCTAP Position */\r
-#define DWT_CTRL_CYCTAP_Msk (0x1UL << DWT_CTRL_CYCTAP_Pos) /*!< DWT CTRL: CYCTAP Mask */\r
-\r
-#define DWT_CTRL_POSTINIT_Pos 5U /*!< DWT CTRL: POSTINIT Position */\r
-#define DWT_CTRL_POSTINIT_Msk (0xFUL << DWT_CTRL_POSTINIT_Pos) /*!< DWT CTRL: POSTINIT Mask */\r
-\r
-#define DWT_CTRL_POSTPRESET_Pos 1U /*!< DWT CTRL: POSTPRESET Position */\r
-#define DWT_CTRL_POSTPRESET_Msk (0xFUL << DWT_CTRL_POSTPRESET_Pos) /*!< DWT CTRL: POSTPRESET Mask */\r
-\r
-#define DWT_CTRL_CYCCNTENA_Pos 0U /*!< DWT CTRL: CYCCNTENA Position */\r
-#define DWT_CTRL_CYCCNTENA_Msk (0x1UL /*<< DWT_CTRL_CYCCNTENA_Pos*/) /*!< DWT CTRL: CYCCNTENA Mask */\r
-\r
-/* DWT CPI Count Register Definitions */\r
-#define DWT_CPICNT_CPICNT_Pos 0U /*!< DWT CPICNT: CPICNT Position */\r
-#define DWT_CPICNT_CPICNT_Msk (0xFFUL /*<< DWT_CPICNT_CPICNT_Pos*/) /*!< DWT CPICNT: CPICNT Mask */\r
-\r
-/* DWT Exception Overhead Count Register Definitions */\r
-#define DWT_EXCCNT_EXCCNT_Pos 0U /*!< DWT EXCCNT: EXCCNT Position */\r
-#define DWT_EXCCNT_EXCCNT_Msk (0xFFUL /*<< DWT_EXCCNT_EXCCNT_Pos*/) /*!< DWT EXCCNT: EXCCNT Mask */\r
-\r
-/* DWT Sleep Count Register Definitions */\r
-#define DWT_SLEEPCNT_SLEEPCNT_Pos 0U /*!< DWT SLEEPCNT: SLEEPCNT Position */\r
-#define DWT_SLEEPCNT_SLEEPCNT_Msk (0xFFUL /*<< DWT_SLEEPCNT_SLEEPCNT_Pos*/) /*!< DWT SLEEPCNT: SLEEPCNT Mask */\r
-\r
-/* DWT LSU Count Register Definitions */\r
-#define DWT_LSUCNT_LSUCNT_Pos 0U /*!< DWT LSUCNT: LSUCNT Position */\r
-#define DWT_LSUCNT_LSUCNT_Msk (0xFFUL /*<< DWT_LSUCNT_LSUCNT_Pos*/) /*!< DWT LSUCNT: LSUCNT Mask */\r
-\r
-/* DWT Folded-instruction Count Register Definitions */\r
-#define DWT_FOLDCNT_FOLDCNT_Pos 0U /*!< DWT FOLDCNT: FOLDCNT Position */\r
-#define DWT_FOLDCNT_FOLDCNT_Msk (0xFFUL /*<< DWT_FOLDCNT_FOLDCNT_Pos*/) /*!< DWT FOLDCNT: FOLDCNT Mask */\r
-\r
-/* DWT Comparator Mask Register Definitions */\r
-#define DWT_MASK_MASK_Pos 0U /*!< DWT MASK: MASK Position */\r
-#define DWT_MASK_MASK_Msk (0x1FUL /*<< DWT_MASK_MASK_Pos*/) /*!< DWT MASK: MASK Mask */\r
-\r
-/* DWT Comparator Function Register Definitions */\r
-#define DWT_FUNCTION_MATCHED_Pos 24U /*!< DWT FUNCTION: MATCHED Position */\r
-#define DWT_FUNCTION_MATCHED_Msk (0x1UL << DWT_FUNCTION_MATCHED_Pos) /*!< DWT FUNCTION: MATCHED Mask */\r
-\r
-#define DWT_FUNCTION_DATAVADDR1_Pos 16U /*!< DWT FUNCTION: DATAVADDR1 Position */\r
-#define DWT_FUNCTION_DATAVADDR1_Msk (0xFUL << DWT_FUNCTION_DATAVADDR1_Pos) /*!< DWT FUNCTION: DATAVADDR1 Mask */\r
-\r
-#define DWT_FUNCTION_DATAVADDR0_Pos 12U /*!< DWT FUNCTION: DATAVADDR0 Position */\r
-#define DWT_FUNCTION_DATAVADDR0_Msk (0xFUL << DWT_FUNCTION_DATAVADDR0_Pos) /*!< DWT FUNCTION: DATAVADDR0 Mask */\r
-\r
-#define DWT_FUNCTION_DATAVSIZE_Pos 10U /*!< DWT FUNCTION: DATAVSIZE Position */\r
-#define DWT_FUNCTION_DATAVSIZE_Msk (0x3UL << DWT_FUNCTION_DATAVSIZE_Pos) /*!< DWT FUNCTION: DATAVSIZE Mask */\r
-\r
-#define DWT_FUNCTION_LNK1ENA_Pos 9U /*!< DWT FUNCTION: LNK1ENA Position */\r
-#define DWT_FUNCTION_LNK1ENA_Msk (0x1UL << DWT_FUNCTION_LNK1ENA_Pos) /*!< DWT FUNCTION: LNK1ENA Mask */\r
-\r
-#define DWT_FUNCTION_DATAVMATCH_Pos 8U /*!< DWT FUNCTION: DATAVMATCH Position */\r
-#define DWT_FUNCTION_DATAVMATCH_Msk (0x1UL << DWT_FUNCTION_DATAVMATCH_Pos) /*!< DWT FUNCTION: DATAVMATCH Mask */\r
-\r
-#define DWT_FUNCTION_CYCMATCH_Pos 7U /*!< DWT FUNCTION: CYCMATCH Position */\r
-#define DWT_FUNCTION_CYCMATCH_Msk (0x1UL << DWT_FUNCTION_CYCMATCH_Pos) /*!< DWT FUNCTION: CYCMATCH Mask */\r
-\r
-#define DWT_FUNCTION_EMITRANGE_Pos 5U /*!< DWT FUNCTION: EMITRANGE Position */\r
-#define DWT_FUNCTION_EMITRANGE_Msk (0x1UL << DWT_FUNCTION_EMITRANGE_Pos) /*!< DWT FUNCTION: EMITRANGE Mask */\r
-\r
-#define DWT_FUNCTION_FUNCTION_Pos 0U /*!< DWT FUNCTION: FUNCTION Position */\r
-#define DWT_FUNCTION_FUNCTION_Msk (0xFUL /*<< DWT_FUNCTION_FUNCTION_Pos*/) /*!< DWT FUNCTION: FUNCTION Mask */\r
-\r
-/*@}*/ /* end of group CMSIS_DWT */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_TPI Trace Port Interface (TPI)\r
- \brief Type definitions for the Trace Port Interface (TPI)\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the Trace Port Interface Register (TPI).\r
- */\r
-typedef struct\r
-{\r
- __IOM uint32_t SSPSR; /*!< Offset: 0x000 (R/ ) Supported Parallel Port Size Register */\r
- __IOM uint32_t CSPSR; /*!< Offset: 0x004 (R/W) Current Parallel Port Size Register */\r
- uint32_t RESERVED0[2U];\r
- __IOM uint32_t ACPR; /*!< Offset: 0x010 (R/W) Asynchronous Clock Prescaler Register */\r
- uint32_t RESERVED1[55U];\r
- __IOM uint32_t SPPR; /*!< Offset: 0x0F0 (R/W) Selected Pin Protocol Register */\r
- uint32_t RESERVED2[131U];\r
- __IM uint32_t FFSR; /*!< Offset: 0x300 (R/ ) Formatter and Flush Status Register */\r
- __IOM uint32_t FFCR; /*!< Offset: 0x304 (R/W) Formatter and Flush Control Register */\r
- __IM uint32_t FSCR; /*!< Offset: 0x308 (R/ ) Formatter Synchronization Counter Register */\r
- uint32_t RESERVED3[759U];\r
- __IM uint32_t TRIGGER; /*!< Offset: 0xEE8 (R/ ) TRIGGER */\r
- __IM uint32_t FIFO0; /*!< Offset: 0xEEC (R/ ) Integration ETM Data */\r
- __IM uint32_t ITATBCTR2; /*!< Offset: 0xEF0 (R/ ) ITATBCTR2 */\r
- uint32_t RESERVED4[1U];\r
- __IM uint32_t ITATBCTR0; /*!< Offset: 0xEF8 (R/ ) ITATBCTR0 */\r
- __IM uint32_t FIFO1; /*!< Offset: 0xEFC (R/ ) Integration ITM Data */\r
- __IOM uint32_t ITCTRL; /*!< Offset: 0xF00 (R/W) Integration Mode Control */\r
- uint32_t RESERVED5[39U];\r
- __IOM uint32_t CLAIMSET; /*!< Offset: 0xFA0 (R/W) Claim tag set */\r
- __IOM uint32_t CLAIMCLR; /*!< Offset: 0xFA4 (R/W) Claim tag clear */\r
- uint32_t RESERVED7[8U];\r
- __IM uint32_t DEVID; /*!< Offset: 0xFC8 (R/ ) TPIU_DEVID */\r
- __IM uint32_t DEVTYPE; /*!< Offset: 0xFCC (R/ ) TPIU_DEVTYPE */\r
-} TPI_Type;\r
-\r
-/* TPI Asynchronous Clock Prescaler Register Definitions */\r
-#define TPI_ACPR_PRESCALER_Pos 0U /*!< TPI ACPR: PRESCALER Position */\r
-#define TPI_ACPR_PRESCALER_Msk (0x1FFFUL /*<< TPI_ACPR_PRESCALER_Pos*/) /*!< TPI ACPR: PRESCALER Mask */\r
-\r
-/* TPI Selected Pin Protocol Register Definitions */\r
-#define TPI_SPPR_TXMODE_Pos 0U /*!< TPI SPPR: TXMODE Position */\r
-#define TPI_SPPR_TXMODE_Msk (0x3UL /*<< TPI_SPPR_TXMODE_Pos*/) /*!< TPI SPPR: TXMODE Mask */\r
-\r
-/* TPI Formatter and Flush Status Register Definitions */\r
-#define TPI_FFSR_FtNonStop_Pos 3U /*!< TPI FFSR: FtNonStop Position */\r
-#define TPI_FFSR_FtNonStop_Msk (0x1UL << TPI_FFSR_FtNonStop_Pos) /*!< TPI FFSR: FtNonStop Mask */\r
-\r
-#define TPI_FFSR_TCPresent_Pos 2U /*!< TPI FFSR: TCPresent Position */\r
-#define TPI_FFSR_TCPresent_Msk (0x1UL << TPI_FFSR_TCPresent_Pos) /*!< TPI FFSR: TCPresent Mask */\r
-\r
-#define TPI_FFSR_FtStopped_Pos 1U /*!< TPI FFSR: FtStopped Position */\r
-#define TPI_FFSR_FtStopped_Msk (0x1UL << TPI_FFSR_FtStopped_Pos) /*!< TPI FFSR: FtStopped Mask */\r
-\r
-#define TPI_FFSR_FlInProg_Pos 0U /*!< TPI FFSR: FlInProg Position */\r
-#define TPI_FFSR_FlInProg_Msk (0x1UL /*<< TPI_FFSR_FlInProg_Pos*/) /*!< TPI FFSR: FlInProg Mask */\r
-\r
-/* TPI Formatter and Flush Control Register Definitions */\r
-#define TPI_FFCR_TrigIn_Pos 8U /*!< TPI FFCR: TrigIn Position */\r
-#define TPI_FFCR_TrigIn_Msk (0x1UL << TPI_FFCR_TrigIn_Pos) /*!< TPI FFCR: TrigIn Mask */\r
-\r
-#define TPI_FFCR_EnFCont_Pos 1U /*!< TPI FFCR: EnFCont Position */\r
-#define TPI_FFCR_EnFCont_Msk (0x1UL << TPI_FFCR_EnFCont_Pos) /*!< TPI FFCR: EnFCont Mask */\r
-\r
-/* TPI TRIGGER Register Definitions */\r
-#define TPI_TRIGGER_TRIGGER_Pos 0U /*!< TPI TRIGGER: TRIGGER Position */\r
-#define TPI_TRIGGER_TRIGGER_Msk (0x1UL /*<< TPI_TRIGGER_TRIGGER_Pos*/) /*!< TPI TRIGGER: TRIGGER Mask */\r
-\r
-/* TPI Integration ETM Data Register Definitions (FIFO0) */\r
-#define TPI_FIFO0_ITM_ATVALID_Pos 29U /*!< TPI FIFO0: ITM_ATVALID Position */\r
-#define TPI_FIFO0_ITM_ATVALID_Msk (0x3UL << TPI_FIFO0_ITM_ATVALID_Pos) /*!< TPI FIFO0: ITM_ATVALID Mask */\r
-\r
-#define TPI_FIFO0_ITM_bytecount_Pos 27U /*!< TPI FIFO0: ITM_bytecount Position */\r
-#define TPI_FIFO0_ITM_bytecount_Msk (0x3UL << TPI_FIFO0_ITM_bytecount_Pos) /*!< TPI FIFO0: ITM_bytecount Mask */\r
-\r
-#define TPI_FIFO0_ETM_ATVALID_Pos 26U /*!< TPI FIFO0: ETM_ATVALID Position */\r
-#define TPI_FIFO0_ETM_ATVALID_Msk (0x3UL << TPI_FIFO0_ETM_ATVALID_Pos) /*!< TPI FIFO0: ETM_ATVALID Mask */\r
-\r
-#define TPI_FIFO0_ETM_bytecount_Pos 24U /*!< TPI FIFO0: ETM_bytecount Position */\r
-#define TPI_FIFO0_ETM_bytecount_Msk (0x3UL << TPI_FIFO0_ETM_bytecount_Pos) /*!< TPI FIFO0: ETM_bytecount Mask */\r
-\r
-#define TPI_FIFO0_ETM2_Pos 16U /*!< TPI FIFO0: ETM2 Position */\r
-#define TPI_FIFO0_ETM2_Msk (0xFFUL << TPI_FIFO0_ETM2_Pos) /*!< TPI FIFO0: ETM2 Mask */\r
-\r
-#define TPI_FIFO0_ETM1_Pos 8U /*!< TPI FIFO0: ETM1 Position */\r
-#define TPI_FIFO0_ETM1_Msk (0xFFUL << TPI_FIFO0_ETM1_Pos) /*!< TPI FIFO0: ETM1 Mask */\r
-\r
-#define TPI_FIFO0_ETM0_Pos 0U /*!< TPI FIFO0: ETM0 Position */\r
-#define TPI_FIFO0_ETM0_Msk (0xFFUL /*<< TPI_FIFO0_ETM0_Pos*/) /*!< TPI FIFO0: ETM0 Mask */\r
-\r
-/* TPI ITATBCTR2 Register Definitions */\r
-#define TPI_ITATBCTR2_ATREADY_Pos 0U /*!< TPI ITATBCTR2: ATREADY Position */\r
-#define TPI_ITATBCTR2_ATREADY_Msk (0x1UL /*<< TPI_ITATBCTR2_ATREADY_Pos*/) /*!< TPI ITATBCTR2: ATREADY Mask */\r
-\r
-/* TPI Integration ITM Data Register Definitions (FIFO1) */\r
-#define TPI_FIFO1_ITM_ATVALID_Pos 29U /*!< TPI FIFO1: ITM_ATVALID Position */\r
-#define TPI_FIFO1_ITM_ATVALID_Msk (0x3UL << TPI_FIFO1_ITM_ATVALID_Pos) /*!< TPI FIFO1: ITM_ATVALID Mask */\r
-\r
-#define TPI_FIFO1_ITM_bytecount_Pos 27U /*!< TPI FIFO1: ITM_bytecount Position */\r
-#define TPI_FIFO1_ITM_bytecount_Msk (0x3UL << TPI_FIFO1_ITM_bytecount_Pos) /*!< TPI FIFO1: ITM_bytecount Mask */\r
-\r
-#define TPI_FIFO1_ETM_ATVALID_Pos 26U /*!< TPI FIFO1: ETM_ATVALID Position */\r
-#define TPI_FIFO1_ETM_ATVALID_Msk (0x3UL << TPI_FIFO1_ETM_ATVALID_Pos) /*!< TPI FIFO1: ETM_ATVALID Mask */\r
-\r
-#define TPI_FIFO1_ETM_bytecount_Pos 24U /*!< TPI FIFO1: ETM_bytecount Position */\r
-#define TPI_FIFO1_ETM_bytecount_Msk (0x3UL << TPI_FIFO1_ETM_bytecount_Pos) /*!< TPI FIFO1: ETM_bytecount Mask */\r
-\r
-#define TPI_FIFO1_ITM2_Pos 16U /*!< TPI FIFO1: ITM2 Position */\r
-#define TPI_FIFO1_ITM2_Msk (0xFFUL << TPI_FIFO1_ITM2_Pos) /*!< TPI FIFO1: ITM2 Mask */\r
-\r
-#define TPI_FIFO1_ITM1_Pos 8U /*!< TPI FIFO1: ITM1 Position */\r
-#define TPI_FIFO1_ITM1_Msk (0xFFUL << TPI_FIFO1_ITM1_Pos) /*!< TPI FIFO1: ITM1 Mask */\r
-\r
-#define TPI_FIFO1_ITM0_Pos 0U /*!< TPI FIFO1: ITM0 Position */\r
-#define TPI_FIFO1_ITM0_Msk (0xFFUL /*<< TPI_FIFO1_ITM0_Pos*/) /*!< TPI FIFO1: ITM0 Mask */\r
-\r
-/* TPI ITATBCTR0 Register Definitions */\r
-#define TPI_ITATBCTR0_ATREADY_Pos 0U /*!< TPI ITATBCTR0: ATREADY Position */\r
-#define TPI_ITATBCTR0_ATREADY_Msk (0x1UL /*<< TPI_ITATBCTR0_ATREADY_Pos*/) /*!< TPI ITATBCTR0: ATREADY Mask */\r
-\r
-/* TPI Integration Mode Control Register Definitions */\r
-#define TPI_ITCTRL_Mode_Pos 0U /*!< TPI ITCTRL: Mode Position */\r
-#define TPI_ITCTRL_Mode_Msk (0x1UL /*<< TPI_ITCTRL_Mode_Pos*/) /*!< TPI ITCTRL: Mode Mask */\r
-\r
-/* TPI DEVID Register Definitions */\r
-#define TPI_DEVID_NRZVALID_Pos 11U /*!< TPI DEVID: NRZVALID Position */\r
-#define TPI_DEVID_NRZVALID_Msk (0x1UL << TPI_DEVID_NRZVALID_Pos) /*!< TPI DEVID: NRZVALID Mask */\r
-\r
-#define TPI_DEVID_MANCVALID_Pos 10U /*!< TPI DEVID: MANCVALID Position */\r
-#define TPI_DEVID_MANCVALID_Msk (0x1UL << TPI_DEVID_MANCVALID_Pos) /*!< TPI DEVID: MANCVALID Mask */\r
-\r
-#define TPI_DEVID_PTINVALID_Pos 9U /*!< TPI DEVID: PTINVALID Position */\r
-#define TPI_DEVID_PTINVALID_Msk (0x1UL << TPI_DEVID_PTINVALID_Pos) /*!< TPI DEVID: PTINVALID Mask */\r
-\r
-#define TPI_DEVID_MinBufSz_Pos 6U /*!< TPI DEVID: MinBufSz Position */\r
-#define TPI_DEVID_MinBufSz_Msk (0x7UL << TPI_DEVID_MinBufSz_Pos) /*!< TPI DEVID: MinBufSz Mask */\r
-\r
-#define TPI_DEVID_AsynClkIn_Pos 5U /*!< TPI DEVID: AsynClkIn Position */\r
-#define TPI_DEVID_AsynClkIn_Msk (0x1UL << TPI_DEVID_AsynClkIn_Pos) /*!< TPI DEVID: AsynClkIn Mask */\r
-\r
-#define TPI_DEVID_NrTraceInput_Pos 0U /*!< TPI DEVID: NrTraceInput Position */\r
-#define TPI_DEVID_NrTraceInput_Msk (0x1FUL /*<< TPI_DEVID_NrTraceInput_Pos*/) /*!< TPI DEVID: NrTraceInput Mask */\r
-\r
-/* TPI DEVTYPE Register Definitions */\r
-#define TPI_DEVTYPE_MajorType_Pos 4U /*!< TPI DEVTYPE: MajorType Position */\r
-#define TPI_DEVTYPE_MajorType_Msk (0xFUL << TPI_DEVTYPE_MajorType_Pos) /*!< TPI DEVTYPE: MajorType Mask */\r
-\r
-#define TPI_DEVTYPE_SubType_Pos 0U /*!< TPI DEVTYPE: SubType Position */\r
-#define TPI_DEVTYPE_SubType_Msk (0xFUL /*<< TPI_DEVTYPE_SubType_Pos*/) /*!< TPI DEVTYPE: SubType Mask */\r
-\r
-/*@}*/ /* end of group CMSIS_TPI */\r
-\r
-\r
-#if (__MPU_PRESENT == 1U)\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_MPU Memory Protection Unit (MPU)\r
- \brief Type definitions for the Memory Protection Unit (MPU)\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the Memory Protection Unit (MPU).\r
- */\r
-typedef struct\r
-{\r
- __IM uint32_t TYPE; /*!< Offset: 0x000 (R/ ) MPU Type Register */\r
- __IOM uint32_t CTRL; /*!< Offset: 0x004 (R/W) MPU Control Register */\r
- __IOM uint32_t RNR; /*!< Offset: 0x008 (R/W) MPU Region RNRber Register */\r
- __IOM uint32_t RBAR; /*!< Offset: 0x00C (R/W) MPU Region Base Address Register */\r
- __IOM uint32_t RASR; /*!< Offset: 0x010 (R/W) MPU Region Attribute and Size Register */\r
- __IOM uint32_t RBAR_A1; /*!< Offset: 0x014 (R/W) MPU Alias 1 Region Base Address Register */\r
- __IOM uint32_t RASR_A1; /*!< Offset: 0x018 (R/W) MPU Alias 1 Region Attribute and Size Register */\r
- __IOM uint32_t RBAR_A2; /*!< Offset: 0x01C (R/W) MPU Alias 2 Region Base Address Register */\r
- __IOM uint32_t RASR_A2; /*!< Offset: 0x020 (R/W) MPU Alias 2 Region Attribute and Size Register */\r
- __IOM uint32_t RBAR_A3; /*!< Offset: 0x024 (R/W) MPU Alias 3 Region Base Address Register */\r
- __IOM uint32_t RASR_A3; /*!< Offset: 0x028 (R/W) MPU Alias 3 Region Attribute and Size Register */\r
-} MPU_Type;\r
-\r
-/* MPU Type Register Definitions */\r
-#define MPU_TYPE_IREGION_Pos 16U /*!< MPU TYPE: IREGION Position */\r
-#define MPU_TYPE_IREGION_Msk (0xFFUL << MPU_TYPE_IREGION_Pos) /*!< MPU TYPE: IREGION Mask */\r
-\r
-#define MPU_TYPE_DREGION_Pos 8U /*!< MPU TYPE: DREGION Position */\r
-#define MPU_TYPE_DREGION_Msk (0xFFUL << MPU_TYPE_DREGION_Pos) /*!< MPU TYPE: DREGION Mask */\r
-\r
-#define MPU_TYPE_SEPARATE_Pos 0U /*!< MPU TYPE: SEPARATE Position */\r
-#define MPU_TYPE_SEPARATE_Msk (1UL /*<< MPU_TYPE_SEPARATE_Pos*/) /*!< MPU TYPE: SEPARATE Mask */\r
-\r
-/* MPU Control Register Definitions */\r
-#define MPU_CTRL_PRIVDEFENA_Pos 2U /*!< MPU CTRL: PRIVDEFENA Position */\r
-#define MPU_CTRL_PRIVDEFENA_Msk (1UL << MPU_CTRL_PRIVDEFENA_Pos) /*!< MPU CTRL: PRIVDEFENA Mask */\r
-\r
-#define MPU_CTRL_HFNMIENA_Pos 1U /*!< MPU CTRL: HFNMIENA Position */\r
-#define MPU_CTRL_HFNMIENA_Msk (1UL << MPU_CTRL_HFNMIENA_Pos) /*!< MPU CTRL: HFNMIENA Mask */\r
-\r
-#define MPU_CTRL_ENABLE_Pos 0U /*!< MPU CTRL: ENABLE Position */\r
-#define MPU_CTRL_ENABLE_Msk (1UL /*<< MPU_CTRL_ENABLE_Pos*/) /*!< MPU CTRL: ENABLE Mask */\r
-\r
-/* MPU Region Number Register Definitions */\r
-#define MPU_RNR_REGION_Pos 0U /*!< MPU RNR: REGION Position */\r
-#define MPU_RNR_REGION_Msk (0xFFUL /*<< MPU_RNR_REGION_Pos*/) /*!< MPU RNR: REGION Mask */\r
-\r
-/* MPU Region Base Address Register Definitions */\r
-#define MPU_RBAR_ADDR_Pos 5U /*!< MPU RBAR: ADDR Position */\r
-#define MPU_RBAR_ADDR_Msk (0x7FFFFFFUL << MPU_RBAR_ADDR_Pos) /*!< MPU RBAR: ADDR Mask */\r
-\r
-#define MPU_RBAR_VALID_Pos 4U /*!< MPU RBAR: VALID Position */\r
-#define MPU_RBAR_VALID_Msk (1UL << MPU_RBAR_VALID_Pos) /*!< MPU RBAR: VALID Mask */\r
-\r
-#define MPU_RBAR_REGION_Pos 0U /*!< MPU RBAR: REGION Position */\r
-#define MPU_RBAR_REGION_Msk (0xFUL /*<< MPU_RBAR_REGION_Pos*/) /*!< MPU RBAR: REGION Mask */\r
-\r
-/* MPU Region Attribute and Size Register Definitions */\r
-#define MPU_RASR_ATTRS_Pos 16U /*!< MPU RASR: MPU Region Attribute field Position */\r
-#define MPU_RASR_ATTRS_Msk (0xFFFFUL << MPU_RASR_ATTRS_Pos) /*!< MPU RASR: MPU Region Attribute field Mask */\r
-\r
-#define MPU_RASR_XN_Pos 28U /*!< MPU RASR: ATTRS.XN Position */\r
-#define MPU_RASR_XN_Msk (1UL << MPU_RASR_XN_Pos) /*!< MPU RASR: ATTRS.XN Mask */\r
-\r
-#define MPU_RASR_AP_Pos 24U /*!< MPU RASR: ATTRS.AP Position */\r
-#define MPU_RASR_AP_Msk (0x7UL << MPU_RASR_AP_Pos) /*!< MPU RASR: ATTRS.AP Mask */\r
-\r
-#define MPU_RASR_TEX_Pos 19U /*!< MPU RASR: ATTRS.TEX Position */\r
-#define MPU_RASR_TEX_Msk (0x7UL << MPU_RASR_TEX_Pos) /*!< MPU RASR: ATTRS.TEX Mask */\r
-\r
-#define MPU_RASR_S_Pos 18U /*!< MPU RASR: ATTRS.S Position */\r
-#define MPU_RASR_S_Msk (1UL << MPU_RASR_S_Pos) /*!< MPU RASR: ATTRS.S Mask */\r
-\r
-#define MPU_RASR_C_Pos 17U /*!< MPU RASR: ATTRS.C Position */\r
-#define MPU_RASR_C_Msk (1UL << MPU_RASR_C_Pos) /*!< MPU RASR: ATTRS.C Mask */\r
-\r
-#define MPU_RASR_B_Pos 16U /*!< MPU RASR: ATTRS.B Position */\r
-#define MPU_RASR_B_Msk (1UL << MPU_RASR_B_Pos) /*!< MPU RASR: ATTRS.B Mask */\r
-\r
-#define MPU_RASR_SRD_Pos 8U /*!< MPU RASR: Sub-Region Disable Position */\r
-#define MPU_RASR_SRD_Msk (0xFFUL << MPU_RASR_SRD_Pos) /*!< MPU RASR: Sub-Region Disable Mask */\r
-\r
-#define MPU_RASR_SIZE_Pos 1U /*!< MPU RASR: Region Size Field Position */\r
-#define MPU_RASR_SIZE_Msk (0x1FUL << MPU_RASR_SIZE_Pos) /*!< MPU RASR: Region Size Field Mask */\r
-\r
-#define MPU_RASR_ENABLE_Pos 0U /*!< MPU RASR: Region enable bit Position */\r
-#define MPU_RASR_ENABLE_Msk (1UL /*<< MPU_RASR_ENABLE_Pos*/) /*!< MPU RASR: Region enable bit Disable Mask */\r
-\r
-/*@} end of group CMSIS_MPU */\r
-#endif\r
-\r
-\r
-#if (__FPU_PRESENT == 1U)\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_FPU Floating Point Unit (FPU)\r
- \brief Type definitions for the Floating Point Unit (FPU)\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the Floating Point Unit (FPU).\r
- */\r
-typedef struct\r
-{\r
- uint32_t RESERVED0[1U];\r
- __IOM uint32_t FPCCR; /*!< Offset: 0x004 (R/W) Floating-Point Context Control Register */\r
- __IOM uint32_t FPCAR; /*!< Offset: 0x008 (R/W) Floating-Point Context Address Register */\r
- __IOM uint32_t FPDSCR; /*!< Offset: 0x00C (R/W) Floating-Point Default Status Control Register */\r
- __IM uint32_t MVFR0; /*!< Offset: 0x010 (R/ ) Media and FP Feature Register 0 */\r
- __IM uint32_t MVFR1; /*!< Offset: 0x014 (R/ ) Media and FP Feature Register 1 */\r
-} FPU_Type;\r
-\r
-/* Floating-Point Context Control Register Definitions */\r
-#define FPU_FPCCR_ASPEN_Pos 31U /*!< FPCCR: ASPEN bit Position */\r
-#define FPU_FPCCR_ASPEN_Msk (1UL << FPU_FPCCR_ASPEN_Pos) /*!< FPCCR: ASPEN bit Mask */\r
-\r
-#define FPU_FPCCR_LSPEN_Pos 30U /*!< FPCCR: LSPEN Position */\r
-#define FPU_FPCCR_LSPEN_Msk (1UL << FPU_FPCCR_LSPEN_Pos) /*!< FPCCR: LSPEN bit Mask */\r
-\r
-#define FPU_FPCCR_MONRDY_Pos 8U /*!< FPCCR: MONRDY Position */\r
-#define FPU_FPCCR_MONRDY_Msk (1UL << FPU_FPCCR_MONRDY_Pos) /*!< FPCCR: MONRDY bit Mask */\r
-\r
-#define FPU_FPCCR_BFRDY_Pos 6U /*!< FPCCR: BFRDY Position */\r
-#define FPU_FPCCR_BFRDY_Msk (1UL << FPU_FPCCR_BFRDY_Pos) /*!< FPCCR: BFRDY bit Mask */\r
-\r
-#define FPU_FPCCR_MMRDY_Pos 5U /*!< FPCCR: MMRDY Position */\r
-#define FPU_FPCCR_MMRDY_Msk (1UL << FPU_FPCCR_MMRDY_Pos) /*!< FPCCR: MMRDY bit Mask */\r
-\r
-#define FPU_FPCCR_HFRDY_Pos 4U /*!< FPCCR: HFRDY Position */\r
-#define FPU_FPCCR_HFRDY_Msk (1UL << FPU_FPCCR_HFRDY_Pos) /*!< FPCCR: HFRDY bit Mask */\r
-\r
-#define FPU_FPCCR_THREAD_Pos 3U /*!< FPCCR: processor mode bit Position */\r
-#define FPU_FPCCR_THREAD_Msk (1UL << FPU_FPCCR_THREAD_Pos) /*!< FPCCR: processor mode active bit Mask */\r
-\r
-#define FPU_FPCCR_USER_Pos 1U /*!< FPCCR: privilege level bit Position */\r
-#define FPU_FPCCR_USER_Msk (1UL << FPU_FPCCR_USER_Pos) /*!< FPCCR: privilege level bit Mask */\r
-\r
-#define FPU_FPCCR_LSPACT_Pos 0U /*!< FPCCR: Lazy state preservation active bit Position */\r
-#define FPU_FPCCR_LSPACT_Msk (1UL /*<< FPU_FPCCR_LSPACT_Pos*/) /*!< FPCCR: Lazy state preservation active bit Mask */\r
-\r
-/* Floating-Point Context Address Register Definitions */\r
-#define FPU_FPCAR_ADDRESS_Pos 3U /*!< FPCAR: ADDRESS bit Position */\r
-#define FPU_FPCAR_ADDRESS_Msk (0x1FFFFFFFUL << FPU_FPCAR_ADDRESS_Pos) /*!< FPCAR: ADDRESS bit Mask */\r
-\r
-/* Floating-Point Default Status Control Register Definitions */\r
-#define FPU_FPDSCR_AHP_Pos 26U /*!< FPDSCR: AHP bit Position */\r
-#define FPU_FPDSCR_AHP_Msk (1UL << FPU_FPDSCR_AHP_Pos) /*!< FPDSCR: AHP bit Mask */\r
-\r
-#define FPU_FPDSCR_DN_Pos 25U /*!< FPDSCR: DN bit Position */\r
-#define FPU_FPDSCR_DN_Msk (1UL << FPU_FPDSCR_DN_Pos) /*!< FPDSCR: DN bit Mask */\r
-\r
-#define FPU_FPDSCR_FZ_Pos 24U /*!< FPDSCR: FZ bit Position */\r
-#define FPU_FPDSCR_FZ_Msk (1UL << FPU_FPDSCR_FZ_Pos) /*!< FPDSCR: FZ bit Mask */\r
-\r
-#define FPU_FPDSCR_RMode_Pos 22U /*!< FPDSCR: RMode bit Position */\r
-#define FPU_FPDSCR_RMode_Msk (3UL << FPU_FPDSCR_RMode_Pos) /*!< FPDSCR: RMode bit Mask */\r
-\r
-/* Media and FP Feature Register 0 Definitions */\r
-#define FPU_MVFR0_FP_rounding_modes_Pos 28U /*!< MVFR0: FP rounding modes bits Position */\r
-#define FPU_MVFR0_FP_rounding_modes_Msk (0xFUL << FPU_MVFR0_FP_rounding_modes_Pos) /*!< MVFR0: FP rounding modes bits Mask */\r
-\r
-#define FPU_MVFR0_Short_vectors_Pos 24U /*!< MVFR0: Short vectors bits Position */\r
-#define FPU_MVFR0_Short_vectors_Msk (0xFUL << FPU_MVFR0_Short_vectors_Pos) /*!< MVFR0: Short vectors bits Mask */\r
-\r
-#define FPU_MVFR0_Square_root_Pos 20U /*!< MVFR0: Square root bits Position */\r
-#define FPU_MVFR0_Square_root_Msk (0xFUL << FPU_MVFR0_Square_root_Pos) /*!< MVFR0: Square root bits Mask */\r
-\r
-#define FPU_MVFR0_Divide_Pos 16U /*!< MVFR0: Divide bits Position */\r
-#define FPU_MVFR0_Divide_Msk (0xFUL << FPU_MVFR0_Divide_Pos) /*!< MVFR0: Divide bits Mask */\r
-\r
-#define FPU_MVFR0_FP_excep_trapping_Pos 12U /*!< MVFR0: FP exception trapping bits Position */\r
-#define FPU_MVFR0_FP_excep_trapping_Msk (0xFUL << FPU_MVFR0_FP_excep_trapping_Pos) /*!< MVFR0: FP exception trapping bits Mask */\r
-\r
-#define FPU_MVFR0_Double_precision_Pos 8U /*!< MVFR0: Double-precision bits Position */\r
-#define FPU_MVFR0_Double_precision_Msk (0xFUL << FPU_MVFR0_Double_precision_Pos) /*!< MVFR0: Double-precision bits Mask */\r
-\r
-#define FPU_MVFR0_Single_precision_Pos 4U /*!< MVFR0: Single-precision bits Position */\r
-#define FPU_MVFR0_Single_precision_Msk (0xFUL << FPU_MVFR0_Single_precision_Pos) /*!< MVFR0: Single-precision bits Mask */\r
-\r
-#define FPU_MVFR0_A_SIMD_registers_Pos 0U /*!< MVFR0: A_SIMD registers bits Position */\r
-#define FPU_MVFR0_A_SIMD_registers_Msk (0xFUL /*<< FPU_MVFR0_A_SIMD_registers_Pos*/) /*!< MVFR0: A_SIMD registers bits Mask */\r
-\r
-/* Media and FP Feature Register 1 Definitions */\r
-#define FPU_MVFR1_FP_fused_MAC_Pos 28U /*!< MVFR1: FP fused MAC bits Position */\r
-#define FPU_MVFR1_FP_fused_MAC_Msk (0xFUL << FPU_MVFR1_FP_fused_MAC_Pos) /*!< MVFR1: FP fused MAC bits Mask */\r
-\r
-#define FPU_MVFR1_FP_HPFP_Pos 24U /*!< MVFR1: FP HPFP bits Position */\r
-#define FPU_MVFR1_FP_HPFP_Msk (0xFUL << FPU_MVFR1_FP_HPFP_Pos) /*!< MVFR1: FP HPFP bits Mask */\r
-\r
-#define FPU_MVFR1_D_NaN_mode_Pos 4U /*!< MVFR1: D_NaN mode bits Position */\r
-#define FPU_MVFR1_D_NaN_mode_Msk (0xFUL << FPU_MVFR1_D_NaN_mode_Pos) /*!< MVFR1: D_NaN mode bits Mask */\r
-\r
-#define FPU_MVFR1_FtZ_mode_Pos 0U /*!< MVFR1: FtZ mode bits Position */\r
-#define FPU_MVFR1_FtZ_mode_Msk (0xFUL /*<< FPU_MVFR1_FtZ_mode_Pos*/) /*!< MVFR1: FtZ mode bits Mask */\r
-\r
-/*@} end of group CMSIS_FPU */\r
-#endif\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_CoreDebug Core Debug Registers (CoreDebug)\r
- \brief Type definitions for the Core Debug Registers\r
- @{\r
- */\r
-\r
-/**\r
- \brief Structure type to access the Core Debug Register (CoreDebug).\r
- */\r
-typedef struct\r
-{\r
- __IOM uint32_t DHCSR; /*!< Offset: 0x000 (R/W) Debug Halting Control and Status Register */\r
- __OM uint32_t DCRSR; /*!< Offset: 0x004 ( /W) Debug Core Register Selector Register */\r
- __IOM uint32_t DCRDR; /*!< Offset: 0x008 (R/W) Debug Core Register Data Register */\r
- __IOM uint32_t DEMCR; /*!< Offset: 0x00C (R/W) Debug Exception and Monitor Control Register */\r
-} CoreDebug_Type;\r
-\r
-/* Debug Halting Control and Status Register Definitions */\r
-#define CoreDebug_DHCSR_DBGKEY_Pos 16U /*!< CoreDebug DHCSR: DBGKEY Position */\r
-#define CoreDebug_DHCSR_DBGKEY_Msk (0xFFFFUL << CoreDebug_DHCSR_DBGKEY_Pos) /*!< CoreDebug DHCSR: DBGKEY Mask */\r
-\r
-#define CoreDebug_DHCSR_S_RESET_ST_Pos 25U /*!< CoreDebug DHCSR: S_RESET_ST Position */\r
-#define CoreDebug_DHCSR_S_RESET_ST_Msk (1UL << CoreDebug_DHCSR_S_RESET_ST_Pos) /*!< CoreDebug DHCSR: S_RESET_ST Mask */\r
-\r
-#define CoreDebug_DHCSR_S_RETIRE_ST_Pos 24U /*!< CoreDebug DHCSR: S_RETIRE_ST Position */\r
-#define CoreDebug_DHCSR_S_RETIRE_ST_Msk (1UL << CoreDebug_DHCSR_S_RETIRE_ST_Pos) /*!< CoreDebug DHCSR: S_RETIRE_ST Mask */\r
-\r
-#define CoreDebug_DHCSR_S_LOCKUP_Pos 19U /*!< CoreDebug DHCSR: S_LOCKUP Position */\r
-#define CoreDebug_DHCSR_S_LOCKUP_Msk (1UL << CoreDebug_DHCSR_S_LOCKUP_Pos) /*!< CoreDebug DHCSR: S_LOCKUP Mask */\r
-\r
-#define CoreDebug_DHCSR_S_SLEEP_Pos 18U /*!< CoreDebug DHCSR: S_SLEEP Position */\r
-#define CoreDebug_DHCSR_S_SLEEP_Msk (1UL << CoreDebug_DHCSR_S_SLEEP_Pos) /*!< CoreDebug DHCSR: S_SLEEP Mask */\r
-\r
-#define CoreDebug_DHCSR_S_HALT_Pos 17U /*!< CoreDebug DHCSR: S_HALT Position */\r
-#define CoreDebug_DHCSR_S_HALT_Msk (1UL << CoreDebug_DHCSR_S_HALT_Pos) /*!< CoreDebug DHCSR: S_HALT Mask */\r
-\r
-#define CoreDebug_DHCSR_S_REGRDY_Pos 16U /*!< CoreDebug DHCSR: S_REGRDY Position */\r
-#define CoreDebug_DHCSR_S_REGRDY_Msk (1UL << CoreDebug_DHCSR_S_REGRDY_Pos) /*!< CoreDebug DHCSR: S_REGRDY Mask */\r
-\r
-#define CoreDebug_DHCSR_C_SNAPSTALL_Pos 5U /*!< CoreDebug DHCSR: C_SNAPSTALL Position */\r
-#define CoreDebug_DHCSR_C_SNAPSTALL_Msk (1UL << CoreDebug_DHCSR_C_SNAPSTALL_Pos) /*!< CoreDebug DHCSR: C_SNAPSTALL Mask */\r
-\r
-#define CoreDebug_DHCSR_C_MASKINTS_Pos 3U /*!< CoreDebug DHCSR: C_MASKINTS Position */\r
-#define CoreDebug_DHCSR_C_MASKINTS_Msk (1UL << CoreDebug_DHCSR_C_MASKINTS_Pos) /*!< CoreDebug DHCSR: C_MASKINTS Mask */\r
-\r
-#define CoreDebug_DHCSR_C_STEP_Pos 2U /*!< CoreDebug DHCSR: C_STEP Position */\r
-#define CoreDebug_DHCSR_C_STEP_Msk (1UL << CoreDebug_DHCSR_C_STEP_Pos) /*!< CoreDebug DHCSR: C_STEP Mask */\r
-\r
-#define CoreDebug_DHCSR_C_HALT_Pos 1U /*!< CoreDebug DHCSR: C_HALT Position */\r
-#define CoreDebug_DHCSR_C_HALT_Msk (1UL << CoreDebug_DHCSR_C_HALT_Pos) /*!< CoreDebug DHCSR: C_HALT Mask */\r
-\r
-#define CoreDebug_DHCSR_C_DEBUGEN_Pos 0U /*!< CoreDebug DHCSR: C_DEBUGEN Position */\r
-#define CoreDebug_DHCSR_C_DEBUGEN_Msk (1UL /*<< CoreDebug_DHCSR_C_DEBUGEN_Pos*/) /*!< CoreDebug DHCSR: C_DEBUGEN Mask */\r
-\r
-/* Debug Core Register Selector Register Definitions */\r
-#define CoreDebug_DCRSR_REGWnR_Pos 16U /*!< CoreDebug DCRSR: REGWnR Position */\r
-#define CoreDebug_DCRSR_REGWnR_Msk (1UL << CoreDebug_DCRSR_REGWnR_Pos) /*!< CoreDebug DCRSR: REGWnR Mask */\r
-\r
-#define CoreDebug_DCRSR_REGSEL_Pos 0U /*!< CoreDebug DCRSR: REGSEL Position */\r
-#define CoreDebug_DCRSR_REGSEL_Msk (0x1FUL /*<< CoreDebug_DCRSR_REGSEL_Pos*/) /*!< CoreDebug DCRSR: REGSEL Mask */\r
-\r
-/* Debug Exception and Monitor Control Register Definitions */\r
-#define CoreDebug_DEMCR_TRCENA_Pos 24U /*!< CoreDebug DEMCR: TRCENA Position */\r
-#define CoreDebug_DEMCR_TRCENA_Msk (1UL << CoreDebug_DEMCR_TRCENA_Pos) /*!< CoreDebug DEMCR: TRCENA Mask */\r
-\r
-#define CoreDebug_DEMCR_MON_REQ_Pos 19U /*!< CoreDebug DEMCR: MON_REQ Position */\r
-#define CoreDebug_DEMCR_MON_REQ_Msk (1UL << CoreDebug_DEMCR_MON_REQ_Pos) /*!< CoreDebug DEMCR: MON_REQ Mask */\r
-\r
-#define CoreDebug_DEMCR_MON_STEP_Pos 18U /*!< CoreDebug DEMCR: MON_STEP Position */\r
-#define CoreDebug_DEMCR_MON_STEP_Msk (1UL << CoreDebug_DEMCR_MON_STEP_Pos) /*!< CoreDebug DEMCR: MON_STEP Mask */\r
-\r
-#define CoreDebug_DEMCR_MON_PEND_Pos 17U /*!< CoreDebug DEMCR: MON_PEND Position */\r
-#define CoreDebug_DEMCR_MON_PEND_Msk (1UL << CoreDebug_DEMCR_MON_PEND_Pos) /*!< CoreDebug DEMCR: MON_PEND Mask */\r
-\r
-#define CoreDebug_DEMCR_MON_EN_Pos 16U /*!< CoreDebug DEMCR: MON_EN Position */\r
-#define CoreDebug_DEMCR_MON_EN_Msk (1UL << CoreDebug_DEMCR_MON_EN_Pos) /*!< CoreDebug DEMCR: MON_EN Mask */\r
-\r
-#define CoreDebug_DEMCR_VC_HARDERR_Pos 10U /*!< CoreDebug DEMCR: VC_HARDERR Position */\r
-#define CoreDebug_DEMCR_VC_HARDERR_Msk (1UL << CoreDebug_DEMCR_VC_HARDERR_Pos) /*!< CoreDebug DEMCR: VC_HARDERR Mask */\r
-\r
-#define CoreDebug_DEMCR_VC_INTERR_Pos 9U /*!< CoreDebug DEMCR: VC_INTERR Position */\r
-#define CoreDebug_DEMCR_VC_INTERR_Msk (1UL << CoreDebug_DEMCR_VC_INTERR_Pos) /*!< CoreDebug DEMCR: VC_INTERR Mask */\r
-\r
-#define CoreDebug_DEMCR_VC_BUSERR_Pos 8U /*!< CoreDebug DEMCR: VC_BUSERR Position */\r
-#define CoreDebug_DEMCR_VC_BUSERR_Msk (1UL << CoreDebug_DEMCR_VC_BUSERR_Pos) /*!< CoreDebug DEMCR: VC_BUSERR Mask */\r
-\r
-#define CoreDebug_DEMCR_VC_STATERR_Pos 7U /*!< CoreDebug DEMCR: VC_STATERR Position */\r
-#define CoreDebug_DEMCR_VC_STATERR_Msk (1UL << CoreDebug_DEMCR_VC_STATERR_Pos) /*!< CoreDebug DEMCR: VC_STATERR Mask */\r
-\r
-#define CoreDebug_DEMCR_VC_CHKERR_Pos 6U /*!< CoreDebug DEMCR: VC_CHKERR Position */\r
-#define CoreDebug_DEMCR_VC_CHKERR_Msk (1UL << CoreDebug_DEMCR_VC_CHKERR_Pos) /*!< CoreDebug DEMCR: VC_CHKERR Mask */\r
-\r
-#define CoreDebug_DEMCR_VC_NOCPERR_Pos 5U /*!< CoreDebug DEMCR: VC_NOCPERR Position */\r
-#define CoreDebug_DEMCR_VC_NOCPERR_Msk (1UL << CoreDebug_DEMCR_VC_NOCPERR_Pos) /*!< CoreDebug DEMCR: VC_NOCPERR Mask */\r
-\r
-#define CoreDebug_DEMCR_VC_MMERR_Pos 4U /*!< CoreDebug DEMCR: VC_MMERR Position */\r
-#define CoreDebug_DEMCR_VC_MMERR_Msk (1UL << CoreDebug_DEMCR_VC_MMERR_Pos) /*!< CoreDebug DEMCR: VC_MMERR Mask */\r
-\r
-#define CoreDebug_DEMCR_VC_CORERESET_Pos 0U /*!< CoreDebug DEMCR: VC_CORERESET Position */\r
-#define CoreDebug_DEMCR_VC_CORERESET_Msk (1UL /*<< CoreDebug_DEMCR_VC_CORERESET_Pos*/) /*!< CoreDebug DEMCR: VC_CORERESET Mask */\r
-\r
-/*@} end of group CMSIS_CoreDebug */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_core_bitfield Core register bit field macros\r
- \brief Macros for use with bit field definitions (xxx_Pos, xxx_Msk).\r
- @{\r
- */\r
-\r
-/**\r
- \brief Mask and shift a bit field value for use in a register bit range.\r
- \param[in] field Name of the register bit field.\r
- \param[in] value Value of the bit field.\r
- \return Masked and shifted value.\r
-*/\r
-#define _VAL2FLD(field, value) ((value << field ## _Pos) & field ## _Msk)\r
-\r
-/**\r
- \brief Mask and shift a register value to extract a bit filed value.\r
- \param[in] field Name of the register bit field.\r
- \param[in] value Value of register.\r
- \return Masked and shifted bit field value.\r
-*/\r
-#define _FLD2VAL(field, value) ((value & field ## _Msk) >> field ## _Pos)\r
-\r
-/*@} end of group CMSIS_core_bitfield */\r
-\r
-\r
-/**\r
- \ingroup CMSIS_core_register\r
- \defgroup CMSIS_core_base Core Definitions\r
- \brief Definitions for base addresses, unions, and structures.\r
- @{\r
- */\r
-\r
-/* Memory mapping of Cortex-M4 Hardware */\r
-#define SCS_BASE (0xE000E000UL) /*!< System Control Space Base Address */\r
-#define ITM_BASE (0xE0000000UL) /*!< ITM Base Address */\r
-#define DWT_BASE (0xE0001000UL) /*!< DWT Base Address */\r
-#define TPI_BASE (0xE0040000UL) /*!< TPI Base Address */\r
-#define CoreDebug_BASE (0xE000EDF0UL) /*!< Core Debug Base Address */\r
-#define SysTick_BASE (SCS_BASE + 0x0010UL) /*!< SysTick Base Address */\r
-#define NVIC_BASE (SCS_BASE + 0x0100UL) /*!< NVIC Base Address */\r
-#define SCB_BASE (SCS_BASE + 0x0D00UL) /*!< System Control Block Base Address */\r
-\r
-#define SCnSCB ((SCnSCB_Type *) SCS_BASE ) /*!< System control Register not in SCB */\r
-#define SCB ((SCB_Type *) SCB_BASE ) /*!< SCB configuration struct */\r
-#define SysTick ((SysTick_Type *) SysTick_BASE ) /*!< SysTick configuration struct */\r
-#define NVIC ((NVIC_Type *) NVIC_BASE ) /*!< NVIC configuration struct */\r
-#define ITM ((ITM_Type *) ITM_BASE ) /*!< ITM configuration struct */\r
-#define DWT ((DWT_Type *) DWT_BASE ) /*!< DWT configuration struct */\r
-#define TPI ((TPI_Type *) TPI_BASE ) /*!< TPI configuration struct */\r
-#define CoreDebug ((CoreDebug_Type *) CoreDebug_BASE) /*!< Core Debug configuration struct */\r
-\r
-#if (__MPU_PRESENT == 1U)\r
- #define MPU_BASE (SCS_BASE + 0x0D90UL) /*!< Memory Protection Unit */\r
- #define MPU ((MPU_Type *) MPU_BASE ) /*!< Memory Protection Unit */\r
-#endif\r
-\r
-#if (__FPU_PRESENT == 1U)\r
- #define FPU_BASE (SCS_BASE + 0x0F30UL) /*!< Floating Point Unit */\r
- #define FPU ((FPU_Type *) FPU_BASE ) /*!< Floating Point Unit */\r
-#endif\r
-\r
-/*@} */\r
-\r
-\r
-\r
-/*******************************************************************************\r
- * Hardware Abstraction Layer\r
- Core Function Interface contains:\r
- - Core NVIC Functions\r
- - Core SysTick Functions\r
- - Core Debug Functions\r
- - Core Register Access Functions\r
- ******************************************************************************/\r
-/**\r
- \defgroup CMSIS_Core_FunctionInterface Functions and Instructions Reference\r
-*/\r
-\r
-\r
-\r
-/* ########################## NVIC functions #################################### */\r
-/**\r
- \ingroup CMSIS_Core_FunctionInterface\r
- \defgroup CMSIS_Core_NVICFunctions NVIC Functions\r
- \brief Functions that manage interrupts and exceptions via the NVIC.\r
- @{\r
- */\r
-\r
-/**\r
- \brief Set Priority Grouping\r
- \details Sets the priority grouping field using the required unlock sequence.\r
- The parameter PriorityGroup is assigned to the field SCB->AIRCR [10:8] PRIGROUP field.\r
- Only values from 0..7 are used.\r
- In case of a conflict between priority grouping and available\r
- priority bits (__NVIC_PRIO_BITS), the smallest possible priority group is set.\r
- \param [in] PriorityGroup Priority grouping field.\r
- */\r
-__STATIC_INLINE void NVIC_SetPriorityGrouping(uint32_t PriorityGroup)\r
-{\r
- uint32_t reg_value;\r
- uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL); /* only values 0..7 are used */\r
-\r
- reg_value = SCB->AIRCR; /* read old register configuration */\r
- reg_value &= ~((uint32_t)(SCB_AIRCR_VECTKEY_Msk | SCB_AIRCR_PRIGROUP_Msk)); /* clear bits to change */\r
- reg_value = (reg_value |\r
- ((uint32_t)0x5FAUL << SCB_AIRCR_VECTKEY_Pos) |\r
- (PriorityGroupTmp << 8U) ); /* Insert write key and priorty group */\r
- SCB->AIRCR = reg_value;\r
-}\r
-\r
-\r
-/**\r
- \brief Get Priority Grouping\r
- \details Reads the priority grouping field from the NVIC Interrupt Controller.\r
- \return Priority grouping field (SCB->AIRCR [10:8] PRIGROUP field).\r
- */\r
-__STATIC_INLINE uint32_t NVIC_GetPriorityGrouping(void)\r
-{\r
- return ((uint32_t)((SCB->AIRCR & SCB_AIRCR_PRIGROUP_Msk) >> SCB_AIRCR_PRIGROUP_Pos));\r
-}\r
-\r
-\r
-/**\r
- \brief Enable External Interrupt\r
- \details Enables a device-specific interrupt in the NVIC interrupt controller.\r
- \param [in] IRQn External interrupt number. Value cannot be negative.\r
- */\r
-__STATIC_INLINE void NVIC_EnableIRQ(IRQn_Type IRQn)\r
-{\r
- NVIC->ISER[(((uint32_t)(int32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL));\r
-}\r
-\r
-\r
-/**\r
- \brief Disable External Interrupt\r
- \details Disables a device-specific interrupt in the NVIC interrupt controller.\r
- \param [in] IRQn External interrupt number. Value cannot be negative.\r
- */\r
-__STATIC_INLINE void NVIC_DisableIRQ(IRQn_Type IRQn)\r
-{\r
- NVIC->ICER[(((uint32_t)(int32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL));\r
-}\r
-\r
-\r
-/**\r
- \brief Get Pending Interrupt\r
- \details Reads the pending register in the NVIC and returns the pending bit for the specified interrupt.\r
- \param [in] IRQn Interrupt number.\r
- \return 0 Interrupt status is not pending.\r
- \return 1 Interrupt status is pending.\r
- */\r
-__STATIC_INLINE uint32_t NVIC_GetPendingIRQ(IRQn_Type IRQn)\r
-{\r
- return((uint32_t)(((NVIC->ISPR[(((uint32_t)(int32_t)IRQn) >> 5UL)] & (1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL))) != 0UL) ? 1UL : 0UL));\r
-}\r
-\r
-\r
-/**\r
- \brief Set Pending Interrupt\r
- \details Sets the pending bit of an external interrupt.\r
- \param [in] IRQn Interrupt number. Value cannot be negative.\r
- */\r
-__STATIC_INLINE void NVIC_SetPendingIRQ(IRQn_Type IRQn)\r
-{\r
- NVIC->ISPR[(((uint32_t)(int32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL));\r
-}\r
-\r
-\r
-/**\r
- \brief Clear Pending Interrupt\r
- \details Clears the pending bit of an external interrupt.\r
- \param [in] IRQn External interrupt number. Value cannot be negative.\r
- */\r
-__STATIC_INLINE void NVIC_ClearPendingIRQ(IRQn_Type IRQn)\r
-{\r
- NVIC->ICPR[(((uint32_t)(int32_t)IRQn) >> 5UL)] = (uint32_t)(1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL));\r
-}\r
-\r
-\r
-/**\r
- \brief Get Active Interrupt\r
- \details Reads the active register in NVIC and returns the active bit.\r
- \param [in] IRQn Interrupt number.\r
- \return 0 Interrupt status is not active.\r
- \return 1 Interrupt status is active.\r
- */\r
-__STATIC_INLINE uint32_t NVIC_GetActive(IRQn_Type IRQn)\r
-{\r
- return((uint32_t)(((NVIC->IABR[(((uint32_t)(int32_t)IRQn) >> 5UL)] & (1UL << (((uint32_t)(int32_t)IRQn) & 0x1FUL))) != 0UL) ? 1UL : 0UL));\r
-}\r
-\r
-\r
-/**\r
- \brief Set Interrupt Priority\r
- \details Sets the priority of an interrupt.\r
- \note The priority cannot be set for every core interrupt.\r
- \param [in] IRQn Interrupt number.\r
- \param [in] priority Priority to set.\r
- */\r
-__STATIC_INLINE void NVIC_SetPriority(IRQn_Type IRQn, uint32_t priority)\r
-{\r
- if ((int32_t)(IRQn) < 0)\r
- {\r
- SCB->SHP[(((uint32_t)(int32_t)IRQn) & 0xFUL)-4UL] = (uint8_t)((priority << (8U - __NVIC_PRIO_BITS)) & (uint32_t)0xFFUL);\r
- }\r
- else\r
- {\r
- NVIC->IP[((uint32_t)(int32_t)IRQn)] = (uint8_t)((priority << (8U - __NVIC_PRIO_BITS)) & (uint32_t)0xFFUL);\r
- }\r
-}\r
-\r
-\r
-/**\r
- \brief Get Interrupt Priority\r
- \details Reads the priority of an interrupt.\r
- The interrupt number can be positive to specify an external (device specific) interrupt,\r
- or negative to specify an internal (core) interrupt.\r
- \param [in] IRQn Interrupt number.\r
- \return Interrupt Priority.\r
- Value is aligned automatically to the implemented priority bits of the microcontroller.\r
- */\r
-__STATIC_INLINE uint32_t NVIC_GetPriority(IRQn_Type IRQn)\r
-{\r
-\r
- if ((int32_t)(IRQn) < 0)\r
- {\r
- return(((uint32_t)SCB->SHP[(((uint32_t)(int32_t)IRQn) & 0xFUL)-4UL] >> (8U - __NVIC_PRIO_BITS)));\r
- }\r
- else\r
- {\r
- return(((uint32_t)NVIC->IP[((uint32_t)(int32_t)IRQn)] >> (8U - __NVIC_PRIO_BITS)));\r
- }\r
-}\r
-\r
-\r
-/**\r
- \brief Encode Priority\r
- \details Encodes the priority for an interrupt with the given priority group,\r
- preemptive priority value, and subpriority value.\r
- In case of a conflict between priority grouping and available\r
- priority bits (__NVIC_PRIO_BITS), the smallest possible priority group is set.\r
- \param [in] PriorityGroup Used priority group.\r
- \param [in] PreemptPriority Preemptive priority value (starting from 0).\r
- \param [in] SubPriority Subpriority value (starting from 0).\r
- \return Encoded priority. Value can be used in the function \ref NVIC_SetPriority().\r
- */\r
-__STATIC_INLINE uint32_t NVIC_EncodePriority (uint32_t PriorityGroup, uint32_t PreemptPriority, uint32_t SubPriority)\r
-{\r
- uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL); /* only values 0..7 are used */\r
- uint32_t PreemptPriorityBits;\r
- uint32_t SubPriorityBits;\r
-\r
- PreemptPriorityBits = ((7UL - PriorityGroupTmp) > (uint32_t)(__NVIC_PRIO_BITS)) ? (uint32_t)(__NVIC_PRIO_BITS) : (uint32_t)(7UL - PriorityGroupTmp);\r
- SubPriorityBits = ((PriorityGroupTmp + (uint32_t)(__NVIC_PRIO_BITS)) < (uint32_t)7UL) ? (uint32_t)0UL : (uint32_t)((PriorityGroupTmp - 7UL) + (uint32_t)(__NVIC_PRIO_BITS));\r
-\r
- return (\r
- ((PreemptPriority & (uint32_t)((1UL << (PreemptPriorityBits)) - 1UL)) << SubPriorityBits) |\r
- ((SubPriority & (uint32_t)((1UL << (SubPriorityBits )) - 1UL)))\r
- );\r
-}\r
-\r
-\r
-/**\r
- \brief Decode Priority\r
- \details Decodes an interrupt priority value with a given priority group to\r
- preemptive priority value and subpriority value.\r
- In case of a conflict between priority grouping and available\r
- priority bits (__NVIC_PRIO_BITS) the smallest possible priority group is set.\r
- \param [in] Priority Priority value, which can be retrieved with the function \ref NVIC_GetPriority().\r
- \param [in] PriorityGroup Used priority group.\r
- \param [out] pPreemptPriority Preemptive priority value (starting from 0).\r
- \param [out] pSubPriority Subpriority value (starting from 0).\r
- */\r
-__STATIC_INLINE void NVIC_DecodePriority (uint32_t Priority, uint32_t PriorityGroup, uint32_t* const pPreemptPriority, uint32_t* const pSubPriority)\r
-{\r
- uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL); /* only values 0..7 are used */\r
- uint32_t PreemptPriorityBits;\r
- uint32_t SubPriorityBits;\r
-\r
- PreemptPriorityBits = ((7UL - PriorityGroupTmp) > (uint32_t)(__NVIC_PRIO_BITS)) ? (uint32_t)(__NVIC_PRIO_BITS) : (uint32_t)(7UL - PriorityGroupTmp);\r
- SubPriorityBits = ((PriorityGroupTmp + (uint32_t)(__NVIC_PRIO_BITS)) < (uint32_t)7UL) ? (uint32_t)0UL : (uint32_t)((PriorityGroupTmp - 7UL) + (uint32_t)(__NVIC_PRIO_BITS));\r
-\r
- *pPreemptPriority = (Priority >> SubPriorityBits) & (uint32_t)((1UL << (PreemptPriorityBits)) - 1UL);\r
- *pSubPriority = (Priority ) & (uint32_t)((1UL << (SubPriorityBits )) - 1UL);\r
-}\r
-\r
-\r
-/**\r
- \brief System Reset\r
- \details Initiates a system reset request to reset the MCU.\r
- */\r
-__STATIC_INLINE void NVIC_SystemReset(void)\r
-{\r
- __DSB(); /* Ensure all outstanding memory accesses included\r
- buffered write are completed before reset */\r
- SCB->AIRCR = (uint32_t)((0x5FAUL << SCB_AIRCR_VECTKEY_Pos) |\r
- (SCB->AIRCR & SCB_AIRCR_PRIGROUP_Msk) |\r
- SCB_AIRCR_SYSRESETREQ_Msk ); /* Keep priority group unchanged */\r
- __DSB(); /* Ensure completion of memory access */\r
-\r
- for(;;) /* wait until reset */\r
- {\r
- __NOP();\r
- }\r
-}\r
-\r
-/*@} end of CMSIS_Core_NVICFunctions */\r
-\r
-\r
-\r
-/* ################################## SysTick function ############################################ */\r
-/**\r
- \ingroup CMSIS_Core_FunctionInterface\r
- \defgroup CMSIS_Core_SysTickFunctions SysTick Functions\r
- \brief Functions that configure the System.\r
- @{\r
- */\r
-\r
-#if (__Vendor_SysTickConfig == 0U)\r
-\r
-/**\r
- \brief System Tick Configuration\r
- \details Initializes the System Timer and its interrupt, and starts the System Tick Timer.\r
- Counter is in free running mode to generate periodic interrupts.\r
- \param [in] ticks Number of ticks between two interrupts.\r
- \return 0 Function succeeded.\r
- \return 1 Function failed.\r
- \note When the variable <b>__Vendor_SysTickConfig</b> is set to 1, then the\r
- function <b>SysTick_Config</b> is not included. In this case, the file <b><i>device</i>.h</b>\r
- must contain a vendor-specific implementation of this function.\r
- */\r
-__STATIC_INLINE uint32_t SysTick_Config(uint32_t ticks)\r
-{\r
- if ((ticks - 1UL) > SysTick_LOAD_RELOAD_Msk)\r
- {\r
- return (1UL); /* Reload value impossible */\r
- }\r
-\r
- SysTick->LOAD = (uint32_t)(ticks - 1UL); /* set reload register */\r
- NVIC_SetPriority (SysTick_IRQn, (1UL << __NVIC_PRIO_BITS) - 1UL); /* set Priority for Systick Interrupt */\r
- SysTick->VAL = 0UL; /* Load the SysTick Counter Value */\r
- SysTick->CTRL = SysTick_CTRL_CLKSOURCE_Msk |\r
- SysTick_CTRL_TICKINT_Msk |\r
- SysTick_CTRL_ENABLE_Msk; /* Enable SysTick IRQ and SysTick Timer */\r
- return (0UL); /* Function successful */\r
-}\r
-\r
-#endif\r
-\r
-/*@} end of CMSIS_Core_SysTickFunctions */\r
-\r
-\r
-\r
-/* ##################################### Debug In/Output function ########################################### */\r
-/**\r
- \ingroup CMSIS_Core_FunctionInterface\r
- \defgroup CMSIS_core_DebugFunctions ITM Functions\r
- \brief Functions that access the ITM debug interface.\r
- @{\r
- */\r
-\r
-extern volatile int32_t ITM_RxBuffer; /*!< External variable to receive characters. */\r
-#define ITM_RXBUFFER_EMPTY 0x5AA55AA5U /*!< Value identifying \ref ITM_RxBuffer is ready for next character. */\r
-\r
-\r
-/**\r
- \brief ITM Send Character\r
- \details Transmits a character via the ITM channel 0, and\r
- \li Just returns when no debugger is connected that has booked the output.\r
- \li Is blocking when a debugger is connected, but the previous character sent has not been transmitted.\r
- \param [in] ch Character to transmit.\r
- \returns Character to transmit.\r
- */\r
-__STATIC_INLINE uint32_t ITM_SendChar (uint32_t ch)\r
-{\r
- if (((ITM->TCR & ITM_TCR_ITMENA_Msk) != 0UL) && /* ITM enabled */\r
- ((ITM->TER & 1UL ) != 0UL) ) /* ITM Port #0 enabled */\r
- {\r
- while (ITM->PORT[0U].u32 == 0UL)\r
- {\r
- __NOP();\r
- }\r
- ITM->PORT[0U].u8 = (uint8_t)ch;\r
- }\r
- return (ch);\r
-}\r
-\r
-\r
-/**\r
- \brief ITM Receive Character\r
- \details Inputs a character via the external variable \ref ITM_RxBuffer.\r
- \return Received character.\r
- \return -1 No character pending.\r
- */\r
-__STATIC_INLINE int32_t ITM_ReceiveChar (void)\r
-{\r
- int32_t ch = -1; /* no character available */\r
-\r
- if (ITM_RxBuffer != ITM_RXBUFFER_EMPTY)\r
- {\r
- ch = ITM_RxBuffer;\r
- ITM_RxBuffer = ITM_RXBUFFER_EMPTY; /* ready for next character */\r
- }\r
-\r
- return (ch);\r
-}\r
-\r
-\r
-/**\r
- \brief ITM Check Character\r
- \details Checks whether a character is pending for reading in the variable \ref ITM_RxBuffer.\r
- \return 0 No character available.\r
- \return 1 Character available.\r
- */\r
-__STATIC_INLINE int32_t ITM_CheckChar (void)\r
-{\r
-\r
- if (ITM_RxBuffer == ITM_RXBUFFER_EMPTY)\r
- {\r
- return (0); /* no character available */\r
- }\r
- else\r
- {\r
- return (1); /* character available */\r
- }\r
-}\r
-\r
-/*@} end of CMSIS_core_DebugFunctions */\r
-\r
-\r
-\r
-\r
-#ifdef __cplusplus\r
-}\r
-#endif\r
-\r
-#endif /* __CORE_CM4_H_DEPENDANT */\r
-\r
-#endif /* __CMSIS_GENERIC */\r
+++ /dev/null
-/**************************************************************************//**\r
- * @file core_cmFunc.h\r
- * @brief CMSIS Cortex-M Core Function Access Header File\r
- * @version V4.30\r
- * @date 20. October 2015\r
- ******************************************************************************/\r
-/* Copyright (c) 2009 - 2015 ARM LIMITED\r
-\r
- All rights reserved.\r
- Redistribution and use in source and binary forms, with or without\r
- modification, are permitted provided that the following conditions are met:\r
- - Redistributions of source code must retain the above copyright\r
- notice, this list of conditions and the following disclaimer.\r
- - Redistributions in binary form must reproduce the above copyright\r
- notice, this list of conditions and the following disclaimer in the\r
- documentation and/or other materials provided with the distribution.\r
- - Neither the name of ARM nor the names of its contributors may be used\r
- to endorse or promote products derived from this software without\r
- specific prior written permission.\r
- *\r
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
- ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
- POSSIBILITY OF SUCH DAMAGE.\r
- ---------------------------------------------------------------------------*/\r
-\r
-\r
-#if defined ( __ICCARM__ )\r
- #pragma system_include /* treat file as system include file for MISRA check */\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #pragma clang system_header /* treat file as system include file */\r
-#endif\r
-\r
-#ifndef __CORE_CMFUNC_H\r
-#define __CORE_CMFUNC_H\r
-\r
-\r
-/* ########################### Core Function Access ########################### */\r
-/** \ingroup CMSIS_Core_FunctionInterface\r
- \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions\r
- @{\r
-*/\r
-\r
-/*------------------ RealView Compiler -----------------*/\r
-#if defined ( __CC_ARM )\r
- #include "cmsis_armcc.h"\r
-\r
-/*------------------ ARM Compiler V6 -------------------*/\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #include "cmsis_armcc_V6.h"\r
-\r
-/*------------------ GNU Compiler ----------------------*/\r
-#elif defined ( __GNUC__ )\r
- #include "cmsis_gcc.h"\r
-\r
-/*------------------ ICC Compiler ----------------------*/\r
-#elif defined ( __ICCARM__ )\r
- #include <cmsis_iar.h>\r
-\r
-/*------------------ TI CCS Compiler -------------------*/\r
-#elif defined ( __TMS470__ )\r
- #include <cmsis_ccs.h>\r
-\r
-/*------------------ TASKING Compiler ------------------*/\r
-#elif defined ( __TASKING__ )\r
- /*\r
- * The CMSIS functions have been implemented as intrinsics in the compiler.\r
- * Please use "carm -?i" to get an up to date list of all intrinsics,\r
- * Including the CMSIS ones.\r
- */\r
-\r
-/*------------------ COSMIC Compiler -------------------*/\r
-#elif defined ( __CSMC__ )\r
- #include <cmsis_csm.h>\r
-\r
-#endif\r
-\r
-/*@} end of CMSIS_Core_RegAccFunctions */\r
-\r
-#endif /* __CORE_CMFUNC_H */\r
+++ /dev/null
-/**************************************************************************//**\r
- * @file core_cmInstr.h\r
- * @brief CMSIS Cortex-M Core Instruction Access Header File\r
- * @version V4.30\r
- * @date 20. October 2015\r
- ******************************************************************************/\r
-/* Copyright (c) 2009 - 2015 ARM LIMITED\r
-\r
- All rights reserved.\r
- Redistribution and use in source and binary forms, with or without\r
- modification, are permitted provided that the following conditions are met:\r
- - Redistributions of source code must retain the above copyright\r
- notice, this list of conditions and the following disclaimer.\r
- - Redistributions in binary form must reproduce the above copyright\r
- notice, this list of conditions and the following disclaimer in the\r
- documentation and/or other materials provided with the distribution.\r
- - Neither the name of ARM nor the names of its contributors may be used\r
- to endorse or promote products derived from this software without\r
- specific prior written permission.\r
- *\r
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
- ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
- POSSIBILITY OF SUCH DAMAGE.\r
- ---------------------------------------------------------------------------*/\r
-\r
-\r
-#if defined ( __ICCARM__ )\r
- #pragma system_include /* treat file as system include file for MISRA check */\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #pragma clang system_header /* treat file as system include file */\r
-#endif\r
-\r
-#ifndef __CORE_CMINSTR_H\r
-#define __CORE_CMINSTR_H\r
-\r
-\r
-/* ########################## Core Instruction Access ######################### */\r
-/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface\r
- Access to dedicated instructions\r
- @{\r
-*/\r
-\r
-/* CHIBIOS FIX */\r
-#if !defined(__CORTEX_SC)\r
-#define __CORTEX_SC 0\r
-#endif\r
-\r
-/*------------------ RealView Compiler -----------------*/\r
-#if defined ( __CC_ARM )\r
- #include "cmsis_armcc.h"\r
-\r
-/*------------------ ARM Compiler V6 -------------------*/\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #include "cmsis_armcc_V6.h"\r
-\r
-/*------------------ GNU Compiler ----------------------*/\r
-#elif defined ( __GNUC__ )\r
- #include "cmsis_gcc.h"\r
-\r
-/*------------------ ICC Compiler ----------------------*/\r
-#elif defined ( __ICCARM__ )\r
- #include <cmsis_iar.h>\r
-\r
-/*------------------ TI CCS Compiler -------------------*/\r
-#elif defined ( __TMS470__ )\r
- #include <cmsis_ccs.h>\r
-\r
-/*------------------ TASKING Compiler ------------------*/\r
-#elif defined ( __TASKING__ )\r
- /*\r
- * The CMSIS functions have been implemented as intrinsics in the compiler.\r
- * Please use "carm -?i" to get an up to date list of all intrinsics,\r
- * Including the CMSIS ones.\r
- */\r
-\r
-/*------------------ COSMIC Compiler -------------------*/\r
-#elif defined ( __CSMC__ )\r
- #include <cmsis_csm.h>\r
-\r
-#endif\r
-\r
-/*@}*/ /* end of group CMSIS_Core_InstructionInterface */\r
-\r
-#endif /* __CORE_CMINSTR_H */\r
+++ /dev/null
-/**************************************************************************//**\r
- * @file core_cmSimd.h\r
- * @brief CMSIS Cortex-M SIMD Header File\r
- * @version V4.30\r
- * @date 20. October 2015\r
- ******************************************************************************/\r
-/* Copyright (c) 2009 - 2015 ARM LIMITED\r
-\r
- All rights reserved.\r
- Redistribution and use in source and binary forms, with or without\r
- modification, are permitted provided that the following conditions are met:\r
- - Redistributions of source code must retain the above copyright\r
- notice, this list of conditions and the following disclaimer.\r
- - Redistributions in binary form must reproduce the above copyright\r
- notice, this list of conditions and the following disclaimer in the\r
- documentation and/or other materials provided with the distribution.\r
- - Neither the name of ARM nor the names of its contributors may be used\r
- to endorse or promote products derived from this software without\r
- specific prior written permission.\r
- *\r
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\r
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\r
- ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE\r
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
- POSSIBILITY OF SUCH DAMAGE.\r
- ---------------------------------------------------------------------------*/\r
-\r
-\r
-#if defined ( __ICCARM__ )\r
- #pragma system_include /* treat file as system include file for MISRA check */\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #pragma clang system_header /* treat file as system include file */\r
-#endif\r
-\r
-#ifndef __CORE_CMSIMD_H\r
-#define __CORE_CMSIMD_H\r
-\r
-#ifdef __cplusplus\r
- extern "C" {\r
-#endif\r
-\r
-\r
-/* ################### Compiler specific Intrinsics ########################### */\r
-/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics\r
- Access to dedicated SIMD instructions\r
- @{\r
-*/\r
-\r
-/*------------------ RealView Compiler -----------------*/\r
-#if defined ( __CC_ARM )\r
- #include "cmsis_armcc.h"\r
-\r
-/*------------------ ARM Compiler V6 -------------------*/\r
-#elif defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)\r
- #include "cmsis_armcc_V6.h"\r
-\r
-/*------------------ GNU Compiler ----------------------*/\r
-#elif defined ( __GNUC__ )\r
- #include "cmsis_gcc.h"\r
-\r
-/*------------------ ICC Compiler ----------------------*/\r
-#elif defined ( __ICCARM__ )\r
- #include <cmsis_iar.h>\r
-\r
-/*------------------ TI CCS Compiler -------------------*/\r
-#elif defined ( __TMS470__ )\r
- #include <cmsis_ccs.h>\r
-\r
-/*------------------ TASKING Compiler ------------------*/\r
-#elif defined ( __TASKING__ )\r
- /*\r
- * The CMSIS functions have been implemented as intrinsics in the compiler.\r
- * Please use "carm -?i" to get an up to date list of all intrinsics,\r
- * Including the CMSIS ones.\r
- */\r
-\r
-/*------------------ COSMIC Compiler -------------------*/\r
-#elif defined ( __CSMC__ )\r
- #include <cmsis_csm.h>\r
-\r
-#endif\r
-\r
-/*@} end of group CMSIS_SIMD_intrinsics */\r
-\r
-\r
-#ifdef __cplusplus\r
-}\r
-#endif\r
-\r
-#endif /* __CORE_CMSIMD_H */\r
+/**
+ * @file gpio.h
+ * Abstracts gpio access, makes things easier
+ */
+
#ifndef GPIO_H_
#define GPIO_H_
#include <stm32l476xx.h>
+/**
+ * Helps simplify gpio calls.
+ * @param p port, e.g. GPIOA
+ * @param b pin, e.g. 4
+ */
#define GPIO_PORT(p, b) GPIO##p, b
+/**
+ * Defines possible modes for a gpio pin
+ */
enum GPIO_MODE
{
- INPUT = 0,
- OUTPUT,
- ALTERNATE,
- ANALOG
+ INPUT = 0, /**< digital input */
+ OUTPUT, /**< digital output */
+ ALTERNATE, /**< alternate function */
+ ANALOG /**< analog function */
};
+/**
+ * Defines whether to use push-pull or open drain.
+ */
enum GPIO_TYPE
{
- PUSHPULL = 0,
- OPENDRAIN
+ PUSHPULL = 0, /**< push-pull */
+ OPENDRAIN /**< open drain */
};
+/**
+ * Defines the pin's speed
+ */
enum GPIO_SPEED
{
- LOW = 0,
- MEDIUM,
- HIGH,
- VERYHIGH
+ LOW = 0, /**< low */
+ MEDIUM, /**< medium */
+ HIGH, /**< high */
+ VERYHIGH /**< very high/maximum */
};
+/**
+ * Defines if a pullup or pulldown should be used.
+ */
enum GPIO_PUPD
{
- NOPUPD,
- PULLUP,
- PULLDOWN
+ NOPUPD, /**< no pullup/pulldown */
+ PULLUP, /**< use pullup */
+ PULLDOWN /**< use pulldown */
};
+/**
+ * Initializes the gpio.
+ */
void gpio_init(void);
+/**
+ * Enables or disables pullup/pulldown for the given pin.
+ * @param port the port, e.g. GPIOA
+ * @param pin the pin
+ * @param pupd pullup/pulldown enable
+ * @see GPIO_PUPD
+ */
void gpio_pupd(GPIO_TypeDef *port, uint8_t pin, uint8_t pupd);
+
+/**
+ * Sets whether to use push-pull or open drain for the given pin.
+ * @param port the port
+ * @param pin the pin
+ * @param type what to use
+ * @see GPIO_TYPE
+ */
void gpio_type(GPIO_TypeDef *port, uint8_t pin, uint8_t type);
+
+/**
+ * Sets the pin's speed.
+ * @param port the port
+ * @param pin the pin
+ * @param speed the speed to use
+ * @see GPIO_SPEED
+ */
void gpio_speed(GPIO_TypeDef *port, uint8_t pin, uint8_t speed);
+
+/**
+ * Sets the pin's i/o mode.
+ * @param port the port
+ * @param pin the pin
+ * @param mode the mode to use
+ * @see GPIO_MODE
+ */
void gpio_mode(GPIO_TypeDef *port, uint8_t pin, uint8_t mode);
+
+/**
+ * Sets the state of a digital output pin.
+ * @param port the port
+ * @param pin the pin
+ * @param val non-zero for high, zero for low
+ */
void gpio_dout(GPIO_TypeDef *port, uint8_t pin, uint8_t val);
+
+/**
+ * Reads a digital input pin.
+ * @param port the port
+ * @param pin the pin
+ * @return non-zero for high, zero for low
+ */
uint8_t gpio_din(GPIO_TypeDef *port, uint8_t pin);
#endif // GPIO_H_
+/**
+ * @file heap.h
+ * A basic memory manager
+ */
+
#ifndef HEAP_H_
#define HEAP_H_
#include <stdint.h>
+/**
+ * Initializes memory management of the given heap.
+ * No overflow stuff is done, so...
+ * @param buf the heap to use for allocations
+ */
void heap_init(void *buf);
+/**
+ * Allocates a chunk of memory.
+ * @param size how many bytes to claim
+ * @return pointer to the allocated buffer
+ */
void *malloc(uint32_t size);
+
+/**
+ * Allocates and zeros a chunk of memory.
+ * @param count how many of whatever to allocate
+ * @param size byte count of each whatever
+ * @return pointer to the allocated buffer
+ */
void *calloc(uint32_t count, uint32_t size);
+
+/**
+ * Frees the buffer allocated through malloc/calloc.
+ * Please don't double-free.
+ * @param the buffer to release
+ */
void free(void *buf);
#endif // HEAP_H_
--- /dev/null
+#ifndef KEYPAD_H_
+#define KEYPAD_H_
+
+#include <stdint.h>
+
+#define K0 (1 << 0)
+#define K1 (1 << 1)
+#define K2 (1 << 2)
+#define K3 (1 << 3)
+#define K4 (1 << 4)
+#define K5 (1 << 5)
+#define K6 (1 << 6)
+#define K7 (1 << 7)
+#define K8 (1 << 8)
+#define K9 (1 << 9)
+#define KS (1 << 10)
+#define KP (1 << 11)
+
+void keypad_init(void);
+
+uint16_t keypad_get(void);
+uint8_t keypad_isdown(uint16_t);
+
+#endif // KEYPAD_H_
+/**
+ * @file random.h
+ * Provides true random number generation functionality
+ */
+
#ifndef RANDOM_H_
#define RANDOM_H_
#include <stdint.h>
+/**
+ * Initializes the STM's true random number generator.
+ */
void random_init(void);
+
+/**
+ * Gets the next random number from the generator.
+ * @return the random number
+ */
uint32_t random_get(void);
#endif // RANDOM_H_
+/**
+ * @file script.h
+ * Provides script library for using calculator hardware
+ */
+
#ifndef SCRIPT_H_
#define SCRIPT_H_
#include <parser.h>
+/**
+ * Loads the library for the given interpreter.
+ * @param it the interpreter to use
+ */
void script_loadlib(interpreter *it);
#endif // SCRIPT_H_
*/
void task_start(void (*task)(void), uint16_t stackSize);
+/**
+ * Allows task switching to be disabled, for low-level actions.
+ * Multiple holds can be placed, and all must be removed to continue task
+ * switching.
+ * @param hold non-zero for hold, zero to remove hold
+ */
void task_hold(uint8_t hold);
#endif // TASK_H_
do
- rand 479 > x
- rand 319 > y
- rand 479 > i
- rand 319 > j
- rand 32767 > purple
+ getkey > input
+ if (input & 4)
+ rand 479 > x
+ rand 319 > y
+ rand 479 > i
+ rand 319 > j
+ rand 32767 > purple
- line x y i j purple
+ line x y i j purple
+ end
while (1)
print "done"
+++ /dev/null
-#!/bin/bash
-echo "Making initrd.img..."
-rm -f initrd.img
-arm-none-eabi-ar r initrd.img initrd/*
#define STK_CALIB *((uint32_t *)0xE000E01C)
// ticks since init
-static uint32_t ticks = 0;
+volatile uint32_t ticks = 0;
void clock_init(void)
{
// set system clock to PLL
RCC->CFGR &= ~(RCC_CFGR_SW);
+ RCC->CFGR &= ~(RCC_CFGR_HPRE_Msk);
RCC->CFGR |= RCC_CFGR_SW_PLL;
while ((RCC->CFGR & RCC_CFGR_SWS_PLL) != RCC_CFGR_SWS_PLL);
while (ticks < target);
}
-__attribute__ ((naked))
void SysTick_Handler(void)
{
- uint32_t lr;
- asm("mov %0, lr" : "=r" (lr));
-
// just keep counting
ticks++;
if (!(ticks & 3))
SCB->ICSR |= SCB_ICSR_PENDSVSET_Msk;
-
- asm("mov lr, %0; bx lr" :: "r" (lr));
}
{
dsp_set_addr(x, y, x + w - 1, y + h - 1);
int countdown = w * h;
+ LOCK;
do {
- LOCK;
dsp_write_data(color >> 8);
dsp_write_data(color & 0xFF);
- UNLOCK;
} while (countdown--);
+ UNLOCK;
}
void dsp_line(int x, int y, int i, int j, uint16_t color)
void gpio_dout(GPIO_TypeDef *port, uint8_t pin, uint8_t val)
{
- if (val)
- port->BSRR |= (1 << pin);
- else
- port->BRR |= (1 << pin);
+ port->BSRR |= (1 << (val ? pin : pin + 16));
}
uint8_t gpio_din(GPIO_TypeDef *port, uint8_t pin)
--- /dev/null
+#include <keypad.h>
+#include <gpio.h>
+
+#define PIN_0 GPIO_PORT(A, 11)
+#define PIN_1 GPIO_PORT(B, 13)
+#define PIN_2 GPIO_PORT(B, 2)
+#define PIN_3 GPIO_PORT(A, 12)
+#define PIN_4 GPIO_PORT(B, 14)
+#define PIN_5 GPIO_PORT(B, 11)
+#define PIN_6 GPIO_PORT(C, 5)
+#define PIN_7 GPIO_PORT(B, 15)
+#define PIN_8 GPIO_PORT(B, 12)
+#define PIN_9 GPIO_PORT(C, 6)
+#define PIN_S GPIO_PORT(B, 1)
+#define PIN_P GPIO_PORT(C, 8)
+
+typedef struct {
+ GPIO_TypeDef *port;
+ uint16_t pin;
+ uint16_t keycode;
+} key_t;
+
+static const key_t keypad_map[12] = {
+ { PIN_0, K0 },
+ { PIN_1, K1 },
+ { PIN_2, K2 },
+ { PIN_3, K3 },
+ { PIN_4, K4 },
+ { PIN_5, K5 },
+ { PIN_6, K6 },
+ { PIN_7, K7 },
+ { PIN_8, K8 },
+ { PIN_9, K9 },
+ { PIN_S, KS },
+ { PIN_P, KP }
+};
+
+void keypad_init(void)
+{
+ for (uint8_t i = 0; i < 12; i++) {
+ GPIO_TypeDef *p = keypad_map[i].port;
+ uint16_t pin = keypad_map[i].pin;
+ gpio_mode(p, pin, OUTPUT);
+ gpio_dout(p, pin, 0);
+ gpio_mode(p, pin, INPUT);
+ gpio_pupd(p, pin, PULLDOWN);
+ }
+}
+
+uint16_t keypad_get(void)
+{
+ uint16_t state = 0;
+ for (uint8_t i = 0; i < 12; i++) {
+ if (gpio_din(keypad_map[i].port, keypad_map[i].pin))
+ state |= keypad_map[i].keycode;
+ }
+ return state;
+}
+
+uint8_t keypad_isdown(uint16_t keycode)
+{
+ return (keypad_get() & keycode);
+}
#include <string.h>\r
#include <script.h>\r
#include <random.h>\r
+#include <keypad.h>\r
\r
extern uint8_t _ebss;\r
extern char *itoa(int, char *, int);\r
int main(void)\r
{\r
asm("cpsid i");\r
- // disable write buffer\r
- *((uint32_t *)0xE000E008) |= 2;\r
\r
// prepare flash latency for 80MHz operation\r
FLASH->ACR &= ~(FLASH_ACR_LATENCY);\r
FLASH->ACR |= FLASH_ACR_LATENCY_4WS;\r
\r
- //MPU->CTRL |= MPU_CTRL_ENABLE_Msk | MPU_CTRL_PRIVDEFENA_Msk;\r
clock_init();\r
heap_init(&_ebss);\r
gpio_init();\r
+ keypad_init();\r
serial_init();\r
random_init();\r
\r
+ //extern void keypad_init(void);\r
+ //keypad_init();\r
+\r
gpio_mode(GPIOA, 5, OUTPUT);\r
\r
// enable FPU\r
SCB->CPACR |= (0xF << 20);\r
+ // enable MPU\r
+ //MPU->CTRL |= MPU_CTRL_PRIVDEFENA_Msk | MPU_CTRL_ENABLE_Msk;\r
\r
task_init(kmain);\r
while (1);\r
\r
void kmain(void)\r
{\r
- asm("cpsie i");\r
dsp_init();\r
dsp_rect(0, 0, LCD_WIDTH, LCD_HEIGHT, dsp_color(0, 0, 0));\r
dsp_cursoron();\r
task_start(task_interpreter, 4096);\r
\r
while (1) {\r
- gpio_dout(GPIOA, 5, 1);\r
+ gpio_dout(GPIOA, 5,\r
+ (keypad_isdown(K0)));\r
+ /*gpio_dout(GPIOA, 5, 1);\r
delay(250);\r
gpio_dout(GPIOA, 5, 0);\r
- delay(250);\r
+ delay(250);*/\r
}\r
}\r
\r
#include <random.h>
#include <serial.h>
#include <stack.h>
+#include <keypad.h>
int script_puts(interpreter *it);
int script_gets(interpreter *it);
int script_line(interpreter *it);
int script_color(interpreter *it);
int script_rand(interpreter *it);
+int script_getkey(interpreter *it);
void script_loadlib(interpreter *it)
{
inew_cfunc(it, "line", script_line);
inew_cfunc(it, "color", script_color);
inew_cfunc(it, "rand", script_rand);
+ inew_cfunc(it, "getkey", script_getkey);
}
int script_puts(interpreter *it)
free(v);
return 0;
}
+
+int script_getkey(interpreter *it)
+{
+ variable *v = (variable *)malloc(sizeof(variable));
+ v->valtype = INTEGER;
+ INT(v) = keypad_get();
+ v->svalue = 0;
+ isetstr(v);
+ iret(it, v);
+ free(v->svalue);
+ free(v);
+ return 0;
+}
\r
void perror(const char *s)\r
{\r
- extern task_t *current;\r
+ //extern task_t *current;\r
serial_puts(s);\r
- char buf[200];\r
- snprintf(buf, 200, "xPSR: %x\r\nPC: %x\r\nLR: %x\r\n", current->sp[0],\r
- current->sp[1], current->sp[2]);\r
- serial_puts(buf);\r
+ //char buf[200];\r
+ //snprintf(buf, 200, "xPSR: %x\r\nPC: %x\r\nLR: %x\r\n", current->sp[0],\r
+ // current->sp[1], current->sp[2]);\r
+ //serial_puts(buf);\r
}\r
\r
__attribute__ ((naked))\r
void HardFault_Handler(void)\r
{\r
GPIOA->BSRR |= (1 << 5);\r
- //perror("Hard Fault!");\r
+ perror("Hard Fault!");\r
while (1);\r
}\r
\r
t->sp[14] = 0xFFFFFFFD;
t->sp[15] = (uint32_t)code;
t->sp[16] = 0x01000000;
-
- //void *sp = (uint8_t *)t->stack + stackSize - 64; // 16 words
- //t->sp = (uint32_t *)sp;
- //t->sp[13] = (uint32_t)task_exit;
- //t->sp[14] = (uint32_t)code;
- //t->sp[15] = 0x01000000;
return t;
}
asm("\
msr psp, %0; \
mrs r0, control; \
- orr r0, r0, #2; \
+ orr r0, r0, #3; \
+ cpsie i; \
msr control, r0; \
isb; \
bx %1; \