Initial commit

This commit is contained in:
2025-08-17 02:39:52 +08:00
commit f82ab23898
95 changed files with 52342 additions and 0 deletions

13
SDK/CMSIS/CMakeLists.txt Normal file
View File

@@ -0,0 +1,13 @@
project(CMSIS LANGUAGES C CXX ASM)
add_library(CMSIS INTERFACE)
target_include_directories(CMSIS INTERFACE
${CMAKE_SOURCE_DIR}/SDK/CMSIS
${CMAKE_SOURCE_DIR}/SDK/CMSIS/GD/GD32E23x/Include
# Added directory of "gd32e23x_libopt.h".
${CMAKE_SOURCE_DIR}/Inc
# 如有其它需要的头文件目录,可继续添加
)

View File

@@ -0,0 +1,213 @@
/*!
\file gd32e23x.h
\brief general definitions for GD32E23x
\version 2025-02-10, V2.3.0, firmware for GD32E23x
*/
/* Copyright (c) 2012 ARM LIMITED
Copyright (c) 2025, GigaDevice Semiconductor Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of ARM nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
*
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*/
/* This file refers the CMSIS standard, some adjustments are made according to GigaDevice chips */
#ifndef GD32E23X_H
#define GD32E23X_H
#ifdef __cplusplus
extern "C" {
#endif
/* define GD32E23x */
#if !defined (GD32E23x)
#define GD32E23x
#endif /* define GD32E23x */
#if !defined (GD32E23x)
#error "Please select the target GD32E23x device used in your application (in gd32e23x.h file)"
#endif /* undefine GD32E23x tip */
/* define value of high speed crystal oscillator (HXTAL) in Hz */
#if !defined (HXTAL_VALUE)
#define HXTAL_VALUE ((uint32_t)8000000)
#endif /* high speed crystal oscillator value */
/* define startup timeout value of high speed crystal oscillator (HXTAL) */
#if !defined (HXTAL_STARTUP_TIMEOUT)
#define HXTAL_STARTUP_TIMEOUT ((uint16_t)0x0FFFF)
#endif /* high speed crystal oscillator startup timeout */
/* define value of internal 8MHz RC oscillator (IRC8M) in Hz */
#if !defined (IRC8M_VALUE)
#define IRC8M_VALUE ((uint32_t)8000000)
#endif /* internal 8MHz RC oscillator value */
/* define startup timeout value of internal 8MHz RC oscillator (IRC8M) */
#if !defined (IRC8M_STARTUP_TIMEOUT)
#define IRC8M_STARTUP_TIMEOUT ((uint16_t)0x0500)
#endif /* internal 8MHz RC oscillator startup timeout */
/* define value of internal RC oscillator for ADC in Hz */
#if !defined (IRC28M_VALUE)
#define IRC28M_VALUE ((uint32_t)28000000)
#endif /* IRC28M_VALUE */
#if !defined (IRC48M_VALUE)
#define IRC48M_VALUE ((uint32_t)48000000)
#endif /* IRC48M_VALUE */
/* define value of internal 40KHz RC oscillator(IRC40K) in Hz */
#if !defined (IRC40K_VALUE)
#define IRC40K_VALUE ((uint32_t)40000)
#endif /* internal 40KHz RC oscillator value */
/* define value of low speed crystal oscillator (LXTAL)in Hz */
#if !defined (LXTAL_VALUE)
#define LXTAL_VALUE ((uint32_t)32768)
#endif /* low speed crystal oscillator value */
/* GD32E23x firmware library version number V1.0 */
#define __GD32E23x_STDPERIPH_VERSION_MAIN (0x02) /*!< [31:24] main version */
#define __GD32E23x_STDPERIPH_VERSION_SUB1 (0x02) /*!< [23:16] sub1 version */
#define __GD32E23x_STDPERIPH_VERSION_SUB2 (0x00) /*!< [15:8] sub2 version */
#define __GD32E23x_STDPERIPH_VERSION_RC (0x00) /*!< [7:0] release candidate */
#define __GD32E23x_STDPERIPH_VERSION ((__GD32E23x_STDPERIPH_VERSION_MAIN << 24)\
|(__GD32E23x_STDPERIPH_VERSION_SUB1 << 16)\
|(__GD32E23x_STDPERIPH_VERSION_SUB2 << 8)\
|(__GD32E23x_STDPERIPH_VERSION_RC))
/* configuration of the Cortex-M23 processor and core peripherals */
#define __CM23_REV 0x0100U /*!< Core revision r1p0 */
#define __SAUREGION_PRESENT 0U /*!< SAU regions are not present */
#define __MPU_PRESENT 0U /*!< MPU is present */
#define __VTOR_PRESENT 1U /*!< VTOR is present */
#define __NVIC_PRIO_BITS 2U /*!< Number of Bits used for Priority Levels */
#define __Vendor_SysTickConfig 0U /*!< Set to 1 if different SysTick Config is used */
/* define interrupt number */
typedef enum IRQn
{
/* Cortex-M23 processor exceptions numbers */
NonMaskableInt_IRQn = -14, /*!< non maskable interrupt */
HardFault_IRQn = -13, /*!< hardfault interrupt */
SVCall_IRQn = -5, /*!< sv call interrupt */
PendSV_IRQn = -2, /*!< pend sv interrupt */
SysTick_IRQn = -1, /*!< system tick interrupt */
/* interruput numbers */
WWDGT_IRQn = 0, /*!< window watchdog timer interrupt */
LVD_IRQn = 1, /*!< LVD through EXTI line detect interrupt */
RTC_IRQn = 2, /*!< RTC through EXTI line interrupt */
FMC_IRQn = 3, /*!< FMC interrupt */
RCU_IRQn = 4, /*!< RCU interrupt */
EXTI0_1_IRQn = 5, /*!< EXTI line 0 and 1 interrupts */
EXTI2_3_IRQn = 6, /*!< EXTI line 2 and 3 interrupts */
EXTI4_15_IRQn = 7, /*!< EXTI line 4 to 15 interrupts */
DMA_Channel0_IRQn = 9, /*!< DMA channel 0 interrupt */
DMA_Channel1_2_IRQn = 10, /*!< DMA channel 1 and channel 2 interrupts */
DMA_Channel3_4_IRQn = 11, /*!< DMA channel 3 and channel 4 interrupts */
ADC_CMP_IRQn = 12, /*!< ADC, CMP interrupts */
TIMER0_BRK_UP_TRG_COM_IRQn = 13, /*!< TIMER0 break, update, trigger and commutation interrupts */
TIMER0_Channel_IRQn = 14, /*!< TIMER0 channel capture compare interrupts */
TIMER2_IRQn = 16, /*!< TIMER2 interrupt */
TIMER5_IRQn = 17, /*!< TIMER5 interrupt */
TIMER13_IRQn = 19, /*!< TIMER13 interrupt */
TIMER14_IRQn = 20, /*!< TIMER14 interrupt */
TIMER15_IRQn = 21, /*!< TIMER15 interrupt */
TIMER16_IRQn = 22, /*!< TIMER16 interrupt */
I2C0_EV_IRQn = 23, /*!< I2C0 event interrupt */
I2C1_EV_IRQn = 24, /*!< I2C1 event interrupt */
SPI0_IRQn = 25, /*!< SPI0 interrupt */
SPI1_IRQn = 26, /*!< SPI1 interrupt */
USART0_IRQn = 27, /*!< USART0 interrupt */
USART1_IRQn = 28, /*!< USART1 interrupt */
I2C0_ER_IRQn = 32, /*!< I2C0 error interrupt */
I2C1_ER_IRQn = 34, /*!< I2C1 error interrupt */
} IRQn_Type;
/* includes */
#include "core_cm23.h"
#include "system_gd32e23x.h"
#include <stdint.h>
/* enum definitions */
typedef enum {DISABLE = 0, ENABLE = !DISABLE} EventStatus, ControlStatus;
typedef enum {RESET = 0, SET = !RESET} FlagStatus;
typedef enum {ERROR = 0, SUCCESS = !ERROR} ErrStatus;
/* bit operations */
#define REG32(addr) (*(volatile uint32_t *)(uint32_t)(addr))
#define REG16(addr) (*(volatile uint16_t *)(uint32_t)(addr))
#define REG8(addr) (*(volatile uint8_t *)(uint32_t)(addr))
#define BIT(x) ((uint32_t)((uint32_t)0x01U<<(x)))
#define BITS(start, end) ((0xFFFFFFFFUL << (start)) & (0xFFFFFFFFUL >> (31U - (uint32_t)(end))))
#define GET_BITS(regval, start, end) (((regval) & BITS((start),(end))) >> (start))
/* main flash and SRAM memory map */
#define FLASH_BASE ((uint32_t)0x08000000U) /*!< main FLASH base address */
#define SRAM_BASE ((uint32_t)0x20000000U) /*!< SRAM base address */
/* SRAM and peripheral base bit-band region */
#define SRAM_BB_BASE ((uint32_t)0x22000000U) /*!< SRAM bit-band base address */
#define PERIPH_BB_BASE ((uint32_t)0x42000000U) /*!< peripheral bit-band base address */
/* peripheral memory map */
#define APB1_BUS_BASE ((uint32_t)0x40000000U) /*!< apb1 base address */
#define APB2_BUS_BASE ((uint32_t)0x40010000U) /*!< apb2 base address */
#define AHB1_BUS_BASE ((uint32_t)0x40020000U) /*!< ahb1 base address */
#define AHB2_BUS_BASE ((uint32_t)0x48000000U) /*!< ahb2 base address */
/* advanced peripheral bus 1 memory map */
#define TIMER_BASE (APB1_BUS_BASE + 0x00000000U) /*!< TIMER base address */
#define RTC_BASE (APB1_BUS_BASE + 0x00002800U) /*!< RTC base address */
#define WWDGT_BASE (APB1_BUS_BASE + 0x00002C00U) /*!< WWDGT base address */
#define FWDGT_BASE (APB1_BUS_BASE + 0x00003000U) /*!< FWDGT base address */
#define SPI_BASE (APB1_BUS_BASE + 0x00003800U) /*!< SPI base address */
#define USART_BASE (APB1_BUS_BASE + 0x00004400U) /*!< USART base address */
#define I2C_BASE (APB1_BUS_BASE + 0x00005400U) /*!< I2C base address */
#define PMU_BASE (APB1_BUS_BASE + 0x00007000U) /*!< PMU base address */
/* advanced peripheral bus 2 memory map */
#define SYSCFG_BASE (APB2_BUS_BASE + 0x00000000U) /*!< SYSCFG base address */
#define CMP_BASE (APB2_BUS_BASE + 0x0000001CU) /*!< CMP base address */
#define EXTI_BASE (APB2_BUS_BASE + 0x00000400U) /*!< EXTI base address */
#define ADC_BASE (APB2_BUS_BASE + 0x00002400U) /*!< ADC base address */
/* advanced high performance bus 1 memory map */
#define DMA_BASE (AHB1_BUS_BASE + 0x00000000U) /*!< DMA base address */
#define DMA_CHANNEL_BASE (DMA_BASE + 0x00000008U) /*!< DMA channel base address */
#define RCU_BASE (AHB1_BUS_BASE + 0x00001000U) /*!< RCU base address */
#define FMC_BASE (AHB1_BUS_BASE + 0x00002000U) /*!< FMC base address */
#define CRC_BASE (AHB1_BUS_BASE + 0x00003000U) /*!< CRC base address */
/* advanced high performance bus 2 memory map */
#define GPIO_BASE (AHB2_BUS_BASE + 0x00000000U) /*!< GPIO base address */
/* option byte and debug memory map */
#define OB_BASE ((uint32_t)0x1FFFF800U) /*!< OB base address */
#define DBG_BASE ((uint32_t)0x40015800U) /*!< DBG base address */
#include "gd32e23x_libopt.h"
#ifdef __cplusplus
}
#endif
#endif /* GD32E23X_H */

View File

@@ -0,0 +1,66 @@
/*!
\file system_gd32e23x.h
\brief CMSIS Cortex-M23 Device Peripheral Access Layer Header File for
GD32E23x Device Series
*/
/* Copyright (c) 2012 ARM LIMITED
Copyright (c) 2025, GigaDevice Semiconductor Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of ARM nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
*
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*/
/* This file refers the CMSIS standard, some adjustments are made according to GigaDevice chips */
#ifndef SYSTEM_GD32E23X_H
#define SYSTEM_GD32E23X_H
#ifdef __cplusplus
extern "C" {
#endif
#include <stdint.h>
/* firmware version can be acquired by uncommenting the macro */
#define __FIRMWARE_VERSION_DEFINE
/* system clock frequency (core clock) */
extern uint32_t SystemCoreClock;
/* function declarations */
/* initialize the system and update the SystemCoreClock variable */
extern void SystemInit (void);
/* update the SystemCoreClock with current core clock retrieved from cpu registers */
extern void SystemCoreClockUpdate (void);
#ifdef __FIRMWARE_VERSION_DEFINE
/* get firmware version */
extern uint32_t gd32e23x_firmware_version_get(void);
#endif /* __FIRMWARE_VERSION_DEFINE */
#ifdef __cplusplus
}
#endif
#endif /* SYSTEM_GD32E23X_H */

View File

@@ -0,0 +1,270 @@
;/*!
; \file startup_gd32e23x.s
; \brief start up file
;
; \version 2025-02-10, V2.3.0, firmware for GD32E23x
;*/
;/* Copyright (c) 2012 ARM LIMITED
; Copyright (c) 2025, GigaDevice Semiconductor Inc.
;
; All rights reserved.
; Redistribution and use in source and binary forms, with or without
; modification, are permitted provided that the following conditions are met:
; - Redistributions of source code must retain the above copyright
; notice, this list of conditions and the following disclaimer.
; - Redistributions in binary form must reproduce the above copyright
; notice, this list of conditions and the following disclaimer in the
; documentation and/or other materials provided with the distribution.
; - Neither the name of ARM nor the names of its contributors may be used
; to endorse or promote products derived from this software without
; specific prior written permission.
; *
; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
; ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
; LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
; INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
; POSSIBILITY OF SUCH DAMAGE.
;*/
;/* This file refers the CMSIS standard, some adjustments are made according to GigaDevice chips */
; <h> Stack Configuration
; <o> Stack Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Stack_Size EQU 0x00000400
AREA STACK, NOINIT, READWRITE, ALIGN=3
Stack_Mem SPACE Stack_Size
__initial_sp
; <h> Heap Configuration
; <o> Heap Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Heap_Size EQU 0x00000400
AREA HEAP, NOINIT, READWRITE, ALIGN=3
__heap_base
Heap_Mem SPACE Heap_Size
__heap_limit
PRESERVE8
THUMB
; /* reset Vector Mapped to at Address 0 */
AREA RESET, DATA, READONLY
EXPORT __Vectors
EXPORT __Vectors_End
EXPORT __Vectors_Size
__Vectors DCD __initial_sp ; Top of Stack
DCD Reset_Handler ; Reset Handler
DCD NMI_Handler ; NMI Handler
DCD HardFault_Handler ; Hard Fault Handler
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD SVC_Handler ; SVCall Handler
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD PendSV_Handler ; PendSV Handler
DCD SysTick_Handler ; SysTick Handler
; /* external interrupts handler */
DCD WWDGT_IRQHandler ; 16:Window Watchdog Timer
DCD LVD_IRQHandler ; 17:LVD through EXTI Line detect
DCD RTC_IRQHandler ; 18:RTC through EXTI Line
DCD FMC_IRQHandler ; 19:FMC
DCD RCU_IRQHandler ; 20:RCU
DCD EXTI0_1_IRQHandler ; 21:EXTI Line 0 and EXTI Line 1
DCD EXTI2_3_IRQHandler ; 22:EXTI Line 2 and EXTI Line 3
DCD EXTI4_15_IRQHandler ; 23:EXTI Line 4 to EXTI Line 15
DCD 0 ; Reserved
DCD DMA_Channel0_IRQHandler ; 25:DMA Channel 0
DCD DMA_Channel1_2_IRQHandler ; 26:DMA Channel 1 and DMA Channel 2
DCD DMA_Channel3_4_IRQHandler ; 27:DMA Channel 3 and DMA Channel 4
DCD ADC_CMP_IRQHandler ; 28:ADC and Comparator
DCD TIMER0_BRK_UP_TRG_COM_IRQHandler ; 29:TIMER0 Break,Update,Trigger and Commutation
DCD TIMER0_Channel_IRQHandler ; 30:TIMER0 Channel Capture Compare
DCD 0 ; Reserved
DCD TIMER2_IRQHandler ; 32:TIMER2
DCD TIMER5_IRQHandler ; 33:TIMER5
DCD 0 ; Reserved
DCD TIMER13_IRQHandler ; 35:TIMER13
DCD TIMER14_IRQHandler ; 36:TIMER14
DCD TIMER15_IRQHandler ; 37:TIMER15
DCD TIMER16_IRQHandler ; 38:TIMER16
DCD I2C0_EV_IRQHandler ; 39:I2C0 Event
DCD I2C1_EV_IRQHandler ; 40:I2C1 Event
DCD SPI0_IRQHandler ; 41:SPI0
DCD SPI1_IRQHandler ; 42:SPI1
DCD USART0_IRQHandler ; 43:USART0
DCD USART1_IRQHandler ; 44:USART1
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD I2C0_ER_IRQHandler ; 48:I2C0 Error
DCD 0 ; Reserved
DCD I2C1_ER_IRQHandler ; 50:I2C1 Error
__Vectors_End
__Vectors_Size EQU __Vectors_End - __Vectors
AREA |.text|, CODE, READONLY
;/* reset Handler */
Reset_Handler PROC
EXPORT Reset_Handler [WEAK]
IMPORT SystemInit
IMPORT __main
LDR R0, =0x1FFFF7E0
LDR R2, [R0]
LDR R0, = 0xFFFF0000
ANDS R2, R2, R0
LSRS R2, R2, #16
LSLS R2, R2, #10
LDR R1, =0x20000000
MOV R0, #0x00
SRAM_INIT STM R1!, {R0}
SUBS R2, R2, #4
CMP R2, #0x00
BNE SRAM_INIT
LDR R0, =SystemInit
BLX R0
LDR R0, =__main
BX R0
ENDP
;/* dummy Exception Handlers */
NMI_Handler\
PROC
EXPORT NMI_Handler [WEAK]
B .
ENDP
HardFault_Handler\
PROC
EXPORT HardFault_Handler [WEAK]
B .
ENDP
SVC_Handler\
PROC
EXPORT SVC_Handler [WEAK]
B .
ENDP
PendSV_Handler\
PROC
EXPORT PendSV_Handler [WEAK]
B .
ENDP
SysTick_Handler\
PROC
EXPORT SysTick_Handler [WEAK]
B .
ENDP
Default_Handler PROC
; /* external interrupts handler */
EXPORT WWDGT_IRQHandler [WEAK]
EXPORT LVD_IRQHandler [WEAK]
EXPORT RTC_IRQHandler [WEAK]
EXPORT FMC_IRQHandler [WEAK]
EXPORT RCU_IRQHandler [WEAK]
EXPORT EXTI0_1_IRQHandler [WEAK]
EXPORT EXTI2_3_IRQHandler [WEAK]
EXPORT EXTI4_15_IRQHandler [WEAK]
EXPORT DMA_Channel0_IRQHandler [WEAK]
EXPORT DMA_Channel1_2_IRQHandler [WEAK]
EXPORT DMA_Channel3_4_IRQHandler [WEAK]
EXPORT ADC_CMP_IRQHandler [WEAK]
EXPORT TIMER0_BRK_UP_TRG_COM_IRQHandler [WEAK]
EXPORT TIMER0_Channel_IRQHandler [WEAK]
EXPORT TIMER2_IRQHandler [WEAK]
EXPORT TIMER5_IRQHandler [WEAK]
EXPORT TIMER13_IRQHandler [WEAK]
EXPORT TIMER14_IRQHandler [WEAK]
EXPORT TIMER15_IRQHandler [WEAK]
EXPORT TIMER16_IRQHandler [WEAK]
EXPORT I2C0_EV_IRQHandler [WEAK]
EXPORT I2C1_EV_IRQHandler [WEAK]
EXPORT SPI0_IRQHandler [WEAK]
EXPORT SPI1_IRQHandler [WEAK]
EXPORT USART0_IRQHandler [WEAK]
EXPORT USART1_IRQHandler [WEAK]
EXPORT I2C0_ER_IRQHandler [WEAK]
EXPORT I2C1_ER_IRQHandler [WEAK]
;/* external interrupts handler */
WWDGT_IRQHandler
LVD_IRQHandler
RTC_IRQHandler
FMC_IRQHandler
RCU_IRQHandler
EXTI0_1_IRQHandler
EXTI2_3_IRQHandler
EXTI4_15_IRQHandler
DMA_Channel0_IRQHandler
DMA_Channel1_2_IRQHandler
DMA_Channel3_4_IRQHandler
ADC_CMP_IRQHandler
TIMER0_BRK_UP_TRG_COM_IRQHandler
TIMER0_Channel_IRQHandler
TIMER2_IRQHandler
TIMER5_IRQHandler
TIMER13_IRQHandler
TIMER14_IRQHandler
TIMER15_IRQHandler
TIMER16_IRQHandler
I2C0_EV_IRQHandler
I2C1_EV_IRQHandler
SPI0_IRQHandler
SPI1_IRQHandler
USART0_IRQHandler
USART1_IRQHandler
I2C0_ER_IRQHandler
I2C1_ER_IRQHandler
B .
ENDP
ALIGN
; user Initial Stack & Heap
IF :DEF:__MICROLIB
EXPORT __initial_sp
EXPORT __heap_base
EXPORT __heap_limit
ELSE
IMPORT __use_two_region_memory
EXPORT __user_initial_stackheap
__user_initial_stackheap PROC
LDR R0, = Heap_Mem
LDR R1, =(Stack_Mem + Stack_Size)
LDR R2, = (Heap_Mem + Heap_Size)
LDR R3, = Stack_Mem
BX LR
ENDP
ALIGN
ENDIF
END

View File

@@ -0,0 +1,296 @@
;/*!
; \file startup_gd32e23x.s
; \brief start up file
;
; \version 2025-02-10, V2.3.0, firmware for GD32E23x
;*/
;/* Copyright (c) 2012 ARM LIMITED
; Copyright (c) 2025, GigaDevice Semiconductor Inc.
;
; All rights reserved.
; Redistribution and use in source and binary forms, with or without
; modification, are permitted provided that the following conditions are met:
; - Redistributions of source code must retain the above copyright
; notice, this list of conditions and the following disclaimer.
; - Redistributions in binary form must reproduce the above copyright
; notice, this list of conditions and the following disclaimer in the
; documentation and/or other materials provided with the distribution.
; - Neither the name of ARM nor the names of its contributors may be used
; to endorse or promote products derived from this software without
; specific prior written permission.
; *
; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
; ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
; LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
; INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
; POSSIBILITY OF SUCH DAMAGE.
;*/
;/* This file refers the CMSIS standard, some adjustments are made according to GigaDevice chips */
MODULE ?cstartup
;; Forward declaration of sections.
SECTION CSTACK:DATA:NOROOT(3)
SECTION .intvec:CODE:NOROOT(2)
EXTERN __iar_program_start
EXTERN SystemInit
PUBLIC __vector_table
DATA
__vector_table
DCD sfe(CSTACK) ; top of stack
DCD Reset_Handler ; Vector Number 1,Reset Handler
DCD NMI_Handler ; Vector Number 2,NMI Handler
DCD HardFault_Handler ; Vector Number 3,Hard Fault Handler
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD SVC_Handler ; Vector Number 11,SVCall Handler
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD PendSV_Handler ; Vector Number 14,PendSV Handler
DCD SysTick_Handler ; Vector Number 15,SysTick Handler
; External Interrupts
DCD WWDGT_IRQHandler ; 16:Window Watchdog Timer
DCD LVD_IRQHandler ; 17:LVD through EXTI Line detect
DCD RTC_IRQHandler ; 18:RTC through EXTI Line
DCD FMC_IRQHandler ; 19:FMC
DCD RCU_IRQHandler ; 20:RCU
DCD EXTI0_1_IRQHandler ; 21:EXTI Line 0 and EXTI Line 1
DCD EXTI2_3_IRQHandler ; 22:EXTI Line 2 and EXTI Line 3
DCD EXTI4_15_IRQHandler ; 23:EXTI Line 4 to EXTI Line 15
DCD 0 ; Reserved
DCD DMA_Channel0_IRQHandler ; 25:DMA Channel 0
DCD DMA_Channel1_2_IRQHandler ; 26:DMA Channel 1 and DMA Channel 2
DCD DMA_Channel3_4_IRQHandler ; 27:DMA Channel 3 and DMA Channel 4
DCD ADC_CMP_IRQHandler ; 28:ADC and Comparator
DCD TIMER0_BRK_UP_TRG_COM_IRQHandler ; 29:TIMER0 Break,Update,Trigger and Commutation
DCD TIMER0_Channel_IRQHandler ; 30:TIMER0 Channel Capture Compare
DCD 0 ; Reserved
DCD TIMER2_IRQHandler ; 32:TIMER2
DCD TIMER5_IRQHandler ; 33:TIMER5
DCD 0 ; Reserved
DCD TIMER13_IRQHandler ; 35:TIMER13
DCD TIMER14_IRQHandler ; 36:TIMER14
DCD TIMER15_IRQHandler ; 37:TIMER15
DCD TIMER16_IRQHandler ; 38:TIMER16
DCD I2C0_EV_IRQHandler ; 39:I2C0 Event
DCD I2C1_EV_IRQHandler ; 40:I2C1 Event
DCD SPI0_IRQHandler ; 41:SPI0
DCD SPI1_IRQHandler ; 42:SPI1
DCD USART0_IRQHandler ; 43:USART0
DCD USART1_IRQHandler ; 44:USART1
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD I2C0_ER_IRQHandler ; 48:I2C0 Error
DCD 0 ; Reserved
DCD I2C1_ER_IRQHandler ; 50:I2C1 Error
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; Default interrupt handlers.
;;
THUMB
PUBWEAK Reset_Handler
SECTION .text:CODE:NOROOT:REORDER(2)
Reset_Handler
LDR R0, =0x1FFFF7E0
LDR R2, [R0]
LDR R0, = 0xFFFF0000
ANDS R2, R2, R0
LSRS R2, R2, #16
LSLS R2, R2, #10
LDR R1, =0x20000000
MOV R0, #0x00
SRAM_INIT STM R1!, {R0}
SUBS R2, R2, #4
CMP R2, #0x00
BNE SRAM_INIT
LDR R0, =SystemInit
BLX R0
LDR R0, =__iar_program_start
BX R0
PUBWEAK NMI_Handler
SECTION .text:CODE:NOROOT:REORDER(1)
NMI_Handler
B NMI_Handler
PUBWEAK HardFault_Handler
SECTION .text:CODE:NOROOT:REORDER(1)
HardFault_Handler
B HardFault_Handler
PUBWEAK SVC_Handler
SECTION .text:CODE:NOROOT:REORDER(1)
SVC_Handler
B SVC_Handler
PUBWEAK PendSV_Handler
SECTION .text:CODE:NOROOT:REORDER(1)
PendSV_Handler
B PendSV_Handler
PUBWEAK SysTick_Handler
SECTION .text:CODE:NOROOT:REORDER(1)
SysTick_Handler
B SysTick_Handler
PUBWEAK WWDGT_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
WWDGT_IRQHandler
B WWDGT_IRQHandler
PUBWEAK LVD_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
LVD_IRQHandler
B LVD_IRQHandler
PUBWEAK RTC_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
RTC_IRQHandler
B RTC_IRQHandler
PUBWEAK FMC_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
FMC_IRQHandler
B FMC_IRQHandler
PUBWEAK RCU_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
RCU_IRQHandler
B RCU_IRQHandler
PUBWEAK EXTI0_1_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
EXTI0_1_IRQHandler
B EXTI0_1_IRQHandler
PUBWEAK EXTI2_3_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
EXTI2_3_IRQHandler
B EXTI2_3_IRQHandler
PUBWEAK EXTI4_15_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
EXTI4_15_IRQHandler
B EXTI4_15_IRQHandler
PUBWEAK DMA_Channel0_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
DMA_Channel0_IRQHandler
B DMA_Channel0_IRQHandler
PUBWEAK DMA_Channel1_2_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
DMA_Channel1_2_IRQHandler
B DMA_Channel1_2_IRQHandler
PUBWEAK DMA_Channel3_4_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
DMA_Channel3_4_IRQHandler
B DMA_Channel3_4_IRQHandler
PUBWEAK ADC_CMP_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
ADC_CMP_IRQHandler
B ADC_CMP_IRQHandler
PUBWEAK TIMER0_BRK_UP_TRG_COM_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
TIMER0_BRK_UP_TRG_COM_IRQHandler
B TIMER0_BRK_UP_TRG_COM_IRQHandler
PUBWEAK TIMER0_Channel_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
TIMER0_Channel_IRQHandler
B TIMER0_Channel_IRQHandler
PUBWEAK TIMER2_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
TIMER2_IRQHandler
B TIMER2_IRQHandler
PUBWEAK TIMER5_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
TIMER5_IRQHandler
B TIMER5_IRQHandler
PUBWEAK TIMER13_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
TIMER13_IRQHandler
B TIMER13_IRQHandler
PUBWEAK TIMER14_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
TIMER14_IRQHandler
B TIMER14_IRQHandler
PUBWEAK TIMER15_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
TIMER15_IRQHandler
B TIMER15_IRQHandler
PUBWEAK TIMER16_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
TIMER16_IRQHandler
B TIMER16_IRQHandler
PUBWEAK I2C0_EV_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
I2C0_EV_IRQHandler
B I2C0_EV_IRQHandler
PUBWEAK I2C1_EV_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
I2C1_EV_IRQHandler
B I2C1_EV_IRQHandler
PUBWEAK SPI0_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
SPI0_IRQHandler
B SPI0_IRQHandler
PUBWEAK SPI1_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
SPI1_IRQHandler
B SPI1_IRQHandler
PUBWEAK USART0_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
USART0_IRQHandler
B USART0_IRQHandler
PUBWEAK USART1_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
USART1_IRQHandler
B USART1_IRQHandler
PUBWEAK I2C0_ER_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
I2C0_ER_IRQHandler
B I2C0_ER_IRQHandler
PUBWEAK I2C1_ER_IRQHandler
SECTION .text:CODE:NOROOT:REORDER(1)
I2C1_ER_IRQHandler
B I2C1_ER_IRQHandler
END

View File

@@ -0,0 +1,451 @@
/*!
\file system_gd32e23x.c
\brief CMSIS Cortex-M23 Device Peripheral Access Layer Source File for
GD32E23x Device Series
*/
/* Copyright (c) 2012 ARM LIMITED
Copyright (c) 2025, GigaDevice Semiconductor Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of ARM nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
*
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*/
/* This file refers the CMSIS standard, some adjustments are made according to GigaDevice chips */
#include "gd32e23x.h"
/* system frequency define */
#define __IRC8M (IRC8M_VALUE) /* internal 8 MHz RC oscillator frequency */
#define __HXTAL (HXTAL_VALUE) /* high speed crystal oscillator frequency */
#define __SYS_OSC_CLK (__IRC8M) /* main oscillator frequency */
#define VECT_TAB_OFFSET (uint32_t)0x00 /* vector table base offset */
/* select a system clock by uncommenting the following line */
//#define __SYSTEM_CLOCK_8M_HXTAL (__HXTAL)
//#define __SYSTEM_CLOCK_8M_IRC8M (__IRC8M)
#define __SYSTEM_CLOCK_72M_PLL_HXTAL (uint32_t)(72000000)
//#define __SYSTEM_CLOCK_72M_PLL_IRC8M_DIV2 (uint32_t)(72000000)
/* The following is to prevent Vcore fluctuations caused by frequency switching.
It is strongly recommended to include it to avoid issues caused by self-removal.
*/
#define RCU_MODIFY(__delay) do{ \
volatile uint32_t i,reg; \
if(0 != __delay){ \
reg = RCU_CFG0; \
reg &= ~(RCU_CFG0_AHBPSC); \
/* CK_AHB = SYSCLK/2 */ \
reg |= RCU_AHB_CKSYS_DIV2; \
RCU_CFG0 = reg; \
for(i=0; i<__delay; i++){ \
} \
reg = RCU_CFG0; \
reg &= ~(RCU_CFG0_AHBPSC); \
reg |= RCU_AHB_CKSYS_DIV4; \
/* CK_AHB = SYSCLK/4 */ \
RCU_CFG0 = reg; \
for(i=0; i<__delay; i++){ \
} \
} \
}while(0)
#define SEL_IRC8M 0x00
#define SEL_HXTAL 0x01
#define SEL_PLL 0x02
/* set the system clock frequency and declare the system clock configuration function */
#ifdef __SYSTEM_CLOCK_8M_HXTAL
uint32_t SystemCoreClock = __SYSTEM_CLOCK_8M_HXTAL;
static void system_clock_8m_hxtal(void);
#elif defined (__SYSTEM_CLOCK_72M_PLL_HXTAL)
uint32_t SystemCoreClock = __SYSTEM_CLOCK_72M_PLL_HXTAL;
static void system_clock_72m_hxtal(void);
#elif defined (__SYSTEM_CLOCK_72M_PLL_IRC8M_DIV2)
uint32_t SystemCoreClock = __SYSTEM_CLOCK_72M_PLL_IRC8M_DIV2;
static void system_clock_72m_irc8m(void);
#else
uint32_t SystemCoreClock = __SYSTEM_CLOCK_8M_IRC8M;
static void system_clock_8m_irc8m(void);
#endif /* __SYSTEM_CLOCK_8M_HXTAL */
/* configure the system clock */
static void system_clock_config(void);
/* software delay to prevent the impact of Vcore fluctuations.
It is strongly recommended to include it to avoid issues caused by self-removal. */
static void _soft_delay_(uint32_t time)
{
__IO uint32_t i;
for(i=0; i<time*10; i++){
}
}
/*!
\brief setup the microcontroller system, initialize the system
\param[in] none
\param[out] none
\retval none
*/
void SystemInit (void)
{
/* enable IRC8M */
RCU_CTL0 |= RCU_CTL0_IRC8MEN;
while(0U == (RCU_CTL0 & RCU_CTL0_IRC8MSTB)){
}
if(((RCU_CFG0 & RCU_CFG0_SCSS) == RCU_SCSS_PLL)){
RCU_MODIFY(0x80);
}
RCU_CFG0 &= ~RCU_CFG0_SCS;
_soft_delay_(100);
RCU_CTL0 &= ~(RCU_CTL0_HXTALEN | RCU_CTL0_CKMEN | RCU_CTL0_PLLEN | RCU_CTL0_HXTALBPS);
/* reset RCU */
RCU_CFG0 &= ~(RCU_CFG0_SCS | RCU_CFG0_AHBPSC | RCU_CFG0_APB1PSC | RCU_CFG0_APB2PSC |\
RCU_CFG0_ADCPSC | RCU_CFG0_CKOUTSEL | RCU_CFG0_CKOUTDIV | RCU_CFG0_PLLDV);
RCU_CFG0 &= ~(RCU_CFG0_PLLSEL | RCU_CFG0_PLLMF | RCU_CFG0_PLLMF4 | RCU_CFG0_PLLDV);
RCU_CFG1 &= ~(RCU_CFG1_PREDV);
RCU_CFG2 &= ~(RCU_CFG2_USART0SEL | RCU_CFG2_ADCSEL);
RCU_CFG2 &= ~RCU_CFG2_IRC28MDIV;
RCU_CFG2 &= ~RCU_CFG2_ADCPSC2;
RCU_CTL1 &= ~RCU_CTL1_IRC28MEN;
RCU_INT = 0x00000000U;
/* configure system clock */
system_clock_config();
#ifdef VECT_TAB_SRAM
nvic_vector_table_set(NVIC_VECTTAB_RAM,VECT_TAB_OFFSET);
#else
nvic_vector_table_set(NVIC_VECTTAB_FLASH,VECT_TAB_OFFSET);
#endif
}
/*!
\brief configure the system clock
\param[in] none
\param[out] none
\retval none
*/
static void system_clock_config(void)
{
#ifdef __SYSTEM_CLOCK_8M_HXTAL
system_clock_8m_hxtal();
#elif defined (__SYSTEM_CLOCK_72M_PLL_HXTAL)
system_clock_72m_hxtal();
#elif defined (__SYSTEM_CLOCK_72M_PLL_IRC8M_DIV2)
system_clock_72m_irc8m();
#elif defined (__SYSTEM_CLOCK_72M_PLL_IRC48M_DIV2)
system_clock_72m_irc48m();
#else
system_clock_8m_irc8m();
#endif /* __SYSTEM_CLOCK_8M_HXTAL */
}
#ifdef __SYSTEM_CLOCK_8M_HXTAL
/*!
\brief configure the system clock to 8M by HXTAL
\param[in] none
\param[out] none
\retval none
*/
static void system_clock_8m_hxtal(void)
{
uint32_t timeout = 0U;
uint32_t stab_flag = 0U;
__IO uint32_t reg_temp;
/* enable HXTAL */
RCU_CTL0 |= RCU_CTL0_HXTALEN;
/* wait until HXTAL is stable or the startup time is longer than HXTAL_STARTUP_TIMEOUT */
do{
timeout++;
stab_flag = (RCU_CTL0 & RCU_CTL0_HXTALSTB);
}
while((0U == stab_flag) && (HXTAL_STARTUP_TIMEOUT != timeout));
/* if fail */
if(0U == (RCU_CTL0 & RCU_CTL0_HXTALSTB)){
while(1){
}
}
/* HXTAL is stable */
/* AHB = SYSCLK */
RCU_CFG0 |= RCU_AHB_CKSYS_DIV1;
/* APB2 = AHB */
RCU_CFG0 |= RCU_APB2_CKAHB_DIV1;
/* APB1 = AHB */
RCU_CFG0 |= RCU_APB1_CKAHB_DIV1;
reg_temp = RCU_CFG0;
/* select HXTAL as system clock */
reg_temp &= ~RCU_CFG0_SCS;
reg_temp |= RCU_CKSYSSRC_HXTAL;
RCU_CFG0 = reg_temp;
/* wait until HXTAL is selected as system clock */
while(RCU_SCSS_HXTAL != (RCU_CFG0 & RCU_CFG0_SCSS)){
}
}
#elif defined (__SYSTEM_CLOCK_72M_PLL_HXTAL)
/*!
\brief configure the system clock to 72M by PLL which selects HXTAL as its clock source
\param[in] none
\param[out] none
\retval none
*/
static void system_clock_72m_hxtal(void)
{
uint32_t timeout = 0U;
uint32_t stab_flag = 0U;
__IO uint32_t reg_temp;
/* enable HXTAL */
RCU_CTL0 |= RCU_CTL0_HXTALEN;
/* wait until HXTAL is stable or the startup time is longer than HXTAL_STARTUP_TIMEOUT */
do{
timeout++;
stab_flag = (RCU_CTL0 & RCU_CTL0_HXTALSTB);
}
while((0U == stab_flag) && (HXTAL_STARTUP_TIMEOUT != timeout));
/* if fail */
if(0U == (RCU_CTL0 & RCU_CTL0_HXTALSTB)){
while(1){
}
}
FMC_WS = (FMC_WS & (~FMC_WS_WSCNT)) | WS_WSCNT_2;
/* HXTAL is stable */
/* AHB = SYSCLK */
RCU_CFG0 |= RCU_AHB_CKSYS_DIV1;
/* APB2 = AHB */
RCU_CFG0 |= RCU_APB2_CKAHB_DIV1;
/* APB1 = AHB */
RCU_CFG0 |= RCU_APB1_CKAHB_DIV1;
/* PLL = HXTAL * 9 = 72 MHz */
RCU_CFG0 &= ~(RCU_CFG0_PLLSEL | RCU_CFG0_PLLMF | RCU_CFG0_PLLDV);
RCU_CFG0 |= (RCU_PLLSRC_HXTAL | RCU_PLL_MUL9);
/* enable PLL */
RCU_CTL0 |= RCU_CTL0_PLLEN;
/* wait until PLL is stable */
while(0U == (RCU_CTL0 & RCU_CTL0_PLLSTB)){
}
reg_temp = RCU_CFG0;
/* select PLL as system clock */
reg_temp &= ~RCU_CFG0_SCS;
reg_temp |= RCU_CKSYSSRC_PLL;
RCU_CFG0 = reg_temp;
/* wait until PLL is selected as system clock */
while(RCU_SCSS_PLL != (RCU_CFG0 & RCU_CFG0_SCSS)){
}
}
#elif defined (__SYSTEM_CLOCK_72M_PLL_IRC8M_DIV2)
/*!
\brief configure the system clock to 72M by PLL which selects IRC8M/2 as its clock source
\param[in] none
\param[out] none
\retval none
*/
static void system_clock_72m_irc8m(void)
{
uint32_t timeout = 0U;
uint32_t stab_flag = 0U;
__IO uint32_t reg_temp;
/* enable IRC8M */
RCU_CTL0 |= RCU_CTL0_IRC8MEN;
/* wait until IRC8M is stable or the startup time is longer than IRC8M_STARTUP_TIMEOUT */
do{
timeout++;
stab_flag = (RCU_CTL0 & RCU_CTL0_IRC8MSTB);
}
while((0U == stab_flag) && (IRC8M_STARTUP_TIMEOUT != timeout));
/* if fail */
if(0U == (RCU_CTL0 & RCU_CTL0_IRC8MSTB)){
while(1){
}
}
FMC_WS = (FMC_WS & (~FMC_WS_WSCNT)) | WS_WSCNT_2;
/* AHB = SYSCLK */
RCU_CFG0 |= RCU_AHB_CKSYS_DIV1;
/* APB2 = AHB */
RCU_CFG0 |= RCU_APB2_CKAHB_DIV1;
/* APB1 = AHB */
RCU_CFG0 |= RCU_APB1_CKAHB_DIV1;
/* PLL = (IRC8M/2) * 18 = 72 MHz */
RCU_CFG0 &= ~(RCU_CFG0_PLLSEL | RCU_CFG0_PLLMF);
RCU_CFG0 |= (RCU_PLLSRC_IRC8M_DIV2 | RCU_PLL_MUL18);
/* enable PLL */
RCU_CTL0 |= RCU_CTL0_PLLEN;
/* wait until PLL is stable */
while(0U == (RCU_CTL0 & RCU_CTL0_PLLSTB)){
}
reg_temp = RCU_CFG0;
/* select PLL as system clock */
reg_temp &= ~RCU_CFG0_SCS;
reg_temp |= RCU_CKSYSSRC_PLL;
RCU_CFG0 = reg_temp;
/* wait until PLL is selected as system clock */
while(RCU_SCSS_PLL != (RCU_CFG0 & RCU_CFG0_SCSS)){
}
}
#else
/*!
\brief configure the system clock to 8M by IRC8M
\param[in] none
\param[out] none
\retval none
*/
static void system_clock_8m_irc8m(void)
{
uint32_t timeout = 0U;
uint32_t stab_flag = 0U;
__IO uint32_t reg_temp;
/* enable IRC8M */
RCU_CTL0 |= RCU_CTL0_IRC8MEN;
/* wait until IRC8M is stable or the startup time is longer than IRC8M_STARTUP_TIMEOUT */
do{
timeout++;
stab_flag = (RCU_CTL0 & RCU_CTL0_IRC8MSTB);
}
while((0U == stab_flag) && (IRC8M_STARTUP_TIMEOUT != timeout));
/* if fail */
if(0U == (RCU_CTL0 & RCU_CTL0_IRC8MSTB)){
while(1){
}
}
/* AHB = SYSCLK */
RCU_CFG0 |= RCU_AHB_CKSYS_DIV1;
/* APB2 = AHB */
RCU_CFG0 |= RCU_APB2_CKAHB_DIV1;
/* APB1 = AHB */
RCU_CFG0 |= RCU_APB1_CKAHB_DIV1;
reg_temp = RCU_CFG0;
/* select IRC8M as system clock */
reg_temp &= ~RCU_CFG0_SCS;
reg_temp |= RCU_CKSYSSRC_IRC8M;
RCU_CFG0 = reg_temp;
/* wait until IRC8M is selected as system clock */
while(RCU_SCSS_IRC8M != (RCU_CFG0 & RCU_CFG0_SCSS)){
}
}
#endif /* __SYSTEM_CLOCK_8M_HXTAL */
/*!
\brief update the SystemCoreClock with current core clock retrieved from cpu registers
\param[in] none
\param[out] none
\retval none
*/
void SystemCoreClockUpdate (void)
{
uint32_t sws = 0U;
uint32_t pllmf = 0U, pllmf4 = 0U, pllsel = 0U, prediv = 0U, idx = 0U, clk_exp = 0U;
/* exponent of AHB clock divider */
const uint8_t ahb_exp[16] = {0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 6, 7, 8, 9};
sws = GET_BITS(RCU_CFG0, 2, 3);
switch(sws){
/* IRC8M is selected as CK_SYS */
case SEL_IRC8M:
SystemCoreClock = IRC8M_VALUE;
break;
/* HXTAL is selected as CK_SYS */
case SEL_HXTAL:
SystemCoreClock = HXTAL_VALUE;
break;
/* PLL is selected as CK_SYS */
case SEL_PLL:
/* get the value of PLLMF[3:0] */
pllmf = GET_BITS(RCU_CFG0, 18, 21);
pllmf4 = GET_BITS(RCU_CFG0, 27, 27);
/* high 16 bits */
if(1U == pllmf4){
pllmf += 17U;
}else if(15U == pllmf){
pllmf = 16U;
} else {
pllmf += 2U;
}
/* PLL clock source selection, HXTAL or IRC8M/2 */
pllsel = GET_BITS(RCU_CFG0, 16, 16);
if(0U != pllsel){
prediv = (GET_BITS(RCU_CFG1, 0, 3) + 1U);
SystemCoreClock = (HXTAL_VALUE / prediv) * pllmf;
} else {
SystemCoreClock = (IRC8M_VALUE >> 1) * pllmf;
}
break;
/* IRC8M is selected as CK_SYS */
default:
SystemCoreClock = IRC8M_VALUE;
break;
}
/* calculate AHB clock frequency */
idx = GET_BITS(RCU_CFG0, 4, 7);
clk_exp = ahb_exp[idx];
SystemCoreClock >>= clk_exp;
}
#ifdef __FIRMWARE_VERSION_DEFINE
/*!
\brief get firmware version
\param[in] none
\param[out] none
\retval firmware version
*/
uint32_t gd32e23x_firmware_version_get(void)
{
return __GD32E23x_STDPERIPH_VERSION;
}
#endif /* __FIRMWARE_VERSION_DEFINE */

707
SDK/CMSIS/cmsis_armclang.h Normal file
View File

@@ -0,0 +1,707 @@
/**************************************************************************//**
* @file cmsis_armclang.h
* @brief CMSIS compiler armclang (Arm Compiler 6) header file
* @version V6.0.0
* @date 27. July 2024
******************************************************************************/
/*
* Copyright (c) 2009-2023 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CMSIS_ARMCLANG_H
#define __CMSIS_ARMCLANG_H
#pragma clang system_header /* treat file as system include file */
#if (__ARM_ACLE >= 200)
#include <arm_acle.h>
#else
#error Compiler must support ACLE V2.0
#endif /* (__ARM_ACLE >= 200) */
/* CMSIS compiler specific defines */
#ifndef __ASM
#define __ASM __asm
#endif
#ifndef __INLINE
#define __INLINE inline
#endif
#ifndef __STATIC_INLINE
#define __STATIC_INLINE static inline
#endif
#ifndef __STATIC_FORCEINLINE
#define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
#endif
#ifndef __NO_RETURN
#define __NO_RETURN __attribute__((__noreturn__))
#endif
#ifndef CMSIS_DEPRECATED
#define CMSIS_DEPRECATED __attribute__((deprecated))
#endif
#ifndef __USED
#define __USED __attribute__((used))
#endif
#ifndef __WEAK
#define __WEAK __attribute__((weak))
#endif
#ifndef __PACKED
#define __PACKED __attribute__((packed, aligned(1)))
#endif
#ifndef __PACKED_STRUCT
#define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
#endif
#ifndef __PACKED_UNION
#define __PACKED_UNION union __attribute__((packed, aligned(1)))
#endif
#ifndef __UNALIGNED_UINT16_WRITE
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpacked"
__PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
#pragma clang diagnostic pop
#define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT16_READ
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpacked"
__PACKED_STRUCT T_UINT16_READ { uint16_t v; };
#pragma clang diagnostic pop
#define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
#endif
#ifndef __UNALIGNED_UINT32_WRITE
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpacked"
__PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
#pragma clang diagnostic pop
#define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT32_READ
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpacked"
__PACKED_STRUCT T_UINT32_READ { uint32_t v; };
#pragma clang diagnostic pop
#define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
#endif
#ifndef __ALIGNED
#define __ALIGNED(x) __attribute__((aligned(x)))
#endif
#ifndef __RESTRICT
#define __RESTRICT __restrict
#endif
#ifndef __COMPILER_BARRIER
#define __COMPILER_BARRIER() __ASM volatile("":::"memory")
#endif
#ifndef __NO_INIT
#define __NO_INIT __attribute__ ((section (".bss.noinit")))
#endif
#ifndef __ALIAS
#define __ALIAS(x) __attribute__ ((alias(x)))
#endif
/* ########################## Core Instruction Access ######################### */
/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
Access to dedicated instructions
@{
*/
/* Define macros for porting to both thumb1 and thumb2.
* For thumb1, use low register (r0-r7), specified by constraint "l"
* Otherwise, use general registers, specified by constraint "r" */
#if defined (__thumb__) && !defined (__thumb2__)
#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
#define __CMSIS_GCC_RW_REG(r) "+l" (r)
#define __CMSIS_GCC_USE_REG(r) "l" (r)
#else
#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
#define __CMSIS_GCC_RW_REG(r) "+r" (r)
#define __CMSIS_GCC_USE_REG(r) "r" (r)
#endif
/**
\brief No Operation
\details No Operation does nothing. This instruction can be used for code alignment purposes.
*/
#define __NOP() __nop()
/**
\brief Wait For Interrupt
\details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
*/
#define __WFI() __wfi()
/**
\brief Wait For Event
\details Wait For Event is a hint instruction that permits the processor to enter
a low-power state until one of a number of events occurs.
*/
#define __WFE() __wfe()
/**
\brief Send Event
\details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
*/
#define __SEV() __sev()
/**
\brief Instruction Synchronization Barrier
\details Instruction Synchronization Barrier flushes the pipeline in the processor,
so that all instructions following the ISB are fetched from cache or memory,
after the instruction has been completed.
*/
#define __ISB() __isb(0xF)
/**
\brief Data Synchronization Barrier
\details Acts as a special kind of Data Memory Barrier.
It completes when all explicit memory accesses before this instruction complete.
*/
#define __DSB() __dsb(0xF)
/**
\brief Data Memory Barrier
\details Ensures the apparent order of the explicit memory operations before
and after the instruction, without ensuring their completion.
*/
#define __DMB() __dmb(0xF)
/**
\brief Reverse byte order (32 bit)
\details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
\param [in] value Value to reverse
\return Reversed value
*/
#define __REV(value) __rev(value)
/**
\brief Reverse byte order (16 bit)
\details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
\param [in] value Value to reverse
\return Reversed value
*/
#define __REV16(value) __rev16(value)
/**
\brief Reverse byte order (16 bit)
\details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
\param [in] value Value to reverse
\return Reversed value
*/
#define __REVSH(value) __revsh(value)
/**
\brief Rotate Right in unsigned value (32 bit)
\details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
\param [in] op1 Value to rotate
\param [in] op2 Number of Bits to rotate
\return Rotated value
*/
#define __ROR(op1, op2) __ror(op1, op2)
/**
\brief Breakpoint
\details Causes the processor to enter Debug state.
Debug tools can use this to investigate system state when the instruction at a particular address is reached.
\param [in] value is ignored by the processor.
If required, a debugger can use it to store additional information about the breakpoint.
*/
#define __BKPT(value) __ASM volatile ("bkpt "#value)
/**
\brief Reverse bit order of value
\details Reverses the bit order of the given value.
\param [in] value Value to reverse
\return Reversed value
*/
#define __RBIT(value) __rbit(value)
/**
\brief Count leading zeros
\details Counts the number of leading zeros of a data value.
\param [in] value Value to count the leading zeros
\return number of leading zeros in value
*/
#define __CLZ(value) __clz(value)
#if ((__ARM_FEATURE_SAT >= 1) && \
(__ARM_ARCH_ISA_THUMB >= 2) )
/* __ARM_FEATURE_SAT is wrong for Armv8-M Baseline devices */
/**
\brief Signed Saturate
\details Saturates a signed value.
\param [in] value Value to be saturated
\param [in] sat Bit position to saturate to (1..32)
\return Saturated value
*/
#define __SSAT(value, sat) __ssat(value, sat)
/**
\brief Unsigned Saturate
\details Saturates an unsigned value.
\param [in] value Value to be saturated
\param [in] sat Bit position to saturate to (0..31)
\return Saturated value
*/
#define __USAT(value, sat) __usat(value, sat)
#else /* (__ARM_FEATURE_SAT >= 1) */
/**
\brief Signed Saturate
\details Saturates a signed value.
\param [in] value Value to be saturated
\param [in] sat Bit position to saturate to (1..32)
\return Saturated value
*/
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
{
if ((sat >= 1U) && (sat <= 32U))
{
const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
const int32_t min = -1 - max ;
if (val > max)
{
return (max);
}
else if (val < min)
{
return (min);
}
}
return (val);
}
/**
\brief Unsigned Saturate
\details Saturates an unsigned value.
\param [in] value Value to be saturated
\param [in] sat Bit position to saturate to (0..31)
\return Saturated value
*/
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
{
if (sat <= 31U)
{
const uint32_t max = ((1U << sat) - 1U);
if (val > (int32_t)max)
{
return (max);
}
else if (val < 0)
{
return (0U);
}
}
return ((uint32_t)val);
}
#endif /* (__ARM_FEATURE_SAT >= 1) */
#if (__ARM_FEATURE_LDREX >= 1)
/**
\brief Remove the exclusive lock
\details Removes the exclusive lock which is created by LDREX.
*/
#define __CLREX __builtin_arm_clrex
/**
\brief LDR Exclusive (8 bit)
\details Executes a exclusive LDR instruction for 8 bit value.
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
#define __LDREXB (uint8_t)__builtin_arm_ldrex
/**
\brief STR Exclusive (8 bit)
\details Executes a exclusive STR instruction for 8 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STREXB (uint32_t)__builtin_arm_strex
#endif /* (__ARM_FEATURE_LDREX >= 1) */
#if (__ARM_FEATURE_LDREX >= 2)
/**
\brief LDR Exclusive (16 bit)
\details Executes a exclusive LDR instruction for 16 bit values.
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
#define __LDREXH (uint16_t)__builtin_arm_ldrex
/**
\brief STR Exclusive (16 bit)
\details Executes a exclusive STR instruction for 16 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STREXH (uint32_t)__builtin_arm_strex
#endif /* (__ARM_FEATURE_LDREX >= 2) */
#if (__ARM_FEATURE_LDREX >= 4)
/**
\brief LDR Exclusive (32 bit)
\details Executes a exclusive LDR instruction for 32 bit values.
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
#define __LDREXW (uint32_t)__builtin_arm_ldrex
/**
\brief STR Exclusive (32 bit)
\details Executes a exclusive STR instruction for 32 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STREXW (uint32_t)__builtin_arm_strex
#endif /* (__ARM_FEATURE_LDREX >= 4) */
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief Rotate Right with Extend (32 bit)
\details Moves each bit of a bitstring right by one bit.
The carry input is shifted in at the left end of the bitstring.
\param [in] value Value to rotate
\return Rotated value
*/
__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
{
uint32_t result;
__ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value));
return (result);
}
/**
\brief LDRT Unprivileged (8 bit)
\details Executes a Unprivileged LDRT instruction for 8 bit value.
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
{
uint32_t result;
__ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
return ((uint8_t)result); /* Add explicit type cast here */
}
/**
\brief LDRT Unprivileged (16 bit)
\details Executes a Unprivileged LDRT instruction for 16 bit values.
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
{
uint32_t result;
__ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
return ((uint16_t)result); /* Add explicit type cast here */
}
/**
\brief LDRT Unprivileged (32 bit)
\details Executes a Unprivileged LDRT instruction for 32 bit values.
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
{
uint32_t result;
__ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
return (result);
}
#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
#if (__ARM_ARCH >= 8)
/**
\brief Load-Acquire (8 bit)
\details Executes a LDAB instruction for 8 bit value.
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
{
uint32_t result;
__ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
return ((uint8_t)result); /* Add explicit type cast here */
}
/**
\brief Load-Acquire (16 bit)
\details Executes a LDAH instruction for 16 bit values.
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
{
uint32_t result;
__ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
return ((uint16_t)result); /* Add explicit type cast here */
}
/**
\brief Load-Acquire (32 bit)
\details Executes a LDA instruction for 32 bit values.
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
{
uint32_t result;
__ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
return (result);
}
/**
\brief Store-Release (8 bit)
\details Executes a STLB instruction for 8 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
{
__ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
}
/**
\brief Store-Release (16 bit)
\details Executes a STLH instruction for 16 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
{
__ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
}
/**
\brief Store-Release (32 bit)
\details Executes a STL instruction for 32 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
{
__ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
}
/**
\brief Load-Acquire Exclusive (8 bit)
\details Executes a LDAB exclusive instruction for 8 bit value.
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
#define __LDAEXB (uint8_t)__builtin_arm_ldaex
/**
\brief Load-Acquire Exclusive (16 bit)
\details Executes a LDAH exclusive instruction for 16 bit values.
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
#define __LDAEXH (uint16_t)__builtin_arm_ldaex
/**
\brief Load-Acquire Exclusive (32 bit)
\details Executes a LDA exclusive instruction for 32 bit values.
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
#define __LDAEX (uint32_t)__builtin_arm_ldaex
/**
\brief Store-Release Exclusive (8 bit)
\details Executes a STLB exclusive instruction for 8 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STLEXB (uint32_t)__builtin_arm_stlex
/**
\brief Store-Release Exclusive (16 bit)
\details Executes a STLH exclusive instruction for 16 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STLEXH (uint32_t)__builtin_arm_stlex
/**
\brief Store-Release Exclusive (32 bit)
\details Executes a STL exclusive instruction for 32 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STLEX (uint32_t)__builtin_arm_stlex
#endif /* (__ARM_ARCH >= 8) */
/** @}*/ /* end of group CMSIS_Core_InstructionInterface */
/* ########################### Core Function Access ########################### */
/** \ingroup CMSIS_Core_FunctionInterface
\defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
@{
*/
/**
\brief Enable IRQ Interrupts
\details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
Can only be executed in Privileged modes.
*/
#ifndef __ARM_COMPAT_H
__STATIC_FORCEINLINE void __enable_irq(void)
{
__ASM volatile ("cpsie i" : : : "memory");
}
#endif
/**
\brief Disable IRQ Interrupts
\details Disables IRQ interrupts by setting special-purpose register PRIMASK.
Can only be executed in Privileged modes.
*/
#ifndef __ARM_COMPAT_H
__STATIC_FORCEINLINE void __disable_irq(void)
{
__ASM volatile ("cpsid i" : : : "memory");
}
#endif
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief Enable FIQ
\details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
Can only be executed in Privileged modes.
*/
__STATIC_FORCEINLINE void __enable_fault_irq(void)
{
__ASM volatile ("cpsie f" : : : "memory");
}
/**
\brief Disable FIQ
\details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
Can only be executed in Privileged modes.
*/
__STATIC_FORCEINLINE void __disable_fault_irq(void)
{
__ASM volatile ("cpsid f" : : : "memory");
}
#endif
/**
\brief Get FPSCR
\details Returns the current value of the Floating Point Status/Control register.
\return Floating Point Status/Control register value
*/
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
{
#if (defined(__ARM_FP) && (__ARM_FP >= 1))
return (__builtin_arm_get_fpscr());
#else
return (0U);
#endif
}
/**
\brief Set FPSCR
\details Assigns the given value to the Floating Point Status/Control register.
\param [in] fpscr Floating Point Status/Control value to set
*/
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
{
#if (defined(__ARM_FP) && (__ARM_FP >= 1))
__builtin_arm_set_fpscr(fpscr);
#else
(void)fpscr;
#endif
}
/** @} end of CMSIS_Core_RegAccFunctions */
// Include the profile specific settings:
#if __ARM_ARCH_PROFILE == 'A'
#include "./a-profile/cmsis_armclang_a.h"
#elif __ARM_ARCH_PROFILE == 'R'
#include "./r-profile/cmsis_armclang_r.h"
#elif __ARM_ARCH_PROFILE == 'M'
#include "./m-profile/cmsis_armclang_m.h"
#else
#error "Unknown Arm architecture profile"
#endif
#endif /* __CMSIS_ARMCLANG_H */

708
SDK/CMSIS/cmsis_clang.h Normal file
View File

@@ -0,0 +1,708 @@
/**************************************************************************//**
* @file cmsis_clang.h
* @brief CMSIS compiler LLVM/Clang header file
* @version V6.0.0
* @date 27. July 2024
******************************************************************************/
/*
* Copyright (c) 2009-2023 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CMSIS_CLANG_H
#define __CMSIS_CLANG_H
#pragma clang system_header /* treat file as system include file */
#if (__ARM_ACLE >= 200)
#include <arm_acle.h>
#else
#error Compiler must support ACLE V2.0
#endif /* (__ARM_ACLE >= 200) */
/* Fallback for __has_builtin */
#ifndef __has_builtin
#define __has_builtin(x) (0)
#endif
/* CMSIS compiler specific defines */
#ifndef __ASM
#define __ASM __asm
#endif
#ifndef __INLINE
#define __INLINE inline
#endif
#ifndef __STATIC_INLINE
#define __STATIC_INLINE static inline
#endif
#ifndef __STATIC_FORCEINLINE
#define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
#endif
#ifndef __NO_RETURN
#define __NO_RETURN __attribute__((__noreturn__))
#endif
#ifndef CMSIS_DEPRECATED
#define CMSIS_DEPRECATED __attribute__((deprecated))
#endif
#ifndef __USED
#define __USED __attribute__((used))
#endif
#ifndef __WEAK
#define __WEAK __attribute__((weak))
#endif
#ifndef __PACKED
#define __PACKED __attribute__((packed, aligned(1)))
#endif
#ifndef __PACKED_STRUCT
#define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
#endif
#ifndef __PACKED_UNION
#define __PACKED_UNION union __attribute__((packed, aligned(1)))
#endif
#ifndef __UNALIGNED_UINT16_WRITE
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpacked"
__PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
#pragma clang diagnostic pop
#define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT16_READ
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpacked"
__PACKED_STRUCT T_UINT16_READ { uint16_t v; };
#pragma clang diagnostic pop
#define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
#endif
#ifndef __UNALIGNED_UINT32_WRITE
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpacked"
__PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
#pragma clang diagnostic pop
#define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT32_READ
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpacked"
__PACKED_STRUCT T_UINT32_READ { uint32_t v; };
#pragma clang diagnostic pop
#define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
#endif
#ifndef __ALIGNED
#define __ALIGNED(x) __attribute__((aligned(x)))
#endif
#ifndef __RESTRICT
#define __RESTRICT __restrict
#endif
#ifndef __COMPILER_BARRIER
#define __COMPILER_BARRIER() __ASM volatile("":::"memory")
#endif
#ifndef __NO_INIT
#define __NO_INIT __attribute__ ((section (".noinit")))
#endif
#ifndef __ALIAS
#define __ALIAS(x) __attribute__ ((alias(x)))
#endif
/* ########################## Core Instruction Access ######################### */
/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
Access to dedicated instructions
@{
*/
/* Define macros for porting to both thumb1 and thumb2.
* For thumb1, use low register (r0-r7), specified by constraint "l"
* Otherwise, use general registers, specified by constraint "r" */
#if defined (__thumb__) && !defined (__thumb2__)
#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
#define __CMSIS_GCC_RW_REG(r) "+l" (r)
#define __CMSIS_GCC_USE_REG(r) "l" (r)
#else
#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
#define __CMSIS_GCC_RW_REG(r) "+r" (r)
#define __CMSIS_GCC_USE_REG(r) "r" (r)
#endif
/**
\brief No Operation
\details No Operation does nothing. This instruction can be used for code alignment purposes.
*/
#define __NOP() __nop()
/**
\brief Wait For Interrupt
\details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
*/
#define __WFI() __wfi()
/**
\brief Wait For Event
\details Wait For Event is a hint instruction that permits the processor to enter
a low-power state until one of a number of events occurs.
*/
#define __WFE() __wfe()
/**
\brief Send Event
\details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
*/
#define __SEV() __sev()
/**
\brief Instruction Synchronization Barrier
\details Instruction Synchronization Barrier flushes the pipeline in the processor,
so that all instructions following the ISB are fetched from cache or memory,
after the instruction has been completed.
*/
#define __ISB() __isb(0xF)
/**
\brief Data Synchronization Barrier
\details Acts as a special kind of Data Memory Barrier.
It completes when all explicit memory accesses before this instruction complete.
*/
#define __DSB() __dsb(0xF)
/**
\brief Data Memory Barrier
\details Ensures the apparent order of the explicit memory operations before
and after the instruction, without ensuring their completion.
*/
#define __DMB() __dmb(0xF)
/**
\brief Reverse byte order (32 bit)
\details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
\param [in] value Value to reverse
\return Reversed value
*/
#define __REV(value) __rev(value)
/**
\brief Reverse byte order (16 bit)
\details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
\param [in] value Value to reverse
\return Reversed value
*/
#define __REV16(value) __rev16(value)
/**
\brief Reverse byte order (16 bit)
\details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
\param [in] value Value to reverse
\return Reversed value
*/
#define __REVSH(value) __revsh(value)
/**
\brief Rotate Right in unsigned value (32 bit)
\details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
\param [in] op1 Value to rotate
\param [in] op2 Number of Bits to rotate
\return Rotated value
*/
#define __ROR(op1, op2) __ror(op1, op2)
/**
\brief Breakpoint
\details Causes the processor to enter Debug state.
Debug tools can use this to investigate system state when the instruction at a particular address is reached.
\param [in] value is ignored by the processor.
If required, a debugger can use it to store additional information about the breakpoint.
*/
#define __BKPT(value) __ASM volatile ("bkpt "#value)
/**
\brief Reverse bit order of value
\details Reverses the bit order of the given value.
\param [in] value Value to reverse
\return Reversed value
*/
#define __RBIT(value) __rbit(value)
/**
\brief Count leading zeros
\details Counts the number of leading zeros of a data value.
\param [in] value Value to count the leading zeros
\return number of leading zeros in value
*/
#define __CLZ(value) __clz(value)
#if ((__ARM_FEATURE_SAT >= 1) && \
(__ARM_ARCH_ISA_THUMB >= 2) )
/* __ARM_FEATURE_SAT is wrong for Armv8-M Baseline devices */
/**
\brief Signed Saturate
\details Saturates a signed value.
\param [in] value Value to be saturated
\param [in] sat Bit position to saturate to (1..32)
\return Saturated value
*/
#define __SSAT(value, sat) __ssat(value, sat)
/**
\brief Unsigned Saturate
\details Saturates an unsigned value.
\param [in] value Value to be saturated
\param [in] sat Bit position to saturate to (0..31)
\return Saturated value
*/
#define __USAT(value, sat) __usat(value, sat)
#else /* (__ARM_FEATURE_SAT >= 1) */
/**
\brief Signed Saturate
\details Saturates a signed value.
\param [in] value Value to be saturated
\param [in] sat Bit position to saturate to (1..32)
\return Saturated value
*/
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
{
if ((sat >= 1U) && (sat <= 32U))
{
const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
const int32_t min = -1 - max ;
if (val > max)
{
return (max);
}
else if (val < min)
{
return (min);
}
}
return (val);
}
/**
\brief Unsigned Saturate
\details Saturates an unsigned value.
\param [in] value Value to be saturated
\param [in] sat Bit position to saturate to (0..31)
\return Saturated value
*/
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
{
if (sat <= 31U)
{
const uint32_t max = ((1U << sat) - 1U);
if (val > (int32_t)max)
{
return (max);
}
else if (val < 0)
{
return (0U);
}
}
return ((uint32_t)val);
}
#endif /* (__ARM_FEATURE_SAT >= 1) */
#if (__ARM_FEATURE_LDREX >= 1)
/**
\brief Remove the exclusive lock
\details Removes the exclusive lock which is created by LDREX.
*/
#define __CLREX __builtin_arm_clrex
/**
\brief LDR Exclusive (8 bit)
\details Executes a exclusive LDR instruction for 8 bit value.
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
#define __LDREXB (uint8_t)__builtin_arm_ldrex
/**
\brief STR Exclusive (8 bit)
\details Executes a exclusive STR instruction for 8 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STREXB (uint32_t)__builtin_arm_strex
#endif /* (__ARM_FEATURE_LDREX >= 1) */
#if (__ARM_FEATURE_LDREX >= 2)
/**
\brief LDR Exclusive (16 bit)
\details Executes a exclusive LDR instruction for 16 bit values.
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
#define __LDREXH (uint16_t)__builtin_arm_ldrex
/**
\brief STR Exclusive (16 bit)
\details Executes a exclusive STR instruction for 16 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STREXH (uint32_t)__builtin_arm_strex
#endif /* (__ARM_FEATURE_LDREX >= 2) */
#if (__ARM_FEATURE_LDREX >= 4)
/**
\brief LDR Exclusive (32 bit)
\details Executes a exclusive LDR instruction for 32 bit values.
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
#define __LDREXW (uint32_t)__builtin_arm_ldrex
/**
\brief STR Exclusive (32 bit)
\details Executes a exclusive STR instruction for 32 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STREXW (uint32_t)__builtin_arm_strex
#endif /* (__ARM_FEATURE_LDREX >= 4) */
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief Rotate Right with Extend (32 bit)
\details Moves each bit of a bitstring right by one bit.
The carry input is shifted in at the left end of the bitstring.
\param [in] value Value to rotate
\return Rotated value
*/
__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
{
uint32_t result;
__ASM volatile ("rrx %0, %1" : "=r" (result) : "r" (value));
return (result);
}
/**
\brief LDRT Unprivileged (8 bit)
\details Executes a Unprivileged LDRT instruction for 8 bit value.
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
{
uint32_t result;
__ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
return ((uint8_t)result); /* Add explicit type cast here */
}
/**
\brief LDRT Unprivileged (16 bit)
\details Executes a Unprivileged LDRT instruction for 16 bit values.
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
{
uint32_t result;
__ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
return ((uint16_t)result); /* Add explicit type cast here */
}
/**
\brief LDRT Unprivileged (32 bit)
\details Executes a Unprivileged LDRT instruction for 32 bit values.
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
{
uint32_t result;
__ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
return (result);
}
#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
#if (__ARM_ARCH >= 8)
/**
\brief Load-Acquire (8 bit)
\details Executes a LDAB instruction for 8 bit value.
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
{
uint32_t result;
__ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
return ((uint8_t)result); /* Add explicit type cast here */
}
/**
\brief Load-Acquire (16 bit)
\details Executes a LDAH instruction for 16 bit values.
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
{
uint32_t result;
__ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
return ((uint16_t)result); /* Add explicit type cast here */
}
/**
\brief Load-Acquire (32 bit)
\details Executes a LDA instruction for 32 bit values.
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
{
uint32_t result;
__ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
return (result);
}
/**
\brief Store-Release (8 bit)
\details Executes a STLB instruction for 8 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
{
__ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
}
/**
\brief Store-Release (16 bit)
\details Executes a STLH instruction for 16 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
{
__ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
}
/**
\brief Store-Release (32 bit)
\details Executes a STL instruction for 32 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
{
__ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
}
/**
\brief Load-Acquire Exclusive (8 bit)
\details Executes a LDAB exclusive instruction for 8 bit value.
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
#define __LDAEXB (uint8_t)__builtin_arm_ldaex
/**
\brief Load-Acquire Exclusive (16 bit)
\details Executes a LDAH exclusive instruction for 16 bit values.
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
#define __LDAEXH (uint16_t)__builtin_arm_ldaex
/**
\brief Load-Acquire Exclusive (32 bit)
\details Executes a LDA exclusive instruction for 32 bit values.
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
#define __LDAEX (uint32_t)__builtin_arm_ldaex
/**
\brief Store-Release Exclusive (8 bit)
\details Executes a STLB exclusive instruction for 8 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STLEXB (uint32_t)__builtin_arm_stlex
/**
\brief Store-Release Exclusive (16 bit)
\details Executes a STLH exclusive instruction for 16 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STLEXH (uint32_t)__builtin_arm_stlex
/**
\brief Store-Release Exclusive (32 bit)
\details Executes a STL exclusive instruction for 32 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
\return 0 Function succeeded
\return 1 Function failed
*/
#define __STLEX (uint32_t)__builtin_arm_stlex
#endif /* (__ARM_ARCH >= 8) */
/** @}*/ /* end of group CMSIS_Core_InstructionInterface */
/* ########################### Core Function Access ########################### */
/** \ingroup CMSIS_Core_FunctionInterface
\defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
@{
*/
/**
\brief Enable IRQ Interrupts
\details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
Can only be executed in Privileged modes.
*/
__STATIC_FORCEINLINE void __enable_irq(void)
{
__ASM volatile ("cpsie i" : : : "memory");
}
/**
\brief Disable IRQ Interrupts
\details Disables IRQ interrupts by setting special-purpose register PRIMASK.
Can only be executed in Privileged modes.
*/
__STATIC_FORCEINLINE void __disable_irq(void)
{
__ASM volatile ("cpsid i" : : : "memory");
}
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief Enable FIQ
\details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
Can only be executed in Privileged modes.
*/
__STATIC_FORCEINLINE void __enable_fault_irq(void)
{
__ASM volatile ("cpsie f" : : : "memory");
}
/**
\brief Disable FIQ
\details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
Can only be executed in Privileged modes.
*/
__STATIC_FORCEINLINE void __disable_fault_irq(void)
{
__ASM volatile ("cpsid f" : : : "memory");
}
#endif
/**
\brief Get FPSCR
\details Returns the current value of the Floating Point Status/Control register.
\return Floating Point Status/Control register value
*/
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
{
#if (defined(__ARM_FP) && (__ARM_FP >= 1))
return (__builtin_arm_get_fpscr());
#else
return (0U);
#endif
}
/**
\brief Set FPSCR
\details Assigns the given value to the Floating Point Status/Control register.
\param [in] fpscr Floating Point Status/Control value to set
*/
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
{
#if (defined(__ARM_FP) && (__ARM_FP >= 1))
__builtin_arm_set_fpscr(fpscr);
#else
(void)fpscr;
#endif
}
/** @} end of CMSIS_Core_RegAccFunctions */
// Include the profile specific settings:
#if __ARM_ARCH_PROFILE == 'A'
#include "./a-profile/cmsis_clang_a.h"
#elif __ARM_ARCH_PROFILE == 'R'
#include "./r-profile/cmsis_clang_r.h"
#elif __ARM_ARCH_PROFILE == 'M'
#include "./m-profile/cmsis_clang_m.h"
#else
#error "Unknown Arm architecture profile"
#endif
#endif /* __CMSIS_CLANG_H */

292
SDK/CMSIS/cmsis_compiler.h Normal file
View File

@@ -0,0 +1,292 @@
/*
* Copyright (c) 2009-2023 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS Compiler Generic Header File
*/
#ifndef __CMSIS_COMPILER_H
#define __CMSIS_COMPILER_H
#include <stdint.h>
/*
* Arm Compiler above 6.10.1 (armclang)
*/
#if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6100100)
#include "cmsis_armclang.h"
/*
* TI Arm Clang Compiler (tiarmclang)
*/
#elif defined (__ti__)
#include "cmsis_tiarmclang.h"
/*
* LLVM/Clang Compiler
*/
#elif defined ( __clang__ )
#include "cmsis_clang.h"
/*
* GNU Compiler
*/
#elif defined ( __GNUC__ )
#include "cmsis_gcc.h"
/*
* IAR Compiler
*/
#elif defined ( __ICCARM__ )
#if __ARM_ARCH_PROFILE == 'A'
#include "a-profile/cmsis_iccarm_a.h"
#elif __ARM_ARCH_PROFILE == 'R'
#include "r-profile/cmsis_iccarm_r.h"
#elif __ARM_ARCH_PROFILE == 'M'
#include "m-profile/cmsis_iccarm_m.h"
#else
#error "Unknown Arm architecture profile"
#endif
/*
* TI Arm Compiler (armcl)
*/
#elif defined ( __TI_ARM__ )
#include <cmsis_ccs.h>
#ifndef __ASM
#define __ASM __asm
#endif
#ifndef __INLINE
#define __INLINE inline
#endif
#ifndef __STATIC_INLINE
#define __STATIC_INLINE static inline
#endif
#ifndef __STATIC_FORCEINLINE
#define __STATIC_FORCEINLINE __STATIC_INLINE
#endif
#ifndef __NO_RETURN
#define __NO_RETURN __attribute__((noreturn))
#endif
#ifndef __USED
#define __USED __attribute__((used))
#endif
#ifndef __WEAK
#define __WEAK __attribute__((weak))
#endif
#ifndef __PACKED
#define __PACKED __attribute__((packed))
#endif
#ifndef __PACKED_STRUCT
#define __PACKED_STRUCT struct __attribute__((packed))
#endif
#ifndef __PACKED_UNION
#define __PACKED_UNION union __attribute__((packed))
#endif
#ifndef __UNALIGNED_UINT16_WRITE
__PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
#define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void*)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT16_READ
__PACKED_STRUCT T_UINT16_READ { uint16_t v; };
#define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
#endif
#ifndef __UNALIGNED_UINT32_WRITE
__PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
#define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT32_READ
__PACKED_STRUCT T_UINT32_READ { uint32_t v; };
#define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
#endif
#ifndef __ALIGNED
#define __ALIGNED(x) __attribute__((aligned(x)))
#endif
#ifndef __RESTRICT
#define __RESTRICT __restrict
#endif
#ifndef __COMPILER_BARRIER
#warning No compiler specific solution for __COMPILER_BARRIER. __COMPILER_BARRIER is ignored.
#define __COMPILER_BARRIER() (void)0
#endif
#ifndef __NO_INIT
#define __NO_INIT __attribute__ ((section (".noinit")))
#endif
#ifndef __ALIAS
#define __ALIAS(x) __attribute__ ((alias(x)))
#endif
/*
* TASKING Compiler
*/
#elif defined ( __TASKING__ )
/*
* The CMSIS functions have been implemented as intrinsics in the compiler.
* Please use "carm -?i" to get an up to date list of all intrinsics,
* Including the CMSIS ones.
*/
#ifndef __ASM
#define __ASM __asm
#endif
#ifndef __INLINE
#define __INLINE inline
#endif
#ifndef __STATIC_INLINE
#define __STATIC_INLINE static inline
#endif
#ifndef __STATIC_FORCEINLINE
#define __STATIC_FORCEINLINE __STATIC_INLINE
#endif
#ifndef __NO_RETURN
#define __NO_RETURN __attribute__((noreturn))
#endif
#ifndef __USED
#define __USED __attribute__((used))
#endif
#ifndef __WEAK
#define __WEAK __attribute__((weak))
#endif
#ifndef __PACKED
#define __PACKED __packed__
#endif
#ifndef __PACKED_STRUCT
#define __PACKED_STRUCT struct __packed__
#endif
#ifndef __PACKED_UNION
#define __PACKED_UNION union __packed__
#endif
#ifndef __UNALIGNED_UINT16_WRITE
__PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
#define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT16_READ
__PACKED_STRUCT T_UINT16_READ { uint16_t v; };
#define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
#endif
#ifndef __UNALIGNED_UINT32_WRITE
__PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
#define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT32_READ
__PACKED_STRUCT T_UINT32_READ { uint32_t v; };
#define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
#endif
#ifndef __ALIGNED
#define __ALIGNED(x) __align(x)
#endif
#ifndef __RESTRICT
#warning No compiler specific solution for __RESTRICT. __RESTRICT is ignored.
#define __RESTRICT
#endif
#ifndef __COMPILER_BARRIER
#warning No compiler specific solution for __COMPILER_BARRIER. __COMPILER_BARRIER is ignored.
#define __COMPILER_BARRIER() (void)0
#endif
#ifndef __NO_INIT
#define __NO_INIT __attribute__ ((section (".noinit")))
#endif
#ifndef __ALIAS
#define __ALIAS(x) __attribute__ ((alias(x)))
#endif
/*
* COSMIC Compiler
*/
#elif defined ( __CSMC__ )
#include <cmsis_csm.h>
#ifndef __ASM
#define __ASM _asm
#endif
#ifndef __INLINE
#define __INLINE inline
#endif
#ifndef __STATIC_INLINE
#define __STATIC_INLINE static inline
#endif
#ifndef __STATIC_FORCEINLINE
#define __STATIC_FORCEINLINE __STATIC_INLINE
#endif
#ifndef __NO_RETURN
// NO RETURN is automatically detected hence no warning here
#define __NO_RETURN
#endif
#ifndef __USED
#warning No compiler specific solution for __USED. __USED is ignored.
#define __USED
#endif
#ifndef __WEAK
#define __WEAK __weak
#endif
#ifndef __PACKED
#define __PACKED @packed
#endif
#ifndef __PACKED_STRUCT
#define __PACKED_STRUCT @packed struct
#endif
#ifndef __PACKED_UNION
#define __PACKED_UNION @packed union
#endif
#ifndef __UNALIGNED_UINT16_WRITE
__PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
#define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT16_READ
__PACKED_STRUCT T_UINT16_READ { uint16_t v; };
#define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
#endif
#ifndef __UNALIGNED_UINT32_WRITE
__PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
#define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
#endif
#ifndef __UNALIGNED_UINT32_READ
__PACKED_STRUCT T_UINT32_READ { uint32_t v; };
#define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
#endif
#ifndef __ALIGNED
#warning No compiler specific solution for __ALIGNED. __ALIGNED is ignored.
#define __ALIGNED(x)
#endif
#ifndef __RESTRICT
#warning No compiler specific solution for __RESTRICT. __RESTRICT is ignored.
#define __RESTRICT
#endif
#ifndef __COMPILER_BARRIER
#warning No compiler specific solution for __COMPILER_BARRIER. __COMPILER_BARRIER is ignored.
#define __COMPILER_BARRIER() (void)0
#endif
#ifndef __NO_INIT
#define __NO_INIT __attribute__ ((section (".noinit")))
#endif
#ifndef __ALIAS
#define __ALIAS(x) __attribute__ ((alias(x)))
#endif
#else
#error Unknown compiler.
#endif
#endif /* __CMSIS_COMPILER_H */

1006
SDK/CMSIS/cmsis_gcc.h Normal file

File diff suppressed because it is too large Load Diff

44
SDK/CMSIS/cmsis_version.h Normal file
View File

@@ -0,0 +1,44 @@
/*
* Copyright (c) 2009-2023 ARM Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS Core Version Definitions
*/
#if defined ( __ICCARM__ )
#pragma system_include /* treat file as system include file for MISRA check */
#elif defined (__clang__)
#pragma clang system_header /* treat file as system include file */
#endif
#ifndef __CMSIS_VERSION_H
#define __CMSIS_VERSION_H
/* CMSIS-Core(M) Version definitions */
#define __CM_CMSIS_VERSION_MAIN ( 6U) /*!< \brief [31:16] CMSIS-Core(M) main version */
#define __CM_CMSIS_VERSION_SUB ( 1U) /*!< \brief [15:0] CMSIS-Core(M) sub version */
#define __CM_CMSIS_VERSION ((__CM_CMSIS_VERSION_MAIN << 16U) | \
__CM_CMSIS_VERSION_SUB ) /*!< \brief CMSIS Core(M) version number */
/* CMSIS-Core(A) Version definitions */
#define __CA_CMSIS_VERSION_MAIN ( 6U) /*!< \brief [31:16] CMSIS-Core(A) main version */
#define __CA_CMSIS_VERSION_SUB ( 1U) /*!< \brief [15:0] CMSIS-Core(A) sub version */
#define __CA_CMSIS_VERSION ((__CA_CMSIS_VERSION_MAIN << 16U) | \
__CA_CMSIS_VERSION_SUB ) /*!< \brief CMSIS-Core(A) version number */
#endif

2253
SDK/CMSIS/core_cm23.h Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,439 @@
/*
* Copyright (c) 2020-2021 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS-Core(M) Level 1 Cache API for Armv7-M and later
*/
#ifndef ARM_ARMV7M_CACHEL1_H
#define ARM_ARMV7M_CACHEL1_H
#if defined ( __ICCARM__ )
#pragma system_include /* treat file as system include file for MISRA check */
#elif defined (__clang__)
#pragma clang system_header /* treat file as system include file */
#endif
/**
\ingroup CMSIS_Core_FunctionInterface
\defgroup CMSIS_Core_CacheFunctions Cache Functions
\brief Functions that configure Instruction and Data cache.
@{
*/
/* Cache Size ID Register Macros */
#define CCSIDR_WAYS(x) (((x) & SCB_CCSIDR_ASSOCIATIVITY_Msk) >> SCB_CCSIDR_ASSOCIATIVITY_Pos)
#define CCSIDR_SETS(x) (((x) & SCB_CCSIDR_NUMSETS_Msk ) >> SCB_CCSIDR_NUMSETS_Pos )
#ifndef __SCB_DCACHE_LINE_SIZE
#define __SCB_DCACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
#endif
#ifndef __SCB_ICACHE_LINE_SIZE
#define __SCB_ICACHE_LINE_SIZE 32U /*!< Cortex-M7 cache line size is fixed to 32 bytes (8 words). See also register SCB_CCSIDR */
#endif
/**
\brief Enable I-Cache
\details Turns on I-Cache
*/
__STATIC_FORCEINLINE void SCB_EnableICache (void)
{
#if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
if (SCB->CCR & SCB_CCR_IC_Msk) return; /* return if ICache is already enabled */
__DSB();
__ISB();
SCB->ICIALLU = 0UL; /* invalidate I-Cache */
__DSB();
__ISB();
SCB->CCR |= (uint32_t)SCB_CCR_IC_Msk; /* enable I-Cache */
__DSB();
__ISB();
#endif
}
/**
\brief Disable I-Cache
\details Turns off I-Cache
*/
__STATIC_FORCEINLINE void SCB_DisableICache (void)
{
#if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
__DSB();
__ISB();
SCB->CCR &= ~(uint32_t)SCB_CCR_IC_Msk; /* disable I-Cache */
SCB->ICIALLU = 0UL; /* invalidate I-Cache */
__DSB();
__ISB();
#endif
}
/**
\brief Invalidate I-Cache
\details Invalidates I-Cache
*/
__STATIC_FORCEINLINE void SCB_InvalidateICache (void)
{
#if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
__DSB();
__ISB();
SCB->ICIALLU = 0UL;
__DSB();
__ISB();
#endif
}
/**
\brief I-Cache Invalidate by address
\details Invalidates I-Cache for the given address.
I-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
I-Cache memory blocks which are part of given address + given size are invalidated.
\param[in] addr address
\param[in] isize size of memory block (in number of bytes)
*/
__STATIC_FORCEINLINE void SCB_InvalidateICache_by_Addr (volatile void *addr, int32_t isize)
{
#if defined (__ICACHE_PRESENT) && (__ICACHE_PRESENT == 1U)
if ( isize > 0 ) {
int32_t op_size = isize + (((uint32_t)addr) & (__SCB_ICACHE_LINE_SIZE - 1U));
uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_ICACHE_LINE_SIZE - 1U) */;
__DSB();
do {
SCB->ICIMVAU = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
op_addr += __SCB_ICACHE_LINE_SIZE;
op_size -= __SCB_ICACHE_LINE_SIZE;
} while ( op_size > 0 );
__DSB();
__ISB();
}
#endif
}
/**
\brief Enable D-Cache
\details Turns on D-Cache
*/
__STATIC_FORCEINLINE void SCB_EnableDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
if (SCB->CCR & SCB_CCR_DC_Msk) return; /* return if DCache is already enabled */
SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
ccsidr = SCB->CCSIDR;
/* invalidate D-Cache */
sets = (uint32_t)(CCSIDR_SETS(ccsidr));
do {
ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
do {
SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
#if defined ( __CC_ARM )
__schedule_barrier();
#endif
} while (ways-- != 0U);
} while(sets-- != 0U);
__DSB();
SCB->CCR |= (uint32_t)SCB_CCR_DC_Msk; /* enable D-Cache */
__DSB();
__ISB();
#endif
}
/**
\brief Disable D-Cache
\details Turns off D-Cache
*/
__STATIC_FORCEINLINE void SCB_DisableDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
struct {
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
} locals
#if ((defined(__GNUC__) || defined(__clang__)) && !defined(__OPTIMIZE__))
__ALIGNED(__SCB_DCACHE_LINE_SIZE)
#endif
;
SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
SCB->CCR &= ~(uint32_t)SCB_CCR_DC_Msk; /* disable D-Cache */
__DSB();
#if !defined(__OPTIMIZE__)
/*
* For the endless loop issue with no optimization builds.
* More details, see https://github.com/ARM-software/CMSIS_5/issues/620
*
* The issue only happens when local variables are in stack. If
* local variables are saved in general purpose register, then the function
* is OK.
*
* When local variables are in stack, after disabling the cache, flush the
* local variables cache line for data consistency.
*/
/* Clean and invalidate the local variable cache. */
#if defined(__ICCARM__)
/* As we can't align the stack to the cache line size, invalidate each of the variables */
SCB->DCCIMVAC = (uint32_t)&locals.sets;
SCB->DCCIMVAC = (uint32_t)&locals.ways;
SCB->DCCIMVAC = (uint32_t)&locals.ccsidr;
#else
SCB->DCCIMVAC = (uint32_t)&locals;
#endif
__DSB();
__ISB();
#endif
locals.ccsidr = SCB->CCSIDR;
/* clean & invalidate D-Cache */
locals.sets = (uint32_t)(CCSIDR_SETS(locals.ccsidr));
do {
locals.ways = (uint32_t)(CCSIDR_WAYS(locals.ccsidr));
do {
SCB->DCCISW = (((locals.sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
((locals.ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
#if defined ( __CC_ARM )
__schedule_barrier();
#endif
} while (locals.ways-- != 0U);
} while(locals.sets-- != 0U);
__DSB();
__ISB();
#endif
}
/**
\brief Invalidate D-Cache
\details Invalidates D-Cache
*/
__STATIC_FORCEINLINE void SCB_InvalidateDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
ccsidr = SCB->CCSIDR;
/* invalidate D-Cache */
sets = (uint32_t)(CCSIDR_SETS(ccsidr));
do {
ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
do {
SCB->DCISW = (((sets << SCB_DCISW_SET_Pos) & SCB_DCISW_SET_Msk) |
((ways << SCB_DCISW_WAY_Pos) & SCB_DCISW_WAY_Msk) );
#if defined ( __CC_ARM )
__schedule_barrier();
#endif
} while (ways-- != 0U);
} while(sets-- != 0U);
__DSB();
__ISB();
#endif
}
/**
\brief Clean D-Cache
\details Cleans D-Cache
*/
__STATIC_FORCEINLINE void SCB_CleanDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
ccsidr = SCB->CCSIDR;
/* clean D-Cache */
sets = (uint32_t)(CCSIDR_SETS(ccsidr));
do {
ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
do {
SCB->DCCSW = (((sets << SCB_DCCSW_SET_Pos) & SCB_DCCSW_SET_Msk) |
((ways << SCB_DCCSW_WAY_Pos) & SCB_DCCSW_WAY_Msk) );
#if defined ( __CC_ARM )
__schedule_barrier();
#endif
} while (ways-- != 0U);
} while(sets-- != 0U);
__DSB();
__ISB();
#endif
}
/**
\brief Clean & Invalidate D-Cache
\details Cleans and Invalidates D-Cache
*/
__STATIC_FORCEINLINE void SCB_CleanInvalidateDCache (void)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
uint32_t ccsidr;
uint32_t sets;
uint32_t ways;
SCB->CSSELR = 0U; /* select Level 1 data cache */
__DSB();
ccsidr = SCB->CCSIDR;
/* clean & invalidate D-Cache */
sets = (uint32_t)(CCSIDR_SETS(ccsidr));
do {
ways = (uint32_t)(CCSIDR_WAYS(ccsidr));
do {
SCB->DCCISW = (((sets << SCB_DCCISW_SET_Pos) & SCB_DCCISW_SET_Msk) |
((ways << SCB_DCCISW_WAY_Pos) & SCB_DCCISW_WAY_Msk) );
#if defined ( __CC_ARM )
__schedule_barrier();
#endif
} while (ways-- != 0U);
} while(sets-- != 0U);
__DSB();
__ISB();
#endif
}
/**
\brief D-Cache Invalidate by address
\details Invalidates D-Cache for the given address.
D-Cache is invalidated starting from a 32 byte aligned address in 32 byte granularity.
D-Cache memory blocks which are part of given address + given size are invalidated.
\param[in] addr address
\param[in] dsize size of memory block (in number of bytes)
*/
__STATIC_FORCEINLINE void SCB_InvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
if ( dsize > 0 ) {
int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
__DSB();
do {
SCB->DCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
op_addr += __SCB_DCACHE_LINE_SIZE;
op_size -= __SCB_DCACHE_LINE_SIZE;
} while ( op_size > 0 );
__DSB();
__ISB();
}
#endif
}
/**
\brief D-Cache Clean by address
\details Cleans D-Cache for the given address
D-Cache is cleaned starting from a 32 byte aligned address in 32 byte granularity.
D-Cache memory blocks which are part of given address + given size are cleaned.
\param[in] addr address
\param[in] dsize size of memory block (in number of bytes)
*/
__STATIC_FORCEINLINE void SCB_CleanDCache_by_Addr (volatile void *addr, int32_t dsize)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
if ( dsize > 0 ) {
int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
__DSB();
do {
SCB->DCCMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
op_addr += __SCB_DCACHE_LINE_SIZE;
op_size -= __SCB_DCACHE_LINE_SIZE;
} while ( op_size > 0 );
__DSB();
__ISB();
}
#endif
}
/**
\brief D-Cache Clean and Invalidate by address
\details Cleans and invalidates D_Cache for the given address
D-Cache is cleaned and invalidated starting from a 32 byte aligned address in 32 byte granularity.
D-Cache memory blocks which are part of given address + given size are cleaned and invalidated.
\param[in] addr address (aligned to 32-byte boundary)
\param[in] dsize size of memory block (in number of bytes)
*/
__STATIC_FORCEINLINE void SCB_CleanInvalidateDCache_by_Addr (volatile void *addr, int32_t dsize)
{
#if defined (__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
if ( dsize > 0 ) {
int32_t op_size = dsize + (((uint32_t)addr) & (__SCB_DCACHE_LINE_SIZE - 1U));
uint32_t op_addr = (uint32_t)addr /* & ~(__SCB_DCACHE_LINE_SIZE - 1U) */;
__DSB();
do {
SCB->DCCIMVAC = op_addr; /* register accepts only 32byte aligned values, only bits 31..5 are valid */
op_addr += __SCB_DCACHE_LINE_SIZE;
op_size -= __SCB_DCACHE_LINE_SIZE;
} while ( op_size > 0 );
__DSB();
__ISB();
}
#endif
}
/*@} end of CMSIS_Core_CacheFunctions */
#endif /* ARM_ARMV7M_CACHEL1_H */

View File

@@ -0,0 +1,273 @@
/*
* Copyright (c) 2017-2020 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS-Core(M) MPU API for Armv7-M MPU
*/
#ifndef ARM_MPU_ARMV7_H
#define ARM_MPU_ARMV7_H
#if defined ( __ICCARM__ )
#pragma system_include /* treat file as system include file for MISRA check */
#elif defined (__clang__)
#pragma clang system_header /* treat file as system include file */
#endif
#define ARM_MPU_REGION_SIZE_32B ((uint8_t)0x04U) ///!< MPU Region Size 32 Bytes
#define ARM_MPU_REGION_SIZE_64B ((uint8_t)0x05U) ///!< MPU Region Size 64 Bytes
#define ARM_MPU_REGION_SIZE_128B ((uint8_t)0x06U) ///!< MPU Region Size 128 Bytes
#define ARM_MPU_REGION_SIZE_256B ((uint8_t)0x07U) ///!< MPU Region Size 256 Bytes
#define ARM_MPU_REGION_SIZE_512B ((uint8_t)0x08U) ///!< MPU Region Size 512 Bytes
#define ARM_MPU_REGION_SIZE_1KB ((uint8_t)0x09U) ///!< MPU Region Size 1 KByte
#define ARM_MPU_REGION_SIZE_2KB ((uint8_t)0x0AU) ///!< MPU Region Size 2 KBytes
#define ARM_MPU_REGION_SIZE_4KB ((uint8_t)0x0BU) ///!< MPU Region Size 4 KBytes
#define ARM_MPU_REGION_SIZE_8KB ((uint8_t)0x0CU) ///!< MPU Region Size 8 KBytes
#define ARM_MPU_REGION_SIZE_16KB ((uint8_t)0x0DU) ///!< MPU Region Size 16 KBytes
#define ARM_MPU_REGION_SIZE_32KB ((uint8_t)0x0EU) ///!< MPU Region Size 32 KBytes
#define ARM_MPU_REGION_SIZE_64KB ((uint8_t)0x0FU) ///!< MPU Region Size 64 KBytes
#define ARM_MPU_REGION_SIZE_128KB ((uint8_t)0x10U) ///!< MPU Region Size 128 KBytes
#define ARM_MPU_REGION_SIZE_256KB ((uint8_t)0x11U) ///!< MPU Region Size 256 KBytes
#define ARM_MPU_REGION_SIZE_512KB ((uint8_t)0x12U) ///!< MPU Region Size 512 KBytes
#define ARM_MPU_REGION_SIZE_1MB ((uint8_t)0x13U) ///!< MPU Region Size 1 MByte
#define ARM_MPU_REGION_SIZE_2MB ((uint8_t)0x14U) ///!< MPU Region Size 2 MBytes
#define ARM_MPU_REGION_SIZE_4MB ((uint8_t)0x15U) ///!< MPU Region Size 4 MBytes
#define ARM_MPU_REGION_SIZE_8MB ((uint8_t)0x16U) ///!< MPU Region Size 8 MBytes
#define ARM_MPU_REGION_SIZE_16MB ((uint8_t)0x17U) ///!< MPU Region Size 16 MBytes
#define ARM_MPU_REGION_SIZE_32MB ((uint8_t)0x18U) ///!< MPU Region Size 32 MBytes
#define ARM_MPU_REGION_SIZE_64MB ((uint8_t)0x19U) ///!< MPU Region Size 64 MBytes
#define ARM_MPU_REGION_SIZE_128MB ((uint8_t)0x1AU) ///!< MPU Region Size 128 MBytes
#define ARM_MPU_REGION_SIZE_256MB ((uint8_t)0x1BU) ///!< MPU Region Size 256 MBytes
#define ARM_MPU_REGION_SIZE_512MB ((uint8_t)0x1CU) ///!< MPU Region Size 512 MBytes
#define ARM_MPU_REGION_SIZE_1GB ((uint8_t)0x1DU) ///!< MPU Region Size 1 GByte
#define ARM_MPU_REGION_SIZE_2GB ((uint8_t)0x1EU) ///!< MPU Region Size 2 GBytes
#define ARM_MPU_REGION_SIZE_4GB ((uint8_t)0x1FU) ///!< MPU Region Size 4 GBytes
#define ARM_MPU_AP_NONE 0U ///!< MPU Access Permission no access
#define ARM_MPU_AP_PRIV 1U ///!< MPU Access Permission privileged access only
#define ARM_MPU_AP_URO 2U ///!< MPU Access Permission unprivileged access read-only
#define ARM_MPU_AP_FULL 3U ///!< MPU Access Permission full access
#define ARM_MPU_AP_PRO 5U ///!< MPU Access Permission privileged access read-only
#define ARM_MPU_AP_RO 6U ///!< MPU Access Permission read-only access
/** MPU Region Base Address Register Value
*
* \param Region The region to be configured, number 0 to 15.
* \param BaseAddress The base address for the region.
*/
#define ARM_MPU_RBAR(Region, BaseAddress) \
(((BaseAddress) & MPU_RBAR_ADDR_Msk) | \
((Region) & MPU_RBAR_REGION_Msk) | \
(MPU_RBAR_VALID_Msk))
/**
* MPU Memory Access Attributes
*
* \param TypeExtField Type extension field, allows you to configure memory access type, for example strongly ordered, peripheral.
* \param IsShareable Region is shareable between multiple bus masters.
* \param IsCacheable Region is cacheable, i.e. its value may be kept in cache.
* \param IsBufferable Region is bufferable, i.e. using write-back caching. Cacheable but non-bufferable regions use write-through policy.
*/
#define ARM_MPU_ACCESS_(TypeExtField, IsShareable, IsCacheable, IsBufferable) \
((((TypeExtField) << MPU_RASR_TEX_Pos) & MPU_RASR_TEX_Msk) | \
(((IsShareable) << MPU_RASR_S_Pos) & MPU_RASR_S_Msk) | \
(((IsCacheable) << MPU_RASR_C_Pos) & MPU_RASR_C_Msk) | \
(((IsBufferable) << MPU_RASR_B_Pos) & MPU_RASR_B_Msk))
/**
* MPU Region Attribute and Size Register Value
*
* \param DisableExec Instruction access disable bit, 1= disable instruction fetches.
* \param AccessPermission Data access permissions, allows you to configure read/write access for User and Privileged mode.
* \param AccessAttributes Memory access attribution, see \ref ARM_MPU_ACCESS_.
* \param SubRegionDisable Sub-region disable field.
* \param Size Region size of the region to be configured, for example 4K, 8K.
*/
#define ARM_MPU_RASR_EX(DisableExec, AccessPermission, AccessAttributes, SubRegionDisable, Size) \
((((DisableExec) << MPU_RASR_XN_Pos) & MPU_RASR_XN_Msk) | \
(((AccessPermission) << MPU_RASR_AP_Pos) & MPU_RASR_AP_Msk) | \
(((AccessAttributes) & (MPU_RASR_TEX_Msk | MPU_RASR_S_Msk | MPU_RASR_C_Msk | MPU_RASR_B_Msk))) | \
(((SubRegionDisable) << MPU_RASR_SRD_Pos) & MPU_RASR_SRD_Msk) | \
(((Size) << MPU_RASR_SIZE_Pos) & MPU_RASR_SIZE_Msk) | \
(((MPU_RASR_ENABLE_Msk))))
/**
* MPU Region Attribute and Size Register Value
*
* \param DisableExec Instruction access disable bit, 1= disable instruction fetches.
* \param AccessPermission Data access permissions, allows you to configure read/write access for User and Privileged mode.
* \param TypeExtField Type extension field, allows you to configure memory access type, for example strongly ordered, peripheral.
* \param IsShareable Region is shareable between multiple bus masters.
* \param IsCacheable Region is cacheable, i.e. its value may be kept in cache.
* \param IsBufferable Region is bufferable, i.e. using write-back caching. Cacheable but non-bufferable regions use write-through policy.
* \param SubRegionDisable Sub-region disable field.
* \param Size Region size of the region to be configured, for example 4K, 8K.
*/
#define ARM_MPU_RASR(DisableExec, AccessPermission, TypeExtField, IsShareable, IsCacheable, IsBufferable, SubRegionDisable, Size) \
ARM_MPU_RASR_EX(DisableExec, AccessPermission, ARM_MPU_ACCESS_(TypeExtField, IsShareable, IsCacheable, IsBufferable), SubRegionDisable, Size)
/**
* MPU Memory Access Attribute for strongly ordered memory.
* - TEX: 000b
* - Shareable
* - Non-cacheable
* - Non-bufferable
*/
#define ARM_MPU_ACCESS_ORDERED ARM_MPU_ACCESS_(0U, 1U, 0U, 0U)
/**
* MPU Memory Access Attribute for device memory.
* - TEX: 000b (if shareable) or 010b (if non-shareable)
* - Shareable or non-shareable
* - Non-cacheable
* - Bufferable (if shareable) or non-bufferable (if non-shareable)
*
* \param IsShareable Configures the device memory as shareable or non-shareable.
*/
#define ARM_MPU_ACCESS_DEVICE(IsShareable) ((IsShareable) ? ARM_MPU_ACCESS_(0U, 1U, 0U, 1U) : ARM_MPU_ACCESS_(2U, 0U, 0U, 0U))
/**
* MPU Memory Access Attribute for normal memory.
* - TEX: 1BBb (reflecting outer cacheability rules)
* - Shareable or non-shareable
* - Cacheable or non-cacheable (reflecting inner cacheability rules)
* - Bufferable or non-bufferable (reflecting inner cacheability rules)
*
* \param OuterCp Configures the outer cache policy.
* \param InnerCp Configures the inner cache policy.
* \param IsShareable Configures the memory as shareable or non-shareable.
*/
#define ARM_MPU_ACCESS_NORMAL(OuterCp, InnerCp, IsShareable) ARM_MPU_ACCESS_((4U | (OuterCp)), IsShareable, ((InnerCp) >> 1U), ((InnerCp) & 1U))
/**
* MPU Memory Access Attribute non-cacheable policy.
*/
#define ARM_MPU_CACHEP_NOCACHE 0U
/**
* MPU Memory Access Attribute write-back, write and read allocate policy.
*/
#define ARM_MPU_CACHEP_WB_WRA 1U
/**
* MPU Memory Access Attribute write-through, no write allocate policy.
*/
#define ARM_MPU_CACHEP_WT_NWA 2U
/**
* MPU Memory Access Attribute write-back, no write allocate policy.
*/
#define ARM_MPU_CACHEP_WB_NWA 3U
/**
* Struct for a single MPU Region
*/
typedef struct {
uint32_t RBAR; //!< The region base address register value (RBAR)
uint32_t RASR; //!< The region attribute and size register value (RASR) \ref MPU_RASR
} ARM_MPU_Region_t;
/** Enable the MPU.
* \param MPU_Control Default access permissions for unconfigured regions.
*/
__STATIC_INLINE void ARM_MPU_Enable(uint32_t MPU_Control)
{
__DMB();
MPU->CTRL = MPU_Control | MPU_CTRL_ENABLE_Msk;
#ifdef SCB_SHCSR_MEMFAULTENA_Msk
SCB->SHCSR |= SCB_SHCSR_MEMFAULTENA_Msk;
#endif
__DSB();
__ISB();
}
/** Disable the MPU.
*/
__STATIC_INLINE void ARM_MPU_Disable(void)
{
__DMB();
#ifdef SCB_SHCSR_MEMFAULTENA_Msk
SCB->SHCSR &= ~SCB_SHCSR_MEMFAULTENA_Msk;
#endif
MPU->CTRL &= ~MPU_CTRL_ENABLE_Msk;
__DSB();
__ISB();
}
/** Clear and disable the given MPU region.
* \param rnr Region number to be cleared.
*/
__STATIC_INLINE void ARM_MPU_ClrRegion(uint32_t rnr)
{
MPU->RNR = rnr;
MPU->RASR = 0U;
}
/** Configure an MPU region.
* \param rbar Value for RBAR register.
* \param rasr Value for RASR register.
*/
__STATIC_INLINE void ARM_MPU_SetRegion(uint32_t rbar, uint32_t rasr)
{
MPU->RBAR = rbar;
MPU->RASR = rasr;
}
/** Configure the given MPU region.
* \param rnr Region number to be configured.
* \param rbar Value for RBAR register.
* \param rasr Value for RASR register.
*/
__STATIC_INLINE void ARM_MPU_SetRegionEx(uint32_t rnr, uint32_t rbar, uint32_t rasr)
{
MPU->RNR = rnr;
MPU->RBAR = rbar;
MPU->RASR = rasr;
}
/** Memcpy with strictly ordered memory access, e.g. used by code in ARM_MPU_Load().
* \param dst Destination data is copied to.
* \param src Source data is copied from.
* \param len Amount of data words to be copied.
*/
__STATIC_INLINE void ARM_MPU_OrderedMemcpy(volatile uint32_t* dst, const uint32_t* __RESTRICT src, uint32_t len)
{
uint32_t i;
for (i = 0U; i < len; ++i)
{
dst[i] = src[i];
}
}
/** Load the given number of MPU regions from a table.
* \param table Pointer to the MPU configuration table.
* \param cnt Amount of regions to be configured.
*/
__STATIC_INLINE void ARM_MPU_Load(ARM_MPU_Region_t const* table, uint32_t cnt)
{
const uint32_t rowWordSize = sizeof(ARM_MPU_Region_t)/4U;
while (cnt > MPU_TYPE_RALIASES) {
ARM_MPU_OrderedMemcpy(&(MPU->RBAR), &(table->RBAR), MPU_TYPE_RALIASES*rowWordSize);
table += MPU_TYPE_RALIASES;
cnt -= MPU_TYPE_RALIASES;
}
ARM_MPU_OrderedMemcpy(&(MPU->RBAR), &(table->RBAR), cnt*rowWordSize);
}
#endif

View File

@@ -0,0 +1,203 @@
/*
* Copyright (c) 2022 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS-Core(M) PAC key functions for Armv8.1-M PAC extension
*/
#ifndef PAC_ARMV81_H
#define PAC_ARMV81_H
#if defined ( __ICCARM__ )
#pragma system_include /* treat file as system include file for MISRA check */
#elif defined (__clang__)
#pragma clang system_header /* treat file as system include file */
#endif
/* ################### PAC Key functions ########################### */
/**
\ingroup CMSIS_Core_FunctionInterface
\defgroup CMSIS_Core_PacKeyFunctions PAC Key functions
\brief Functions that access the PAC keys.
@{
*/
#if (defined (__ARM_FEATURE_PAUTH) && (__ARM_FEATURE_PAUTH == 1))
/**
\brief read the PAC key used for privileged mode
\details Reads the PAC key stored in the PAC_KEY_P registers.
\param [out] pPacKey 128bit PAC key
*/
__STATIC_FORCEINLINE void __get_PAC_KEY_P (uint32_t* pPacKey) {
__ASM volatile (
"mrs r1, pac_key_p_0\n"
"str r1,[%0,#0]\n"
"mrs r1, pac_key_p_1\n"
"str r1,[%0,#4]\n"
"mrs r1, pac_key_p_2\n"
"str r1,[%0,#8]\n"
"mrs r1, pac_key_p_3\n"
"str r1,[%0,#12]\n"
: : "r" (pPacKey) : "memory", "r1"
);
}
/**
\brief write the PAC key used for privileged mode
\details writes the given PAC key to the PAC_KEY_P registers.
\param [in] pPacKey 128bit PAC key
*/
__STATIC_FORCEINLINE void __set_PAC_KEY_P (uint32_t* pPacKey) {
__ASM volatile (
"ldr r1,[%0,#0]\n"
"msr pac_key_p_0, r1\n"
"ldr r1,[%0,#4]\n"
"msr pac_key_p_1, r1\n"
"ldr r1,[%0,#8]\n"
"msr pac_key_p_2, r1\n"
"ldr r1,[%0,#12]\n"
"msr pac_key_p_3, r1\n"
: : "r" (pPacKey) : "memory", "r1"
);
}
/**
\brief read the PAC key used for unprivileged mode
\details Reads the PAC key stored in the PAC_KEY_U registers.
\param [out] pPacKey 128bit PAC key
*/
__STATIC_FORCEINLINE void __get_PAC_KEY_U (uint32_t* pPacKey) {
__ASM volatile (
"mrs r1, pac_key_u_0\n"
"str r1,[%0,#0]\n"
"mrs r1, pac_key_u_1\n"
"str r1,[%0,#4]\n"
"mrs r1, pac_key_u_2\n"
"str r1,[%0,#8]\n"
"mrs r1, pac_key_u_3\n"
"str r1,[%0,#12]\n"
: : "r" (pPacKey) : "memory", "r1"
);
}
/**
\brief write the PAC key used for unprivileged mode
\details writes the given PAC key to the PAC_KEY_U registers.
\param [in] pPacKey 128bit PAC key
*/
__STATIC_FORCEINLINE void __set_PAC_KEY_U (uint32_t* pPacKey) {
__ASM volatile (
"ldr r1,[%0,#0]\n"
"msr pac_key_u_0, r1\n"
"ldr r1,[%0,#4]\n"
"msr pac_key_u_1, r1\n"
"ldr r1,[%0,#8]\n"
"msr pac_key_u_2, r1\n"
"ldr r1,[%0,#12]\n"
"msr pac_key_u_3, r1\n"
: : "r" (pPacKey) : "memory", "r1"
);
}
#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
/**
\brief read the PAC key used for privileged mode (non-secure)
\details Reads the PAC key stored in the non-secure PAC_KEY_P registers when in secure mode.
\param [out] pPacKey 128bit PAC key
*/
__STATIC_FORCEINLINE void __TZ_get_PAC_KEY_P_NS (uint32_t* pPacKey) {
__ASM volatile (
"mrs r1, pac_key_p_0_ns\n"
"str r1,[%0,#0]\n"
"mrs r1, pac_key_p_1_ns\n"
"str r1,[%0,#4]\n"
"mrs r1, pac_key_p_2_ns\n"
"str r1,[%0,#8]\n"
"mrs r1, pac_key_p_3_ns\n"
"str r1,[%0,#12]\n"
: : "r" (pPacKey) : "memory", "r1"
);
}
/**
\brief write the PAC key used for privileged mode (non-secure)
\details writes the given PAC key to the non-secure PAC_KEY_P registers when in secure mode.
\param [in] pPacKey 128bit PAC key
*/
__STATIC_FORCEINLINE void __TZ_set_PAC_KEY_P_NS (uint32_t* pPacKey) {
__ASM volatile (
"ldr r1,[%0,#0]\n"
"msr pac_key_p_0_ns, r1\n"
"ldr r1,[%0,#4]\n"
"msr pac_key_p_1_ns, r1\n"
"ldr r1,[%0,#8]\n"
"msr pac_key_p_2_ns, r1\n"
"ldr r1,[%0,#12]\n"
"msr pac_key_p_3_ns, r1\n"
: : "r" (pPacKey) : "memory", "r1"
);
}
/**
\brief read the PAC key used for unprivileged mode (non-secure)
\details Reads the PAC key stored in the non-secure PAC_KEY_U registers when in secure mode.
\param [out] pPacKey 128bit PAC key
*/
__STATIC_FORCEINLINE void __TZ_get_PAC_KEY_U_NS (uint32_t* pPacKey) {
__ASM volatile (
"mrs r1, pac_key_u_0_ns\n"
"str r1,[%0,#0]\n"
"mrs r1, pac_key_u_1_ns\n"
"str r1,[%0,#4]\n"
"mrs r1, pac_key_u_2_ns\n"
"str r1,[%0,#8]\n"
"mrs r1, pac_key_u_3_ns\n"
"str r1,[%0,#12]\n"
: : "r" (pPacKey) : "memory", "r1"
);
}
/**
\brief write the PAC key used for unprivileged mode (non-secure)
\details writes the given PAC key to the non-secure PAC_KEY_U registers when in secure mode.
\param [in] pPacKey 128bit PAC key
*/
__STATIC_FORCEINLINE void __TZ_set_PAC_KEY_U_NS (uint32_t* pPacKey) {
__ASM volatile (
"ldr r1,[%0,#0]\n"
"msr pac_key_u_0_ns, r1\n"
"ldr r1,[%0,#4]\n"
"msr pac_key_u_1_ns, r1\n"
"ldr r1,[%0,#8]\n"
"msr pac_key_u_2_ns, r1\n"
"ldr r1,[%0,#12]\n"
"msr pac_key_u_3_ns, r1\n"
: : "r" (pPacKey) : "memory", "r1"
);
}
#endif /* (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) */
#endif /* (defined (__ARM_FEATURE_PAUTH) && (__ARM_FEATURE_PAUTH == 1)) */
/*@} end of CMSIS_Core_PacKeyFunctions */
#endif /* PAC_ARMV81_H */

View File

@@ -0,0 +1,421 @@
/*
* Copyright (c) 2017-2022 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS-Core(M) MPU API for Armv8-M and Armv8.1-M MPU
*/
#ifndef ARM_MPU_ARMV8_H
#define ARM_MPU_ARMV8_H
#if defined ( __ICCARM__ )
#pragma system_include /* treat file as system include file for MISRA check */
#elif defined (__clang__)
#pragma clang system_header /* treat file as system include file */
#endif
/** \brief Attribute for device memory (outer only) */
#define ARM_MPU_ATTR_DEVICE ( 0U )
/** \brief Attribute for non-cacheable, normal memory */
#define ARM_MPU_ATTR_NON_CACHEABLE ( 4U )
/** \brief Attribute for Normal memory, Outer and Inner cacheability.
* \param NT Non-Transient: Set to 1 for Non-transient data. Set to 0 for Transient data.
* \param WB Write-Back: Set to 1 to use a Write-Back policy. Set to 0 to use a Write-Through policy.
* \param RA Read Allocation: Set to 1 to enable cache allocation on read miss. Set to 0 to disable cache allocation on read miss.
* \param WA Write Allocation: Set to 1 to enable cache allocation on write miss. Set to 0 to disable cache allocation on write miss.
*/
#define ARM_MPU_ATTR_MEMORY_(NT, WB, RA, WA) \
((((NT) & 1U) << 3U) | (((WB) & 1U) << 2U) | (((RA) & 1U) << 1U) | ((WA) & 1U))
/** \brief Device memory type non Gathering, non Re-ordering, non Early Write Acknowledgement */
#define ARM_MPU_ATTR_DEVICE_nGnRnE (0U)
/** \brief Device memory type non Gathering, non Re-ordering, Early Write Acknowledgement */
#define ARM_MPU_ATTR_DEVICE_nGnRE (1U)
/** \brief Device memory type non Gathering, Re-ordering, Early Write Acknowledgement */
#define ARM_MPU_ATTR_DEVICE_nGRE (2U)
/** \brief Device memory type Gathering, Re-ordering, Early Write Acknowledgement */
#define ARM_MPU_ATTR_DEVICE_GRE (3U)
/** \brief Normal memory outer-cacheable and inner-cacheable attributes
* WT = Write Through, WB = Write Back, TR = Transient, RA = Read-Allocate, WA = Write Allocate
*/
#define MPU_ATTR_NORMAL_OUTER_NON_CACHEABLE (0b0100)
#define MPU_ATTR_NORMAL_OUTER_WT_TR_RA (0b0010)
#define MPU_ATTR_NORMAL_OUTER_WT_TR_WA (0b0001)
#define MPU_ATTR_NORMAL_OUTER_WT_TR_RA_WA (0b0011)
#define MPU_ATTR_NORMAL_OUTER_WT_RA (0b1010)
#define MPU_ATTR_NORMAL_OUTER_WT_WA (0b1001)
#define MPU_ATTR_NORMAL_OUTER_WT_RA_WA (0b1011)
#define MPU_ATTR_NORMAL_OUTER_WB_TR_RA (0b0101)
#define MPU_ATTR_NORMAL_OUTER_WB_TR_WA (0b0110)
#define MPU_ATTR_NORMAL_OUTER_WB_TR_RA_WA (0b0111)
#define MPU_ATTR_NORMAL_OUTER_WB_RA (0b1101)
#define MPU_ATTR_NORMAL_OUTER_WB_WA (0b1110)
#define MPU_ATTR_NORMAL_OUTER_WB_RA_WA (0b1111)
#define MPU_ATTR_NORMAL_INNER_NON_CACHEABLE (0b0100)
#define MPU_ATTR_NORMAL_INNER_WT_TR_RA (0b0010)
#define MPU_ATTR_NORMAL_INNER_WT_TR_WA (0b0001)
#define MPU_ATTR_NORMAL_INNER_WT_TR_RA_WA (0b0011)
#define MPU_ATTR_NORMAL_INNER_WT_RA (0b1010)
#define MPU_ATTR_NORMAL_INNER_WT_WA (0b1001)
#define MPU_ATTR_NORMAL_INNER_WT_RA_WA (0b1011)
#define MPU_ATTR_NORMAL_INNER_WB_TR_RA (0b0101)
#define MPU_ATTR_NORMAL_INNER_WB_TR_WA (0b0110)
#define MPU_ATTR_NORMAL_INNER_WB_TR_RA_WA (0b0111)
#define MPU_ATTR_NORMAL_INNER_WB_RA (0b1101)
#define MPU_ATTR_NORMAL_INNER_WB_WA (0b1110)
#define MPU_ATTR_NORMAL_INNER_WB_RA_WA (0b1111)
/** \brief Memory Attribute
* \param O Outer memory attributes
* \param I O == ARM_MPU_ATTR_DEVICE: Device memory attributes, else: Inner memory attributes
*/
#define ARM_MPU_ATTR(O, I) ((((O) & 0xFU) << 4U) | ((((O) & 0xFU) != 0U) ? ((I) & 0xFU) : (((I) & 0x3U) << 2U)))
/* \brief Specifies MAIR_ATTR number */
#define MAIR_ATTR(x) ((x > 7 || x < 0) ? 0 : x)
/**
* Shareability
*/
/** \brief Normal memory, non-shareable */
#define ARM_MPU_SH_NON (0U)
/** \brief Normal memory, outer shareable */
#define ARM_MPU_SH_OUTER (2U)
/** \brief Normal memory, inner shareable */
#define ARM_MPU_SH_INNER (3U)
/**
* Access permissions
* AP = Access permission, RO = Read-only, RW = Read/Write, NP = Any privilege, PO = Privileged code only
*/
/** \brief Normal memory, read/write */
#define ARM_MPU_AP_RW (0U)
/** \brief Normal memory, read-only */
#define ARM_MPU_AP_RO (1U)
/** \brief Normal memory, any privilege level */
#define ARM_MPU_AP_NP (1U)
/** \brief Normal memory, privileged access only */
#define ARM_MPU_AP_PO (0U)
/*
* Execute-never
* XN = Execute-never, EX = Executable
*/
/** \brief Normal memory, Execution only permitted if read permitted */
#define ARM_MPU_XN (1U)
/** \brief Normal memory, Execution only permitted if read permitted */
#define ARM_MPU_EX (0U)
/** \brief Memory access permissions
* \param RO Read-Only: Set to 1 for read-only memory. Set to 0 for a read/write memory.
* \param NP Non-Privileged: Set to 1 for non-privileged memory. Set to 0 for privileged memory.
*/
#define ARM_MPU_AP_(RO, NP) ((((RO) & 1U) << 1U) | ((NP) & 1U))
/** \brief Region Base Address Register value
* \param BASE The base address bits [31:5] of a memory region. The value is zero extended. Effective address gets 32 byte aligned.
* \param SH Defines the Shareability domain for this memory region.
* \param RO Read-Only: Set to 1 for a read-only memory region. Set to 0 for a read/write memory region.
* \param NP Non-Privileged: Set to 1 for a non-privileged memory region. Set to 0 for privileged memory region.
* \param XN eXecute Never: Set to 1 for a non-executable memory region. Set to 0 for an executable memory region.
*/
#define ARM_MPU_RBAR(BASE, SH, RO, NP, XN) \
(((BASE) & MPU_RBAR_BASE_Msk) | \
(((SH) << MPU_RBAR_SH_Pos) & MPU_RBAR_SH_Msk) | \
((ARM_MPU_AP_(RO, NP) << MPU_RBAR_AP_Pos) & MPU_RBAR_AP_Msk) | \
(((XN) << MPU_RBAR_XN_Pos) & MPU_RBAR_XN_Msk))
/** \brief Region Limit Address Register value
* \param LIMIT The limit address bits [31:5] for this memory region. The value is one extended.
* \param IDX The attribute index to be associated with this memory region.
*/
#define ARM_MPU_RLAR(LIMIT, IDX) \
(((LIMIT) & MPU_RLAR_LIMIT_Msk) | \
(((IDX) << MPU_RLAR_AttrIndx_Pos) & MPU_RLAR_AttrIndx_Msk) | \
(MPU_RLAR_EN_Msk))
#if defined(MPU_RLAR_PXN_Pos)
/** \brief Region Limit Address Register with PXN value
* \param LIMIT The limit address bits [31:5] for this memory region. The value is one extended.
* \param PXN Privileged execute never. Defines whether code can be executed from this privileged region.
* \param IDX The attribute index to be associated with this memory region.
*/
#define ARM_MPU_RLAR_PXN(LIMIT, PXN, IDX) \
(((LIMIT) & MPU_RLAR_LIMIT_Msk) | \
(((PXN) << MPU_RLAR_PXN_Pos) & MPU_RLAR_PXN_Msk) | \
(((IDX) << MPU_RLAR_AttrIndx_Pos) & MPU_RLAR_AttrIndx_Msk) | \
(MPU_RLAR_EN_Msk))
#endif
/**
* Struct for a single MPU Region
*/
typedef struct {
uint32_t RBAR; /*!< Region Base Address Register value */
uint32_t RLAR; /*!< Region Limit Address Register value */
} ARM_MPU_Region_t;
/**
\brief Read MPU Type Register
\return Number of MPU regions
*/
__STATIC_INLINE uint32_t ARM_MPU_TYPE()
{
return ((MPU->TYPE) >> 8);
}
/** Enable the MPU.
* \param MPU_Control Default access permissions for unconfigured regions.
*/
__STATIC_INLINE void ARM_MPU_Enable(uint32_t MPU_Control)
{
__DMB();
MPU->CTRL = MPU_Control | MPU_CTRL_ENABLE_Msk;
#ifdef SCB_SHCSR_MEMFAULTENA_Msk
SCB->SHCSR |= SCB_SHCSR_MEMFAULTENA_Msk;
#endif
__DSB();
__ISB();
}
/** Disable the MPU.
*/
__STATIC_INLINE void ARM_MPU_Disable(void)
{
__DMB();
#ifdef SCB_SHCSR_MEMFAULTENA_Msk
SCB->SHCSR &= ~SCB_SHCSR_MEMFAULTENA_Msk;
#endif
MPU->CTRL &= ~MPU_CTRL_ENABLE_Msk;
__DSB();
__ISB();
}
#ifdef MPU_NS
/** Enable the Non-secure MPU.
* \param MPU_Control Default access permissions for unconfigured regions.
*/
__STATIC_INLINE void ARM_MPU_Enable_NS(uint32_t MPU_Control)
{
__DMB();
MPU_NS->CTRL = MPU_Control | MPU_CTRL_ENABLE_Msk;
#ifdef SCB_SHCSR_MEMFAULTENA_Msk
SCB_NS->SHCSR |= SCB_SHCSR_MEMFAULTENA_Msk;
#endif
__DSB();
__ISB();
}
/** Disable the Non-secure MPU.
*/
__STATIC_INLINE void ARM_MPU_Disable_NS(void)
{
__DMB();
#ifdef SCB_SHCSR_MEMFAULTENA_Msk
SCB_NS->SHCSR &= ~SCB_SHCSR_MEMFAULTENA_Msk;
#endif
MPU_NS->CTRL &= ~MPU_CTRL_ENABLE_Msk;
__DSB();
__ISB();
}
#endif
/** Set the memory attribute encoding to the given MPU.
* \param mpu Pointer to the MPU to be configured.
* \param idx The attribute index to be set [0-7]
* \param attr The attribute value to be set.
*/
__STATIC_INLINE void ARM_MPU_SetMemAttrEx(MPU_Type* mpu, uint8_t idx, uint8_t attr)
{
const uint8_t reg = idx / 4U;
const uint32_t pos = ((idx % 4U) * 8U);
const uint32_t mask = 0xFFU << pos;
if (reg >= (sizeof(mpu->MAIR) / sizeof(mpu->MAIR[0]))) {
return; // invalid index
}
mpu->MAIR[reg] = ((mpu->MAIR[reg] & ~mask) | ((attr << pos) & mask));
}
/** Set the memory attribute encoding.
* \param idx The attribute index to be set [0-7]
* \param attr The attribute value to be set.
*/
__STATIC_INLINE void ARM_MPU_SetMemAttr(uint8_t idx, uint8_t attr)
{
ARM_MPU_SetMemAttrEx(MPU, idx, attr);
}
#ifdef MPU_NS
/** Set the memory attribute encoding to the Non-secure MPU.
* \param idx The attribute index to be set [0-7]
* \param attr The attribute value to be set.
*/
__STATIC_INLINE void ARM_MPU_SetMemAttr_NS(uint8_t idx, uint8_t attr)
{
ARM_MPU_SetMemAttrEx(MPU_NS, idx, attr);
}
#endif
/** Clear and disable the given MPU region of the given MPU.
* \param mpu Pointer to MPU to be used.
* \param rnr Region number to be cleared.
*/
__STATIC_INLINE void ARM_MPU_ClrRegionEx(MPU_Type* mpu, uint32_t rnr)
{
mpu->RNR = rnr;
mpu->RLAR = 0U;
}
/** Clear and disable the given MPU region.
* \param rnr Region number to be cleared.
*/
__STATIC_INLINE void ARM_MPU_ClrRegion(uint32_t rnr)
{
ARM_MPU_ClrRegionEx(MPU, rnr);
}
#ifdef MPU_NS
/** Clear and disable the given Non-secure MPU region.
* \param rnr Region number to be cleared.
*/
__STATIC_INLINE void ARM_MPU_ClrRegion_NS(uint32_t rnr)
{
ARM_MPU_ClrRegionEx(MPU_NS, rnr);
}
#endif
/** Configure the given MPU region of the given MPU.
* \param mpu Pointer to MPU to be used.
* \param rnr Region number to be configured.
* \param rbar Value for RBAR register.
* \param rlar Value for RLAR register.
*/
__STATIC_INLINE void ARM_MPU_SetRegionEx(MPU_Type* mpu, uint32_t rnr, uint32_t rbar, uint32_t rlar)
{
mpu->RNR = rnr;
mpu->RBAR = rbar;
mpu->RLAR = rlar;
}
/** Configure the given MPU region.
* \param rnr Region number to be configured.
* \param rbar Value for RBAR register.
* \param rlar Value for RLAR register.
*/
__STATIC_INLINE void ARM_MPU_SetRegion(uint32_t rnr, uint32_t rbar, uint32_t rlar)
{
ARM_MPU_SetRegionEx(MPU, rnr, rbar, rlar);
}
#ifdef MPU_NS
/** Configure the given Non-secure MPU region.
* \param rnr Region number to be configured.
* \param rbar Value for RBAR register.
* \param rlar Value for RLAR register.
*/
__STATIC_INLINE void ARM_MPU_SetRegion_NS(uint32_t rnr, uint32_t rbar, uint32_t rlar)
{
ARM_MPU_SetRegionEx(MPU_NS, rnr, rbar, rlar);
}
#endif
/** Memcpy with strictly ordered memory access, e.g. used by code in ARM_MPU_LoadEx()
* \param dst Destination data is copied to.
* \param src Source data is copied from.
* \param len Amount of data words to be copied.
*/
__STATIC_INLINE void ARM_MPU_OrderedMemcpy(volatile uint32_t* dst, const uint32_t* __RESTRICT src, uint32_t len)
{
uint32_t i;
for (i = 0U; i < len; ++i)
{
dst[i] = src[i];
}
}
/** Load the given number of MPU regions from a table to the given MPU.
* \param mpu Pointer to the MPU registers to be used.
* \param rnr First region number to be configured.
* \param table Pointer to the MPU configuration table.
* \param cnt Amount of regions to be configured.
*/
__STATIC_INLINE void ARM_MPU_LoadEx(MPU_Type* mpu, uint32_t rnr, ARM_MPU_Region_t const* table, uint32_t cnt)
{
const uint32_t rowWordSize = sizeof(ARM_MPU_Region_t)/4U;
if (cnt == 1U) {
mpu->RNR = rnr;
ARM_MPU_OrderedMemcpy(&(mpu->RBAR), &(table->RBAR), rowWordSize);
} else {
uint32_t rnrBase = rnr & ~(MPU_TYPE_RALIASES-1U);
uint32_t rnrOffset = rnr % MPU_TYPE_RALIASES;
mpu->RNR = rnrBase;
while ((rnrOffset + cnt) > MPU_TYPE_RALIASES) {
uint32_t c = MPU_TYPE_RALIASES - rnrOffset;
ARM_MPU_OrderedMemcpy(&(mpu->RBAR)+(rnrOffset*2U), &(table->RBAR), c*rowWordSize);
table += c;
cnt -= c;
rnrOffset = 0U;
rnrBase += MPU_TYPE_RALIASES;
mpu->RNR = rnrBase;
}
ARM_MPU_OrderedMemcpy(&(mpu->RBAR)+(rnrOffset*2U), &(table->RBAR), cnt*rowWordSize);
}
}
/** Load the given number of MPU regions from a table.
* \param rnr First region number to be configured.
* \param table Pointer to the MPU configuration table.
* \param cnt Amount of regions to be configured.
*/
__STATIC_INLINE void ARM_MPU_Load(uint32_t rnr, ARM_MPU_Region_t const* table, uint32_t cnt)
{
ARM_MPU_LoadEx(MPU, rnr, table, cnt);
}
#ifdef MPU_NS
/** Load the given number of MPU regions from a table to the Non-secure MPU.
* \param rnr First region number to be configured.
* \param table Pointer to the MPU configuration table.
* \param cnt Amount of regions to be configured.
*/
__STATIC_INLINE void ARM_MPU_Load_NS(uint32_t rnr, ARM_MPU_Region_t const* table, uint32_t cnt)
{
ARM_MPU_LoadEx(MPU_NS, rnr, table, cnt);
}
#endif
#endif

View File

@@ -0,0 +1,335 @@
/*
* Copyright (c) 2020 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS-Core(M) PMU API for Armv8.1-M PMU
*/
#ifndef ARM_PMU_ARMV8_H
#define ARM_PMU_ARMV8_H
#if defined ( __ICCARM__ )
#pragma system_include /* treat file as system include file for MISRA check */
#elif defined (__clang__)
#pragma clang system_header /* treat file as system include file */
#endif
/**
* \brief PMU Events
* \note See the Armv8.1-M Architecture Reference Manual for full details on these PMU events.
* */
#define ARM_PMU_SW_INCR 0x0000 /*!< Software update to the PMU_SWINC register, architecturally executed and condition code check pass */
#define ARM_PMU_L1I_CACHE_REFILL 0x0001 /*!< L1 I-Cache refill */
#define ARM_PMU_L1D_CACHE_REFILL 0x0003 /*!< L1 D-Cache refill */
#define ARM_PMU_L1D_CACHE 0x0004 /*!< L1 D-Cache access */
#define ARM_PMU_LD_RETIRED 0x0006 /*!< Memory-reading instruction architecturally executed and condition code check pass */
#define ARM_PMU_ST_RETIRED 0x0007 /*!< Memory-writing instruction architecturally executed and condition code check pass */
#define ARM_PMU_INST_RETIRED 0x0008 /*!< Instruction architecturally executed */
#define ARM_PMU_EXC_TAKEN 0x0009 /*!< Exception entry */
#define ARM_PMU_EXC_RETURN 0x000A /*!< Exception return instruction architecturally executed and the condition code check pass */
#define ARM_PMU_PC_WRITE_RETIRED 0x000C /*!< Software change to the Program Counter (PC). Instruction is architecturally executed and condition code check pass */
#define ARM_PMU_BR_IMMED_RETIRED 0x000D /*!< Immediate branch architecturally executed */
#define ARM_PMU_BR_RETURN_RETIRED 0x000E /*!< Function return instruction architecturally executed and the condition code check pass */
#define ARM_PMU_UNALIGNED_LDST_RETIRED 0x000F /*!< Unaligned memory memory-reading or memory-writing instruction architecturally executed and condition code check pass */
#define ARM_PMU_BR_MIS_PRED 0x0010 /*!< Mispredicted or not predicted branch speculatively executed */
#define ARM_PMU_CPU_CYCLES 0x0011 /*!< Cycle */
#define ARM_PMU_BR_PRED 0x0012 /*!< Predictable branch speculatively executed */
#define ARM_PMU_MEM_ACCESS 0x0013 /*!< Data memory access */
#define ARM_PMU_L1I_CACHE 0x0014 /*!< Level 1 instruction cache access */
#define ARM_PMU_L1D_CACHE_WB 0x0015 /*!< Level 1 data cache write-back */
#define ARM_PMU_L2D_CACHE 0x0016 /*!< Level 2 data cache access */
#define ARM_PMU_L2D_CACHE_REFILL 0x0017 /*!< Level 2 data cache refill */
#define ARM_PMU_L2D_CACHE_WB 0x0018 /*!< Level 2 data cache write-back */
#define ARM_PMU_BUS_ACCESS 0x0019 /*!< Bus access */
#define ARM_PMU_MEMORY_ERROR 0x001A /*!< Local memory error */
#define ARM_PMU_INST_SPEC 0x001B /*!< Instruction speculatively executed */
#define ARM_PMU_BUS_CYCLES 0x001D /*!< Bus cycles */
#define ARM_PMU_CHAIN 0x001E /*!< For an odd numbered counter, increment when an overflow occurs on the preceding even-numbered counter on the same PE */
#define ARM_PMU_L1D_CACHE_ALLOCATE 0x001F /*!< Level 1 data cache allocation without refill */
#define ARM_PMU_L2D_CACHE_ALLOCATE 0x0020 /*!< Level 2 data cache allocation without refill */
#define ARM_PMU_BR_RETIRED 0x0021 /*!< Branch instruction architecturally executed */
#define ARM_PMU_BR_MIS_PRED_RETIRED 0x0022 /*!< Mispredicted branch instruction architecturally executed */
#define ARM_PMU_STALL_FRONTEND 0x0023 /*!< No operation issued because of the frontend */
#define ARM_PMU_STALL_BACKEND 0x0024 /*!< No operation issued because of the backend */
#define ARM_PMU_L2I_CACHE 0x0027 /*!< Level 2 instruction cache access */
#define ARM_PMU_L2I_CACHE_REFILL 0x0028 /*!< Level 2 instruction cache refill */
#define ARM_PMU_L3D_CACHE_ALLOCATE 0x0029 /*!< Level 3 data cache allocation without refill */
#define ARM_PMU_L3D_CACHE_REFILL 0x002A /*!< Level 3 data cache refill */
#define ARM_PMU_L3D_CACHE 0x002B /*!< Level 3 data cache access */
#define ARM_PMU_L3D_CACHE_WB 0x002C /*!< Level 3 data cache write-back */
#define ARM_PMU_LL_CACHE_RD 0x0036 /*!< Last level data cache read */
#define ARM_PMU_LL_CACHE_MISS_RD 0x0037 /*!< Last level data cache read miss */
#define ARM_PMU_L1D_CACHE_MISS_RD 0x0039 /*!< Level 1 data cache read miss */
#define ARM_PMU_OP_COMPLETE 0x003A /*!< Operation retired */
#define ARM_PMU_OP_SPEC 0x003B /*!< Operation speculatively executed */
#define ARM_PMU_STALL 0x003C /*!< Stall cycle for instruction or operation not sent for execution */
#define ARM_PMU_STALL_OP_BACKEND 0x003D /*!< Stall cycle for instruction or operation not sent for execution due to pipeline backend */
#define ARM_PMU_STALL_OP_FRONTEND 0x003E /*!< Stall cycle for instruction or operation not sent for execution due to pipeline frontend */
#define ARM_PMU_STALL_OP 0x003F /*!< Instruction or operation slots not occupied each cycle */
#define ARM_PMU_L1D_CACHE_RD 0x0040 /*!< Level 1 data cache read */
#define ARM_PMU_LE_RETIRED 0x0100 /*!< Loop end instruction executed */
#define ARM_PMU_LE_SPEC 0x0101 /*!< Loop end instruction speculatively executed */
#define ARM_PMU_BF_RETIRED 0x0104 /*!< Branch future instruction architecturally executed and condition code check pass */
#define ARM_PMU_BF_SPEC 0x0105 /*!< Branch future instruction speculatively executed and condition code check pass */
#define ARM_PMU_LE_CANCEL 0x0108 /*!< Loop end instruction not taken */
#define ARM_PMU_BF_CANCEL 0x0109 /*!< Branch future instruction not taken */
#define ARM_PMU_SE_CALL_S 0x0114 /*!< Call to secure function, resulting in Security state change */
#define ARM_PMU_SE_CALL_NS 0x0115 /*!< Call to non-secure function, resulting in Security state change */
#define ARM_PMU_DWT_CMPMATCH0 0x0118 /*!< DWT comparator 0 match */
#define ARM_PMU_DWT_CMPMATCH1 0x0119 /*!< DWT comparator 1 match */
#define ARM_PMU_DWT_CMPMATCH2 0x011A /*!< DWT comparator 2 match */
#define ARM_PMU_DWT_CMPMATCH3 0x011B /*!< DWT comparator 3 match */
#define ARM_PMU_MVE_INST_RETIRED 0x0200 /*!< MVE instruction architecturally executed */
#define ARM_PMU_MVE_INST_SPEC 0x0201 /*!< MVE instruction speculatively executed */
#define ARM_PMU_MVE_FP_RETIRED 0x0204 /*!< MVE floating-point instruction architecturally executed */
#define ARM_PMU_MVE_FP_SPEC 0x0205 /*!< MVE floating-point instruction speculatively executed */
#define ARM_PMU_MVE_FP_HP_RETIRED 0x0208 /*!< MVE half-precision floating-point instruction architecturally executed */
#define ARM_PMU_MVE_FP_HP_SPEC 0x0209 /*!< MVE half-precision floating-point instruction speculatively executed */
#define ARM_PMU_MVE_FP_SP_RETIRED 0x020C /*!< MVE single-precision floating-point instruction architecturally executed */
#define ARM_PMU_MVE_FP_SP_SPEC 0x020D /*!< MVE single-precision floating-point instruction speculatively executed */
#define ARM_PMU_MVE_FP_MAC_RETIRED 0x0214 /*!< MVE floating-point multiply or multiply-accumulate instruction architecturally executed */
#define ARM_PMU_MVE_FP_MAC_SPEC 0x0215 /*!< MVE floating-point multiply or multiply-accumulate instruction speculatively executed */
#define ARM_PMU_MVE_INT_RETIRED 0x0224 /*!< MVE integer instruction architecturally executed */
#define ARM_PMU_MVE_INT_SPEC 0x0225 /*!< MVE integer instruction speculatively executed */
#define ARM_PMU_MVE_INT_MAC_RETIRED 0x0228 /*!< MVE multiply or multiply-accumulate instruction architecturally executed */
#define ARM_PMU_MVE_INT_MAC_SPEC 0x0229 /*!< MVE multiply or multiply-accumulate instruction speculatively executed */
#define ARM_PMU_MVE_LDST_RETIRED 0x0238 /*!< MVE load or store instruction architecturally executed */
#define ARM_PMU_MVE_LDST_SPEC 0x0239 /*!< MVE load or store instruction speculatively executed */
#define ARM_PMU_MVE_LD_RETIRED 0x023C /*!< MVE load instruction architecturally executed */
#define ARM_PMU_MVE_LD_SPEC 0x023D /*!< MVE load instruction speculatively executed */
#define ARM_PMU_MVE_ST_RETIRED 0x0240 /*!< MVE store instruction architecturally executed */
#define ARM_PMU_MVE_ST_SPEC 0x0241 /*!< MVE store instruction speculatively executed */
#define ARM_PMU_MVE_LDST_CONTIG_RETIRED 0x0244 /*!< MVE contiguous load or store instruction architecturally executed */
#define ARM_PMU_MVE_LDST_CONTIG_SPEC 0x0245 /*!< MVE contiguous load or store instruction speculatively executed */
#define ARM_PMU_MVE_LD_CONTIG_RETIRED 0x0248 /*!< MVE contiguous load instruction architecturally executed */
#define ARM_PMU_MVE_LD_CONTIG_SPEC 0x0249 /*!< MVE contiguous load instruction speculatively executed */
#define ARM_PMU_MVE_ST_CONTIG_RETIRED 0x024C /*!< MVE contiguous store instruction architecturally executed */
#define ARM_PMU_MVE_ST_CONTIG_SPEC 0x024D /*!< MVE contiguous store instruction speculatively executed */
#define ARM_PMU_MVE_LDST_NONCONTIG_RETIRED 0x0250 /*!< MVE non-contiguous load or store instruction architecturally executed */
#define ARM_PMU_MVE_LDST_NONCONTIG_SPEC 0x0251 /*!< MVE non-contiguous load or store instruction speculatively executed */
#define ARM_PMU_MVE_LD_NONCONTIG_RETIRED 0x0254 /*!< MVE non-contiguous load instruction architecturally executed */
#define ARM_PMU_MVE_LD_NONCONTIG_SPEC 0x0255 /*!< MVE non-contiguous load instruction speculatively executed */
#define ARM_PMU_MVE_ST_NONCONTIG_RETIRED 0x0258 /*!< MVE non-contiguous store instruction architecturally executed */
#define ARM_PMU_MVE_ST_NONCONTIG_SPEC 0x0259 /*!< MVE non-contiguous store instruction speculatively executed */
#define ARM_PMU_MVE_LDST_MULTI_RETIRED 0x025C /*!< MVE memory instruction targeting multiple registers architecturally executed */
#define ARM_PMU_MVE_LDST_MULTI_SPEC 0x025D /*!< MVE memory instruction targeting multiple registers speculatively executed */
#define ARM_PMU_MVE_LD_MULTI_RETIRED 0x0260 /*!< MVE memory load instruction targeting multiple registers architecturally executed */
#define ARM_PMU_MVE_LD_MULTI_SPEC 0x0261 /*!< MVE memory load instruction targeting multiple registers speculatively executed */
#define ARM_PMU_MVE_ST_MULTI_RETIRED 0x0261 /*!< MVE memory store instruction targeting multiple registers architecturally executed */
#define ARM_PMU_MVE_ST_MULTI_SPEC 0x0265 /*!< MVE memory store instruction targeting multiple registers speculatively executed */
#define ARM_PMU_MVE_LDST_UNALIGNED_RETIRED 0x028C /*!< MVE unaligned memory load or store instruction architecturally executed */
#define ARM_PMU_MVE_LDST_UNALIGNED_SPEC 0x028D /*!< MVE unaligned memory load or store instruction speculatively executed */
#define ARM_PMU_MVE_LD_UNALIGNED_RETIRED 0x0290 /*!< MVE unaligned load instruction architecturally executed */
#define ARM_PMU_MVE_LD_UNALIGNED_SPEC 0x0291 /*!< MVE unaligned load instruction speculatively executed */
#define ARM_PMU_MVE_ST_UNALIGNED_RETIRED 0x0294 /*!< MVE unaligned store instruction architecturally executed */
#define ARM_PMU_MVE_ST_UNALIGNED_SPEC 0x0295 /*!< MVE unaligned store instruction speculatively executed */
#define ARM_PMU_MVE_LDST_UNALIGNED_NONCONTIG_RETIRED 0x0298 /*!< MVE unaligned noncontiguous load or store instruction architecturally executed */
#define ARM_PMU_MVE_LDST_UNALIGNED_NONCONTIG_SPEC 0x0299 /*!< MVE unaligned noncontiguous load or store instruction speculatively executed */
#define ARM_PMU_MVE_VREDUCE_RETIRED 0x02A0 /*!< MVE vector reduction instruction architecturally executed */
#define ARM_PMU_MVE_VREDUCE_SPEC 0x02A1 /*!< MVE vector reduction instruction speculatively executed */
#define ARM_PMU_MVE_VREDUCE_FP_RETIRED 0x02A4 /*!< MVE floating-point vector reduction instruction architecturally executed */
#define ARM_PMU_MVE_VREDUCE_FP_SPEC 0x02A5 /*!< MVE floating-point vector reduction instruction speculatively executed */
#define ARM_PMU_MVE_VREDUCE_INT_RETIRED 0x02A8 /*!< MVE integer vector reduction instruction architecturally executed */
#define ARM_PMU_MVE_VREDUCE_INT_SPEC 0x02A9 /*!< MVE integer vector reduction instruction speculatively executed */
#define ARM_PMU_MVE_PRED 0x02B8 /*!< Cycles where one or more predicated beats architecturally executed */
#define ARM_PMU_MVE_STALL 0x02CC /*!< Stall cycles caused by an MVE instruction */
#define ARM_PMU_MVE_STALL_RESOURCE 0x02CD /*!< Stall cycles caused by an MVE instruction because of resource conflicts */
#define ARM_PMU_MVE_STALL_RESOURCE_MEM 0x02CE /*!< Stall cycles caused by an MVE instruction because of memory resource conflicts */
#define ARM_PMU_MVE_STALL_RESOURCE_FP 0x02CF /*!< Stall cycles caused by an MVE instruction because of floating-point resource conflicts */
#define ARM_PMU_MVE_STALL_RESOURCE_INT 0x02D0 /*!< Stall cycles caused by an MVE instruction because of integer resource conflicts */
#define ARM_PMU_MVE_STALL_BREAK 0x02D3 /*!< Stall cycles caused by an MVE chain break */
#define ARM_PMU_MVE_STALL_DEPENDENCY 0x02D4 /*!< Stall cycles caused by MVE register dependency */
#define ARM_PMU_ITCM_ACCESS 0x4007 /*!< Instruction TCM access */
#define ARM_PMU_DTCM_ACCESS 0x4008 /*!< Data TCM access */
#define ARM_PMU_TRCEXTOUT0 0x4010 /*!< ETM external output 0 */
#define ARM_PMU_TRCEXTOUT1 0x4011 /*!< ETM external output 1 */
#define ARM_PMU_TRCEXTOUT2 0x4012 /*!< ETM external output 2 */
#define ARM_PMU_TRCEXTOUT3 0x4013 /*!< ETM external output 3 */
#define ARM_PMU_CTI_TRIGOUT4 0x4018 /*!< Cross-trigger Interface output trigger 4 */
#define ARM_PMU_CTI_TRIGOUT5 0x4019 /*!< Cross-trigger Interface output trigger 5 */
#define ARM_PMU_CTI_TRIGOUT6 0x401A /*!< Cross-trigger Interface output trigger 6 */
#define ARM_PMU_CTI_TRIGOUT7 0x401B /*!< Cross-trigger Interface output trigger 7 */
/** \brief PMU Functions */
__STATIC_INLINE void ARM_PMU_Enable(void);
__STATIC_INLINE void ARM_PMU_Disable(void);
__STATIC_INLINE void ARM_PMU_Set_EVTYPER(uint32_t num, uint32_t type);
__STATIC_INLINE void ARM_PMU_CYCCNT_Reset(void);
__STATIC_INLINE void ARM_PMU_EVCNTR_ALL_Reset(void);
__STATIC_INLINE void ARM_PMU_CNTR_Enable(uint32_t mask);
__STATIC_INLINE void ARM_PMU_CNTR_Disable(uint32_t mask);
__STATIC_INLINE uint32_t ARM_PMU_Get_CCNTR(void);
__STATIC_INLINE uint32_t ARM_PMU_Get_EVCNTR(uint32_t num);
__STATIC_INLINE uint32_t ARM_PMU_Get_CNTR_OVS(void);
__STATIC_INLINE void ARM_PMU_Set_CNTR_OVS(uint32_t mask);
__STATIC_INLINE void ARM_PMU_Set_CNTR_IRQ_Enable(uint32_t mask);
__STATIC_INLINE void ARM_PMU_Set_CNTR_IRQ_Disable(uint32_t mask);
__STATIC_INLINE void ARM_PMU_CNTR_Increment(uint32_t mask);
/**
\brief Enable the PMU
*/
__STATIC_INLINE void ARM_PMU_Enable(void)
{
PMU->CTRL |= PMU_CTRL_ENABLE_Msk;
}
/**
\brief Disable the PMU
*/
__STATIC_INLINE void ARM_PMU_Disable(void)
{
PMU->CTRL &= ~PMU_CTRL_ENABLE_Msk;
}
/**
\brief Set event to count for PMU eventer counter
\param [in] num Event counter (0-30) to configure
\param [in] type Event to count
*/
__STATIC_INLINE void ARM_PMU_Set_EVTYPER(uint32_t num, uint32_t type)
{
PMU->EVTYPER[num] = type;
}
/**
\brief Reset cycle counter
*/
__STATIC_INLINE void ARM_PMU_CYCCNT_Reset(void)
{
PMU->CTRL |= PMU_CTRL_CYCCNT_RESET_Msk;
}
/**
\brief Reset all event counters
*/
__STATIC_INLINE void ARM_PMU_EVCNTR_ALL_Reset(void)
{
PMU->CTRL |= PMU_CTRL_EVENTCNT_RESET_Msk;
}
/**
\brief Enable counters
\param [in] mask Counters to enable
\note Enables one or more of the following:
- event counters (0-30)
- cycle counter
*/
__STATIC_INLINE void ARM_PMU_CNTR_Enable(uint32_t mask)
{
PMU->CNTENSET = mask;
}
/**
\brief Disable counters
\param [in] mask Counters to enable
\note Disables one or more of the following:
- event counters (0-30)
- cycle counter
*/
__STATIC_INLINE void ARM_PMU_CNTR_Disable(uint32_t mask)
{
PMU->CNTENCLR = mask;
}
/**
\brief Read cycle counter
\return Cycle count
*/
__STATIC_INLINE uint32_t ARM_PMU_Get_CCNTR(void)
{
return PMU->CCNTR;
}
/**
\brief Read event counter
\param [in] num Event counter (0-30) to read
\return Event count
*/
__STATIC_INLINE uint32_t ARM_PMU_Get_EVCNTR(uint32_t num)
{
return PMU_EVCNTR_CNT_Msk & PMU->EVCNTR[num];
}
/**
\brief Read counter overflow status
\return Counter overflow status bits for the following:
- event counters (0-30)
- cycle counter
*/
__STATIC_INLINE uint32_t ARM_PMU_Get_CNTR_OVS(void)
{
return PMU->OVSSET;
}
/**
\brief Clear counter overflow status
\param [in] mask Counter overflow status bits to clear
\note Clears overflow status bits for one or more of the following:
- event counters (0-30)
- cycle counter
*/
__STATIC_INLINE void ARM_PMU_Set_CNTR_OVS(uint32_t mask)
{
PMU->OVSCLR = mask;
}
/**
\brief Enable counter overflow interrupt request
\param [in] mask Counter overflow interrupt request bits to set
\note Sets overflow interrupt request bits for one or more of the following:
- event counters (0-30)
- cycle counter
*/
__STATIC_INLINE void ARM_PMU_Set_CNTR_IRQ_Enable(uint32_t mask)
{
PMU->INTENSET = mask;
}
/**
\brief Disable counter overflow interrupt request
\param [in] mask Counter overflow interrupt request bits to clear
\note Clears overflow interrupt request bits for one or more of the following:
- event counters (0-30)
- cycle counter
*/
__STATIC_INLINE void ARM_PMU_Set_CNTR_IRQ_Disable(uint32_t mask)
{
PMU->INTENCLR = mask;
}
/**
\brief Software increment event counter
\param [in] mask Counters to increment
\note Software increment bits for one or more event counters (0-30)
*/
__STATIC_INLINE void ARM_PMU_CNTR_Increment(uint32_t mask)
{
PMU->SWINC = mask;
}
#endif

View File

@@ -0,0 +1,818 @@
/*
* Copyright (c) 2009-2024 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS-Core(M) Compiler ARMClang (Arm Compiler 6) Header File
*/
#ifndef __CMSIS_ARMCLANG_M_H
#define __CMSIS_ARMCLANG_M_H
#pragma clang system_header /* treat file as system include file */
#ifndef __CMSIS_ARMCLANG_H
#error "This file must not be included directly"
#endif
#if (__ARM_ACLE >= 200)
#include <arm_acle.h>
#else
#error Compiler must support ACLE V2.0
#endif /* (__ARM_ACLE >= 200) */
/* ######################### Startup and Lowlevel Init ######################## */
#ifndef __PROGRAM_START
#define __PROGRAM_START __main
#endif
#ifndef __INITIAL_SP
#define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
#endif
#ifndef __STACK_LIMIT
#define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
#endif
#ifndef __VECTOR_TABLE
#define __VECTOR_TABLE __Vectors
#endif
#ifndef __VECTOR_TABLE_ATTRIBUTE
#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET")))
#endif
#if (__ARM_FEATURE_CMSE == 3)
#ifndef __STACK_SEAL
#define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base
#endif
#ifndef __TZ_STACK_SEAL_SIZE
#define __TZ_STACK_SEAL_SIZE 8U
#endif
#ifndef __TZ_STACK_SEAL_VALUE
#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
#endif
__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
*((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
}
#endif
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief STRT Unprivileged (8 bit)
\details Executes a Unprivileged STRT instruction for 8 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
{
__ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
}
/**
\brief STRT Unprivileged (16 bit)
\details Executes a Unprivileged STRT instruction for 16 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
{
__ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
}
/**
\brief STRT Unprivileged (32 bit)
\details Executes a Unprivileged STRT instruction for 32 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
{
__ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
}
#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
/* ########################### Core Function Access ########################### */
/** \ingroup CMSIS_Core_FunctionInterface
\defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
@{
*/
/**
\brief Get Control Register
\details Returns the content of the Control Register.
\return Control Register value
*/
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
{
uint32_t result;
__ASM volatile ("MRS %0, control" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Control Register (non-secure)
\details Returns the content of the non-secure Control Register when in secure mode.
\return non-secure Control Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, control_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Control Register
\details Writes the given value to the Control Register.
\param [in] control Control Register value to set
*/
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
{
__ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
__ISB();
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Control Register (non-secure)
\details Writes the given value to the non-secure Control Register when in secure state.
\param [in] control Control Register value to set
*/
__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
{
__ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
__ISB();
}
#endif
/**
\brief Get IPSR Register
\details Returns the content of the IPSR Register.
\return IPSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, ipsr" : "=r" (result) );
return (result);
}
/**
\brief Get APSR Register
\details Returns the content of the APSR Register.
\return APSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, apsr" : "=r" (result) );
return (result);
}
/**
\brief Get xPSR Register
\details Returns the content of the xPSR Register.
\return xPSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, xpsr" : "=r" (result) );
return (result);
}
/**
\brief Get Process Stack Pointer
\details Returns the current value of the Process Stack Pointer (PSP).
\return PSP Register value
*/
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
{
uint32_t result;
__ASM volatile ("MRS %0, psp" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Process Stack Pointer (non-secure)
\details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
\return PSP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Process Stack Pointer
\details Assigns the given value to the Process Stack Pointer (PSP).
\param [in] topOfProcStack Process Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
{
__ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Process Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
\param [in] topOfProcStack Process Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
{
__ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
}
#endif
/**
\brief Get Main Stack Pointer
\details Returns the current value of the Main Stack Pointer (MSP).
\return MSP Register value
*/
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
{
uint32_t result;
__ASM volatile ("MRS %0, msp" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Main Stack Pointer (non-secure)
\details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
\return MSP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Main Stack Pointer
\details Assigns the given value to the Main Stack Pointer (MSP).
\param [in] topOfMainStack Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
{
__ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Main Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
\param [in] topOfMainStack Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
{
__ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
}
#endif
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Stack Pointer (non-secure)
\details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
\return SP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
return (result);
}
/**
\brief Set Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
\param [in] topOfStack Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
{
__ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
}
#endif
/**
\brief Get Priority Mask
\details Returns the current state of the priority mask bit from the Priority Mask Register.
\return Priority Mask value
*/
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
{
uint32_t result;
__ASM volatile ("MRS %0, primask" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Priority Mask (non-secure)
\details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
\return Priority Mask value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Priority Mask
\details Assigns the given value to the Priority Mask Register.
\param [in] priMask Priority Mask
*/
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
{
__ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Priority Mask (non-secure)
\details Assigns the given value to the non-secure Priority Mask Register when in secure state.
\param [in] priMask Priority Mask
*/
__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
{
__ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
}
#endif
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief Get Base Priority
\details Returns the current value of the Base Priority register.
\return Base Priority register value
*/
__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
{
uint32_t result;
__ASM volatile ("MRS %0, basepri" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Base Priority (non-secure)
\details Returns the current value of the non-secure Base Priority register when in secure state.
\return Base Priority register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Base Priority
\details Assigns the given value to the Base Priority register.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
{
__ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Base Priority (non-secure)
\details Assigns the given value to the non-secure Base Priority register when in secure state.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
{
__ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
}
#endif
/**
\brief Set Base Priority with condition
\details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
or the new value increases the BASEPRI priority level.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
{
__ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
}
/**
\brief Get Fault Mask
\details Returns the current value of the Fault Mask register.
\return Fault Mask register value
*/
__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
{
uint32_t result;
__ASM volatile ("MRS %0, faultmask" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Fault Mask (non-secure)
\details Returns the current value of the non-secure Fault Mask register when in secure state.
\return Fault Mask register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Fault Mask
\details Assigns the given value to the Fault Mask register.
\param [in] faultMask Fault Mask value to set
*/
__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
{
__ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Fault Mask (non-secure)
\details Assigns the given value to the non-secure Fault Mask register when in secure state.
\param [in] faultMask Fault Mask value to set
*/
__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
{
__ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
}
#endif
#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
#if (__ARM_ARCH >= 8)
/**
\brief Get Process Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always in non-secure
mode.
\details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
\return PSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
{
#if (((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
(__ARM_FEATURE_CMSE < 3) )
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, psplim" : "=r" (result) );
return (result);
#endif
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Process Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
\return PSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
{
#if ((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) )
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
return (result);
#endif
}
#endif
/**
\brief Set Process Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored in non-secure
mode.
\details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
\param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
{
#if (((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
(__ARM_FEATURE_CMSE < 3) )
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
(void)ProcStackPtrLimit;
#else
__ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
#endif
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Process Stack Pointer (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
\param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
{
#if ((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) )
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
(void)ProcStackPtrLimit;
#else
__ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
#endif
}
#endif
/**
\brief Get Main Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
\return MSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
{
#if (((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
(__ARM_FEATURE_CMSE < 3) )
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, msplim" : "=r" (result) );
return (result);
#endif
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Main Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
\return MSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
{
#if ((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) )
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
return (result);
#endif
}
#endif
/**
\brief Set Main Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
\param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
{
#if (((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
(__ARM_FEATURE_CMSE < 3) )
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
(void)MainStackPtrLimit;
#else
__ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
#endif
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Main Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
\param [in] MainStackPtrLimit Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
{
#if ((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) )
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
(void)MainStackPtrLimit;
#else
__ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
#endif
}
#endif
#endif /* (__ARM_ARCH >= 8) */
/** @} end of CMSIS_Core_RegAccFunctions */
/* ################### Compiler specific Intrinsics ########################### */
/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
Access to dedicated SIMD instructions
@{
*/
#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
#define __SADD8 __sadd8
#define __QADD8 __qadd8
#define __SHADD8 __shadd8
#define __UADD8 __uadd8
#define __UQADD8 __uqadd8
#define __UHADD8 __uhadd8
#define __SSUB8 __ssub8
#define __QSUB8 __qsub8
#define __SHSUB8 __shsub8
#define __USUB8 __usub8
#define __UQSUB8 __uqsub8
#define __UHSUB8 __uhsub8
#define __SADD16 __sadd16
#define __QADD16 __qadd16
#define __SHADD16 __shadd16
#define __UADD16 __uadd16
#define __UQADD16 __uqadd16
#define __UHADD16 __uhadd16
#define __SSUB16 __ssub16
#define __QSUB16 __qsub16
#define __SHSUB16 __shsub16
#define __USUB16 __usub16
#define __UQSUB16 __uqsub16
#define __UHSUB16 __uhsub16
#define __SASX __sasx
#define __QASX __qasx
#define __SHASX __shasx
#define __UASX __uasx
#define __UQASX __uqasx
#define __UHASX __uhasx
#define __SSAX __ssax
#define __QSAX __qsax
#define __SHSAX __shsax
#define __USAX __usax
#define __UQSAX __uqsax
#define __UHSAX __uhsax
#define __USAD8 __usad8
#define __USADA8 __usada8
#define __SSAT16 __ssat16
#define __USAT16 __usat16
#define __UXTB16 __uxtb16
#define __UXTAB16 __uxtab16
#define __SXTB16 __sxtb16
#define __SXTAB16 __sxtab16
#define __SMUAD __smuad
#define __SMUADX __smuadx
#define __SMLAD __smlad
#define __SMLADX __smladx
#define __SMLALD __smlald
#define __SMLALDX __smlaldx
#define __SMUSD __smusd
#define __SMUSDX __smusdx
#define __SMLSD __smlsd
#define __SMLSDX __smlsdx
#define __SMLSLD __smlsld
#define __SMLSLDX __smlsldx
#define __SEL __sel
#define __QADD __qadd
#define __QSUB __qsub
#define __PKHBT(ARG1,ARG2,ARG3) \
__extension__ \
({ \
uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
__ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
__RES; \
})
#define __PKHTB(ARG1,ARG2,ARG3) \
__extension__ \
({ \
uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
if (ARG3 == 0) \
__ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
else \
__ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
__RES; \
})
#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
#define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
{
int32_t result;
__ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
return (result);
}
#endif /* (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) */
/** @} end of group CMSIS_SIMD_intrinsics */
#endif /* __CMSIS_ARMCLANG_M_H */

View File

@@ -0,0 +1,824 @@
/*
* Copyright (c) 2009-2024 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS-Core(M) Compiler LLVM/Clang Header File
*/
#ifndef __CMSIS_CLANG_M_H
#define __CMSIS_CLANG_M_H
#pragma clang system_header /* treat file as system include file */
#ifndef __CMSIS_CLANG_H
#error "This file must not be included directly"
#endif
#if (__ARM_ACLE >= 200)
#include <arm_acle.h>
#else
#error Compiler must support ACLE V2.0
#endif /* (__ARM_ACLE >= 200) */
/* Fallback for __has_builtin */
#ifndef __has_builtin
#define __has_builtin(x) (0)
#endif
/* ######################### Startup and Lowlevel Init ######################## */
#ifndef __PROGRAM_START
#define __PROGRAM_START _start
#endif
#ifndef __INITIAL_SP
#define __INITIAL_SP __stack
#endif
#ifndef __STACK_LIMIT
#define __STACK_LIMIT __stack_limit
#endif
#ifndef __VECTOR_TABLE
#define __VECTOR_TABLE __Vectors
#endif
#ifndef __VECTOR_TABLE_ATTRIBUTE
#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors")))
#endif
#if (__ARM_FEATURE_CMSE == 3)
#ifndef __STACK_SEAL
#define __STACK_SEAL __stack_seal
#endif
#ifndef __TZ_STACK_SEAL_SIZE
#define __TZ_STACK_SEAL_SIZE 8U
#endif
#ifndef __TZ_STACK_SEAL_VALUE
#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
#endif
__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
*((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
}
#endif
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief STRT Unprivileged (8 bit)
\details Executes a Unprivileged STRT instruction for 8 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
{
__ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
}
/**
\brief STRT Unprivileged (16 bit)
\details Executes a Unprivileged STRT instruction for 16 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
{
__ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
}
/**
\brief STRT Unprivileged (32 bit)
\details Executes a Unprivileged STRT instruction for 32 bit values.
\param [in] value Value to store
\param [in] ptr Pointer to location
*/
__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
{
__ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
}
#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
/* ########################### Core Function Access ########################### */
/** \ingroup CMSIS_Core_FunctionInterface
\defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
@{
*/
/**
\brief Get Control Register
\details Returns the content of the Control Register.
\return Control Register value
*/
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
{
uint32_t result;
__ASM volatile ("MRS %0, control" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Control Register (non-secure)
\details Returns the content of the non-secure Control Register when in secure mode.
\return non-secure Control Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, control_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Control Register
\details Writes the given value to the Control Register.
\param [in] control Control Register value to set
*/
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
{
__ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
__ISB();
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Control Register (non-secure)
\details Writes the given value to the non-secure Control Register when in secure state.
\param [in] control Control Register value to set
*/
__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
{
__ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
__ISB();
}
#endif
/**
\brief Get IPSR Register
\details Returns the content of the IPSR Register.
\return IPSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, ipsr" : "=r" (result) );
return (result);
}
/**
\brief Get APSR Register
\details Returns the content of the APSR Register.
\return APSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, apsr" : "=r" (result) );
return (result);
}
/**
\brief Get xPSR Register
\details Returns the content of the xPSR Register.
\return xPSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, xpsr" : "=r" (result) );
return (result);
}
/**
\brief Get Process Stack Pointer
\details Returns the current value of the Process Stack Pointer (PSP).
\return PSP Register value
*/
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
{
uint32_t result;
__ASM volatile ("MRS %0, psp" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Process Stack Pointer (non-secure)
\details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
\return PSP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Process Stack Pointer
\details Assigns the given value to the Process Stack Pointer (PSP).
\param [in] topOfProcStack Process Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
{
__ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Process Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
\param [in] topOfProcStack Process Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
{
__ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
}
#endif
/**
\brief Get Main Stack Pointer
\details Returns the current value of the Main Stack Pointer (MSP).
\return MSP Register value
*/
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
{
uint32_t result;
__ASM volatile ("MRS %0, msp" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Main Stack Pointer (non-secure)
\details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
\return MSP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Main Stack Pointer
\details Assigns the given value to the Main Stack Pointer (MSP).
\param [in] topOfMainStack Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
{
__ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Main Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
\param [in] topOfMainStack Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
{
__ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
}
#endif
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Stack Pointer (non-secure)
\details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
\return SP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
return (result);
}
/**
\brief Set Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
\param [in] topOfStack Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
{
__ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
}
#endif
/**
\brief Get Priority Mask
\details Returns the current state of the priority mask bit from the Priority Mask Register.
\return Priority Mask value
*/
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
{
uint32_t result;
__ASM volatile ("MRS %0, primask" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Priority Mask (non-secure)
\details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
\return Priority Mask value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Priority Mask
\details Assigns the given value to the Priority Mask Register.
\param [in] priMask Priority Mask
*/
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
{
__ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Priority Mask (non-secure)
\details Assigns the given value to the non-secure Priority Mask Register when in secure state.
\param [in] priMask Priority Mask
*/
__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
{
__ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
}
#endif
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief Get Base Priority
\details Returns the current value of the Base Priority register.
\return Base Priority register value
*/
__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
{
uint32_t result;
__ASM volatile ("MRS %0, basepri" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Base Priority (non-secure)
\details Returns the current value of the non-secure Base Priority register when in secure state.
\return Base Priority register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Base Priority
\details Assigns the given value to the Base Priority register.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
{
__ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Base Priority (non-secure)
\details Assigns the given value to the non-secure Base Priority register when in secure state.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
{
__ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
}
#endif
/**
\brief Set Base Priority with condition
\details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
or the new value increases the BASEPRI priority level.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
{
__ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
}
/**
\brief Get Fault Mask
\details Returns the current value of the Fault Mask register.
\return Fault Mask register value
*/
__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
{
uint32_t result;
__ASM volatile ("MRS %0, faultmask" : "=r" (result) );
return (result);
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Fault Mask (non-secure)
\details Returns the current value of the non-secure Fault Mask register when in secure state.
\return Fault Mask register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Fault Mask
\details Assigns the given value to the Fault Mask register.
\param [in] faultMask Fault Mask value to set
*/
__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
{
__ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Fault Mask (non-secure)
\details Assigns the given value to the non-secure Fault Mask register when in secure state.
\param [in] faultMask Fault Mask value to set
*/
__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
{
__ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
}
#endif
#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
#if (__ARM_ARCH >= 8)
/**
\brief Get Process Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always in non-secure
mode.
\details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
\return PSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
{
#if (((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
(__ARM_FEATURE_CMSE < 3) )
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, psplim" : "=r" (result) );
return (result);
#endif
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Process Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
\return PSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
{
#if ((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) )
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
return (result);
#endif
}
#endif
/**
\brief Set Process Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored in non-secure
mode.
\details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
\param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
{
#if (((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
(__ARM_FEATURE_CMSE < 3) )
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
(void)ProcStackPtrLimit;
#else
__ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
#endif
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Process Stack Pointer (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
\param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
{
#if ((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) )
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
(void)ProcStackPtrLimit;
#else
__ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
#endif
}
#endif
/**
\brief Get Main Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
\return MSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
{
#if (((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
(__ARM_FEATURE_CMSE < 3) )
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, msplim" : "=r" (result) );
return (result);
#endif
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Main Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
\return MSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
{
#if ((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) )
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
return (result);
#endif
}
#endif
/**
\brief Set Main Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
\param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
{
#if (((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) ) && \
(__ARM_FEATURE_CMSE < 3) )
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
(void)MainStackPtrLimit;
#else
__ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
#endif
}
#if (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Main Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
\param [in] MainStackPtrLimit Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
{
#if ((__ARM_ARCH_8M_MAIN__ < 1) && \
(__ARM_ARCH_8_1M_MAIN__ < 1) )
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
(void)MainStackPtrLimit;
#else
__ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
#endif
}
#endif
#endif /* (__ARM_ARCH >= 8) */
/* ################### Compiler specific Intrinsics ########################### */
/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
Access to dedicated SIMD instructions
@{
*/
#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
#define __SADD8 __sadd8
#define __QADD8 __qadd8
#define __SHADD8 __shadd8
#define __UADD8 __uadd8
#define __UQADD8 __uqadd8
#define __UHADD8 __uhadd8
#define __SSUB8 __ssub8
#define __QSUB8 __qsub8
#define __SHSUB8 __shsub8
#define __USUB8 __usub8
#define __UQSUB8 __uqsub8
#define __UHSUB8 __uhsub8
#define __SADD16 __sadd16
#define __QADD16 __qadd16
#define __SHADD16 __shadd16
#define __UADD16 __uadd16
#define __UQADD16 __uqadd16
#define __UHADD16 __uhadd16
#define __SSUB16 __ssub16
#define __QSUB16 __qsub16
#define __SHSUB16 __shsub16
#define __USUB16 __usub16
#define __UQSUB16 __uqsub16
#define __UHSUB16 __uhsub16
#define __SASX __sasx
#define __QASX __qasx
#define __SHASX __shasx
#define __UASX __uasx
#define __UQASX __uqasx
#define __UHASX __uhasx
#define __SSAX __ssax
#define __QSAX __qsax
#define __SHSAX __shsax
#define __USAX __usax
#define __UQSAX __uqsax
#define __UHSAX __uhsax
#define __USAD8 __usad8
#define __USADA8 __usada8
#define __SSAT16 __ssat16
#define __USAT16 __usat16
#define __UXTB16 __uxtb16
#define __UXTAB16 __uxtab16
#define __SXTB16 __sxtb16
#define __SXTAB16 __sxtab16
#define __SMUAD __smuad
#define __SMUADX __smuadx
#define __SMLAD __smlad
#define __SMLADX __smladx
#define __SMLALD __smlald
#define __SMLALDX __smlaldx
#define __SMUSD __smusd
#define __SMUSDX __smusdx
#define __SMLSD __smlsd
#define __SMLSDX __smlsdx
#define __SMLSLD __smlsld
#define __SMLSLDX __smlsldx
#define __SEL __sel
#define __QADD __qadd
#define __QSUB __qsub
#define __PKHBT(ARG1,ARG2,ARG3) \
__extension__ \
({ \
uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
__ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
__RES; \
})
#define __PKHTB(ARG1,ARG2,ARG3) \
__extension__ \
({ \
uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
if (ARG3 == 0) \
__ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
else \
__ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
__RES; \
})
#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2))
#define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
{
int32_t result;
__ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
return (result);
}
#endif /* (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) */
/** @} end of group CMSIS_SIMD_intrinsics */
/** @} end of CMSIS_Core_RegAccFunctions */
#endif /* __CMSIS_CLANG_M_H */

View File

@@ -0,0 +1,717 @@
/*
* Copyright (c) 2009-2023 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* CMSIS-Core(M) Compiler GCC Header File
*/
#ifndef __CMSIS_GCC_M_H
#define __CMSIS_GCC_M_H
#ifndef __CMSIS_GCC_H
#error "This file must not be included directly"
#endif
#include <arm_acle.h>
/* ######################### Startup and Lowlevel Init ######################## */
#ifndef __PROGRAM_START
/**
\brief Initializes data and bss sections
\details This default implementations initialized all data and additional bss
sections relying on .copy.table and .zero.table specified properly
in the used linker script.
*/
__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
{
extern void _start(void) __NO_RETURN;
typedef struct __copy_table {
uint32_t const* src;
uint32_t* dest;
uint32_t wlen;
} __copy_table_t;
typedef struct __zero_table {
uint32_t* dest;
uint32_t wlen;
} __zero_table_t;
extern const __copy_table_t __copy_table_start__;
extern const __copy_table_t __copy_table_end__;
extern const __zero_table_t __zero_table_start__;
extern const __zero_table_t __zero_table_end__;
for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
for(uint32_t i=0u; i<pTable->wlen; ++i) {
pTable->dest[i] = pTable->src[i];
}
}
for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
for(uint32_t i=0u; i<pTable->wlen; ++i) {
pTable->dest[i] = 0u;
}
}
_start();
}
#define __PROGRAM_START __cmsis_start
#endif
#ifndef __INITIAL_SP
#define __INITIAL_SP __StackTop
#endif
#ifndef __STACK_LIMIT
#define __STACK_LIMIT __StackLimit
#endif
#ifndef __VECTOR_TABLE
#define __VECTOR_TABLE __Vectors
#endif
#ifndef __VECTOR_TABLE_ATTRIBUTE
#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors")))
#endif
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
#ifndef __STACK_SEAL
#define __STACK_SEAL __StackSeal
#endif
#ifndef __TZ_STACK_SEAL_SIZE
#define __TZ_STACK_SEAL_SIZE 8U
#endif
#ifndef __TZ_STACK_SEAL_VALUE
#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
#endif
__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
*((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
}
#endif
/* ########################### Core Function Access ########################### */
/** \ingroup CMSIS_Core_FunctionInterface
\defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
@{
*/
/**
\brief Get Control Register
\details Returns the content of the Control Register.
\return Control Register value
*/
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
{
uint32_t result;
__ASM volatile ("MRS %0, control" : "=r" (result) );
return (result);
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Control Register (non-secure)
\details Returns the content of the non-secure Control Register when in secure mode.
\return non-secure Control Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, control_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Control Register
\details Writes the given value to the Control Register.
\param [in] control Control Register value to set
*/
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
{
__ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
__ISB();
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Control Register (non-secure)
\details Writes the given value to the non-secure Control Register when in secure state.
\param [in] control Control Register value to set
*/
__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
{
__ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
__ISB();
}
#endif
/**
\brief Get IPSR Register
\details Returns the content of the IPSR Register.
\return IPSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, ipsr" : "=r" (result) );
return (result);
}
/**
\brief Get APSR Register
\details Returns the content of the APSR Register.
\return APSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, apsr" : "=r" (result) );
return (result);
}
/**
\brief Get xPSR Register
\details Returns the content of the xPSR Register.
\return xPSR Register value
*/
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
{
uint32_t result;
__ASM volatile ("MRS %0, xpsr" : "=r" (result) );
return (result);
}
/**
\brief Get Process Stack Pointer
\details Returns the current value of the Process Stack Pointer (PSP).
\return PSP Register value
*/
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
{
uint32_t result;
__ASM volatile ("MRS %0, psp" : "=r" (result) );
return (result);
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Process Stack Pointer (non-secure)
\details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
\return PSP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Process Stack Pointer
\details Assigns the given value to the Process Stack Pointer (PSP).
\param [in] topOfProcStack Process Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
{
__ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Process Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
\param [in] topOfProcStack Process Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
{
__ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
}
#endif
/**
\brief Get Main Stack Pointer
\details Returns the current value of the Main Stack Pointer (MSP).
\return MSP Register value
*/
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
{
uint32_t result;
__ASM volatile ("MRS %0, msp" : "=r" (result) );
return (result);
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Main Stack Pointer (non-secure)
\details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
\return MSP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Main Stack Pointer
\details Assigns the given value to the Main Stack Pointer (MSP).
\param [in] topOfMainStack Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
{
__ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Main Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
\param [in] topOfMainStack Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
{
__ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
}
#endif
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Stack Pointer (non-secure)
\details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
\return SP Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
return (result);
}
/**
\brief Set Stack Pointer (non-secure)
\details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
\param [in] topOfStack Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
{
__ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
}
#endif
/**
\brief Get Priority Mask
\details Returns the current state of the priority mask bit from the Priority Mask Register.
\return Priority Mask value
*/
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
{
uint32_t result;
__ASM volatile ("MRS %0, primask" : "=r" (result) );
return (result);
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Priority Mask (non-secure)
\details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
\return Priority Mask value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Priority Mask
\details Assigns the given value to the Priority Mask Register.
\param [in] priMask Priority Mask
*/
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
{
__ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Priority Mask (non-secure)
\details Assigns the given value to the non-secure Priority Mask Register when in secure state.
\param [in] priMask Priority Mask
*/
__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
{
__ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
}
#endif
#if (__ARM_ARCH_ISA_THUMB >= 2)
/**
\brief Get Base Priority
\details Returns the current value of the Base Priority register.
\return Base Priority register value
*/
__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
{
uint32_t result;
__ASM volatile ("MRS %0, basepri" : "=r" (result) );
return (result);
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Base Priority (non-secure)
\details Returns the current value of the non-secure Base Priority register when in secure state.
\return Base Priority register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Base Priority
\details Assigns the given value to the Base Priority register.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
{
__ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Base Priority (non-secure)
\details Assigns the given value to the non-secure Base Priority register when in secure state.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
{
__ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
}
#endif
/**
\brief Set Base Priority with condition
\details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
or the new value increases the BASEPRI priority level.
\param [in] basePri Base Priority value to set
*/
__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
{
__ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
}
/**
\brief Get Fault Mask
\details Returns the current value of the Fault Mask register.
\return Fault Mask register value
*/
__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
{
uint32_t result;
__ASM volatile ("MRS %0, faultmask" : "=r" (result) );
return (result);
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Fault Mask (non-secure)
\details Returns the current value of the non-secure Fault Mask register when in secure state.
\return Fault Mask register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
{
uint32_t result;
__ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
return (result);
}
#endif
/**
\brief Set Fault Mask
\details Assigns the given value to the Fault Mask register.
\param [in] faultMask Fault Mask value to set
*/
__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
{
__ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Fault Mask (non-secure)
\details Assigns the given value to the non-secure Fault Mask register when in secure state.
\param [in] faultMask Fault Mask value to set
*/
__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
{
__ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
}
#endif
#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */
#if (__ARM_ARCH >= 8)
/**
\brief Get Process Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always in non-secure
mode.
\details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
\return PSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
{
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
!(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
(!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, psplim" : "=r" (result) );
return (result);
#endif
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Process Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
\return PSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
{
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
!(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)))
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
return (result);
#endif
}
#endif
/**
\brief Set Process Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored in non-secure
mode.
\details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
\param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
{
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
!(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
(!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
(void)ProcStackPtrLimit;
#else
__ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
#endif
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Process Stack Pointer (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
\param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
{
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
!(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)))
/* without main extensions, the non-secure PSPLIM is RAZ/WI */
(void)ProcStackPtrLimit;
#else
__ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
#endif
}
#endif
/**
\brief Get Main Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
\return MSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
{
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
!(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
(!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, msplim" : "=r" (result) );
return (result);
#endif
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Get Main Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence zero is returned always.
\details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
\return MSPLIM Register value
*/
__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
{
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
!(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)))
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
return (0U);
#else
uint32_t result;
__ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
return (result);
#endif
}
#endif
/**
\brief Set Main Stack Pointer Limit
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
\param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
*/
__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
{
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
!(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)) && \
(!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
(void)MainStackPtrLimit;
#else
__ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
#endif
}
#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)
/**
\brief Set Main Stack Pointer Limit (non-secure)
Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
Stack Pointer Limit register hence the write is silently ignored.
\details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
\param [in] MainStackPtrLimit Main Stack Pointer value to set
*/
__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
{
#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
!(defined (__ARM_ARCH_8_1M_MAIN__ ) && (__ARM_ARCH_8_1M_MAIN__ == 1)))
/* without main extensions, the non-secure MSPLIM is RAZ/WI */
(void)MainStackPtrLimit;
#else
__ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
#endif
}
#endif
#endif /* (__ARM_ARCH >= 8) */
/*@} end of CMSIS_Core_RegAccFunctions */
#endif /* __CMSIS_GCC_M_H */

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,232 @@
.syntax unified
.cpu cortex-m23
.fpu softvfp
.thumb
.global g_pfnVectors
.global Default_Handler
/* necessary symbols defined in linker script to initialize data */
.word _sidata
.word _sdata
.word _edata
.word _sbss
.word _ebss
.section .text.Reset_Handler
.weak Reset_Handler
.type Reset_Handler, %function
/* reset Handler */
Reset_Handler:
ldr r0, =_sp
mov sp, r0
/* copy the data segment into ram */
movs r1, #0
b LoopCopyDataInit
CopyDataInit:
ldr r3, =_sidata
ldr r3, [r3, r1]
str r3, [r0, r1]
adds r1, r1, #4
LoopCopyDataInit:
ldr r0, =_sdata
ldr r3, =_edata
adds r2, r0, r1
cmp r2, r3
bcc CopyDataInit
ldr r2, =_sbss
b LoopFillZerobss
FillZerobss:
movs r3, #0
str r3, [r2]
adds r2, r2, #4
LoopFillZerobss:
ldr r3, = _ebss
cmp r2, r3
bcc FillZerobss
/* Call SystemInit function */
bl SystemInit
/* Call static constructors */
bl __libc_init_array
/*Call the main function */
bl main
LoopForever:
b LoopForever
.size Reset_Handler, .-Reset_Handler
.section .text.Default_Handler,"ax",%progbits
Default_Handler:
Infinite_Loop:
b Infinite_Loop
.size Default_Handler, .-Default_Handler
.section .vectors,"a",%progbits
.global __gVectors
__gVectors:
.word _sp /* Top of Stack */
.word Reset_Handler /* 1:Reset Handler */
.word NMI_Handler /* 2:NMI Handler */
.word HardFault_Handler /* 3:Hard Fault Handler */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word SVC_Handler /* 11:SVCall Handler */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word PendSV_Handler /* 14:PendSV Handler */
.word SysTick_Handler /* 15:SysTick Handler */
/* external interrupts handler */
.word WWDGT_IRQHandler /* 16:Window Watchdog Timer */
.word LVD_IRQHandler /* 17:LVD through EXTI Line detect */
.word RTC_IRQHandler /* 18:RTC through EXTI Line */
.word FMC_IRQHandler /* 19:FMC */
.word RCU_IRQHandler /* 20:RCU */
.word EXTI0_1_IRQHandler /* 21:EXTI Line 0 and EXTI Line 1 */
.word EXTI2_3_IRQHandler /* 22:EXTI Line 2 and EXTI Line 3 */
.word EXTI4_15_IRQHandler /* 23:EXTI Line 4 to EXTI Line 15 */
.word 0 /* Reserved */
.word DMA_Channel0_IRQHandler /* 25:DMA Channel 0 */
.word DMA_Channel1_2_IRQHandler /* 26:DMA Channel 1 and DMA Channel 2 */
.word DMA_Channel3_4_IRQHandler /* 27:DMA Channel 3 and DMA Channel 4 */
.word ADC_CMP_IRQHandler /* 28:ADC and Comparator */
.word TIMER0_BRK_UP_TRG_COM_IRQHandler /* 29:TIMER0 Break,Update,Trigger and Commutation */
.word TIMER0_Channel_IRQHandler /* 30:TIMER0 Channel Capture Compare */
.word 0 /* Reserved */
.word TIMER2_IRQHandler /* 32:TIMER2 */
.word TIMER5_IRQHandler /* 33:TIMER5 */
.word 0 /* Reserved */
.word TIMER13_IRQHandler /* 35:TIMER13 */
.word TIMER14_IRQHandler /* 36:TIMER14 */
.word TIMER15_IRQHandler /* 37:TIMER15 */
.word TIMER16_IRQHandler /* 38:TIMER16 */
.word I2C0_EV_IRQHandler /* 39:I2C0 Event */
.word I2C1_EV_IRQHandler /* 40:I2C1 Event */
.word SPI0_IRQHandler /* 41:SPI0 */
.word SPI1_IRQHandler /* 42:SPI1 */
.word USART0_IRQHandler /* 43:USART0 */
.word USART1_IRQHandler /* 44:USART1 */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word 0 /* Reserved */
.word I2C0_ER_IRQHandler /* 48:I2C0 Error */
.word 0 /* Reserved */
.word I2C1_ER_IRQHandler /* 50:I2C1 Error */
.size __gVectors, . - __gVectors
.weak NMI_Handler
.thumb_set NMI_Handler,Default_Handler
.weak HardFault_Handler
.thumb_set HardFault_Handler,Default_Handler
.weak SVC_Handler
.thumb_set SVC_Handler,Default_Handler
.weak PendSV_Handler
.thumb_set PendSV_Handler,Default_Handler
.weak SysTick_Handler
.thumb_set SysTick_Handler,Default_Handler
.weak WWDGT_IRQHandler
.thumb_set WWDGT_IRQHandler,Default_Handler
.weak LVD_IRQHandler
.thumb_set LVD_IRQHandler,Default_Handler
.weak TAMPER_IRQHandler
.thumb_set TAMPER_IRQHandler,Default_Handler
.weak RTC_IRQHandler
.thumb_set RTC_IRQHandler,Default_Handler
.weak FMC_IRQHandler
.thumb_set FMC_IRQHandler,Default_Handler
.weak RCU_IRQHandler
.thumb_set RCU_IRQHandler,Default_Handler
.weak EXTI0_1_IRQHandler
.thumb_set EXTI0_1_IRQHandler,Default_Handler
.weak EXTI2_3_IRQHandler
.thumb_set EXTI2_3_IRQHandler,Default_Handler
.weak EXTI4_15_IRQHandler
.thumb_set EXTI4_15_IRQHandler,Default_Handler
.weak DMA_Channel0_IRQHandler
.thumb_set DMA_Channel0_IRQHandler,Default_Handler
.weak DMA_Channel1_2_IRQHandler
.thumb_set DMA_Channel1_2_IRQHandler,Default_Handler
.weak DMA_Channel3_4_IRQHandler
.thumb_set DMA_Channel3_4_IRQHandler,Default_Handler
.weak ADC_CMP_IRQHandler
.thumb_set ADC_CMP_IRQHandler,Default_Handler
.weak TIMER0_BRK_UP_TRG_COM_IRQHandler
.thumb_set TIMER0_BRK_UP_TRG_COM_IRQHandler,Default_Handler
.weak TIMER0_Channel_IRQHandler
.thumb_set TIMER0_Channel_IRQHandler,Default_Handler
.weak TIMER2_IRQHandler
.thumb_set TIMER2_IRQHandler,Default_Handler
.weak TIMER5_IRQHandler
.thumb_set TIMER5_IRQHandler,Default_Handler
.weak TIMER13_IRQHandler
.thumb_set TIMER13_IRQHandler,Default_Handler
.weak TIMER14_IRQHandler
.thumb_set TIMER14_IRQHandler,Default_Handler
.weak TIMER15_IRQHandler
.thumb_set TIMER15_IRQHandler,Default_Handler
.weak TIMER16_IRQHandler
.thumb_set TIMER16_IRQHandler,Default_Handler
.weak I2C0_EV_IRQHandler
.thumb_set I2C0_EV_IRQHandler,Default_Handler
.weak I2C1_EV_IRQHandler
.thumb_set I2C1_EV_IRQHandler,Default_Handler
.weak SPI0_IRQHandler
.thumb_set SPI0_IRQHandler,Default_Handler
.weak SPI1_IRQHandler
.thumb_set SPI1_IRQHandler,Default_Handler
.weak USART0_IRQHandler
.thumb_set USART0_IRQHandler,Default_Handler
.weak USART1_IRQHandler
.thumb_set USART1_IRQHandler,Default_Handler
.weak I2C0_ER_IRQHandler
.thumb_set I2C0_ER_IRQHandler,Default_Handler
.weak I2C1_ER_IRQHandler
.thumb_set I2C1_ER_IRQHandler,Default_Handler