From 9b88e7f0ca087380d28ad8c46edeb6a08a0c40be Mon Sep 17 00:00:00 2001 From: Markus Klein Date: Sun, 4 Dec 2022 18:14:19 +0100 Subject: [PATCH] use a single core folder for all ARM architectures. This should reduce code duplication. Move the GIC driver to a separate file so that it can be used by both ARMV7A and ARMV7R cores. Add initial support for some Cortex-A and Cortex-R devices --- ARM.CMSIS.pdsc | 8 +- .../core_ca.h => Core/Include/armv7a.h} | 1493 +++++------------ .../Include/armv7a_cp15.h} | 4 +- CMSIS/Core/Include/armv7r.h | 470 ++++++ CMSIS/Core/Include/armv8a.h | 621 +++++++ CMSIS/Core/Include/armv8a_system_control.h | 143 ++ CMSIS/Core/Include/armv8r.h | 354 ++++ CMSIS/Core/Include/cmsis_armclang.h | 636 +------ CMSIS/Core/Include/cmsis_armclang_corea.h | 238 +++ CMSIS/Core/Include/cmsis_armclang_corem.h | 670 ++++++++ CMSIS/Core/Include/cmsis_armclang_corer.h | 150 ++ CMSIS/Core/Include/cmsis_clang.h | 636 +------ CMSIS/Core/Include/cmsis_clang_corea.h | 236 +++ CMSIS/Core/Include/cmsis_clang_corem.h | 670 ++++++++ CMSIS/Core/Include/cmsis_clang_corer.h | 150 ++ CMSIS/Core/Include/cmsis_compiler.h | 67 +- CMSIS/Core/Include/cmsis_gcc.h | 714 +------- CMSIS/Core/Include/cmsis_gcc_corea.h | 257 +++ CMSIS/Core/Include/cmsis_gcc_corem.h | 723 ++++++++ CMSIS/Core/Include/cmsis_gcc_corer.h | 153 ++ CMSIS/Core/Include/cmsis_iccarm.h | 1090 ++++++------ CMSIS/Core/Include/cmsis_iccarm_corea.h | 283 ++++ CMSIS/Core/Include/cmsis_iccarm_corem.h | 464 +++++ CMSIS/Core/Include/cmsis_iccarm_corer.h | 34 + CMSIS/Core/Include/cmsis_tiarmclang.h | 638 +------ CMSIS/Core/Include/cmsis_tiarmclang_corem.h | 670 ++++++++ CMSIS/Core/Include/cmsis_version.h | 34 +- CMSIS/Core/Include/core_ca.h | 36 + CMSIS/Core/Include/core_ca35.h | 41 + CMSIS/Core/Include/core_ca5.h | 41 + CMSIS/Core/Include/core_ca53.h | 41 + CMSIS/Core/Include/core_ca57.h | 41 + CMSIS/Core/Include/core_ca7.h | 41 + CMSIS/Core/Include/core_ca9.h | 41 + CMSIS/Core/Include/core_cr4.h | 41 + CMSIS/Core/Include/core_cr5.h | 41 + CMSIS/Core/Include/core_cr52.h | 41 + CMSIS/Core/Include/core_cr7.h | 41 + CMSIS/Core/Include/core_cr8.h | 41 + CMSIS/Core/Include/gic_v20.h | 757 +++++++++ CMSIS/{Core_A => Core}/Include/irq_ctrl.h | 2 +- CMSIS/{Core_A => Core}/Source/irq_ctrl_gic.c | 0 .../Target/CA5/RTE/Device/ARMCA5/mmu_ARMCA5.c | 2 +- .../Target/CA7/RTE/Device/ARMCA7/mmu_ARMCA7.c | 2 +- .../Target/CA9/RTE/Device/ARMCA9/mmu_ARMCA9.c | 2 +- CMSIS/CoreValidation/Project/avh.yml | 1 - CMSIS/Core_A/Include/cmsis_armcc.h | 605 ------- CMSIS/Core_A/Include/cmsis_armclang.h | 644 ------- CMSIS/Core_A/Include/cmsis_compiler.h | 245 --- CMSIS/Core_A/Include/cmsis_gcc.h | 960 ----------- CMSIS/Core_A/Include/cmsis_iccarm.h | 573 ------- CMSIS/DoxyGen/Core/Core.dxy.in | 2 +- CMSIS/DoxyGen/Core_A/Core_A.dxy.in | 18 +- CMSIS/DoxyGen/Core_A/src/Overview.txt | 2 +- CMSIS/DoxyGen/Core_A/src/Template.txt | 40 +- .../Core_A/src/{core_ca.txt => arm7a.txt} | 4 +- README.md | 3 +- gen_pack.sh | 1 - 58 files changed, 8572 insertions(+), 7384 deletions(-) rename CMSIS/{Core_A/Include/core_ca.h => Core/Include/armv7a.h} (55%) rename CMSIS/{Core_A/Include/cmsis_cp15.h => Core/Include/armv7a_cp15.h} (99%) create mode 100644 CMSIS/Core/Include/armv7r.h create mode 100644 CMSIS/Core/Include/armv8a.h create mode 100644 CMSIS/Core/Include/armv8a_system_control.h create mode 100644 CMSIS/Core/Include/armv8r.h create mode 100644 CMSIS/Core/Include/cmsis_armclang_corea.h create mode 100644 CMSIS/Core/Include/cmsis_armclang_corem.h create mode 100644 CMSIS/Core/Include/cmsis_armclang_corer.h create mode 100644 CMSIS/Core/Include/cmsis_clang_corea.h create mode 100644 CMSIS/Core/Include/cmsis_clang_corem.h create mode 100644 CMSIS/Core/Include/cmsis_clang_corer.h create mode 100644 CMSIS/Core/Include/cmsis_gcc_corea.h create mode 100644 CMSIS/Core/Include/cmsis_gcc_corem.h create mode 100644 CMSIS/Core/Include/cmsis_gcc_corer.h create mode 100644 CMSIS/Core/Include/cmsis_iccarm_corea.h create mode 100644 CMSIS/Core/Include/cmsis_iccarm_corem.h create mode 100644 CMSIS/Core/Include/cmsis_iccarm_corer.h create mode 100644 CMSIS/Core/Include/cmsis_tiarmclang_corem.h create mode 100644 CMSIS/Core/Include/core_ca.h create mode 100644 CMSIS/Core/Include/core_ca35.h create mode 100644 CMSIS/Core/Include/core_ca5.h create mode 100644 CMSIS/Core/Include/core_ca53.h create mode 100644 CMSIS/Core/Include/core_ca57.h create mode 100644 CMSIS/Core/Include/core_ca7.h create mode 100644 CMSIS/Core/Include/core_ca9.h create mode 100644 CMSIS/Core/Include/core_cr4.h create mode 100644 CMSIS/Core/Include/core_cr5.h create mode 100644 CMSIS/Core/Include/core_cr52.h create mode 100644 CMSIS/Core/Include/core_cr7.h create mode 100644 CMSIS/Core/Include/core_cr8.h create mode 100644 CMSIS/Core/Include/gic_v20.h rename CMSIS/{Core_A => Core}/Include/irq_ctrl.h (99%) rename CMSIS/{Core_A => Core}/Source/irq_ctrl_gic.c (100%) delete mode 100644 CMSIS/Core_A/Include/cmsis_armcc.h delete mode 100644 CMSIS/Core_A/Include/cmsis_armclang.h delete mode 100644 CMSIS/Core_A/Include/cmsis_compiler.h delete mode 100644 CMSIS/Core_A/Include/cmsis_gcc.h delete mode 100644 CMSIS/Core_A/Include/cmsis_iccarm.h rename CMSIS/DoxyGen/Core_A/src/{core_ca.txt => arm7a.txt} (99%) diff --git a/ARM.CMSIS.pdsc b/ARM.CMSIS.pdsc index ba27c9cbc..6fb0efecc 100644 --- a/ARM.CMSIS.pdsc +++ b/ARM.CMSIS.pdsc @@ -427,7 +427,7 @@ Device interrupt controller interface - + @@ -648,7 +648,7 @@ - + CMSIS-CORE for Cortex-M, SC000, SC300, Star-MC1, ARMv8-M, ARMv8.1-M @@ -666,7 +666,7 @@ - + @@ -674,7 +674,7 @@ IRQ Controller implementation using GIC - + diff --git a/CMSIS/Core_A/Include/core_ca.h b/CMSIS/Core/Include/armv7a.h similarity index 55% rename from CMSIS/Core_A/Include/core_ca.h rename to CMSIS/Core/Include/armv7a.h index 12a1a6d73..5101ed40e 100644 --- a/CMSIS/Core_A/Include/core_ca.h +++ b/CMSIS/Core/Include/armv7a.h @@ -1,11 +1,11 @@ /**************************************************************************//** - * @file core_ca.h - * @brief CMSIS Cortex-A Core Peripheral Access Layer Header File + * @file armv7a.h + * @brief CMSIS Cortex-A Core Peripheral Access Layer Header File for ARMv7-A * @version V1.0.8 * @date 23. March 2023 ******************************************************************************/ /* - * Copyright (c) 2009-2022 ARM Limited. All rights reserved. + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. * * SPDX-License-Identifier: Apache-2.0 * @@ -28,8 +28,8 @@ #pragma clang system_header /* treat file as system include file */ #endif -#ifndef __CORE_CA_H_GENERIC -#define __CORE_CA_H_GENERIC +#ifndef __ARM_V7A_GENERIC +#define __ARM_V7A_GENERIC #ifdef __cplusplus extern "C" { @@ -38,12 +38,12 @@ /******************************************************************************* * CMSIS definitions ******************************************************************************/ + /** + \ingroup ARMv7-A + @{ + */ -/* CMSIS CA definitions */ -#define __CA_CMSIS_VERSION_MAIN (1U) /*!< \brief [31:16] CMSIS-Core(A) main version */ -#define __CA_CMSIS_VERSION_SUB (1U) /*!< \brief [15:0] CMSIS-Core(A) sub version */ -#define __CA_CMSIS_VERSION ((__CA_CMSIS_VERSION_MAIN << 16U) | \ - __CA_CMSIS_VERSION_SUB ) /*!< \brief CMSIS-Core(A) version number */ +#include "cmsis_version.h" #if defined ( __CC_ARM ) #if defined (__TARGET_FPU_VFP) @@ -124,12 +124,12 @@ } #endif -#endif /* __CORE_CA_H_GENERIC */ +#endif /* __ARM_V7A_GENERIC */ #ifndef __CMSIS_GENERIC -#ifndef __CORE_CA_H_DEPENDANT -#define __CORE_CA_H_DEPENDANT +#ifndef __ARM_V7A_DEPENDANT +#define __ARM_V7A_DEPENDANT #ifdef __cplusplus extern "C" { @@ -137,11 +137,6 @@ /* check device defines and use defaults */ #if defined __CHECK_DEVICE_DEFINES - #ifndef __CA_REV - #define __CA_REV 0x0000U - #warning "__CA_REV not defined in device header file; using default!" - #endif - #ifndef __FPU_PRESENT #define __FPU_PRESENT 0U #warning "__FPU_PRESENT not defined in device header file; using default!" @@ -178,6 +173,10 @@ #define __IOM volatile /*!< \brief Defines 'read / write' structure member permissions */ #define RESERVED(N, T) T RESERVED##N; // placeholder struct members used for "reserved" areas +/** @} end of group ARMv7-A */ + + + /******************************************************************************* * Register Abstraction Core Register contain: @@ -676,477 +675,141 @@ typedef union \brief Union type to access the L2C_310 Cache Controller. */ #if (__L2C_PRESENT == 1U) || defined(DOXYGEN) -typedef struct -{ - __IM uint32_t CACHE_ID; /*!< \brief Offset: 0x0000 (R/ ) Cache ID Register */ - __IM uint32_t CACHE_TYPE; /*!< \brief Offset: 0x0004 (R/ ) Cache Type Register */ - RESERVED(0[0x3e], uint32_t) - __IOM uint32_t CONTROL; /*!< \brief Offset: 0x0100 (R/W) Control Register */ - __IOM uint32_t AUX_CNT; /*!< \brief Offset: 0x0104 (R/W) Auxiliary Control */ - RESERVED(1[0x3e], uint32_t) - __IOM uint32_t EVENT_CONTROL; /*!< \brief Offset: 0x0200 (R/W) Event Counter Control */ - __IOM uint32_t EVENT_COUNTER1_CONF; /*!< \brief Offset: 0x0204 (R/W) Event Counter 1 Configuration */ - __IOM uint32_t EVENT_COUNTER0_CONF; /*!< \brief Offset: 0x0208 (R/W) Event Counter 1 Configuration */ - RESERVED(2[0x2], uint32_t) - __IOM uint32_t INTERRUPT_MASK; /*!< \brief Offset: 0x0214 (R/W) Interrupt Mask */ - __IM uint32_t MASKED_INT_STATUS; /*!< \brief Offset: 0x0218 (R/ ) Masked Interrupt Status */ - __IM uint32_t RAW_INT_STATUS; /*!< \brief Offset: 0x021c (R/ ) Raw Interrupt Status */ - __OM uint32_t INTERRUPT_CLEAR; /*!< \brief Offset: 0x0220 ( /W) Interrupt Clear */ - RESERVED(3[0x143], uint32_t) - __IOM uint32_t CACHE_SYNC; /*!< \brief Offset: 0x0730 (R/W) Cache Sync */ - RESERVED(4[0xf], uint32_t) - __IOM uint32_t INV_LINE_PA; /*!< \brief Offset: 0x0770 (R/W) Invalidate Line By PA */ - RESERVED(6[2], uint32_t) - __IOM uint32_t INV_WAY; /*!< \brief Offset: 0x077c (R/W) Invalidate by Way */ - RESERVED(5[0xc], uint32_t) - __IOM uint32_t CLEAN_LINE_PA; /*!< \brief Offset: 0x07b0 (R/W) Clean Line by PA */ - RESERVED(7[1], uint32_t) - __IOM uint32_t CLEAN_LINE_INDEX_WAY; /*!< \brief Offset: 0x07b8 (R/W) Clean Line by Index/Way */ - __IOM uint32_t CLEAN_WAY; /*!< \brief Offset: 0x07bc (R/W) Clean by Way */ - RESERVED(8[0xc], uint32_t) - __IOM uint32_t CLEAN_INV_LINE_PA; /*!< \brief Offset: 0x07f0 (R/W) Clean and Invalidate Line by PA */ - RESERVED(9[1], uint32_t) - __IOM uint32_t CLEAN_INV_LINE_INDEX_WAY; /*!< \brief Offset: 0x07f8 (R/W) Clean and Invalidate Line by Index/Way */ - __IOM uint32_t CLEAN_INV_WAY; /*!< \brief Offset: 0x07fc (R/W) Clean and Invalidate by Way */ - RESERVED(10[0x40], uint32_t) - __IOM uint32_t DATA_LOCK_0_WAY; /*!< \brief Offset: 0x0900 (R/W) Data Lockdown 0 by Way */ - __IOM uint32_t INST_LOCK_0_WAY; /*!< \brief Offset: 0x0904 (R/W) Instruction Lockdown 0 by Way */ - __IOM uint32_t DATA_LOCK_1_WAY; /*!< \brief Offset: 0x0908 (R/W) Data Lockdown 1 by Way */ - __IOM uint32_t INST_LOCK_1_WAY; /*!< \brief Offset: 0x090c (R/W) Instruction Lockdown 1 by Way */ - __IOM uint32_t DATA_LOCK_2_WAY; /*!< \brief Offset: 0x0910 (R/W) Data Lockdown 2 by Way */ - __IOM uint32_t INST_LOCK_2_WAY; /*!< \brief Offset: 0x0914 (R/W) Instruction Lockdown 2 by Way */ - __IOM uint32_t DATA_LOCK_3_WAY; /*!< \brief Offset: 0x0918 (R/W) Data Lockdown 3 by Way */ - __IOM uint32_t INST_LOCK_3_WAY; /*!< \brief Offset: 0x091c (R/W) Instruction Lockdown 3 by Way */ - __IOM uint32_t DATA_LOCK_4_WAY; /*!< \brief Offset: 0x0920 (R/W) Data Lockdown 4 by Way */ - __IOM uint32_t INST_LOCK_4_WAY; /*!< \brief Offset: 0x0924 (R/W) Instruction Lockdown 4 by Way */ - __IOM uint32_t DATA_LOCK_5_WAY; /*!< \brief Offset: 0x0928 (R/W) Data Lockdown 5 by Way */ - __IOM uint32_t INST_LOCK_5_WAY; /*!< \brief Offset: 0x092c (R/W) Instruction Lockdown 5 by Way */ - __IOM uint32_t DATA_LOCK_6_WAY; /*!< \brief Offset: 0x0930 (R/W) Data Lockdown 5 by Way */ - __IOM uint32_t INST_LOCK_6_WAY; /*!< \brief Offset: 0x0934 (R/W) Instruction Lockdown 5 by Way */ - __IOM uint32_t DATA_LOCK_7_WAY; /*!< \brief Offset: 0x0938 (R/W) Data Lockdown 6 by Way */ - __IOM uint32_t INST_LOCK_7_WAY; /*!< \brief Offset: 0x093c (R/W) Instruction Lockdown 6 by Way */ - RESERVED(11[0x4], uint32_t) - __IOM uint32_t LOCK_LINE_EN; /*!< \brief Offset: 0x0950 (R/W) Lockdown by Line Enable */ - __IOM uint32_t UNLOCK_ALL_BY_WAY; /*!< \brief Offset: 0x0954 (R/W) Unlock All Lines by Way */ - RESERVED(12[0xaa], uint32_t) - __IOM uint32_t ADDRESS_FILTER_START; /*!< \brief Offset: 0x0c00 (R/W) Address Filtering Start */ - __IOM uint32_t ADDRESS_FILTER_END; /*!< \brief Offset: 0x0c04 (R/W) Address Filtering End */ - RESERVED(13[0xce], uint32_t) - __IOM uint32_t DEBUG_CONTROL; /*!< \brief Offset: 0x0f40 (R/W) Debug Control Register */ -} L2C_310_TypeDef; - -#define L2C_310 ((L2C_310_TypeDef *)L2C_310_BASE) /*!< \brief L2C_310 register set access pointer */ -#endif + typedef struct + { + __IM uint32_t CACHE_ID; /*!< \brief Offset: 0x0000 (R/ ) Cache ID Register */ + __IM uint32_t CACHE_TYPE; /*!< \brief Offset: 0x0004 (R/ ) Cache Type Register */ + RESERVED(0[0x3e], uint32_t) + __IOM uint32_t CONTROL; /*!< \brief Offset: 0x0100 (R/W) Control Register */ + __IOM uint32_t AUX_CNT; /*!< \brief Offset: 0x0104 (R/W) Auxiliary Control */ + RESERVED(1[0x3e], uint32_t) + __IOM uint32_t EVENT_CONTROL; /*!< \brief Offset: 0x0200 (R/W) Event Counter Control */ + __IOM uint32_t EVENT_COUNTER1_CONF; /*!< \brief Offset: 0x0204 (R/W) Event Counter 1 Configuration */ + __IOM uint32_t EVENT_COUNTER0_CONF; /*!< \brief Offset: 0x0208 (R/W) Event Counter 1 Configuration */ + RESERVED(2[0x2], uint32_t) + __IOM uint32_t INTERRUPT_MASK; /*!< \brief Offset: 0x0214 (R/W) Interrupt Mask */ + __IM uint32_t MASKED_INT_STATUS; /*!< \brief Offset: 0x0218 (R/ ) Masked Interrupt Status */ + __IM uint32_t RAW_INT_STATUS; /*!< \brief Offset: 0x021c (R/ ) Raw Interrupt Status */ + __OM uint32_t INTERRUPT_CLEAR; /*!< \brief Offset: 0x0220 ( /W) Interrupt Clear */ + RESERVED(3[0x143], uint32_t) + __IOM uint32_t CACHE_SYNC; /*!< \brief Offset: 0x0730 (R/W) Cache Sync */ + RESERVED(4[0xf], uint32_t) + __IOM uint32_t INV_LINE_PA; /*!< \brief Offset: 0x0770 (R/W) Invalidate Line By PA */ + RESERVED(6[2], uint32_t) + __IOM uint32_t INV_WAY; /*!< \brief Offset: 0x077c (R/W) Invalidate by Way */ + RESERVED(5[0xc], uint32_t) + __IOM uint32_t CLEAN_LINE_PA; /*!< \brief Offset: 0x07b0 (R/W) Clean Line by PA */ + RESERVED(7[1], uint32_t) + __IOM uint32_t CLEAN_LINE_INDEX_WAY; /*!< \brief Offset: 0x07b8 (R/W) Clean Line by Index/Way */ + __IOM uint32_t CLEAN_WAY; /*!< \brief Offset: 0x07bc (R/W) Clean by Way */ + RESERVED(8[0xc], uint32_t) + __IOM uint32_t CLEAN_INV_LINE_PA; /*!< \brief Offset: 0x07f0 (R/W) Clean and Invalidate Line by PA */ + RESERVED(9[1], uint32_t) + __IOM uint32_t CLEAN_INV_LINE_INDEX_WAY; /*!< \brief Offset: 0x07f8 (R/W) Clean and Invalidate Line by Index/Way */ + __IOM uint32_t CLEAN_INV_WAY; /*!< \brief Offset: 0x07fc (R/W) Clean and Invalidate by Way */ + RESERVED(10[0x40], uint32_t) + __IOM uint32_t DATA_LOCK_0_WAY; /*!< \brief Offset: 0x0900 (R/W) Data Lockdown 0 by Way */ + __IOM uint32_t INST_LOCK_0_WAY; /*!< \brief Offset: 0x0904 (R/W) Instruction Lockdown 0 by Way */ + __IOM uint32_t DATA_LOCK_1_WAY; /*!< \brief Offset: 0x0908 (R/W) Data Lockdown 1 by Way */ + __IOM uint32_t INST_LOCK_1_WAY; /*!< \brief Offset: 0x090c (R/W) Instruction Lockdown 1 by Way */ + __IOM uint32_t DATA_LOCK_2_WAY; /*!< \brief Offset: 0x0910 (R/W) Data Lockdown 2 by Way */ + __IOM uint32_t INST_LOCK_2_WAY; /*!< \brief Offset: 0x0914 (R/W) Instruction Lockdown 2 by Way */ + __IOM uint32_t DATA_LOCK_3_WAY; /*!< \brief Offset: 0x0918 (R/W) Data Lockdown 3 by Way */ + __IOM uint32_t INST_LOCK_3_WAY; /*!< \brief Offset: 0x091c (R/W) Instruction Lockdown 3 by Way */ + __IOM uint32_t DATA_LOCK_4_WAY; /*!< \brief Offset: 0x0920 (R/W) Data Lockdown 4 by Way */ + __IOM uint32_t INST_LOCK_4_WAY; /*!< \brief Offset: 0x0924 (R/W) Instruction Lockdown 4 by Way */ + __IOM uint32_t DATA_LOCK_5_WAY; /*!< \brief Offset: 0x0928 (R/W) Data Lockdown 5 by Way */ + __IOM uint32_t INST_LOCK_5_WAY; /*!< \brief Offset: 0x092c (R/W) Instruction Lockdown 5 by Way */ + __IOM uint32_t DATA_LOCK_6_WAY; /*!< \brief Offset: 0x0930 (R/W) Data Lockdown 5 by Way */ + __IOM uint32_t INST_LOCK_6_WAY; /*!< \brief Offset: 0x0934 (R/W) Instruction Lockdown 5 by Way */ + __IOM uint32_t DATA_LOCK_7_WAY; /*!< \brief Offset: 0x0938 (R/W) Data Lockdown 6 by Way */ + __IOM uint32_t INST_LOCK_7_WAY; /*!< \brief Offset: 0x093c (R/W) Instruction Lockdown 6 by Way */ + RESERVED(11[0x4], uint32_t) + __IOM uint32_t LOCK_LINE_EN; /*!< \brief Offset: 0x0950 (R/W) Lockdown by Line Enable */ + __IOM uint32_t UNLOCK_ALL_BY_WAY; /*!< \brief Offset: 0x0954 (R/W) Unlock All Lines by Way */ + RESERVED(12[0xaa], uint32_t) + __IOM uint32_t ADDRESS_FILTER_START; /*!< \brief Offset: 0x0c00 (R/W) Address Filtering Start */ + __IOM uint32_t ADDRESS_FILTER_END; /*!< \brief Offset: 0x0c04 (R/W) Address Filtering End */ + RESERVED(13[0xce], uint32_t) + __IOM uint32_t DEBUG_CONTROL; /*!< \brief Offset: 0x0f40 (R/W) Debug Control Register */ + } L2C_310_TypeDef; + + #define L2C_310 ((L2C_310_TypeDef *)L2C_310_BASE) /*!< \brief L2C_310 register set access pointer */ +#endif /* #if (__L2C_PRESENT == 1U) || defined(DOXYGEN) */ #if (__GIC_PRESENT == 1U) || defined(DOXYGEN) - -/** \brief Structure type to access the Generic Interrupt Controller Distributor (GICD) -*/ -typedef struct -{ - __IOM uint32_t CTLR; /*!< \brief Offset: 0x000 (R/W) Distributor Control Register */ - __IM uint32_t TYPER; /*!< \brief Offset: 0x004 (R/ ) Interrupt Controller Type Register */ - __IM uint32_t IIDR; /*!< \brief Offset: 0x008 (R/ ) Distributor Implementer Identification Register */ - RESERVED(0, uint32_t) - __IOM uint32_t STATUSR; /*!< \brief Offset: 0x010 (R/W) Error Reporting Status Register, optional */ - RESERVED(1[11], uint32_t) - __OM uint32_t SETSPI_NSR; /*!< \brief Offset: 0x040 ( /W) Set SPI Register */ - RESERVED(2, uint32_t) - __OM uint32_t CLRSPI_NSR; /*!< \brief Offset: 0x048 ( /W) Clear SPI Register */ - RESERVED(3, uint32_t) - __OM uint32_t SETSPI_SR; /*!< \brief Offset: 0x050 ( /W) Set SPI, Secure Register */ - RESERVED(4, uint32_t) - __OM uint32_t CLRSPI_SR; /*!< \brief Offset: 0x058 ( /W) Clear SPI, Secure Register */ - RESERVED(5[9], uint32_t) - __IOM uint32_t IGROUPR[32]; /*!< \brief Offset: 0x080 (R/W) Interrupt Group Registers */ - __IOM uint32_t ISENABLER[32]; /*!< \brief Offset: 0x100 (R/W) Interrupt Set-Enable Registers */ - __IOM uint32_t ICENABLER[32]; /*!< \brief Offset: 0x180 (R/W) Interrupt Clear-Enable Registers */ - __IOM uint32_t ISPENDR[32]; /*!< \brief Offset: 0x200 (R/W) Interrupt Set-Pending Registers */ - __IOM uint32_t ICPENDR[32]; /*!< \brief Offset: 0x280 (R/W) Interrupt Clear-Pending Registers */ - __IOM uint32_t ISACTIVER[32]; /*!< \brief Offset: 0x300 (R/W) Interrupt Set-Active Registers */ - __IOM uint32_t ICACTIVER[32]; /*!< \brief Offset: 0x380 (R/W) Interrupt Clear-Active Registers */ - __IOM uint32_t IPRIORITYR[255]; /*!< \brief Offset: 0x400 (R/W) Interrupt Priority Registers */ - RESERVED(6, uint32_t) - __IOM uint32_t ITARGETSR[255]; /*!< \brief Offset: 0x800 (R/W) Interrupt Targets Registers */ - RESERVED(7, uint32_t) - __IOM uint32_t ICFGR[64]; /*!< \brief Offset: 0xC00 (R/W) Interrupt Configuration Registers */ - __IOM uint32_t IGRPMODR[32]; /*!< \brief Offset: 0xD00 (R/W) Interrupt Group Modifier Registers */ - RESERVED(8[32], uint32_t) - __IOM uint32_t NSACR[64]; /*!< \brief Offset: 0xE00 (R/W) Non-secure Access Control Registers */ - __OM uint32_t SGIR; /*!< \brief Offset: 0xF00 ( /W) Software Generated Interrupt Register */ - RESERVED(9[3], uint32_t) - __IOM uint32_t CPENDSGIR[4]; /*!< \brief Offset: 0xF10 (R/W) SGI Clear-Pending Registers */ - __IOM uint32_t SPENDSGIR[4]; /*!< \brief Offset: 0xF20 (R/W) SGI Set-Pending Registers */ - RESERVED(10[5236], uint32_t) - __IOM uint64_t IROUTER[988]; /*!< \brief Offset: 0x6100(R/W) Interrupt Routing Registers */ -} GICDistributor_Type; - -#define GICDistributor ((GICDistributor_Type *) GIC_DISTRIBUTOR_BASE ) /*!< \brief GIC Distributor register set access pointer */ - -/* GICDistributor CTLR Register */ -#define GICDistributor_CTLR_EnableGrp0_Pos 0U /*!< GICDistributor CTLR: EnableGrp0 Position */ -#define GICDistributor_CTLR_EnableGrp0_Msk (0x1U /*<< GICDistributor_CTLR_EnableGrp0_Pos*/) /*!< GICDistributor CTLR: EnableGrp0 Mask */ -#define GICDistributor_CTLR_EnableGrp0(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_CTLR_EnableGrp0_Pos*/)) & GICDistributor_CTLR_EnableGrp0_Msk) - -#define GICDistributor_CTLR_EnableGrp1_Pos 1U /*!< GICDistributor CTLR: EnableGrp1 Position */ -#define GICDistributor_CTLR_EnableGrp1_Msk (0x1U << GICDistributor_CTLR_EnableGrp1_Pos) /*!< GICDistributor CTLR: EnableGrp1 Mask */ -#define GICDistributor_CTLR_EnableGrp1(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_EnableGrp1_Pos)) & GICDistributor_CTLR_EnableGrp1_Msk) - -#define GICDistributor_CTLR_ARE_Pos 4U /*!< GICDistributor CTLR: ARE Position */ -#define GICDistributor_CTLR_ARE_Msk (0x1U << GICDistributor_CTLR_ARE_Pos) /*!< GICDistributor CTLR: ARE Mask */ -#define GICDistributor_CTLR_ARE(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_ARE_Pos)) & GICDistributor_CTLR_ARE_Msk) - -#define GICDistributor_CTLR_DC_Pos 6U /*!< GICDistributor CTLR: DC Position */ -#define GICDistributor_CTLR_DC_Msk (0x1U << GICDistributor_CTLR_DC_Pos) /*!< GICDistributor CTLR: DC Mask */ -#define GICDistributor_CTLR_DC(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_DC_Pos)) & GICDistributor_CTLR_DC_Msk) - -#define GICDistributor_CTLR_EINWF_Pos 7U /*!< GICDistributor CTLR: EINWF Position */ -#define GICDistributor_CTLR_EINWF_Msk (0x1U << GICDistributor_CTLR_EINWF_Pos) /*!< GICDistributor CTLR: EINWF Mask */ -#define GICDistributor_CTLR_EINWF(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_EINWF_Pos)) & GICDistributor_CTLR_EINWF_Msk) - -#define GICDistributor_CTLR_RWP_Pos 31U /*!< GICDistributor CTLR: RWP Position */ -#define GICDistributor_CTLR_RWP_Msk (0x1U << GICDistributor_CTLR_RWP_Pos) /*!< GICDistributor CTLR: RWP Mask */ -#define GICDistributor_CTLR_RWP(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_RWP_Pos)) & GICDistributor_CTLR_RWP_Msk) - -/* GICDistributor TYPER Register */ -#define GICDistributor_TYPER_ITLinesNumber_Pos 0U /*!< GICDistributor TYPER: ITLinesNumber Position */ -#define GICDistributor_TYPER_ITLinesNumber_Msk (0x1FU /*<< GICDistributor_TYPER_ITLinesNumber_Pos*/) /*!< GICDistributor TYPER: ITLinesNumber Mask */ -#define GICDistributor_TYPER_ITLinesNumber(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_TYPER_ITLinesNumber_Pos*/)) & GICDistributor_CTLR_ITLinesNumber_Msk) - -#define GICDistributor_TYPER_CPUNumber_Pos 5U /*!< GICDistributor TYPER: CPUNumber Position */ -#define GICDistributor_TYPER_CPUNumber_Msk (0x7U << GICDistributor_TYPER_CPUNumber_Pos) /*!< GICDistributor TYPER: CPUNumber Mask */ -#define GICDistributor_TYPER_CPUNumber(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_TYPER_CPUNumber_Pos)) & GICDistributor_TYPER_CPUNumber_Msk) - -#define GICDistributor_TYPER_SecurityExtn_Pos 10U /*!< GICDistributor TYPER: SecurityExtn Position */ -#define GICDistributor_TYPER_SecurityExtn_Msk (0x1U << GICDistributor_TYPER_SecurityExtn_Pos) /*!< GICDistributor TYPER: SecurityExtn Mask */ -#define GICDistributor_TYPER_SecurityExtn(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_TYPER_SecurityExtn_Pos)) & GICDistributor_TYPER_SecurityExtn_Msk) - -#define GICDistributor_TYPER_LSPI_Pos 11U /*!< GICDistributor TYPER: LSPI Position */ -#define GICDistributor_TYPER_LSPI_Msk (0x1FU << GICDistributor_TYPER_LSPI_Pos) /*!< GICDistributor TYPER: LSPI Mask */ -#define GICDistributor_TYPER_LSPI(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_TYPER_LSPI_Pos)) & GICDistributor_TYPER_LSPI_Msk) - -/* GICDistributor IIDR Register */ -#define GICDistributor_IIDR_Implementer_Pos 0U /*!< GICDistributor IIDR: Implementer Position */ -#define GICDistributor_IIDR_Implementer_Msk (0xFFFU /*<< GICDistributor_IIDR_Implementer_Pos*/) /*!< GICDistributor IIDR: Implementer Mask */ -#define GICDistributor_IIDR_Implementer(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_IIDR_Implementer_Pos*/)) & GICDistributor_IIDR_Implementer_Msk) - -#define GICDistributor_IIDR_Revision_Pos 12U /*!< GICDistributor IIDR: Revision Position */ -#define GICDistributor_IIDR_Revision_Msk (0xFU << GICDistributor_IIDR_Revision_Pos) /*!< GICDistributor IIDR: Revision Mask */ -#define GICDistributor_IIDR_Revision(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_IIDR_Revision_Pos)) & GICDistributor_IIDR_Revision_Msk) - -#define GICDistributor_IIDR_Variant_Pos 16U /*!< GICDistributor IIDR: Variant Position */ -#define GICDistributor_IIDR_Variant_Msk (0xFU << GICDistributor_IIDR_Variant_Pos) /*!< GICDistributor IIDR: Variant Mask */ -#define GICDistributor_IIDR_Variant(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_IIDR_Variant_Pos)) & GICDistributor_IIDR_Variant_Msk) - -#define GICDistributor_IIDR_ProductID_Pos 24U /*!< GICDistributor IIDR: ProductID Position */ -#define GICDistributor_IIDR_ProductID_Msk (0xFFU << GICDistributor_IIDR_ProductID_Pos) /*!< GICDistributor IIDR: ProductID Mask */ -#define GICDistributor_IIDR_ProductID(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_IIDR_ProductID_Pos)) & GICDistributor_IIDR_ProductID_Msk) - -/* GICDistributor STATUSR Register */ -#define GICDistributor_STATUSR_RRD_Pos 0U /*!< GICDistributor STATUSR: RRD Position */ -#define GICDistributor_STATUSR_RRD_Msk (0x1U /*<< GICDistributor_STATUSR_RRD_Pos*/) /*!< GICDistributor STATUSR: RRD Mask */ -#define GICDistributor_STATUSR_RRD(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_STATUSR_RRD_Pos*/)) & GICDistributor_STATUSR_RRD_Msk) - -#define GICDistributor_STATUSR_WRD_Pos 1U /*!< GICDistributor STATUSR: WRD Position */ -#define GICDistributor_STATUSR_WRD_Msk (0x1U << GICDistributor_STATUSR_WRD_Pos) /*!< GICDistributor STATUSR: WRD Mask */ -#define GICDistributor_STATUSR_WRD(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_STATUSR_WRD_Pos)) & GICDistributor_STATUSR_WRD_Msk) - -#define GICDistributor_STATUSR_RWOD_Pos 2U /*!< GICDistributor STATUSR: RWOD Position */ -#define GICDistributor_STATUSR_RWOD_Msk (0x1U << GICDistributor_STATUSR_RWOD_Pos) /*!< GICDistributor STATUSR: RWOD Mask */ -#define GICDistributor_STATUSR_RWOD(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_STATUSR_RWOD_Pos)) & GICDistributor_STATUSR_RWOD_Msk) - -#define GICDistributor_STATUSR_WROD_Pos 3U /*!< GICDistributor STATUSR: WROD Position */ -#define GICDistributor_STATUSR_WROD_Msk (0x1U << GICDistributor_STATUSR_WROD_Pos) /*!< GICDistributor STATUSR: WROD Mask */ -#define GICDistributor_STATUSR_WROD(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_STATUSR_WROD_Pos)) & GICDistributor_STATUSR_WROD_Msk) - -/* GICDistributor SETSPI_NSR Register */ -#define GICDistributor_SETSPI_NSR_INTID_Pos 0U /*!< GICDistributor SETSPI_NSR: INTID Position */ -#define GICDistributor_SETSPI_NSR_INTID_Msk (0x3FFU /*<< GICDistributor_SETSPI_NSR_INTID_Pos*/) /*!< GICDistributor SETSPI_NSR: INTID Mask */ -#define GICDistributor_SETSPI_NSR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_SETSPI_NSR_INTID_Pos*/)) & GICDistributor_SETSPI_NSR_INTID_Msk) - -/* GICDistributor CLRSPI_NSR Register */ -#define GICDistributor_CLRSPI_NSR_INTID_Pos 0U /*!< GICDistributor CLRSPI_NSR: INTID Position */ -#define GICDistributor_CLRSPI_NSR_INTID_Msk (0x3FFU /*<< GICDistributor_CLRSPI_NSR_INTID_Pos*/) /*!< GICDistributor CLRSPI_NSR: INTID Mask */ -#define GICDistributor_CLRSPI_NSR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_CLRSPI_NSR_INTID_Pos*/)) & GICDistributor_CLRSPI_NSR_INTID_Msk) - -/* GICDistributor SETSPI_SR Register */ -#define GICDistributor_SETSPI_SR_INTID_Pos 0U /*!< GICDistributor SETSPI_SR: INTID Position */ -#define GICDistributor_SETSPI_SR_INTID_Msk (0x3FFU /*<< GICDistributor_SETSPI_SR_INTID_Pos*/) /*!< GICDistributor SETSPI_SR: INTID Mask */ -#define GICDistributor_SETSPI_SR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_SETSPI_SR_INTID_Pos*/)) & GICDistributor_SETSPI_SR_INTID_Msk) - -/* GICDistributor CLRSPI_SR Register */ -#define GICDistributor_CLRSPI_SR_INTID_Pos 0U /*!< GICDistributor CLRSPI_SR: INTID Position */ -#define GICDistributor_CLRSPI_SR_INTID_Msk (0x3FFU /*<< GICDistributor_CLRSPI_SR_INTID_Pos*/) /*!< GICDistributor CLRSPI_SR: INTID Mask */ -#define GICDistributor_CLRSPI_SR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_CLRSPI_SR_INTID_Pos*/)) & GICDistributor_CLRSPI_SR_INTID_Msk) - -/* GICDistributor ITARGETSR Register */ -#define GICDistributor_ITARGETSR_CPU0_Pos 0U /*!< GICDistributor ITARGETSR: CPU0 Position */ -#define GICDistributor_ITARGETSR_CPU0_Msk (0x1U /*<< GICDistributor_ITARGETSR_CPU0_Pos*/) /*!< GICDistributor ITARGETSR: CPU0 Mask */ -#define GICDistributor_ITARGETSR_CPU0(x) (((uint8_t)(((uint8_t)(x)) /*<< GICDistributor_ITARGETSR_CPU0_Pos*/)) & GICDistributor_ITARGETSR_CPU0_Msk) - -#define GICDistributor_ITARGETSR_CPU1_Pos 1U /*!< GICDistributor ITARGETSR: CPU1 Position */ -#define GICDistributor_ITARGETSR_CPU1_Msk (0x1U << GICDistributor_ITARGETSR_CPU1_Pos) /*!< GICDistributor ITARGETSR: CPU1 Mask */ -#define GICDistributor_ITARGETSR_CPU1(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU1_Pos)) & GICDistributor_ITARGETSR_CPU1_Msk) - -#define GICDistributor_ITARGETSR_CPU2_Pos 2U /*!< GICDistributor ITARGETSR: CPU2 Position */ -#define GICDistributor_ITARGETSR_CPU2_Msk (0x1U << GICDistributor_ITARGETSR_CPU2_Pos) /*!< GICDistributor ITARGETSR: CPU2 Mask */ -#define GICDistributor_ITARGETSR_CPU2(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU2_Pos)) & GICDistributor_ITARGETSR_CPU2_Msk) - -#define GICDistributor_ITARGETSR_CPU3_Pos 3U /*!< GICDistributor ITARGETSR: CPU3 Position */ -#define GICDistributor_ITARGETSR_CPU3_Msk (0x1U << GICDistributor_ITARGETSR_CPU3_Pos) /*!< GICDistributor ITARGETSR: CPU3 Mask */ -#define GICDistributor_ITARGETSR_CPU3(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU3_Pos)) & GICDistributor_ITARGETSR_CPU3_Msk) - -#define GICDistributor_ITARGETSR_CPU4_Pos 4U /*!< GICDistributor ITARGETSR: CPU4 Position */ -#define GICDistributor_ITARGETSR_CPU4_Msk (0x1U << GICDistributor_ITARGETSR_CPU4_Pos) /*!< GICDistributor ITARGETSR: CPU4 Mask */ -#define GICDistributor_ITARGETSR_CPU4(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU4_Pos)) & GICDistributor_ITARGETSR_CPU4_Msk) - -#define GICDistributor_ITARGETSR_CPU5_Pos 5U /*!< GICDistributor ITARGETSR: CPU5 Position */ -#define GICDistributor_ITARGETSR_CPU5_Msk (0x1U << GICDistributor_ITARGETSR_CPU5_Pos) /*!< GICDistributor ITARGETSR: CPU5 Mask */ -#define GICDistributor_ITARGETSR_CPU5(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU5_Pos)) & GICDistributor_ITARGETSR_CPU5_Msk) - -#define GICDistributor_ITARGETSR_CPU6_Pos 6U /*!< GICDistributor ITARGETSR: CPU6 Position */ -#define GICDistributor_ITARGETSR_CPU6_Msk (0x1U << GICDistributor_ITARGETSR_CPU6_Pos) /*!< GICDistributor ITARGETSR: CPU6 Mask */ -#define GICDistributor_ITARGETSR_CPU6(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU6_Pos)) & GICDistributor_ITARGETSR_CPU6_Msk) - -#define GICDistributor_ITARGETSR_CPU7_Pos 7U /*!< GICDistributor ITARGETSR: CPU7 Position */ -#define GICDistributor_ITARGETSR_CPU7_Msk (0x1U << GICDistributor_ITARGETSR_CPU7_Pos) /*!< GICDistributor ITARGETSR: CPU7 Mask */ -#define GICDistributor_ITARGETSR_CPU7(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU7_Pos)) & GICDistributor_ITARGETSR_CPU7_Msk) - -/* GICDistributor SGIR Register */ -#define GICDistributor_SGIR_INTID_Pos 0U /*!< GICDistributor SGIR: INTID Position */ -#define GICDistributor_SGIR_INTID_Msk (0x7U /*<< GICDistributor_SGIR_INTID_Pos*/) /*!< GICDistributor SGIR: INTID Mask */ -#define GICDistributor_SGIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_SGIR_INTID_Pos*/)) & GICDistributor_SGIR_INTID_Msk) - -#define GICDistributor_SGIR_NSATT_Pos 15U /*!< GICDistributor SGIR: NSATT Position */ -#define GICDistributor_SGIR_NSATT_Msk (0x1U << GICDistributor_SGIR_NSATT_Pos) /*!< GICDistributor SGIR: NSATT Mask */ -#define GICDistributor_SGIR_NSATT(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_SGIR_NSATT_Pos)) & GICDistributor_SGIR_NSATT_Msk) - -#define GICDistributor_SGIR_CPUTargetList_Pos 16U /*!< GICDistributor SGIR: CPUTargetList Position */ -#define GICDistributor_SGIR_CPUTargetList_Msk (0xFFU << GICDistributor_SGIR_CPUTargetList_Pos) /*!< GICDistributor SGIR: CPUTargetList Mask */ -#define GICDistributor_SGIR_CPUTargetList(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_SGIR_CPUTargetList_Pos)) & GICDistributor_SGIR_CPUTargetList_Msk) - -#define GICDistributor_SGIR_TargetFilterList_Pos 24U /*!< GICDistributor SGIR: TargetFilterList Position */ -#define GICDistributor_SGIR_TargetFilterList_Msk (0x3U << GICDistributor_SGIR_TargetFilterList_Pos) /*!< GICDistributor SGIR: TargetFilterList Mask */ -#define GICDistributor_SGIR_TargetFilterList(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_SGIR_TargetFilterList_Pos)) & GICDistributor_SGIR_TargetFilterList_Msk) - -/* GICDistributor IROUTER Register */ -#define GICDistributor_IROUTER_Aff0_Pos 0UL /*!< GICDistributor IROUTER: Aff0 Position */ -#define GICDistributor_IROUTER_Aff0_Msk (0xFFUL /*<< GICDistributor_IROUTER_Aff0_Pos*/) /*!< GICDistributor IROUTER: Aff0 Mask */ -#define GICDistributor_IROUTER_Aff0(x) (((uint64_t)(((uint64_t)(x)) /*<< GICDistributor_IROUTER_Aff0_Pos*/)) & GICDistributor_IROUTER_Aff0_Msk) - -#define GICDistributor_IROUTER_Aff1_Pos 8UL /*!< GICDistributor IROUTER: Aff1 Position */ -#define GICDistributor_IROUTER_Aff1_Msk (0xFFUL << GICDistributor_IROUTER_Aff1_Pos) /*!< GICDistributor IROUTER: Aff1 Mask */ -#define GICDistributor_IROUTER_Aff1(x) (((uint64_t)(((uint64_t)(x)) << GICDistributor_IROUTER_Aff1_Pos)) & GICDistributor_IROUTER_Aff1_Msk) - -#define GICDistributor_IROUTER_Aff2_Pos 16UL /*!< GICDistributor IROUTER: Aff2 Position */ -#define GICDistributor_IROUTER_Aff2_Msk (0xFFUL << GICDistributor_IROUTER_Aff2_Pos) /*!< GICDistributor IROUTER: Aff2 Mask */ -#define GICDistributor_IROUTER_Aff2(x) (((uint64_t)(((uint64_t)(x)) << GICDistributor_IROUTER_Aff2_Pos)) & GICDistributor_IROUTER_Aff2_Msk) - -#define GICDistributor_IROUTER_IRM_Pos 31UL /*!< GICDistributor IROUTER: IRM Position */ -#define GICDistributor_IROUTER_IRM_Msk (0xFFUL << GICDistributor_IROUTER_IRM_Pos) /*!< GICDistributor IROUTER: IRM Mask */ -#define GICDistributor_IROUTER_IRM(x) (((uint64_t)(((uint64_t)(x)) << GICDistributor_IROUTER_IRM_Pos)) & GICDistributor_IROUTER_IRM_Msk) - -#define GICDistributor_IROUTER_Aff3_Pos 32UL /*!< GICDistributor IROUTER: Aff3 Position */ -#define GICDistributor_IROUTER_Aff3_Msk (0xFFUL << GICDistributor_IROUTER_Aff3_Pos) /*!< GICDistributor IROUTER: Aff3 Mask */ -#define GICDistributor_IROUTER_Aff3(x) (((uint64_t)(((uint64_t)(x)) << GICDistributor_IROUTER_Aff3_Pos)) & GICDistributor_IROUTER_Aff3_Msk) - - - -/** \brief Structure type to access the Generic Interrupt Controller Interface (GICC) -*/ -typedef struct -{ - __IOM uint32_t CTLR; /*!< \brief Offset: 0x000 (R/W) CPU Interface Control Register */ - __IOM uint32_t PMR; /*!< \brief Offset: 0x004 (R/W) Interrupt Priority Mask Register */ - __IOM uint32_t BPR; /*!< \brief Offset: 0x008 (R/W) Binary Point Register */ - __IM uint32_t IAR; /*!< \brief Offset: 0x00C (R/ ) Interrupt Acknowledge Register */ - __OM uint32_t EOIR; /*!< \brief Offset: 0x010 ( /W) End Of Interrupt Register */ - __IM uint32_t RPR; /*!< \brief Offset: 0x014 (R/ ) Running Priority Register */ - __IM uint32_t HPPIR; /*!< \brief Offset: 0x018 (R/ ) Highest Priority Pending Interrupt Register */ - __IOM uint32_t ABPR; /*!< \brief Offset: 0x01C (R/W) Aliased Binary Point Register */ - __IM uint32_t AIAR; /*!< \brief Offset: 0x020 (R/ ) Aliased Interrupt Acknowledge Register */ - __OM uint32_t AEOIR; /*!< \brief Offset: 0x024 ( /W) Aliased End Of Interrupt Register */ - __IM uint32_t AHPPIR; /*!< \brief Offset: 0x028 (R/ ) Aliased Highest Priority Pending Interrupt Register */ - __IOM uint32_t STATUSR; /*!< \brief Offset: 0x02C (R/W) Error Reporting Status Register, optional */ - RESERVED(1[40], uint32_t) - __IOM uint32_t APR[4]; /*!< \brief Offset: 0x0D0 (R/W) Active Priority Register */ - __IOM uint32_t NSAPR[4]; /*!< \brief Offset: 0x0E0 (R/W) Non-secure Active Priority Register */ - RESERVED(2[3], uint32_t) - __IM uint32_t IIDR; /*!< \brief Offset: 0x0FC (R/ ) CPU Interface Identification Register */ - RESERVED(3[960], uint32_t) - __OM uint32_t DIR; /*!< \brief Offset: 0x1000( /W) Deactivate Interrupt Register */ -} GICInterface_Type; - -#define GICInterface ((GICInterface_Type *) GIC_INTERFACE_BASE ) /*!< \brief GIC Interface register set access pointer */ - -/* GICInterface CTLR Register */ -#define GICInterface_CTLR_Enable_Pos 0U /*!< PTIM CTLR: Enable Position */ -#define GICInterface_CTLR_Enable_Msk (0x1U /*<< GICInterface_CTLR_Enable_Pos*/) /*!< PTIM CTLR: Enable Mask */ -#define GICInterface_CTLR_Enable(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_CTLR_Enable_Pos*/)) & GICInterface_CTLR_Enable_Msk) - -/* GICInterface PMR Register */ -#define GICInterface_PMR_Priority_Pos 0U /*!< PTIM PMR: Priority Position */ -#define GICInterface_PMR_Priority_Msk (0xFFU /*<< GICInterface_PMR_Priority_Pos*/) /*!< PTIM PMR: Priority Mask */ -#define GICInterface_PMR_Priority(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_PMR_Priority_Pos*/)) & GICInterface_PMR_Priority_Msk) - -/* GICInterface BPR Register */ -#define GICInterface_BPR_Binary_Point_Pos 0U /*!< PTIM BPR: Binary_Point Position */ -#define GICInterface_BPR_Binary_Point_Msk (0x7U /*<< GICInterface_BPR_Binary_Point_Pos*/) /*!< PTIM BPR: Binary_Point Mask */ -#define GICInterface_BPR_Binary_Point(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_BPR_Binary_Point_Pos*/)) & GICInterface_BPR_Binary_Point_Msk) - -/* GICInterface IAR Register */ -#define GICInterface_IAR_INTID_Pos 0U /*!< PTIM IAR: INTID Position */ -#define GICInterface_IAR_INTID_Msk (0xFFFFFFU /*<< GICInterface_IAR_INTID_Pos*/) /*!< PTIM IAR: INTID Mask */ -#define GICInterface_IAR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_IAR_INTID_Pos*/)) & GICInterface_IAR_INTID_Msk) - -/* GICInterface EOIR Register */ -#define GICInterface_EOIR_INTID_Pos 0U /*!< PTIM EOIR: INTID Position */ -#define GICInterface_EOIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_EOIR_INTID_Pos*/) /*!< PTIM EOIR: INTID Mask */ -#define GICInterface_EOIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_EOIR_INTID_Pos*/)) & GICInterface_EOIR_INTID_Msk) - -/* GICInterface RPR Register */ -#define GICInterface_RPR_INTID_Pos 0U /*!< PTIM RPR: INTID Position */ -#define GICInterface_RPR_INTID_Msk (0xFFU /*<< GICInterface_RPR_INTID_Pos*/) /*!< PTIM RPR: INTID Mask */ -#define GICInterface_RPR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_RPR_INTID_Pos*/)) & GICInterface_RPR_INTID_Msk) - -/* GICInterface HPPIR Register */ -#define GICInterface_HPPIR_INTID_Pos 0U /*!< PTIM HPPIR: INTID Position */ -#define GICInterface_HPPIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_HPPIR_INTID_Pos*/) /*!< PTIM HPPIR: INTID Mask */ -#define GICInterface_HPPIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_HPPIR_INTID_Pos*/)) & GICInterface_HPPIR_INTID_Msk) - -/* GICInterface ABPR Register */ -#define GICInterface_ABPR_Binary_Point_Pos 0U /*!< PTIM ABPR: Binary_Point Position */ -#define GICInterface_ABPR_Binary_Point_Msk (0x7U /*<< GICInterface_ABPR_Binary_Point_Pos*/) /*!< PTIM ABPR: Binary_Point Mask */ -#define GICInterface_ABPR_Binary_Point(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_ABPR_Binary_Point_Pos*/)) & GICInterface_ABPR_Binary_Point_Msk) - -/* GICInterface AIAR Register */ -#define GICInterface_AIAR_INTID_Pos 0U /*!< PTIM AIAR: INTID Position */ -#define GICInterface_AIAR_INTID_Msk (0xFFFFFFU /*<< GICInterface_AIAR_INTID_Pos*/) /*!< PTIM AIAR: INTID Mask */ -#define GICInterface_AIAR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_AIAR_INTID_Pos*/)) & GICInterface_AIAR_INTID_Msk) - -/* GICInterface AEOIR Register */ -#define GICInterface_AEOIR_INTID_Pos 0U /*!< PTIM AEOIR: INTID Position */ -#define GICInterface_AEOIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_AEOIR_INTID_Pos*/) /*!< PTIM AEOIR: INTID Mask */ -#define GICInterface_AEOIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_AEOIR_INTID_Pos*/)) & GICInterface_AEOIR_INTID_Msk) - -/* GICInterface AHPPIR Register */ -#define GICInterface_AHPPIR_INTID_Pos 0U /*!< PTIM AHPPIR: INTID Position */ -#define GICInterface_AHPPIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_AHPPIR_INTID_Pos*/) /*!< PTIM AHPPIR: INTID Mask */ -#define GICInterface_AHPPIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_AHPPIR_INTID_Pos*/)) & GICInterface_AHPPIR_INTID_Msk) - -/* GICInterface STATUSR Register */ -#define GICInterface_STATUSR_RRD_Pos 0U /*!< GICInterface STATUSR: RRD Position */ -#define GICInterface_STATUSR_RRD_Msk (0x1U /*<< GICInterface_STATUSR_RRD_Pos*/) /*!< GICInterface STATUSR: RRD Mask */ -#define GICInterface_STATUSR_RRD(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_STATUSR_RRD_Pos*/)) & GICInterface_STATUSR_RRD_Msk) - -#define GICInterface_STATUSR_WRD_Pos 1U /*!< GICInterface STATUSR: WRD Position */ -#define GICInterface_STATUSR_WRD_Msk (0x1U << GICInterface_STATUSR_WRD_Pos) /*!< GICInterface STATUSR: WRD Mask */ -#define GICInterface_STATUSR_WRD(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_STATUSR_WRD_Pos)) & GICInterface_STATUSR_WRD_Msk) - -#define GICInterface_STATUSR_RWOD_Pos 2U /*!< GICInterface STATUSR: RWOD Position */ -#define GICInterface_STATUSR_RWOD_Msk (0x1U << GICInterface_STATUSR_RWOD_Pos) /*!< GICInterface STATUSR: RWOD Mask */ -#define GICInterface_STATUSR_RWOD(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_STATUSR_RWOD_Pos)) & GICInterface_STATUSR_RWOD_Msk) - -#define GICInterface_STATUSR_WROD_Pos 3U /*!< GICInterface STATUSR: WROD Position */ -#define GICInterface_STATUSR_WROD_Msk (0x1U << GICInterface_STATUSR_WROD_Pos) /*!< GICInterface STATUSR: WROD Mask */ -#define GICInterface_STATUSR_WROD(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_STATUSR_WROD_Pos)) & GICInterface_STATUSR_WROD_Msk) - -#define GICInterface_STATUSR_ASV_Pos 4U /*!< GICInterface STATUSR: ASV Position */ -#define GICInterface_STATUSR_ASV_Msk (0x1U << GICInterface_STATUSR_ASV_Pos) /*!< GICInterface STATUSR: ASV Mask */ -#define GICInterface_STATUSR_ASV(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_STATUSR_ASV_Pos)) & GICInterface_STATUSR_ASV_Msk) - -/* GICInterface IIDR Register */ -#define GICInterface_IIDR_Implementer_Pos 0U /*!< GICInterface IIDR: Implementer Position */ -#define GICInterface_IIDR_Implementer_Msk (0xFFFU /*<< GICInterface_IIDR_Implementer_Pos*/) /*!< GICInterface IIDR: Implementer Mask */ -#define GICInterface_IIDR_Implementer(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_IIDR_Implementer_Pos*/)) & GICInterface_IIDR_Implementer_Msk) - -#define GICInterface_IIDR_Revision_Pos 12U /*!< GICInterface IIDR: Revision Position */ -#define GICInterface_IIDR_Revision_Msk (0xFU << GICInterface_IIDR_Revision_Pos) /*!< GICInterface IIDR: Revision Mask */ -#define GICInterface_IIDR_Revision(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_IIDR_Revision_Pos)) & GICInterface_IIDR_Revision_Msk) - -#define GICInterface_IIDR_Arch_version_Pos 16U /*!< GICInterface IIDR: Arch_version Position */ -#define GICInterface_IIDR_Arch_version_Msk (0xFU << GICInterface_IIDR_Arch_version_Pos) /*!< GICInterface IIDR: Arch_version Mask */ -#define GICInterface_IIDR_Arch_version(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_IIDR_Arch_version_Pos)) & GICInterface_IIDR_Arch_version_Msk) - -#define GICInterface_IIDR_ProductID_Pos 20U /*!< GICInterface IIDR: ProductID Position */ -#define GICInterface_IIDR_ProductID_Msk (0xFFFU << GICInterface_IIDR_ProductID_Pos) /*!< GICInterface IIDR: ProductID Mask */ -#define GICInterface_IIDR_ProductID(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_IIDR_ProductID_Pos)) & GICInterface_IIDR_ProductID_Msk) - -/* GICInterface DIR Register */ -#define GICInterface_DIR_INTID_Pos 0U /*!< PTIM DIR: INTID Position */ -#define GICInterface_DIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_DIR_INTID_Pos*/) /*!< PTIM DIR: INTID Mask */ -#define GICInterface_DIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_DIR_INTID_Pos*/)) & GICInterface_DIR_INTID_Msk) + #include "gic_v20.h" #endif /* (__GIC_PRESENT == 1U) || defined(DOXYGEN) */ #if (__TIM_PRESENT == 1U) || defined(DOXYGEN) -#if ((__CORTEX_A == 5U) || (__CORTEX_A == 9U)) || defined(DOXYGEN) -/** \brief Structure type to access the Private Timer -*/ -typedef struct -{ - __IOM uint32_t LOAD; //!< \brief Offset: 0x000 (R/W) Private Timer Load Register - __IOM uint32_t COUNTER; //!< \brief Offset: 0x004 (R/W) Private Timer Counter Register - __IOM uint32_t CONTROL; //!< \brief Offset: 0x008 (R/W) Private Timer Control Register - __IOM uint32_t ISR; //!< \brief Offset: 0x00C (R/W) Private Timer Interrupt Status Register - RESERVED(0[4], uint32_t) - __IOM uint32_t WLOAD; //!< \brief Offset: 0x020 (R/W) Watchdog Load Register - __IOM uint32_t WCOUNTER; //!< \brief Offset: 0x024 (R/W) Watchdog Counter Register - __IOM uint32_t WCONTROL; //!< \brief Offset: 0x028 (R/W) Watchdog Control Register - __IOM uint32_t WISR; //!< \brief Offset: 0x02C (R/W) Watchdog Interrupt Status Register - __IOM uint32_t WRESET; //!< \brief Offset: 0x030 (R/W) Watchdog Reset Status Register - __OM uint32_t WDISABLE; //!< \brief Offset: 0x034 ( /W) Watchdog Disable Register -} Timer_Type; -#define PTIM ((Timer_Type *) TIMER_BASE ) /*!< \brief Timer register struct */ - -/* PTIM Control Register */ -#define PTIM_CONTROL_Enable_Pos 0U /*!< PTIM CONTROL: Enable Position */ -#define PTIM_CONTROL_Enable_Msk (0x1U /*<< PTIM_CONTROL_Enable_Pos*/) /*!< PTIM CONTROL: Enable Mask */ -#define PTIM_CONTROL_Enable(x) (((uint32_t)(((uint32_t)(x)) /*<< PTIM_CONTROL_Enable_Pos*/)) & PTIM_CONTROL_Enable_Msk) - -#define PTIM_CONTROL_AutoReload_Pos 1U /*!< PTIM CONTROL: Auto Reload Position */ -#define PTIM_CONTROL_AutoReload_Msk (0x1U << PTIM_CONTROL_AutoReload_Pos) /*!< PTIM CONTROL: Auto Reload Mask */ -#define PTIM_CONTROL_AutoReload(x) (((uint32_t)(((uint32_t)(x)) << PTIM_CONTROL_AutoReload_Pos)) & PTIM_CONTROL_AutoReload_Msk) - -#define PTIM_CONTROL_IRQenable_Pos 2U /*!< PTIM CONTROL: IRQ Enabel Position */ -#define PTIM_CONTROL_IRQenable_Msk (0x1U << PTIM_CONTROL_IRQenable_Pos) /*!< PTIM CONTROL: IRQ Enabel Mask */ -#define PTIM_CONTROL_IRQenable(x) (((uint32_t)(((uint32_t)(x)) << PTIM_CONTROL_IRQenable_Pos)) & PTIM_CONTROL_IRQenable_Msk) - -#define PTIM_CONTROL_Prescaler_Pos 8U /*!< PTIM CONTROL: Prescaler Position */ -#define PTIM_CONTROL_Prescaler_Msk (0xFFU << PTIM_CONTROL_Prescaler_Pos) /*!< PTIM CONTROL: Prescaler Mask */ -#define PTIM_CONTROL_Prescaler(x) (((uint32_t)(((uint32_t)(x)) << PTIM_CONTROL_Prescaler_Pos)) & PTIM_CONTROL_Prescaler_Msk) - -/* WCONTROL Watchdog Control Register */ -#define PTIM_WCONTROL_Enable_Pos 0U /*!< PTIM WCONTROL: Enable Position */ -#define PTIM_WCONTROL_Enable_Msk (0x1U /*<< PTIM_WCONTROL_Enable_Pos*/) /*!< PTIM WCONTROL: Enable Mask */ -#define PTIM_WCONTROL_Enable(x) (((uint32_t)(((uint32_t)(x)) /*<< PTIM_WCONTROL_Enable_Pos*/)) & PTIM_WCONTROL_Enable_Msk) - -#define PTIM_WCONTROL_AutoReload_Pos 1U /*!< PTIM WCONTROL: Auto Reload Position */ -#define PTIM_WCONTROL_AutoReload_Msk (0x1U << PTIM_WCONTROL_AutoReload_Pos) /*!< PTIM WCONTROL: Auto Reload Mask */ -#define PTIM_WCONTROL_AutoReload(x) (((uint32_t)(((uint32_t)(x)) << PTIM_WCONTROL_AutoReload_Pos)) & PTIM_WCONTROL_AutoReload_Msk) - -#define PTIM_WCONTROL_IRQenable_Pos 2U /*!< PTIM WCONTROL: IRQ Enable Position */ -#define PTIM_WCONTROL_IRQenable_Msk (0x1U << PTIM_WCONTROL_IRQenable_Pos) /*!< PTIM WCONTROL: IRQ Enable Mask */ -#define PTIM_WCONTROL_IRQenable(x) (((uint32_t)(((uint32_t)(x)) << PTIM_WCONTROL_IRQenable_Pos)) & PTIM_WCONTROL_IRQenable_Msk) - -#define PTIM_WCONTROL_Mode_Pos 3U /*!< PTIM WCONTROL: Watchdog Mode Position */ -#define PTIM_WCONTROL_Mode_Msk (0x1U << PTIM_WCONTROL_Mode_Pos) /*!< PTIM WCONTROL: Watchdog Mode Mask */ -#define PTIM_WCONTROL_Mode(x) (((uint32_t)(((uint32_t)(x)) << PTIM_WCONTROL_Mode_Pos)) & PTIM_WCONTROL_Mode_Msk) - -#define PTIM_WCONTROL_Presacler_Pos 8U /*!< PTIM WCONTROL: Prescaler Position */ -#define PTIM_WCONTROL_Presacler_Msk (0xFFU << PTIM_WCONTROL_Presacler_Pos) /*!< PTIM WCONTROL: Prescaler Mask */ -#define PTIM_WCONTROL_Presacler(x) (((uint32_t)(((uint32_t)(x)) << PTIM_WCONTROL_Presacler_Pos)) & PTIM_WCONTROL_Presacler_Msk) - -/* WISR Watchdog Interrupt Status Register */ -#define PTIM_WISR_EventFlag_Pos 0U /*!< PTIM WISR: Event Flag Position */ -#define PTIM_WISR_EventFlag_Msk (0x1U /*<< PTIM_WISR_EventFlag_Pos*/) /*!< PTIM WISR: Event Flag Mask */ -#define PTIM_WISR_EventFlag(x) (((uint32_t)(((uint32_t)(x)) /*<< PTIM_WISR_EventFlag_Pos*/)) & PTIM_WISR_EventFlag_Msk) - -/* WRESET Watchdog Reset Status */ -#define PTIM_WRESET_ResetFlag_Pos 0U /*!< PTIM WRESET: Reset Flag Position */ -#define PTIM_WRESET_ResetFlag_Msk (0x1U /*<< PTIM_WRESET_ResetFlag_Pos*/) /*!< PTIM WRESET: Reset Flag Mask */ -#define PTIM_WRESET_ResetFlag(x) (((uint32_t)(((uint32_t)(x)) /*<< PTIM_WRESET_ResetFlag_Pos*/)) & PTIM_WRESET_ResetFlag_Msk) - -#endif /* ((__CORTEX_A == 5U) || (__CORTEX_A == 9U)) || defined(DOXYGEN) */ + #if ((__CORTEX_A == 5U) || (__CORTEX_A == 9U)) || defined(DOXYGEN) + /** \brief Structure type to access the Private Timer + */ + typedef struct + { + __IOM uint32_t LOAD; //!< \brief Offset: 0x000 (R/W) Private Timer Load Register + __IOM uint32_t COUNTER; //!< \brief Offset: 0x004 (R/W) Private Timer Counter Register + __IOM uint32_t CONTROL; //!< \brief Offset: 0x008 (R/W) Private Timer Control Register + __IOM uint32_t ISR; //!< \brief Offset: 0x00C (R/W) Private Timer Interrupt Status Register + RESERVED(0[4], uint32_t) + __IOM uint32_t WLOAD; //!< \brief Offset: 0x020 (R/W) Watchdog Load Register + __IOM uint32_t WCOUNTER; //!< \brief Offset: 0x024 (R/W) Watchdog Counter Register + __IOM uint32_t WCONTROL; //!< \brief Offset: 0x028 (R/W) Watchdog Control Register + __IOM uint32_t WISR; //!< \brief Offset: 0x02C (R/W) Watchdog Interrupt Status Register + __IOM uint32_t WRESET; //!< \brief Offset: 0x030 (R/W) Watchdog Reset Status Register + __OM uint32_t WDISABLE; //!< \brief Offset: 0x034 ( /W) Watchdog Disable Register + } Timer_Type; + #define PTIM ((Timer_Type *) TIMER_BASE ) /*!< \brief Timer register struct */ + + /* PTIM Control Register */ + #define PTIM_CONTROL_Enable_Pos 0U /*!< PTIM CONTROL: Enable Position */ + #define PTIM_CONTROL_Enable_Msk (0x1U /*<< PTIM_CONTROL_Enable_Pos*/) /*!< PTIM CONTROL: Enable Mask */ + #define PTIM_CONTROL_Enable(x) (((uint32_t)(((uint32_t)(x)) /*<< PTIM_CONTROL_Enable_Pos*/)) & PTIM_CONTROL_Enable_Msk) + + #define PTIM_CONTROL_AutoReload_Pos 1U /*!< PTIM CONTROL: Auto Reload Position */ + #define PTIM_CONTROL_AutoReload_Msk (0x1U << PTIM_CONTROL_AutoReload_Pos) /*!< PTIM CONTROL: Auto Reload Mask */ + #define PTIM_CONTROL_AutoReload(x) (((uint32_t)(((uint32_t)(x)) << PTIM_CONTROL_AutoReload_Pos)) & PTIM_CONTROL_AutoReload_Msk) + + #define PTIM_CONTROL_IRQenable_Pos 2U /*!< PTIM CONTROL: IRQ Enabel Position */ + #define PTIM_CONTROL_IRQenable_Msk (0x1U << PTIM_CONTROL_IRQenable_Pos) /*!< PTIM CONTROL: IRQ Enabel Mask */ + #define PTIM_CONTROL_IRQenable(x) (((uint32_t)(((uint32_t)(x)) << PTIM_CONTROL_IRQenable_Pos)) & PTIM_CONTROL_IRQenable_Msk) + + #define PTIM_CONTROL_Prescaler_Pos 8U /*!< PTIM CONTROL: Prescaler Position */ + #define PTIM_CONTROL_Prescaler_Msk (0xFFU << PTIM_CONTROL_Prescaler_Pos) /*!< PTIM CONTROL: Prescaler Mask */ + #define PTIM_CONTROL_Prescaler(x) (((uint32_t)(((uint32_t)(x)) << PTIM_CONTROL_Prescaler_Pos)) & PTIM_CONTROL_Prescaler_Msk) + + /* WCONTROL Watchdog Control Register */ + #define PTIM_WCONTROL_Enable_Pos 0U /*!< PTIM WCONTROL: Enable Position */ + #define PTIM_WCONTROL_Enable_Msk (0x1U /*<< PTIM_WCONTROL_Enable_Pos*/) /*!< PTIM WCONTROL: Enable Mask */ + #define PTIM_WCONTROL_Enable(x) (((uint32_t)(((uint32_t)(x)) /*<< PTIM_WCONTROL_Enable_Pos*/)) & PTIM_WCONTROL_Enable_Msk) + + #define PTIM_WCONTROL_AutoReload_Pos 1U /*!< PTIM WCONTROL: Auto Reload Position */ + #define PTIM_WCONTROL_AutoReload_Msk (0x1U << PTIM_WCONTROL_AutoReload_Pos) /*!< PTIM WCONTROL: Auto Reload Mask */ + #define PTIM_WCONTROL_AutoReload(x) (((uint32_t)(((uint32_t)(x)) << PTIM_WCONTROL_AutoReload_Pos)) & PTIM_WCONTROL_AutoReload_Msk) + + #define PTIM_WCONTROL_IRQenable_Pos 2U /*!< PTIM WCONTROL: IRQ Enable Position */ + #define PTIM_WCONTROL_IRQenable_Msk (0x1U << PTIM_WCONTROL_IRQenable_Pos) /*!< PTIM WCONTROL: IRQ Enable Mask */ + #define PTIM_WCONTROL_IRQenable(x) (((uint32_t)(((uint32_t)(x)) << PTIM_WCONTROL_IRQenable_Pos)) & PTIM_WCONTROL_IRQenable_Msk) + + #define PTIM_WCONTROL_Mode_Pos 3U /*!< PTIM WCONTROL: Watchdog Mode Position */ + #define PTIM_WCONTROL_Mode_Msk (0x1U << PTIM_WCONTROL_Mode_Pos) /*!< PTIM WCONTROL: Watchdog Mode Mask */ + #define PTIM_WCONTROL_Mode(x) (((uint32_t)(((uint32_t)(x)) << PTIM_WCONTROL_Mode_Pos)) & PTIM_WCONTROL_Mode_Msk) + + #define PTIM_WCONTROL_Presacler_Pos 8U /*!< PTIM WCONTROL: Prescaler Position */ + #define PTIM_WCONTROL_Presacler_Msk (0xFFU << PTIM_WCONTROL_Presacler_Pos) /*!< PTIM WCONTROL: Prescaler Mask */ + #define PTIM_WCONTROL_Presacler(x) (((uint32_t)(((uint32_t)(x)) << PTIM_WCONTROL_Presacler_Pos)) & PTIM_WCONTROL_Presacler_Msk) + + /* WISR Watchdog Interrupt Status Register */ + #define PTIM_WISR_EventFlag_Pos 0U /*!< PTIM WISR: Event Flag Position */ + #define PTIM_WISR_EventFlag_Msk (0x1U /*<< PTIM_WISR_EventFlag_Pos*/) /*!< PTIM WISR: Event Flag Mask */ + #define PTIM_WISR_EventFlag(x) (((uint32_t)(((uint32_t)(x)) /*<< PTIM_WISR_EventFlag_Pos*/)) & PTIM_WISR_EventFlag_Msk) + + /* WRESET Watchdog Reset Status */ + #define PTIM_WRESET_ResetFlag_Pos 0U /*!< PTIM WRESET: Reset Flag Position */ + #define PTIM_WRESET_ResetFlag_Msk (0x1U /*<< PTIM_WRESET_ResetFlag_Pos*/) /*!< PTIM WRESET: Reset Flag Mask */ + #define PTIM_WRESET_ResetFlag(x) (((uint32_t)(((uint32_t)(x)) /*<< PTIM_WRESET_ResetFlag_Pos*/)) & PTIM_WRESET_ResetFlag_Msk) + + #endif /* ((__CORTEX_A == 5U) || (__CORTEX_A == 9U)) || defined(DOXYGEN) */ #endif /* (__TIM_PRESENT == 1U) || defined(DOXYGEN) */ /******************************************************************************* @@ -1359,653 +1022,266 @@ __STATIC_FORCEINLINE void L1C_CleanInvalidateDCacheAll(void) { /* ########################## L2 Cache functions ################################# */ #if (__L2C_PRESENT == 1U) || defined(DOXYGEN) -/** \brief Cache Sync operation by writing CACHE_SYNC register. -*/ -__STATIC_INLINE void L2C_Sync(void) -{ - L2C_310->CACHE_SYNC = 0x0; -} - -/** \brief Read cache controller cache ID from CACHE_ID register. - * \return L2C_310_TypeDef::CACHE_ID - */ -__STATIC_INLINE int L2C_GetID (void) -{ - return L2C_310->CACHE_ID; -} - -/** \brief Read cache controller cache type from CACHE_TYPE register. -* \return L2C_310_TypeDef::CACHE_TYPE -*/ -__STATIC_INLINE int L2C_GetType (void) -{ - return L2C_310->CACHE_TYPE; -} - -/** \brief Invalidate all cache by way -*/ -__STATIC_INLINE void L2C_InvAllByWay (void) -{ - unsigned int assoc; - - if (L2C_310->AUX_CNT & (1U << 16U)) { - assoc = 16U; - } else { - assoc = 8U; + /** \brief Cache Sync operation by writing CACHE_SYNC register. + */ + __STATIC_INLINE void L2C_Sync(void) + { + L2C_310->CACHE_SYNC = 0x0; } - L2C_310->INV_WAY = (1U << assoc) - 1U; - while(L2C_310->INV_WAY & ((1U << assoc) - 1U)); //poll invalidate - - L2C_Sync(); -} - -/** \brief Clean and Invalidate all cache by way -*/ -__STATIC_INLINE void L2C_CleanInvAllByWay (void) -{ - unsigned int assoc; - - if (L2C_310->AUX_CNT & (1U << 16U)) { - assoc = 16U; - } else { - assoc = 8U; + /** \brief Read cache controller cache ID from CACHE_ID register. + * \return L2C_310_TypeDef::CACHE_ID + */ + __STATIC_INLINE int L2C_GetID (void) + { + return L2C_310->CACHE_ID; } - - L2C_310->CLEAN_INV_WAY = (1U << assoc) - 1U; - while(L2C_310->CLEAN_INV_WAY & ((1U << assoc) - 1U)); //poll invalidate - - L2C_Sync(); -} - -/** \brief Enable Level 2 Cache -*/ -__STATIC_INLINE void L2C_Enable(void) -{ - L2C_310->CONTROL = 0; - L2C_310->INTERRUPT_CLEAR = 0x000001FFuL; - L2C_310->DEBUG_CONTROL = 0; - L2C_310->DATA_LOCK_0_WAY = 0; - L2C_310->CACHE_SYNC = 0; - L2C_310->CONTROL = 0x01; - L2C_Sync(); -} - -/** \brief Disable Level 2 Cache -*/ -__STATIC_INLINE void L2C_Disable(void) -{ - L2C_310->CONTROL = 0x00; - L2C_Sync(); -} - -/** \brief Invalidate cache by physical address -* \param [in] pa Pointer to data to invalidate cache for. -*/ -__STATIC_INLINE void L2C_InvPa (void *pa) -{ - L2C_310->INV_LINE_PA = (unsigned int)pa; - L2C_Sync(); -} - -/** \brief Clean cache by physical address -* \param [in] pa Pointer to data to invalidate cache for. -*/ -__STATIC_INLINE void L2C_CleanPa (void *pa) -{ - L2C_310->CLEAN_LINE_PA = (unsigned int)pa; - L2C_Sync(); -} - -/** \brief Clean and invalidate cache by physical address -* \param [in] pa Pointer to data to invalidate cache for. -*/ -__STATIC_INLINE void L2C_CleanInvPa (void *pa) -{ - L2C_310->CLEAN_INV_LINE_PA = (unsigned int)pa; - L2C_Sync(); -} -#endif - -/* ########################## GIC functions ###################################### */ -#if (__GIC_PRESENT == 1U) || defined(DOXYGEN) -/** \brief Enable the interrupt distributor using the GIC's CTLR register. -*/ -__STATIC_INLINE void GIC_EnableDistributor(void) -{ - GICDistributor->CTLR |= 1U; -} - -/** \brief Disable the interrupt distributor using the GIC's CTLR register. -*/ -__STATIC_INLINE void GIC_DisableDistributor(void) -{ - GICDistributor->CTLR &=~1U; -} - -/** \brief Read the GIC's TYPER register. -* \return GICDistributor_Type::TYPER -*/ -__STATIC_INLINE uint32_t GIC_DistributorInfo(void) -{ - return (GICDistributor->TYPER); -} - -/** \brief Reads the GIC's IIDR register. -* \return GICDistributor_Type::IIDR -*/ -__STATIC_INLINE uint32_t GIC_DistributorImplementer(void) -{ - return (GICDistributor->IIDR); -} - -/** \brief Sets the GIC's ITARGETSR register for the given interrupt. -* \param [in] IRQn Interrupt to be configured. -* \param [in] cpu_target CPU interfaces to assign this interrupt to. -*/ -__STATIC_INLINE void GIC_SetTarget(IRQn_Type IRQn, uint32_t cpu_target) -{ - uint32_t mask = GICDistributor->ITARGETSR[IRQn / 4U] & ~(0xFFUL << ((IRQn % 4U) * 8U)); - GICDistributor->ITARGETSR[IRQn / 4U] = mask | ((cpu_target & 0xFFUL) << ((IRQn % 4U) * 8U)); -} - -/** \brief Read the GIC's ITARGETSR register. -* \param [in] IRQn Interrupt to acquire the configuration for. -* \return GICDistributor_Type::ITARGETSR -*/ -__STATIC_INLINE uint32_t GIC_GetTarget(IRQn_Type IRQn) -{ - return (GICDistributor->ITARGETSR[IRQn / 4U] >> ((IRQn % 4U) * 8U)) & 0xFFUL; -} - -/** \brief Enable the CPU's interrupt interface. -*/ -__STATIC_INLINE void GIC_EnableInterface(void) -{ - GICInterface->CTLR |= 1U; //enable interface -} - -/** \brief Disable the CPU's interrupt interface. -*/ -__STATIC_INLINE void GIC_DisableInterface(void) -{ - GICInterface->CTLR &=~1U; //disable distributor -} - -/** \brief Read the CPU's IAR register. -* \return GICInterface_Type::IAR -*/ -__STATIC_INLINE IRQn_Type GIC_AcknowledgePending(void) -{ - return (IRQn_Type)(GICInterface->IAR); -} - -/** \brief Writes the given interrupt number to the CPU's EOIR register. -* \param [in] IRQn The interrupt to be signaled as finished. -*/ -__STATIC_INLINE void GIC_EndInterrupt(IRQn_Type IRQn) -{ - GICInterface->EOIR = IRQn; -} - -/** \brief Enables the given interrupt using GIC's ISENABLER register. -* \param [in] IRQn The interrupt to be enabled. -*/ -__STATIC_INLINE void GIC_EnableIRQ(IRQn_Type IRQn) -{ - GICDistributor->ISENABLER[IRQn / 32U] = 1U << (IRQn % 32U); -} - -/** \brief Get interrupt enable status using GIC's ISENABLER register. -* \param [in] IRQn The interrupt to be queried. -* \return 0 - interrupt is not enabled, 1 - interrupt is enabled. -*/ -__STATIC_INLINE uint32_t GIC_GetEnableIRQ(IRQn_Type IRQn) -{ - return (GICDistributor->ISENABLER[IRQn / 32U] >> (IRQn % 32U)) & 1UL; -} - -/** \brief Disables the given interrupt using GIC's ICENABLER register. -* \param [in] IRQn The interrupt to be disabled. -*/ -__STATIC_INLINE void GIC_DisableIRQ(IRQn_Type IRQn) -{ - GICDistributor->ICENABLER[IRQn / 32U] = 1U << (IRQn % 32U); -} - -/** \brief Get interrupt pending status from GIC's ISPENDR register. -* \param [in] IRQn The interrupt to be queried. -* \return 0 - interrupt is not pending, 1 - interrupt is pendig. -*/ -__STATIC_INLINE uint32_t GIC_GetPendingIRQ(IRQn_Type IRQn) -{ - uint32_t pend; - - if (IRQn >= 16U) { - pend = (GICDistributor->ISPENDR[IRQn / 32U] >> (IRQn % 32U)) & 1UL; - } else { - // INTID 0-15 Software Generated Interrupt - pend = (GICDistributor->SPENDSGIR[IRQn / 4U] >> ((IRQn % 4U) * 8U)) & 0xFFUL; - // No CPU identification offered - if (pend != 0U) { - pend = 1U; + /** \brief Read cache controller cache type from CACHE_TYPE register. + * \return L2C_310_TypeDef::CACHE_TYPE + */ + __STATIC_INLINE int L2C_GetType (void) + { + return L2C_310->CACHE_TYPE; + } + + /** \brief Invalidate all cache by way + */ + __STATIC_INLINE void L2C_InvAllByWay (void) + { + unsigned int assoc; + + if (L2C_310->AUX_CNT & (1U << 16U)) { + assoc = 16U; } else { - pend = 0U; + assoc = 8U; } + + L2C_310->INV_WAY = (1U << assoc) - 1U; + while(L2C_310->INV_WAY & ((1U << assoc) - 1U)); //poll invalidate + + L2C_Sync(); } - - return (pend); -} - -/** \brief Sets the given interrupt as pending using GIC's ISPENDR register. -* \param [in] IRQn The interrupt to be enabled. -*/ -__STATIC_INLINE void GIC_SetPendingIRQ(IRQn_Type IRQn) -{ - if (IRQn >= 16U) { - GICDistributor->ISPENDR[IRQn / 32U] = 1U << (IRQn % 32U); - } else { - // INTID 0-15 Software Generated Interrupt - // Forward the interrupt to the CPU interface that requested it - GICDistributor->SGIR = (IRQn | 0x02000000U); + + /** \brief Clean and Invalidate all cache by way + */ + __STATIC_INLINE void L2C_CleanInvAllByWay (void) + { + unsigned int assoc; + + if (L2C_310->AUX_CNT & (1U << 16U)) { + assoc = 16U; + } else { + assoc = 8U; + } + + L2C_310->CLEAN_INV_WAY = (1U << assoc) - 1U; + while(L2C_310->CLEAN_INV_WAY & ((1U << assoc) - 1U)); //poll invalidate + + L2C_Sync(); } -} - -/** \brief Clears the given interrupt from being pending using GIC's ICPENDR register. -* \param [in] IRQn The interrupt to be enabled. -*/ -__STATIC_INLINE void GIC_ClearPendingIRQ(IRQn_Type IRQn) -{ - if (IRQn >= 16U) { - GICDistributor->ICPENDR[IRQn / 32U] = 1U << (IRQn % 32U); - } else { - // INTID 0-15 Software Generated Interrupt - GICDistributor->CPENDSGIR[IRQn / 4U] = 1U << ((IRQn % 4U) * 8U); + + /** \brief Enable Level 2 Cache + */ + __STATIC_INLINE void L2C_Enable(void) + { + L2C_310->CONTROL = 0; + L2C_310->INTERRUPT_CLEAR = 0x000001FFuL; + L2C_310->DEBUG_CONTROL = 0; + L2C_310->DATA_LOCK_0_WAY = 0; + L2C_310->CACHE_SYNC = 0; + L2C_310->CONTROL = 0x01; + L2C_Sync(); } -} - -/** \brief Sets the interrupt configuration using GIC's ICFGR register. -* \param [in] IRQn The interrupt to be configured. -* \param [in] int_config Int_config field value. Bit 0: Reserved (0 - N-N model, 1 - 1-N model for some GIC before v1) -* Bit 1: 0 - level sensitive, 1 - edge triggered -*/ -__STATIC_INLINE void GIC_SetConfiguration(IRQn_Type IRQn, uint32_t int_config) -{ - uint32_t icfgr = GICDistributor->ICFGR[IRQn / 16U]; /* read current register content */ - uint32_t shift = (IRQn % 16U) << 1U; /* calculate shift value */ - - int_config &= 3U; /* only 2 bits are valid */ - icfgr &= (~(3U << shift)); /* clear bits to change */ - icfgr |= ( int_config << shift); /* set new configuration */ - - GICDistributor->ICFGR[IRQn / 16U] = icfgr; /* write new register content */ -} - -/** \brief Get the interrupt configuration from the GIC's ICFGR register. -* \param [in] IRQn Interrupt to acquire the configuration for. -* \return Int_config field value. Bit 0: Reserved (0 - N-N model, 1 - 1-N model for some GIC before v1) -* Bit 1: 0 - level sensitive, 1 - edge triggered -*/ -__STATIC_INLINE uint32_t GIC_GetConfiguration(IRQn_Type IRQn) -{ - return (GICDistributor->ICFGR[IRQn / 16U] >> ((IRQn % 16U) >> 1U)); -} - -/** \brief Set the priority for the given interrupt in the GIC's IPRIORITYR register. -* \param [in] IRQn The interrupt to be configured. -* \param [in] priority The priority for the interrupt, lower values denote higher priorities. -*/ -__STATIC_INLINE void GIC_SetPriority(IRQn_Type IRQn, uint32_t priority) -{ - uint32_t mask = GICDistributor->IPRIORITYR[IRQn / 4U] & ~(0xFFUL << ((IRQn % 4U) * 8U)); - GICDistributor->IPRIORITYR[IRQn / 4U] = mask | ((priority & 0xFFUL) << ((IRQn % 4U) * 8U)); -} - -/** \brief Read the current interrupt priority from GIC's IPRIORITYR register. -* \param [in] IRQn The interrupt to be queried. -*/ -__STATIC_INLINE uint32_t GIC_GetPriority(IRQn_Type IRQn) -{ - return (GICDistributor->IPRIORITYR[IRQn / 4U] >> ((IRQn % 4U) * 8U)) & 0xFFUL; -} - -/** \brief Set the interrupt priority mask using CPU's PMR register. -* \param [in] priority Priority mask to be set. -*/ -__STATIC_INLINE void GIC_SetInterfacePriorityMask(uint32_t priority) -{ - GICInterface->PMR = priority & 0xFFUL; //set priority mask -} - -/** \brief Read the current interrupt priority mask from CPU's PMR register. -* \result GICInterface_Type::PMR -*/ -__STATIC_INLINE uint32_t GIC_GetInterfacePriorityMask(void) -{ - return GICInterface->PMR; -} - -/** \brief Configures the group priority and subpriority split point using CPU's BPR register. -* \param [in] binary_point Amount of bits used as subpriority. -*/ -__STATIC_INLINE void GIC_SetBinaryPoint(uint32_t binary_point) -{ - GICInterface->BPR = binary_point & 7U; //set binary point -} - -/** \brief Read the current group priority and subpriority split point from CPU's BPR register. -* \return GICInterface_Type::BPR -*/ -__STATIC_INLINE uint32_t GIC_GetBinaryPoint(void) -{ - return GICInterface->BPR; -} - -/** \brief Get the status for a given interrupt. -* \param [in] IRQn The interrupt to get status for. -* \return 0 - not pending/active, 1 - pending, 2 - active, 3 - pending and active -*/ -__STATIC_INLINE uint32_t GIC_GetIRQStatus(IRQn_Type IRQn) -{ - uint32_t pending, active; - - active = ((GICDistributor->ISACTIVER[IRQn / 32U]) >> (IRQn % 32U)) & 1UL; - pending = ((GICDistributor->ISPENDR[IRQn / 32U]) >> (IRQn % 32U)) & 1UL; - - return ((active<<1U) | pending); -} - -/** \brief Generate a software interrupt using GIC's SGIR register. -* \param [in] IRQn Software interrupt to be generated. -* \param [in] target_list List of CPUs the software interrupt should be forwarded to. -* \param [in] filter_list Filter to be applied to determine interrupt receivers. -*/ -__STATIC_INLINE void GIC_SendSGI(IRQn_Type IRQn, uint32_t target_list, uint32_t filter_list) -{ - GICDistributor->SGIR = ((filter_list & 3U) << 24U) | ((target_list & 0xFFUL) << 16U) | (IRQn & 0x0FUL); -} - -/** \brief Get the interrupt number of the highest interrupt pending from CPU's HPPIR register. -* \return GICInterface_Type::HPPIR -*/ -__STATIC_INLINE uint32_t GIC_GetHighPendingIRQ(void) -{ - return GICInterface->HPPIR; -} - -/** \brief Provides information about the implementer and revision of the CPU interface. -* \return GICInterface_Type::IIDR -*/ -__STATIC_INLINE uint32_t GIC_GetInterfaceId(void) -{ - return GICInterface->IIDR; -} - -/** \brief Set the interrupt group from the GIC's IGROUPR register. -* \param [in] IRQn The interrupt to be queried. -* \param [in] group Interrupt group number: 0 - Group 0, 1 - Group 1 -*/ -__STATIC_INLINE void GIC_SetGroup(IRQn_Type IRQn, uint32_t group) -{ - uint32_t igroupr = GICDistributor->IGROUPR[IRQn / 32U]; - uint32_t shift = (IRQn % 32U); - - igroupr &= (~(1U << shift)); - igroupr |= ( (group & 1U) << shift); - - GICDistributor->IGROUPR[IRQn / 32U] = igroupr; -} -#define GIC_SetSecurity GIC_SetGroup - -/** \brief Get the interrupt group from the GIC's IGROUPR register. -* \param [in] IRQn The interrupt to be queried. -* \return 0 - Group 0, 1 - Group 1 -*/ -__STATIC_INLINE uint32_t GIC_GetGroup(IRQn_Type IRQn) -{ - return (GICDistributor->IGROUPR[IRQn / 32U] >> (IRQn % 32U)) & 1UL; -} -#define GIC_GetSecurity GIC_GetGroup - -/** \brief Initialize the interrupt distributor. -*/ -__STATIC_INLINE void GIC_DistInit(void) -{ - uint32_t i; - uint32_t num_irq = 0U; - uint32_t priority_field; - - //A reset sets all bits in the IGROUPRs corresponding to the SPIs to 0, - //configuring all of the interrupts as Secure. - - //Disable interrupt forwarding - GIC_DisableDistributor(); - //Get the maximum number of interrupts that the GIC supports - num_irq = 32U * ((GIC_DistributorInfo() & 0x1FU) + 1U); - - /* Priority level is implementation defined. - To determine the number of priority bits implemented write 0xFF to an IPRIORITYR - priority field and read back the value stored.*/ - GIC_SetPriority((IRQn_Type)0U, 0xFFU); - priority_field = GIC_GetPriority((IRQn_Type)0U); - - for (i = 32U; i < num_irq; i++) + + /** \brief Disable Level 2 Cache + */ + __STATIC_INLINE void L2C_Disable(void) { - //Disable the SPI interrupt - GIC_DisableIRQ((IRQn_Type)i); - //Set level-sensitive (and N-N model) - GIC_SetConfiguration((IRQn_Type)i, 0U); - //Set priority - GIC_SetPriority((IRQn_Type)i, priority_field/2U); - //Set target list to CPU0 - GIC_SetTarget((IRQn_Type)i, 1U); + L2C_310->CONTROL = 0x00; + L2C_Sync(); } - //Enable distributor - GIC_EnableDistributor(); -} - -/** \brief Initialize the CPU's interrupt interface -*/ -__STATIC_INLINE void GIC_CPUInterfaceInit(void) -{ - uint32_t i; - uint32_t priority_field; - - //A reset sets all bits in the IGROUPRs corresponding to the SPIs to 0, - //configuring all of the interrupts as Secure. - - //Disable interrupt forwarding - GIC_DisableInterface(); - - /* Priority level is implementation defined. - To determine the number of priority bits implemented write 0xFF to an IPRIORITYR - priority field and read back the value stored.*/ - GIC_SetPriority((IRQn_Type)0U, 0xFFU); - priority_field = GIC_GetPriority((IRQn_Type)0U); - - //SGI and PPI - for (i = 0U; i < 32U; i++) + + /** \brief Invalidate cache by physical address + * \param [in] pa Pointer to data to invalidate cache for. + */ + __STATIC_INLINE void L2C_InvPa (void *pa) { - if(i > 15U) { - //Set level-sensitive (and N-N model) for PPI - GIC_SetConfiguration((IRQn_Type)i, 0U); - } - //Disable SGI and PPI interrupts - GIC_DisableIRQ((IRQn_Type)i); - //Set priority - GIC_SetPriority((IRQn_Type)i, priority_field/2U); + L2C_310->INV_LINE_PA = (unsigned int)pa; + L2C_Sync(); } - //Enable interface - GIC_EnableInterface(); - //Set binary point to 0 - GIC_SetBinaryPoint(0U); - //Set priority mask - GIC_SetInterfacePriorityMask(0xFFU); -} - -/** \brief Initialize and enable the GIC -*/ -__STATIC_INLINE void GIC_Enable(void) -{ - GIC_DistInit(); - GIC_CPUInterfaceInit(); //per CPU -} -#endif + + /** \brief Clean cache by physical address + * \param [in] pa Pointer to data to invalidate cache for. + */ + __STATIC_INLINE void L2C_CleanPa (void *pa) + { + L2C_310->CLEAN_LINE_PA = (unsigned int)pa; + L2C_Sync(); + } + + /** \brief Clean and invalidate cache by physical address + * \param [in] pa Pointer to data to invalidate cache for. + */ + __STATIC_INLINE void L2C_CleanInvPa (void *pa) + { + L2C_310->CLEAN_INV_LINE_PA = (unsigned int)pa; + L2C_Sync(); + } +#endif /* #if (__L2C_PRESENT == 1U) || defined(DOXYGEN) */ /* ########################## Generic Timer functions ############################ */ #if (__TIM_PRESENT == 1U) || defined(DOXYGEN) /* PL1 Physical Timer */ #if (__CORTEX_A == 7U) || defined(DOXYGEN) + /** \brief Physical Timer Control register */ + typedef union + { + struct + { + uint32_t ENABLE:1; /*!< \brief bit: 0 Enables the timer. */ + uint32_t IMASK:1; /*!< \brief bit: 1 Timer output signal mask bit. */ + uint32_t ISTATUS:1; /*!< \brief bit: 2 The status of the timer. */ + RESERVED(0:29, uint32_t) + } b; /*!< \brief Structure used for bit access */ + uint32_t w; /*!< \brief Type used for word access */ + } CNTP_CTL_Type; -/** \brief Physical Timer Control register */ -typedef union -{ - struct + /** \brief Configures the frequency the timer shall run at. + * \param [in] value The timer frequency in Hz. + */ + __STATIC_INLINE void PL1_SetCounterFrequency(uint32_t value) { - uint32_t ENABLE:1; /*!< \brief bit: 0 Enables the timer. */ - uint32_t IMASK:1; /*!< \brief bit: 1 Timer output signal mask bit. */ - uint32_t ISTATUS:1; /*!< \brief bit: 2 The status of the timer. */ - RESERVED(0:29, uint32_t) - } b; /*!< \brief Structure used for bit access */ - uint32_t w; /*!< \brief Type used for word access */ -} CNTP_CTL_Type; - -/** \brief Configures the frequency the timer shall run at. -* \param [in] value The timer frequency in Hz. -*/ -__STATIC_INLINE void PL1_SetCounterFrequency(uint32_t value) -{ - __set_CNTFRQ(value); - __ISB(); -} - -/** \brief Sets the reset value of the timer. -* \param [in] value The value the timer is loaded with. -*/ -__STATIC_INLINE void PL1_SetLoadValue(uint32_t value) -{ - __set_CNTP_TVAL(value); - __ISB(); -} - -/** \brief Get the current counter value. -* \return Current counter value. -*/ -__STATIC_INLINE uint32_t PL1_GetCurrentValue(void) -{ - return(__get_CNTP_TVAL()); -} - -/** \brief Get the current physical counter value. -* \return Current physical counter value. -*/ -__STATIC_INLINE uint64_t PL1_GetCurrentPhysicalValue(void) -{ - return(__get_CNTPCT()); -} - -/** \brief Set the physical compare value. -* \param [in] value New physical timer compare value. -*/ -__STATIC_INLINE void PL1_SetPhysicalCompareValue(uint64_t value) -{ - __set_CNTP_CVAL(value); - __ISB(); -} - -/** \brief Get the physical compare value. -* \return Physical compare value. -*/ -__STATIC_INLINE uint64_t PL1_GetPhysicalCompareValue(void) -{ - return(__get_CNTP_CVAL()); -} - -/** \brief Configure the timer by setting the control value. -* \param [in] value New timer control value. -*/ -__STATIC_INLINE void PL1_SetControl(uint32_t value) -{ - __set_CNTP_CTL(value); - __ISB(); -} - -/** \brief Get the control value. -* \return Control value. -*/ -__STATIC_INLINE uint32_t PL1_GetControl(void) -{ - return(__get_CNTP_CTL()); -} -#endif + __set_CNTFRQ(value); + __ISB(); + } + + /** \brief Sets the reset value of the timer. + * \param [in] value The value the timer is loaded with. + */ + __STATIC_INLINE void PL1_SetLoadValue(uint32_t value) + { + __set_CNTP_TVAL(value); + __ISB(); + } + + /** \brief Get the current counter value. + * \return Current counter value. + */ + __STATIC_INLINE uint32_t PL1_GetCurrentValue(void) + { + return(__get_CNTP_TVAL()); + } + + /** \brief Get the current physical counter value. + * \return Current physical counter value. + */ + __STATIC_INLINE uint64_t PL1_GetCurrentPhysicalValue(void) + { + return(__get_CNTPCT()); + } + + /** \brief Set the physical compare value. + * \param [in] value New physical timer compare value. + */ + __STATIC_INLINE void PL1_SetPhysicalCompareValue(uint64_t value) + { + __set_CNTP_CVAL(value); + __ISB(); + } + + /** \brief Get the physical compare value. + * \return Physical compare value. + */ + __STATIC_INLINE uint64_t PL1_GetPhysicalCompareValue(void) + { + return(__get_CNTP_CVAL()); + } + + /** \brief Configure the timer by setting the control value. + * \param [in] value New timer control value. + */ + __STATIC_INLINE void PL1_SetControl(uint32_t value) + { + __set_CNTP_CTL(value); + __ISB(); + } + + /** \brief Get the control value. + * \return Control value. + */ + __STATIC_INLINE uint32_t PL1_GetControl(void) + { + return(__get_CNTP_CTL()); + } +#endif /* (__CORTEX_A == 7U) || defined(DOXYGEN) */ /* Private Timer */ #if ((__CORTEX_A == 5U) || (__CORTEX_A == 9U)) || defined(DOXYGEN) -/** \brief Set the load value to timers LOAD register. -* \param [in] value The load value to be set. -*/ -__STATIC_INLINE void PTIM_SetLoadValue(uint32_t value) -{ - PTIM->LOAD = value; -} - -/** \brief Get the load value from timers LOAD register. -* \return Timer_Type::LOAD -*/ -__STATIC_INLINE uint32_t PTIM_GetLoadValue(void) -{ - return(PTIM->LOAD); -} - -/** \brief Set current counter value from its COUNTER register. -*/ -__STATIC_INLINE void PTIM_SetCurrentValue(uint32_t value) -{ - PTIM->COUNTER = value; -} - -/** \brief Get current counter value from timers COUNTER register. -* \result Timer_Type::COUNTER -*/ -__STATIC_INLINE uint32_t PTIM_GetCurrentValue(void) -{ - return(PTIM->COUNTER); -} - -/** \brief Configure the timer using its CONTROL register. -* \param [in] value The new configuration value to be set. -*/ -__STATIC_INLINE void PTIM_SetControl(uint32_t value) -{ - PTIM->CONTROL = value; -} - -/** ref Timer_Type::CONTROL Get the current timer configuration from its CONTROL register. -* \return Timer_Type::CONTROL -*/ -__STATIC_INLINE uint32_t PTIM_GetControl(void) -{ - return(PTIM->CONTROL); -} - -/** ref Timer_Type::CONTROL Get the event flag in timers ISR register. -* \return 0 - flag is not set, 1- flag is set -*/ -__STATIC_INLINE uint32_t PTIM_GetEventFlag(void) -{ - return (PTIM->ISR & 1UL); -} - -/** ref Timer_Type::CONTROL Clears the event flag in timers ISR register. -*/ -__STATIC_INLINE void PTIM_ClearEventFlag(void) -{ - PTIM->ISR = 1; -} -#endif -#endif + /** \brief Set the load value to timers LOAD register. + * \param [in] value The load value to be set. + */ + __STATIC_INLINE void PTIM_SetLoadValue(uint32_t value) + { + PTIM->LOAD = value; + } + + /** \brief Get the load value from timers LOAD register. + * \return Timer_Type::LOAD + */ + __STATIC_INLINE uint32_t PTIM_GetLoadValue(void) + { + return(PTIM->LOAD); + } + + /** \brief Set current counter value from its COUNTER register. + */ + __STATIC_INLINE void PTIM_SetCurrentValue(uint32_t value) + { + PTIM->COUNTER = value; + } + + /** \brief Get current counter value from timers COUNTER register. + * \result Timer_Type::COUNTER + */ + __STATIC_INLINE uint32_t PTIM_GetCurrentValue(void) + { + return(PTIM->COUNTER); + } + + /** \brief Configure the timer using its CONTROL register. + * \param [in] value The new configuration value to be set. + */ + __STATIC_INLINE void PTIM_SetControl(uint32_t value) + { + PTIM->CONTROL = value; + } + + /** ref Timer_Type::CONTROL Get the current timer configuration from its CONTROL register. + * \return Timer_Type::CONTROL + */ + __STATIC_INLINE uint32_t PTIM_GetControl(void) + { + return(PTIM->CONTROL); + } + + /** ref Timer_Type::CONTROL Get the event flag in timers ISR register. + * \return 0 - flag is not set, 1- flag is set + */ + __STATIC_INLINE uint32_t PTIM_GetEventFlag(void) + { + return (PTIM->ISR & 1UL); + } + + /** ref Timer_Type::CONTROL Clears the event flag in timers ISR register. + */ + __STATIC_INLINE void PTIM_ClearEventFlag(void) + { + PTIM->ISR = 1; + } +#endif /* #if ((__CORTEX_A == 5U) || (__CORTEX_A == 9U)) || defined(DOXYGEN) */ +#endif /* (__TIM_PRESENT == 1U) || defined(DOXYGEN) */ /* ########################## MMU functions ###################################### */ @@ -2922,7 +2198,6 @@ __STATIC_INLINE void MMU_Disable(void) /** \brief Invalidate entire unified TLB */ - __STATIC_INLINE void MMU_InvalidateTLB(void) { __set_TLBIALL(0); @@ -2935,6 +2210,6 @@ __STATIC_INLINE void MMU_InvalidateTLB(void) } #endif -#endif /* __CORE_CA_H_DEPENDANT */ +#endif /* __ARM_V7A_DEPENDANT */ #endif /* __CMSIS_GENERIC */ diff --git a/CMSIS/Core_A/Include/cmsis_cp15.h b/CMSIS/Core/Include/armv7a_cp15.h similarity index 99% rename from CMSIS/Core_A/Include/cmsis_cp15.h rename to CMSIS/Core/Include/armv7a_cp15.h index 9d5300112..201bb9685 100644 --- a/CMSIS/Core_A/Include/cmsis_cp15.h +++ b/CMSIS/Core/Include/armv7a_cp15.h @@ -1,11 +1,11 @@ /**************************************************************************//** - * @file cmsis_cp15.h + * @file armv7a_cp15.h * @brief CMSIS compiler specific macros, functions, instructions * @version V1.0.2 * @date 19. December 2022 ******************************************************************************/ /* - * Copyright (c) 2009-2017 ARM Limited. All rights reserved. + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. * * SPDX-License-Identifier: Apache-2.0 * diff --git a/CMSIS/Core/Include/armv7r.h b/CMSIS/Core/Include/armv7r.h new file mode 100644 index 000000000..984091633 --- /dev/null +++ b/CMSIS/Core/Include/armv7r.h @@ -0,0 +1,470 @@ +/**************************************************************************//** + * @file armv7r.h + * @brief CMSIS Cortex-R Core Peripheral Access Layer Header File for ARMv7-R + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if defined ( __ICCARM__ ) + #pragma system_include /* treat file as system include file for MISRA check */ +#elif defined (__clang__) + #pragma clang system_header /* treat file as system include file */ +#endif + +#ifndef __ARM_V7R_GENERIC +#define __ARM_V7R_GENERIC + +#ifdef __cplusplus + extern "C" { +#endif + +/******************************************************************************* + * CMSIS definitions + ******************************************************************************/ + /** + \ingroup ARMv7-R + @{ + */ + +#include "cmsis_version.h" + +#if defined ( __CC_ARM ) + #if defined (__TARGET_FPU_VFP) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) + #if defined (__ARM_FP) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __ICCARM__ ) + #if defined (__ARMVFP__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __TMS470__ ) + #if defined __TI_VFP_SUPPORT__ + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __GNUC__ ) + #if defined (__VFP_FP__) && !defined(__SOFTFP__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __TASKING__ ) + #if defined (__FPU_VFP__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif +#endif + +#include "cmsis_compiler.h" /* CMSIS compiler specific defines */ + +#ifdef __cplusplus +} +#endif + +#endif /* __ARM_V7R_GENERIC */ + +#ifndef __CMSIS_GENERIC + +#ifndef __ARM_V7R_DEPENDANT +#define __ARM_V7R_DEPENDANT + +#ifdef __cplusplus + extern "C" { +#endif + + /* check device defines and use defaults */ +#if defined __CHECK_DEVICE_DEFINES + #ifndef __FPU_PRESENT + #define __FPU_PRESENT 0U + #warning "__FPU_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __VIC_PRESENT + #define __VIC_PRESENT 0U + #warning "__VIC_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __GIC_PRESENT + #define __GIC_PRESENT 1U + #warning "__GIC_PRESENT not defined in device header file; using default!" + #endif + + #if (__GIC_PRESENT == 1U) && (__VIC_PRESENT == 1U) + #error "Only one Interrupt Controller can be used" + #endif + + #ifndef __MPU_PRESENT + #define __MPU_PRESENT 0U + #warning "__MPU_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __ICACHE_PRESENT + #define __ICACHE_PRESENT 0U + #warning "__ICACHE_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __DCACHE_PRESENT + #define __DCACHE_PRESENT 0U + #warning "__DCACHE_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __DTCM_PRESENT + #define __DTCM_PRESENT 0U + #warning "__DTCM_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __ECC_PRESENT + #define __ECC_PRESENT 0U + #warning "__ECC_PRESENT not defined in device header file; using default!" + #endif +#endif + +/* IO definitions (access restrictions to peripheral registers) */ +#ifdef __cplusplus + #define __I volatile /*!< \brief Defines 'read only' permissions */ +#else + #define __I volatile const /*!< \brief Defines 'read only' permissions */ +#endif +#define __O volatile /*!< \brief Defines 'write only' permissions */ +#define __IO volatile /*!< \brief Defines 'read / write' permissions */ + +/* following defines should be used for structure members */ +#define __IM volatile const /*!< \brief Defines 'read only' structure member permissions */ +#define __OM volatile /*!< \brief Defines 'write only' structure member permissions */ +#define __IOM volatile /*!< \brief Defines 'read / write' structure member permissions */ +#define RESERVED(N, T) T RESERVED##N; // placeholder struct members used for "reserved" areas + +/** @} end of group ARMv7-R */ + + + + /******************************************************************************* + * Register Abstraction + Core Register contain: + - CPSR + - CP15 Registers + - L2C-310 Cache Controller + - Generic Interrupt Controller Distributor + - Generic Interrupt Controller Interface + ******************************************************************************/ + +/* Core Register CPSR */ +typedef union +{ + struct + { + uint32_t M:5; /*!< \brief bit: 0.. 4 Mode field */ + uint32_t T:1; /*!< \brief bit: 5 Thumb execution state bit */ + uint32_t F:1; /*!< \brief bit: 6 FIQ mask bit */ + uint32_t I:1; /*!< \brief bit: 7 IRQ mask bit */ + uint32_t A:1; /*!< \brief bit: 8 Asynchronous abort mask bit */ + uint32_t E:1; /*!< \brief bit: 9 Endianness execution state bit */ + uint32_t IT1:6; /*!< \brief bit: 10..15 If-Then execution state bits 2-7 */ + uint32_t GE:4; /*!< \brief bit: 16..19 Greater than or Equal flags */ + RESERVED(0:4, uint32_t) + uint32_t J:1; /*!< \brief bit: 24 Jazelle bit */ + uint32_t IT0:2; /*!< \brief bit: 25..26 If-Then execution state bits 0-1 */ + uint32_t Q:1; /*!< \brief bit: 27 Saturation condition flag */ + uint32_t V:1; /*!< \brief bit: 28 Overflow condition code flag */ + uint32_t C:1; /*!< \brief bit: 29 Carry condition code flag */ + uint32_t Z:1; /*!< \brief bit: 30 Zero condition code flag */ + uint32_t N:1; /*!< \brief bit: 31 Negative condition code flag */ + } b; /*!< \brief Structure used for bit access */ + uint32_t w; /*!< \brief Type used for word access */ +} CPSR_Type; + + + +/* CPSR Register Definitions */ +#define CPSR_N_Pos 31U /*!< \brief CPSR: N Position */ +#define CPSR_N_Msk (1UL << CPSR_N_Pos) /*!< \brief CPSR: N Mask */ + +#define CPSR_Z_Pos 30U /*!< \brief CPSR: Z Position */ +#define CPSR_Z_Msk (1UL << CPSR_Z_Pos) /*!< \brief CPSR: Z Mask */ + +#define CPSR_C_Pos 29U /*!< \brief CPSR: C Position */ +#define CPSR_C_Msk (1UL << CPSR_C_Pos) /*!< \brief CPSR: C Mask */ + +#define CPSR_V_Pos 28U /*!< \brief CPSR: V Position */ +#define CPSR_V_Msk (1UL << CPSR_V_Pos) /*!< \brief CPSR: V Mask */ + +#define CPSR_Q_Pos 27U /*!< \brief CPSR: Q Position */ +#define CPSR_Q_Msk (1UL << CPSR_Q_Pos) /*!< \brief CPSR: Q Mask */ + +#define CPSR_IT0_Pos 25U /*!< \brief CPSR: IT0 Position */ +#define CPSR_IT0_Msk (3UL << CPSR_IT0_Pos) /*!< \brief CPSR: IT0 Mask */ + +#define CPSR_J_Pos 24U /*!< \brief CPSR: J Position */ +#define CPSR_J_Msk (1UL << CPSR_J_Pos) /*!< \brief CPSR: J Mask */ + +#define CPSR_GE_Pos 16U /*!< \brief CPSR: GE Position */ +#define CPSR_GE_Msk (0xFUL << CPSR_GE_Pos) /*!< \brief CPSR: GE Mask */ + +#define CPSR_IT1_Pos 10U /*!< \brief CPSR: IT1 Position */ +#define CPSR_IT1_Msk (0x3FUL << CPSR_IT1_Pos) /*!< \brief CPSR: IT1 Mask */ + +#define CPSR_E_Pos 9U /*!< \brief CPSR: E Position */ +#define CPSR_E_Msk (1UL << CPSR_E_Pos) /*!< \brief CPSR: E Mask */ + +#define CPSR_A_Pos 8U /*!< \brief CPSR: A Position */ +#define CPSR_A_Msk (1UL << CPSR_A_Pos) /*!< \brief CPSR: A Mask */ + +#define CPSR_I_Pos 7U /*!< \brief CPSR: I Position */ +#define CPSR_I_Msk (1UL << CPSR_I_Pos) /*!< \brief CPSR: I Mask */ + +#define CPSR_F_Pos 6U /*!< \brief CPSR: F Position */ +#define CPSR_F_Msk (1UL << CPSR_F_Pos) /*!< \brief CPSR: F Mask */ + +#define CPSR_T_Pos 5U /*!< \brief CPSR: T Position */ +#define CPSR_T_Msk (1UL << CPSR_T_Pos) /*!< \brief CPSR: T Mask */ + +#define CPSR_M_Pos 0U /*!< \brief CPSR: M Position */ +#define CPSR_M_Msk (0x1FUL << CPSR_M_Pos) /*!< \brief CPSR: M Mask */ + +#define CPSR_M_USR 0x10U /*!< \brief CPSR: M User mode (PL0) */ +#define CPSR_M_FIQ 0x11U /*!< \brief CPSR: M Fast Interrupt mode (PL1) */ +#define CPSR_M_IRQ 0x12U /*!< \brief CPSR: M Interrupt mode (PL1) */ +#define CPSR_M_SVC 0x13U /*!< \brief CPSR: M Supervisor mode (PL1) */ +#define CPSR_M_MON 0x16U /*!< \brief CPSR: M Monitor mode (PL1) */ +#define CPSR_M_ABT 0x17U /*!< \brief CPSR: M Abort mode (PL1) */ +#define CPSR_M_HYP 0x1AU /*!< \brief CPSR: M Hypervisor mode (PL2) */ +#define CPSR_M_UND 0x1BU /*!< \brief CPSR: M Undefined mode (PL1) */ +#define CPSR_M_SYS 0x1FU /*!< \brief CPSR: M System mode (PL1) */ + +/* CP15 Register SCTLR */ +typedef union +{ + struct + { + uint32_t M:1; /*!< \brief bit: 0 MMU enable */ + uint32_t A:1; /*!< \brief bit: 1 Alignment check enable */ + uint32_t C:1; /*!< \brief bit: 2 Cache enable */ + RESERVED(0:2, uint32_t) + uint32_t CP15BEN:1; /*!< \brief bit: 5 CP15 barrier enable */ + RESERVED(1:1, uint32_t) + uint32_t B:1; /*!< \brief bit: 7 Endianness model */ + RESERVED(2:2, uint32_t) + uint32_t SW:1; /*!< \brief bit: 10 SWP and SWPB enable */ + uint32_t Z:1; /*!< \brief bit: 11 Branch prediction enable */ + uint32_t I:1; /*!< \brief bit: 12 Instruction cache enable */ + uint32_t V:1; /*!< \brief bit: 13 Vectors bit */ + uint32_t RR:1; /*!< \brief bit: 14 Round Robin select */ + RESERVED(3:2, uint32_t) + uint32_t HA:1; /*!< \brief bit: 17 Hardware Access flag enable */ + RESERVED(4:1, uint32_t) + uint32_t WXN:1; /*!< \brief bit: 19 Write permission implies XN */ + uint32_t UWXN:1; /*!< \brief bit: 20 Unprivileged write permission implies PL1 XN */ + uint32_t FI:1; /*!< \brief bit: 21 Fast interrupts configuration enable */ + uint32_t U:1; /*!< \brief bit: 22 Alignment model */ + RESERVED(5:1, uint32_t) + uint32_t VE:1; /*!< \brief bit: 24 Interrupt Vectors Enable */ + uint32_t EE:1; /*!< \brief bit: 25 Exception Endianness */ + RESERVED(6:1, uint32_t) + uint32_t NMFI:1; /*!< \brief bit: 27 Non-maskable FIQ (NMFI) support */ + uint32_t TRE:1; /*!< \brief bit: 28 TEX remap enable. */ + uint32_t AFE:1; /*!< \brief bit: 29 Access flag enable */ + uint32_t TE:1; /*!< \brief bit: 30 Thumb Exception enable */ + RESERVED(7:1, uint32_t) + } b; /*!< \brief Structure used for bit access */ + uint32_t w; /*!< \brief Type used for word access */ +} SCTLR_Type; + +#define SCTLR_TE_Pos 30U /*!< \brief SCTLR: TE Position */ +#define SCTLR_TE_Msk (1UL << SCTLR_TE_Pos) /*!< \brief SCTLR: TE Mask */ + +#define SCTLR_AFE_Pos 29U /*!< \brief SCTLR: AFE Position */ +#define SCTLR_AFE_Msk (1UL << SCTLR_AFE_Pos) /*!< \brief SCTLR: AFE Mask */ + +#define SCTLR_TRE_Pos 28U /*!< \brief SCTLR: TRE Position */ +#define SCTLR_TRE_Msk (1UL << SCTLR_TRE_Pos) /*!< \brief SCTLR: TRE Mask */ + +#define SCTLR_NMFI_Pos 27U /*!< \brief SCTLR: NMFI Position */ +#define SCTLR_NMFI_Msk (1UL << SCTLR_NMFI_Pos) /*!< \brief SCTLR: NMFI Mask */ + +#define SCTLR_EE_Pos 25U /*!< \brief SCTLR: EE Position */ +#define SCTLR_EE_Msk (1UL << SCTLR_EE_Pos) /*!< \brief SCTLR: EE Mask */ + +#define SCTLR_VE_Pos 24U /*!< \brief SCTLR: VE Position */ +#define SCTLR_VE_Msk (1UL << SCTLR_VE_Pos) /*!< \brief SCTLR: VE Mask */ + +#define SCTLR_U_Pos 22U /*!< \brief SCTLR: U Position */ +#define SCTLR_U_Msk (1UL << SCTLR_U_Pos) /*!< \brief SCTLR: U Mask */ + +#define SCTLR_FI_Pos 21U /*!< \brief SCTLR: FI Position */ +#define SCTLR_FI_Msk (1UL << SCTLR_FI_Pos) /*!< \brief SCTLR: FI Mask */ + +#define SCTLR_UWXN_Pos 20U /*!< \brief SCTLR: UWXN Position */ +#define SCTLR_UWXN_Msk (1UL << SCTLR_UWXN_Pos) /*!< \brief SCTLR: UWXN Mask */ + +#define SCTLR_WXN_Pos 19U /*!< \brief SCTLR: WXN Position */ +#define SCTLR_WXN_Msk (1UL << SCTLR_WXN_Pos) /*!< \brief SCTLR: WXN Mask */ + +#define SCTLR_HA_Pos 17U /*!< \brief SCTLR: HA Position */ +#define SCTLR_HA_Msk (1UL << SCTLR_HA_Pos) /*!< \brief SCTLR: HA Mask */ + +#define SCTLR_RR_Pos 14U /*!< \brief SCTLR: RR Position */ +#define SCTLR_RR_Msk (1UL << SCTLR_RR_Pos) /*!< \brief SCTLR: RR Mask */ + +#define SCTLR_V_Pos 13U /*!< \brief SCTLR: V Position */ +#define SCTLR_V_Msk (1UL << SCTLR_V_Pos) /*!< \brief SCTLR: V Mask */ + +#define SCTLR_I_Pos 12U /*!< \brief SCTLR: I Position */ +#define SCTLR_I_Msk (1UL << SCTLR_I_Pos) /*!< \brief SCTLR: I Mask */ + +#define SCTLR_Z_Pos 11U /*!< \brief SCTLR: Z Position */ +#define SCTLR_Z_Msk (1UL << SCTLR_Z_Pos) /*!< \brief SCTLR: Z Mask */ + +#define SCTLR_SW_Pos 10U /*!< \brief SCTLR: SW Position */ +#define SCTLR_SW_Msk (1UL << SCTLR_SW_Pos) /*!< \brief SCTLR: SW Mask */ + +#define SCTLR_B_Pos 7U /*!< \brief SCTLR: B Position */ +#define SCTLR_B_Msk (1UL << SCTLR_B_Pos) /*!< \brief SCTLR: B Mask */ + +#define SCTLR_CP15BEN_Pos 5U /*!< \brief SCTLR: CP15BEN Position */ +#define SCTLR_CP15BEN_Msk (1UL << SCTLR_CP15BEN_Pos) /*!< \brief SCTLR: CP15BEN Mask */ + +#define SCTLR_C_Pos 2U /*!< \brief SCTLR: C Position */ +#define SCTLR_C_Msk (1UL << SCTLR_C_Pos) /*!< \brief SCTLR: C Mask */ + +#define SCTLR_A_Pos 1U /*!< \brief SCTLR: A Position */ +#define SCTLR_A_Msk (1UL << SCTLR_A_Pos) /*!< \brief SCTLR: A Mask */ + +#define SCTLR_M_Pos 0U /*!< \brief SCTLR: M Position */ +#define SCTLR_M_Msk (1UL << SCTLR_M_Pos) /*!< \brief SCTLR: M Mask */ + + +#if (__VIC_PRESENT == 1U) || defined(DOXYGEN) + /** \brief Structure type to access the Vectored Interrupt Controller (PL190) (VIC) */ + typedef struct + { + __IM uint32_t VICIRQSTATUS; /*!< \brief Offset: 0x000 (R/ ) Provides the status of interrupts [31:0] after IRQ masking */ + __IM uint32_t VICFIQSTATUS; /*!< \brief Offset: 0x004 (R/ ) Provides the status of the interrupts after FIQ masking */ + __IM uint32_t VICRAWINTR; /*!< \brief Offset: 0x008 (R/ ) Provides the status of the source interrupts, and software interrupts */ + __IOM uint32_t VICINTSELECT; /*!< \brief Offset: 0x00C (R/W) Selects whether the corresponding interrupt source generates an FIQ or an IRQ */ + __IOM uint32_t VICINTENABLE; /*!< \brief Offset: 0x010 (R/W) Enables the interrupt request lines */ + __OM uint32_t VICINTENCLEAR; /*!< \brief Offset: 0x014 ( /W) Clears bits in the VICIntEnable Register */ + __IOM uint32_t VICSOFTINT; /*!< \brief Offset: 0x018 (R/W) Generates software interrupts */ + __OM uint32_t VICSOFTINTCLEAR; /*!< \brief Offset: 0x01C ( /W) Clears the corresponding bit in the VICSOFTINT Register */ + __IOM uint32_t VICPROTECTION; /*!< \brief Offset: 0x020 (R/W) Enables or disables protected register access */ + RESERVED(0:4, uint32_t) + __IOM uint32_t VICVECTADDR; /*!< \brief Offset: 0x030 (R/W) Contains the ISR address of the currently active interrupt */ + __IOM uint32_t VICDEFVECTADDR; /*!< \brief Offset: 0x034 (R/W) Contains the default ISR address */ + RESERVED(1:50, uint32_t) + __IOM uint32_t VICVECTADDRx[16]; /*!< \brief Offset: 0x100 (R/W) Contain the ISR vector addresses */ + RESERVED(2:30, uint32_t) + __IOM uint32_t VICVECTCNTLx[16]; /*!< \brief Offset: 0x200 (R/W) Select the interrupt source for the vectored interrupt */ + RESERVED(3:872, uint32_t) + __IM uint32_t VICPERIPHID0; /*!< \brief Offset: 0xFE0 (R/ ) Peripheral Identification Register 0 */ + __IM uint32_t VICPERIPHID1; /*!< \brief Offset: 0xFE4 (R/ ) Peripheral Identification Register 1*/ + __IM uint32_t VICPERIPHID2; /*!< \brief Offset: 0xFE8 (R/ ) Peripheral Identification Register 2 */ + __IM uint32_t VICPERIPHID3; /*!< \brief Offset: 0xFEC (R/ ) Peripheral Identification Register 3 */ + __IM uint32_t VICPCELLID[4]; /*!< \brief Offset: 0xFF0 (R/ ) PrimeCell Identification Registers */ + } VIC_Type; + + #define VIC ((VIC_Type *) VIC_BASE) /*!< \brief GIC Distributor register set access pointer */ + + + #define VICPROTECTION_Protection_Pos 0U + #define VICPROTECTION_Protection_Msk (1U << VICPROTECTION_Protection_Pos) + #define VICPROTECTION_Protection(x) (((uint32_t)(((uint32_t)(x)) /*<< VICPROTECTION_Protection_Pos*/)) & VICPROTECTION_Protection_Msk) + + #define VICVECTCNTL_IntSource_Pos 0U + #define VICVECTCNTL_IntSource_Msk (0x1FU << VICVECTCNTL_IntSource_Pos) + #define VICVECTCNTL_IntSource(x) (((uint32_t)(((uint32_t)(x)) /*<< VICVECTCNTL_IntSource_Pos*/)) & VICVECTCNTL_IntSource_Msk) + #define VICVECTCNTL_E_Pos 5U + #define VICVECTCNTL_E_Msk (1U << VICVECTCNTL_E_Pos) + #define VICVECTCNTL_E(x) (((uint32_t)(((uint32_t)(x)) << VICVECTCNTL_E_Pos)) & VICVECTCNTL_E_Msk) + + #define VICPERIPHID0_Partnumber0_Pos 0U + #define VICPERIPHID0_Partnumber0_Mask (0xFFU << VICPERIPHID0_Partnumber0_Pos) + + #define VICPERIPHID1_Partnumber1_Pos 0U + #define VICPERIPHID1_Partnumber1_Mask (0xFU << VICPERIPHID1_Partnumber1_Pos) + #define VICPERIPHID1_Designer0_Pos 4U + #define VICPERIPHID1_Designer0_Mask (0xFU << VICPERIPHID1_Designer0_Pos) + + #define VICPERIPHID2_PaDesigner1_Pos 0U + #define VICPERIPHID2_PaDesigner1_Mask (0xFU << VICPERIPHID2_PaDesigner1_Pos) + #define VICPERIPHID2_Revision_Pos 4U + #define VICPERIPHID2_Revision_Mask (0xFU << VICPERIPHID2_Revision_Pos) + + #define VICPERIPHID3_Configuration_Pos 0U + #define VICPERIPHID3_Configuration_Mask (0xFFU << VICPERIPHID3_Configuration_Pos) + + #define VICPCELLID_VICPCellID_Pos 0U + #define VICPCELLID_VICPCellID_Msk (0xFFU << VICPCELLID_VICPCellID_Pos) +#endif /* (__VIC_PRESENT == 1U) || defined(DOXYGEN) */ + +#if (__GIC_PRESENT == 1U) || defined(DOXYGEN) + #include "gic_v20.h" +#endif /* (__GIC_PRESENT == 1U) || defined(DOXYGEN) */ + +#ifdef __cplusplus + } +#endif + +#endif /* __ARM_V7A_DEPENDANT */ +#endif /* __CMSIS_GENERIC */ \ No newline at end of file diff --git a/CMSIS/Core/Include/armv8a.h b/CMSIS/Core/Include/armv8a.h new file mode 100644 index 000000000..b815fa026 --- /dev/null +++ b/CMSIS/Core/Include/armv8a.h @@ -0,0 +1,621 @@ +/**************************************************************************//** + * @file armv8a.h + * @brief CMSIS Cortex-A Core Peripheral Access Layer Header File for ARMv8-A + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if defined ( __ICCARM__ ) + #pragma system_include /* treat file as system include file for MISRA check */ +#elif defined (__clang__) + #pragma clang system_header /* treat file as system include file */ +#endif + +#ifndef __ARM_V8A_GENERIC +#define __ARM_V8A_GENERIC + +#ifdef __cplusplus + extern "C" { +#endif + +/******************************************************************************* + * CMSIS definitions + ******************************************************************************/ +/** + \ingroup ARMv8-A + @{ + */ + +#include "cmsis_version.h" + +/** __FPU_USED indicates whether an FPU is used or not. + For this, __FPU_PRESENT has to be checked prior to making use of FPU specific registers and functions. +*/ +#if defined ( __CC_ARM ) + #if defined (__TARGET_FPU_VFP) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + + #if defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1U) + #if defined (__DSP_PRESENT) && (__DSP_PRESENT == 1U) + #define __DSP_USED 1U + #else + #error "Compiler generates DSP (SIMD) instructions for a devices without DSP extensions (check __DSP_PRESENT)" + #define __DSP_USED 0U + #endif + #else + #define __DSP_USED 0U + #endif + +#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) + #if defined (__ARM_FP) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + + #if defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1U) + #if defined (__DSP_PRESENT) && (__DSP_PRESENT == 1U) + #define __DSP_USED 1U + #else + #error "Compiler generates DSP (SIMD) instructions for a devices without DSP extensions (check __DSP_PRESENT)" + #define __DSP_USED 0U + #endif + #else + #define __DSP_USED 0U + #endif + +#elif defined ( __ICCARM__ ) + #if defined (__ARMVFP__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + + #if defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1U) + #if defined (__DSP_PRESENT) && (__DSP_PRESENT == 1U) + #define __DSP_USED 1U + #else + #error "Compiler generates DSP (SIMD) instructions for a devices without DSP extensions (check __DSP_PRESENT)" + #define __DSP_USED 0U + #endif + #else + #define __DSP_USED 0U + #endif + +#elif defined ( __TMS470__ ) + #if defined __TI_VFP_SUPPORT__ + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __GNUC__ ) + #if defined (__ARM_FP) && (__ARM_FP==0xE) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + + #if defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1U) + #if defined (__DSP_PRESENT) && (__DSP_PRESENT == 1U) + #define __DSP_USED 1U + #else + #error "Compiler generates DSP (SIMD) instructions for a devices without DSP extensions (check __DSP_PRESENT)" + #define __DSP_USED 0U + #endif + #else + #define __DSP_USED 0U + #endif + +#elif defined ( __TI_ARM__ ) + #if defined (__TI_VFP_SUPPORT__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __TASKING__ ) + #if defined (__FPU_VFP__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __CSMC__ ) + #if ( __CSMC__ & 0x400U) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#endif + +#include "cmsis_compiler.h" /* CMSIS compiler specific defines */ + +#ifdef __cplusplus +} +#endif + +#endif /* __ARM_V8A_GENERIC */ + +#ifndef __CMSIS_GENERIC + +#ifndef __ARM_V8A_DEPENDANT +#define __ARM_V8A_DEPENDANT + +#ifdef __cplusplus + extern "C" { +#endif + + /* check device defines and use defaults */ +#if defined __CHECK_DEVICE_DEFINES + #ifndef __FPU_PRESENT + #define __FPU_PRESENT 0U + #warning "__FPU_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __GIC_PRESENT + #define __GIC_PRESENT 1U + #warning "__GIC_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __TIM_PRESENT + #define __TIM_PRESENT 1U + #warning "__TIM_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __L2C_PRESENT + #define __L2C_PRESENT 0U + #warning "__L2C_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __L3C_PRESENT + #define __L3C_PRESENT 0U + #warning "__L3C_PRESENT not defined in device header file; using default!" + #endif +#endif + +/* IO definitions (access restrictions to peripheral registers) */ +/** + \defgroup CMSIS_glob_defs CMSIS Global Defines + + IO Type Qualifiers are used + \li to specify the access to peripheral variables. + \li for automatic generation of peripheral register debug information. +*/ +#ifdef __cplusplus + #define __I volatile /*!< \brief Defines 'read only' permissions */ +#else + #define __I volatile const /*!< \brief Defines 'read only' permissions */ +#endif +#define __O volatile /*!< \brief Defines 'write only' permissions */ +#define __IO volatile /*!< \brief Defines 'read / write' permissions */ + +/* following defines should be used for structure members */ +#define __IM volatile const /*!< \brief Defines 'read only' structure member permissions */ +#define __OM volatile /*!< \brief Defines 'write only' structure member permissions */ +#define __IOM volatile /*!< \brief Defines 'read / write' structure member permissions */ +#define RESERVED(N, T) T RESERVED##N; // placeholder struct members used for "reserved" areas + +/** @} end of group ARMv8-A */ + + + + /******************************************************************************* + * Register Abstraction + Core Register contain: + - CPSR + - CP15 Registers + - L2C-310 Cache Controller + - Generic Interrupt Controller Distributor + - Generic Interrupt Controller Interface + ******************************************************************************/ + +/* Core Register CPSR */ +typedef union +{ + struct + { + uint32_t M:4; /*!< \brief bit: 0.. 3 Mode field */ + RESERVED(0:2, uint32_t) /* bit: 4.. 5 Reserved */ + uint32_t F:1; /*!< \brief bit: 6 FIQ mask bit */ + uint32_t I:1; /*!< \brief bit: 7 IRQ mask bit */ + uint32_t A:1; /*!< \brief bit: 8 Asynchronous abort mask bit */ + uint32_t E:1; /*!< \brief bit: 9 Endianness execution state bit */ + RESERVED(1:6, uint32_t) /* bit: 10..15 Reserved */ + uint32_t GE:4; /*!< \brief bit: 16..19 Greater than or Equal flags */ + RESERVED(2:1, uint32_t) /* bit: 20 Reserved */ + uint32_t DIT:1; /*!< \brief bit: 21 Data Independent Timing */ + uint32_t PAN:1; /*!< \brief bit: 22 Privileged Access Never */ + uint32_t SSBS:1; /*!< \brief bit: 23 Speculative Store Bypass Safe */ + RESERVED(3:3, uint32_t) /* bit: 24..26 Reserved */ + uint32_t Q:1; /*!< \brief bit: 27 Saturation condition flag */ + uint32_t V:1; /*!< \brief bit: 28 Overflow condition code flag */ + uint32_t C:1; /*!< \brief bit: 29 Carry condition code flag */ + uint32_t Z:1; /*!< \brief bit: 30 Zero condition code flag */ + uint32_t N:1; /*!< \brief bit: 31 Negative condition code flag */ + } b; /*!< \brief Structure used for bit access */ + uint32_t w; /*!< \brief Type used for word access */ +} CPSR_Type; + + + +/* CPSR Register Definitions */ +#define CPSR_N_Pos 31U /*!< \brief CPSR: N Position */ +#define CPSR_N_Msk (1UL << CPSR_N_Pos) /*!< \brief CPSR: N Mask */ + +#define CPSR_Z_Pos 30U /*!< \brief CPSR: Z Position */ +#define CPSR_Z_Msk (1UL << CPSR_Z_Pos) /*!< \brief CPSR: Z Mask */ + +#define CPSR_C_Pos 29U /*!< \brief CPSR: C Position */ +#define CPSR_C_Msk (1UL << CPSR_C_Pos) /*!< \brief CPSR: C Mask */ + +#define CPSR_V_Pos 28U /*!< \brief CPSR: V Position */ +#define CPSR_V_Msk (1UL << CPSR_V_Pos) /*!< \brief CPSR: V Mask */ + +#define CPSR_Q_Pos 27U /*!< \brief CPSR: Q Position */ +#define CPSR_Q_Msk (1UL << CPSR_Q_Pos) /*!< \brief CPSR: Q Mask */ + +#define CPSR_SSBS_Pos 23U /*!< \brief CPSR: SSBS Position */ +#define CPSR_SSBS_Msk (0x1UL << CPSR_SSBS_Pos) /*!< \brief CPSR: SSBS Mask */ + +#define CPSR_PAN_Pos 22U /*!< \brief CPSR: PAN Position */ +#define CPSR_PAN_Msk (0x1UL << CPSR_PAN_Pos) /*!< \brief CPSR: PAN Mask */ + +#define CPSR_DIT_Pos 21U /*!< \brief CPSR: DIT Position */ +#define CPSR_DIT_Msk (0x1UL << CPSR_DIT_Pos) /*!< \brief CPSR: DIT Mask */ + +#define CPSR_GE_Pos 16U /*!< \brief CPSR: GE Position */ +#define CPSR_GE_Msk (0xFUL << CPSR_GE_Pos) /*!< \brief CPSR: GE Mask */ + +#define CPSR_E_Pos 9U /*!< \brief CPSR: E Position */ +#define CPSR_E_Msk (1UL << CPSR_E_Pos) /*!< \brief CPSR: E Mask */ + +#define CPSR_A_Pos 8U /*!< \brief CPSR: A Position */ +#define CPSR_A_Msk (1UL << CPSR_A_Pos) /*!< \brief CPSR: A Mask */ + +#define CPSR_I_Pos 7U /*!< \brief CPSR: I Position */ +#define CPSR_I_Msk (1UL << CPSR_I_Pos) /*!< \brief CPSR: I Mask */ + +#define CPSR_F_Pos 6U /*!< \brief CPSR: F Position */ +#define CPSR_F_Msk (1UL << CPSR_F_Pos) /*!< \brief CPSR: F Mask */ + +#define CPSR_M_Pos 0U /*!< \brief CPSR: M Position */ +#define CPSR_M_Msk (0xFUL << CPSR_M_Pos) /*!< \brief CPSR: M Mask */ + +#define CPSR_M_USR 0x10U /*!< \brief CPSR: M User mode (PL0) */ +#define CPSR_M_FIQ 0x11U /*!< \brief CPSR: M Fast Interrupt mode (PL1) */ +#define CPSR_M_IRQ 0x12U /*!< \brief CPSR: M Interrupt mode (PL1) */ +#define CPSR_M_SVC 0x13U /*!< \brief CPSR: M Supervisor mode (PL1) */ +#define CPSR_M_MON 0x16U /*!< \brief CPSR: M Monitor mode (PL1) */ +#define CPSR_M_ABT 0x17U /*!< \brief CPSR: M Abort mode (PL1) */ +#define CPSR_M_HYP 0x1AU /*!< \brief CPSR: M Hypervisor mode (PL2) */ +#define CPSR_M_UND 0x1BU /*!< \brief CPSR: M Undefined mode (PL1) */ +#define CPSR_M_SYS 0x1FU /*!< \brief CPSR: M System mode (PL1) */ + +/* Register SCTLR */ +typedef union +{ + struct + { + uint64_t M:1; /*!< \brief bit: 0 MMU enable */ + uint64_t A:1; /*!< \brief bit: 1 Alignment check enable */ + uint64_t C:1; /*!< \brief bit: 2 Cache enable */ + uint64_t SA:1; /*!< \brief bit: 3 SP Alignment check enable */ + RESERVED(1:2, uint64_t) //[5:4] + uint64_t nAA:1; /*!< \brief bit: 6 Non-aligned access */ + RESERVED(2:4, uint64_t) //[10:7] + uint64_t EOS:1; /*!< \brief bit: 11 Exception Exit is Context Synchronizing */ + uint64_t I:1; /*!< \brief bit: 12 Instruction cache enable */ + uint64_t EnDB:1; //13 + RESERVED(3:2, uint64_t) //[15:14] + RESERVED(4:1, uint64_t) //[16] + RESERVED(5:1, uint64_t) //[17] + RESERVED(6:1, uint64_t) //[18] + uint64_t WXN:1; /*!< \brief bit: 19 Write permission implies XN */ + RESERVED(7:1, uint64_t) //[20] + uint64_t IESB:1; //21 + uint64_t EIS:1; //22 + RESERVED(8:1, uint64_t) //[23] + RESERVED(9:1, uint64_t) //[24] + uint64_t EE:1; /*!< \brief bit: 25 Exception Endianness */ + RESERVED(10:1, uint64_t) //[26] + uint64_t EnDA:1; //27 + RESERVED(11:2, uint64_t) //[29:28] + uint64_t EnIB:1; //30 + uint64_t EnIA:1; //31 + RESERVED(12:4, uint64_t) //[35:32] + uint64_t BT:1; //36 + uint64_t ITFSB:1; //37 + RESERVED(13:2, uint64_t) //[39:38] + uint64_t TCF:2; //[41:40] + RESERVED(14:1, uint64_t) //[42] + uint64_t ATA:1; //43 + uint64_t DSSBS:1; //44 + RESERVED(15:19, uint64_t) //[63:45] + } b; /*!< \brief Structure used for bit access */ + uint64_t w; /*!< \brief Type used for word access */ +} SCTLR_Type; + +#define SCTLR_TE_Pos 30U /*!< \brief SCTLR: TE Position */ +#define SCTLR_TE_Msk (1UL << SCTLR_TE_Pos) /*!< \brief SCTLR: TE Mask */ + +#define SCTLR_AFE_Pos 29U /*!< \brief SCTLR: AFE Position */ +#define SCTLR_AFE_Msk (1UL << SCTLR_AFE_Pos) /*!< \brief SCTLR: AFE Mask */ + +#define SCTLR_TRE_Pos 28U /*!< \brief SCTLR: TRE Position */ +#define SCTLR_TRE_Msk (1UL << SCTLR_TRE_Pos) /*!< \brief SCTLR: TRE Mask */ + +#define SCTLR_EE_Pos 25U /*!< \brief SCTLR: EE Position */ +#define SCTLR_EE_Msk (1UL << SCTLR_EE_Pos) /*!< \brief SCTLR: EE Mask */ + +#define SCTLR_UWXN_Pos 20U /*!< \brief SCTLR: UWXN Position */ +#define SCTLR_UWXN_Msk (1UL << SCTLR_UWXN_Pos) /*!< \brief SCTLR: UWXN Mask */ + +#define SCTLR_WXN_Pos 19U /*!< \brief SCTLR: WXN Position */ +#define SCTLR_WXN_Msk (1UL << SCTLR_WXN_Pos) /*!< \brief SCTLR: WXN Mask */ + +#define SCTLR_nTWE_Pos 18U /*!< \brief SCTLR: nTWE Position */ +#define SCTLR_nTWE_Msk (1UL << SCTLR_nTWE_Pos) /*!< \brief SCTLR: nTWE Mask */ + +#define SCTLR_nTWI_Pos 16U /*!< \brief SCTLR: nTWI Position */ +#define SCTLR_nTWI_Msk (1UL << SCTLR_nTWI_Pos) /*!< \brief SCTLR: nTWI Mask */ + +#define SCTLR_V_Pos 13U /*!< \brief SCTLR: V Position */ +#define SCTLR_V_Msk (1UL << SCTLR_V_Pos) /*!< \brief SCTLR: V Mask */ + +#define SCTLR_I_Pos 12U /*!< \brief SCTLR: I Position */ +#define SCTLR_I_Msk (1UL << SCTLR_I_Pos) /*!< \brief SCTLR: I Mask */ + +#define SCTLR_SED_Pos 8U /*!< \brief SCTLR: SED Position */ +#define SCTLR_SED_Msk (1UL << SCTLR_SED_Pos) /*!< \brief SCTLR: SED Mask */ + +#define SCTLR_ITD_Pos 7U /*!< \brief SCTLR: ITD Position */ +#define SCTLR_ITD_Msk (1UL << SCTLR_ITD_Pos) /*!< \brief SCTLR: ITD Mask */ + +#define SCTLR_THEE_Pos 6U /*!< \brief SCTLR: THEE Position */ +#define SCTLR_THEE_Msk (1UL << SCTLR_THEE_Pos) /*!< \brief SCTLR: THEE Mask */ + +#define SCTLR_CP15BEN_Pos 5U /*!< \brief SCTLR: CP15BEN Position */ +#define SCTLR_CP15BEN_Msk (1UL << SCTLR_CP15BEN_Pos) /*!< \brief SCTLR: CP15BEN Mask */ + +#define SCTLR_C_Pos 2U /*!< \brief SCTLR: C Position */ +#define SCTLR_C_Msk (1UL << SCTLR_C_Pos) /*!< \brief SCTLR: C Mask */ + +#define SCTLR_A_Pos 1U /*!< \brief SCTLR: A Position */ +#define SCTLR_A_Msk (1UL << SCTLR_A_Pos) /*!< \brief SCTLR: A Mask */ + +#define SCTLR_M_Pos 0U /*!< \brief SCTLR: M Position */ +#define SCTLR_M_Msk (1UL << SCTLR_M_Pos) /*!< \brief SCTLR: M Mask */ + + +/* Register TCR_EL3 */ +typedef union +{ + struct + { + uint64_t T0SZ:6; //[5:0] + RESERVED(1:2, uint64_t) //[7:6] + uint64_t IRGN0:2; //[9:8] + uint64_t ORGN0:2; //[11:10] + uint64_t SH0:2; //[13:12] + uint64_t TG0:2; //[15:14] + uint64_t PS:3; //[18:16] + RESERVED(2:1, uint64_t) //[19] + uint64_t TBI:1; //[20] + uint64_t HA:1; //[21] + uint64_t HD:1; //[22] + RESERVED(3:1, uint64_t) //[23] + uint64_t HPD:1; //[24] + uint64_t HWU59:1; //[25] + uint64_t HWU60:1; //[26] + uint64_t HWU61:1; //[27] + uint64_t HWU62:1; //[28] + uint64_t TBID:1; //[29] + uint64_t TCMA:1; //[30] + RESERVED(4:1, uint64_t) //[31] + RESERVED(5:32, uint64_t) //[63:32] + } b; + uint64_t w; /*!< \brief Type used for word access */ +} TCR_EL3_Type; + + +/* Register MPIDR_EL1 */ +typedef union +{ + struct + { + uint64_t Aff0:8; + uint64_t Aff1:8; + uint64_t Aff2:8; + uint64_t MT:1; + RESERVED(0:5, uint64_t) + uint64_t U:1; + RESERVED(1:1, uint64_t) + uint64_t Aff3:8; + RESERVED(2:24, uint64_t) + } b; /*!< \brief Structure used for bit access */ + uint64_t w; /*!< \brief Type used for word access */ +} MPIDR_EL1_Type; + + + /******************************************************************************* + * Hardware Abstraction Layer + Core Function Interface contains: + - L1 Cache Functions + - L2C-310 Cache Controller Functions + - PL1 Timer Functions + - GIC Functions + - MMU Functions + ******************************************************************************/ + +/* ########################## L1 Cache functions ################################# */ + +/** \brief Enable Caches by setting I and C bits in SCTLR register. +*/ +__STATIC_FORCEINLINE void L1C_EnableCaches(void) { + __set_SCTLR_EL3( __get_SCTLR_EL3() | SCTLR_I_Msk | SCTLR_C_Msk); + __ISB(); +} + +/** \brief Disable Caches by clearing I and C bits in SCTLR register. +*/ +__STATIC_FORCEINLINE void L1C_DisableCaches(void) { + __set_SCTLR_EL3( __get_SCTLR_EL3() & (~SCTLR_I_Msk) & (~SCTLR_C_Msk)); + __ISB(); +} + +/** \brief Enable Branch Prediction by setting Z bit in SCTLR register. +*/ +__STATIC_FORCEINLINE void L1C_EnableBTAC(void) { + +} + +/** \brief Disable Branch Prediction by clearing Z bit in SCTLR register. +*/ +__STATIC_FORCEINLINE void L1C_DisableBTAC(void) { + +} + +/** \brief Invalidate entire branch predictor array +*/ +__STATIC_FORCEINLINE void L1C_InvalidateBTAC(void) { + +} + +/** \brief Invalidate the whole instruction cache +*/ +__STATIC_FORCEINLINE void L1C_InvalidateICacheAll(void) { + +} + +/** \brief Invalidate the whole data cache. +*/ +__STATIC_FORCEINLINE void L1C_InvalidateDCacheAll(void) { + +} + + +/* ########################## L2 Cache functions ################################# */ +#if (__L2C_PRESENT == 1U) || defined(DOXYGEN) + +/** \brief Enable Level 2 Cache +*/ +__STATIC_INLINE void L2C_Enable(void) +{ + +} +#endif + +/* ########################## L3 Cache functions ################################# */ +#if (__L3C_PRESENT == 1U) || defined(DOXYGEN) + +#endif + +/* ########################## GIC functions ###################################### */ +#if (__GIC_PRESENT == 1U) || defined(DOXYGEN) + +#endif + +/* ########################## Generic Timer functions ############################ */ +#if (__TIM_PRESENT == 1U) || defined(DOXYGEN) + +#endif + +/* ########################## MMU functions ###################################### */ + +/** \brief Enable MMU +*/ +__STATIC_INLINE void MMU_Enable(void) +{ + __set_SCTLR_EL3( __get_SCTLR_EL3() | SCTLR_M_Msk); + __ISB(); +} + +/** \brief Disable MMU +*/ +__STATIC_INLINE void MMU_Disable(void) +{ + __set_SCTLR_EL3( __get_SCTLR_EL3() & (~SCTLR_M_Msk)); + __ISB(); +} + +/** \brief Invalidate entire unified TLB +*/ + +__STATIC_INLINE void MMU_InvalidateTLB(void) +{ + __DSB(); + __ASM volatile("tlbi vmalle1is"); + __DSB(); + __ISB(); +} + + +#ifdef __cplusplus +} +#endif + +#endif /* __ARM_V8A_DEPENDANT */ + +#endif /* __CMSIS_GENERIC */ diff --git a/CMSIS/Core/Include/armv8a_system_control.h b/CMSIS/Core/Include/armv8a_system_control.h new file mode 100644 index 000000000..d03cafc40 --- /dev/null +++ b/CMSIS/Core/Include/armv8a_system_control.h @@ -0,0 +1,143 @@ +/**************************************************************************//** + * @file armv8a_system_control.h + * @brief CMSIS compiler specific macros, functions, instructions + * @version V6.0.0 + * @date 4. August 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include + + #if defined ( __ICCARM__ ) + #pragma system_include /* treat file as system include file for MISRA check */ +#elif defined (__clang__) + #pragma clang system_header /* treat file as system include file */ +#endif + +#ifndef __CMSIS_SYSTEM_CONTROL_H +#define __CMSIS_SYSTEM_CONTROL_H + + + /** \brief Get MPIDR EL1 + \return Multiprocessor Affinity Register value + */ + __STATIC_FORCEINLINE uint64_t __get_MPIDR_EL1(void) + { + uint64_t result; + __ASM volatile("MRS %0, MPIDR_EL1" : "=r" (result) : : "memory"); + return result; + } + + /** \brief Get MAIR EL3 + \return MAIR value + */ + __STATIC_FORCEINLINE uint64_t __get_MAIR_EL3(void) + { + uint64_t result; + __ASM volatile("MRS %0, mair_el3" : "=r" (result) : : "memory"); + return result; + } + + /** \brief Set MAIR EL3 + \param [in] mair MAIR value to set + */ + __STATIC_FORCEINLINE void __set_MAIR_EL3(uint64_t mair) + { + __ASM volatile("MSR mair_el3, %0" : : "r" (mair) : "memory"); + } + + /** \brief Get TCR EL3 + \return TCR value + */ + __STATIC_FORCEINLINE uint64_t __get_TCR_EL3(void) + { + uint64_t result; + __ASM volatile("MRS %0, tcr_el3" : "=r" (result) : : "memory"); + return result; + } + + /** \brief Set TCR EL3 + \param [in] tcr TCR value to set + */ + __STATIC_FORCEINLINE void __set_TCR_EL3(uint64_t tcr) + { + __ASM volatile("MSR tcr_el3, %0" : : "r" (tcr) : "memory"); + } + + /** \brief Get TTBR0 EL3 + \return Translation Table Base Register 0 value + */ + __STATIC_FORCEINLINE uint64_t __get_TTBR0_EL3(void) + { + uint64_t result; + __ASM volatile("MRS %0, ttbr0_el3" : "=r" (result) : : "memory"); + return result; + } + + /** \brief Set TTBR0 EL3 + \param [in] ttbr0 Translation Table Base Register 0 value to set + */ + __STATIC_FORCEINLINE void __set_TTBR0_EL3(uint64_t ttbr0) + { + __ASM volatile("MSR ttbr0_el3, %0" : : "r" (ttbr0) : "memory"); + } + + /** \brief Get SCTLR EL3 + \return STRLR EL3 value + */ + __STATIC_FORCEINLINE uint64_t __get_SCTLR_EL3(void) + { + uint64_t result; + __ASM volatile("MRS %0, sctlr_el3" : "=r" (result) : : "memory"); + return result; + } + + /** \brief Set SCTLR EL3 + \param [in] vbar SCTLR value to set + */ + __STATIC_FORCEINLINE void __set_SCTLR_EL3(uint64_t sctlr) + { + __ASM volatile("MSR sctlr_el3, %0" : : "r" (sctlr) : "memory"); + } + + /** \brief Set VBAR EL3 + \param [in] vbar VBAR value to set + */ + __STATIC_FORCEINLINE void __set_VBAR_EL3(uint64_t vbar) + { + __ASM volatile("MSR vbar_el3, %0" : : "r" (vbar) : "memory"); + } + + /** \brief Set VBAR EL2 + \param [in] vbar VBAR value to set + */ + __STATIC_FORCEINLINE void __set_VBAR_EL2(uint64_t vbar) + { + __ASM volatile("MSR vbar_el2, %0" : : "r" (vbar) : "memory"); + } + + /** \brief Set VBAR EL1 + \param [in] vbar VBAR value to set + */ + __STATIC_FORCEINLINE void __set_VBAR_EL1(uint64_t vbar) + { + __ASM volatile("MSR vbar_el1, %0" : : "r" (vbar) : "memory"); + } + + +#endif /* __CMSIS_SYSTEM_CONTROL_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/armv8r.h b/CMSIS/Core/Include/armv8r.h new file mode 100644 index 000000000..fc84dfd54 --- /dev/null +++ b/CMSIS/Core/Include/armv8r.h @@ -0,0 +1,354 @@ +/**************************************************************************//** + * @file armv8r.h + * @brief CMSIS Cortex-R Core Peripheral Access Layer Header File for ARMv8-R + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if defined ( __ICCARM__ ) + #pragma system_include /* treat file as system include file for MISRA check */ +#elif defined (__clang__) + #pragma clang system_header /* treat file as system include file */ +#endif + +#ifndef __ARM_V8R_GENERIC +#define __ARM_V8R_GENERIC + +#include + +#ifdef __cplusplus + extern "C" { +#endif + +/******************************************************************************* + * CMSIS definitions + ******************************************************************************/ +/** + \ingroup ARMv8-R + @{ + */ + +#include "cmsis_version.h" + +/** __FPU_USED indicates whether an FPU is used or not. + For this, __FPU_PRESENT has to be checked prior to making use of FPU specific registers and functions. +*/ +#if defined ( __CC_ARM ) + #if defined (__TARGET_FPU_VFP) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + + #if defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1U) + #if defined (__DSP_PRESENT) && (__DSP_PRESENT == 1U) + #define __DSP_USED 1U + #else + #error "Compiler generates DSP (SIMD) instructions for a devices without DSP extensions (check __DSP_PRESENT)" + #define __DSP_USED 0U + #endif + #else + #define __DSP_USED 0U + #endif + +#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) + #if defined (__ARM_FP) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #warning "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + + #if defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1U) + #if defined (__DSP_PRESENT) && (__DSP_PRESENT == 1U) + #define __DSP_USED 1U + #else + #error "Compiler generates DSP (SIMD) instructions for a devices without DSP extensions (check __DSP_PRESENT)" + #define __DSP_USED 0U + #endif + #else + #define __DSP_USED 0U + #endif + +#elif defined ( __GNUC__ ) + #if defined (__VFP_FP__) && !defined(__SOFTFP__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + + #if (defined(__ARM_NEON) && (__ARM_NEON == 1)) + #if defined (__DSP_PRESENT) && (__DSP_PRESENT == 1U) + #define __DSP_USED 1U + #else + #error "Compiler generates DSP (SIMD) instructions for a devices without DSP extensions (check __DSP_PRESENT)" + #define __DSP_USED 0U + #endif + #else + #define __DSP_USED 0U + #endif + +#elif defined ( __ICCARM__ ) + #if defined (__ARMVFP__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + + #if defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1U) + #if defined (__DSP_PRESENT) && (__DSP_PRESENT == 1U) + #define __DSP_USED 1U + #else + #error "Compiler generates DSP (SIMD) instructions for a devices without DSP extensions (check __DSP_PRESENT)" + #define __DSP_USED 0U + #endif + #else + #define __DSP_USED 0U + #endif + +#elif defined ( __TI_ARM__ ) + #if defined (__TI_VFP_SUPPORT__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __TASKING__ ) + #if defined (__FPU_VFP__) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#elif defined ( __CSMC__ ) + #if ( __CSMC__ & 0x400U) + #if defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U) + #define __FPU_USED 1U + #else + #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)" + #define __FPU_USED 0U + #endif + #else + #define __FPU_USED 0U + #endif + +#endif + +#include "cmsis_compiler.h" /* CMSIS compiler specific defines */ + +#ifdef __cplusplus +} +#endif + +#endif /* __ARM_V8R_GENERIC */ + +#ifndef __CMSIS_GENERIC + +#ifndef __ARM_V8R_DEPENDANT +#define __ARM_V8R_DEPENDANT + +#ifdef __cplusplus + extern "C" { +#endif + + /* check device defines and use defaults */ +#if defined __CHECK_DEVICE_DEFINES + #ifndef __FPU_PRESENT + #define __FPU_PRESENT 0U + #warning "__FPU_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __MPU_PRESENT + #define __MPU_PRESENT 0U + #warning "__MPU_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __ICACHE_PRESENT + #define __ICACHE_PRESENT 0U + #warning "__ICACHE_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __DCACHE_PRESENT + #define __DCACHE_PRESENT 0U + #warning "__DCACHE_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __DTCM_PRESENT + #define __DTCM_PRESENT 0U + #warning "__DTCM_PRESENT not defined in device header file; using default!" + #endif + + #ifndef __ECC_PRESENT + #define __ECC_PRESENT 0U + #warning "__ECC_PRESENT not defined in device header file; using default!" + #endif +#endif + +/* IO definitions (access restrictions to peripheral registers) */ +/** + \defgroup CMSIS_glob_defs CMSIS Global Defines + + IO Type Qualifiers are used + \li to specify the access to peripheral variables. + \li for automatic generation of peripheral register debug information. +*/ +#ifdef __cplusplus + #define __I volatile /*!< \brief Defines 'read only' permissions */ +#else + #define __I volatile const /*!< \brief Defines 'read only' permissions */ +#endif +#define __O volatile /*!< \brief Defines 'write only' permissions */ +#define __IO volatile /*!< \brief Defines 'read / write' permissions */ + +/* following defines should be used for structure members */ +#define __IM volatile const /*!< \brief Defines 'read only' structure member permissions */ +#define __OM volatile /*!< \brief Defines 'write only' structure member permissions */ +#define __IOM volatile /*!< \brief Defines 'read / write' structure member permissions */ +#define RESERVED(N, T) T RESERVED##N; // placeholder struct members used for "reserved" areas + +/** @} end of group ARMv8-R */ + + + + /******************************************************************************* + * Register Abstraction + Core Register contain: + - CPSR + ******************************************************************************/ + +/* Core Register CPSR */ +typedef union +{ + struct + { + uint32_t M:4; /*!< \brief bit: 0.. 3 Mode field */ + RESERVED(0:2, uint32_t) /* bit: 4.. 5 Reserved */ + uint32_t F:1; /*!< \brief bit: 6 FIQ mask bit */ + uint32_t I:1; /*!< \brief bit: 7 IRQ mask bit */ + uint32_t A:1; /*!< \brief bit: 8 Asynchronous abort mask bit */ + uint32_t E:1; /*!< \brief bit: 9 Endianness execution state bit */ + RESERVED(1:6, uint32_t) /* bit: 10..15 Reserved */ + uint32_t GE:4; /*!< \brief bit: 16..19 Greater than or Equal flags */ + RESERVED(2:1, uint32_t) /* bit: 20 Reserved */ + uint32_t DIT:1; /*!< \brief bit: 21 Data Independent Timing */ + uint32_t PAN:1; /*!< \brief bit: 22 Privileged Access Never */ + uint32_t SSBS:1; /*!< \brief bit: 23 Speculative Store Bypass Safe */ + RESERVED(3:3, uint32_t) /* bit: 24..26 Reserved */ + uint32_t Q:1; /*!< \brief bit: 27 Saturation condition flag */ + uint32_t V:1; /*!< \brief bit: 28 Overflow condition code flag */ + uint32_t C:1; /*!< \brief bit: 29 Carry condition code flag */ + uint32_t Z:1; /*!< \brief bit: 30 Zero condition code flag */ + uint32_t N:1; /*!< \brief bit: 31 Negative condition code flag */ + } b; /*!< \brief Structure used for bit access */ + uint32_t w; /*!< \brief Type used for word access */ +} CPSR_Type; + + + +/* CPSR Register Definitions */ +#define CPSR_N_Pos 31U /*!< \brief CPSR: N Position */ +#define CPSR_N_Msk (1UL << CPSR_N_Pos) /*!< \brief CPSR: N Mask */ + +#define CPSR_Z_Pos 30U /*!< \brief CPSR: Z Position */ +#define CPSR_Z_Msk (1UL << CPSR_Z_Pos) /*!< \brief CPSR: Z Mask */ + +#define CPSR_C_Pos 29U /*!< \brief CPSR: C Position */ +#define CPSR_C_Msk (1UL << CPSR_C_Pos) /*!< \brief CPSR: C Mask */ + +#define CPSR_V_Pos 28U /*!< \brief CPSR: V Position */ +#define CPSR_V_Msk (1UL << CPSR_V_Pos) /*!< \brief CPSR: V Mask */ + +#define CPSR_Q_Pos 27U /*!< \brief CPSR: Q Position */ +#define CPSR_Q_Msk (1UL << CPSR_Q_Pos) /*!< \brief CPSR: Q Mask */ + +#define CPSR_SSBS_Pos 23U /*!< \brief CPSR: SSBS Position */ +#define CPSR_SSBS_Msk (0x1UL << CPSR_SSBS_Pos) /*!< \brief CPSR: SSBS Mask */ + +#define CPSR_PAN_Pos 22U /*!< \brief CPSR: PAN Position */ +#define CPSR_PAN_Msk (0x1UL << CPSR_PAN_Pos) /*!< \brief CPSR: PAN Mask */ + +#define CPSR_DIT_Pos 21U /*!< \brief CPSR: DIT Position */ +#define CPSR_DIT_Msk (0x1UL << CPSR_DIT_Pos) /*!< \brief CPSR: DIT Mask */ + +#define CPSR_GE_Pos 16U /*!< \brief CPSR: GE Position */ +#define CPSR_GE_Msk (0xFUL << CPSR_GE_Pos) /*!< \brief CPSR: GE Mask */ + +#define CPSR_E_Pos 9U /*!< \brief CPSR: E Position */ +#define CPSR_E_Msk (1UL << CPSR_E_Pos) /*!< \brief CPSR: E Mask */ + +#define CPSR_A_Pos 8U /*!< \brief CPSR: A Position */ +#define CPSR_A_Msk (1UL << CPSR_A_Pos) /*!< \brief CPSR: A Mask */ + +#define CPSR_I_Pos 7U /*!< \brief CPSR: I Position */ +#define CPSR_I_Msk (1UL << CPSR_I_Pos) /*!< \brief CPSR: I Mask */ + +#define CPSR_F_Pos 6U /*!< \brief CPSR: F Position */ +#define CPSR_F_Msk (1UL << CPSR_F_Pos) /*!< \brief CPSR: F Mask */ + +#define CPSR_M_Pos 0U /*!< \brief CPSR: M Position */ +#define CPSR_M_Msk (0xFUL << CPSR_M_Pos) /*!< \brief CPSR: M Mask */ + +#define CPSR_M_USR 0x10U /*!< \brief CPSR: M User mode (PL0) */ +#define CPSR_M_FIQ 0x11U /*!< \brief CPSR: M Fast Interrupt mode (PL1) */ +#define CPSR_M_IRQ 0x12U /*!< \brief CPSR: M Interrupt mode (PL1) */ +#define CPSR_M_SVC 0x13U /*!< \brief CPSR: M Supervisor mode (PL1) */ +#define CPSR_M_MON 0x16U /*!< \brief CPSR: M Monitor mode (PL1) */ +#define CPSR_M_ABT 0x17U /*!< \brief CPSR: M Abort mode (PL1) */ +#define CPSR_M_HYP 0x1AU /*!< \brief CPSR: M Hypervisor mode (PL2) */ +#define CPSR_M_UND 0x1BU /*!< \brief CPSR: M Undefined mode (PL1) */ +#define CPSR_M_SYS 0x1FU /*!< \brief CPSR: M System mode (PL1) */ + + +#ifdef __cplusplus +} +#endif + +#endif /* __ARM_V8R_DEPENDANT */ + +#endif /* __CMSIS_GENERIC */ \ No newline at end of file diff --git a/CMSIS/Core/Include/cmsis_armclang.h b/CMSIS/Core/Include/cmsis_armclang.h index 470f7d4ff..c589f8479 100644 --- a/CMSIS/Core/Include/cmsis_armclang.h +++ b/CMSIS/Core/Include/cmsis_armclang.h @@ -49,6 +49,9 @@ #ifndef __NO_RETURN #define __NO_RETURN __attribute__((__noreturn__)) #endif +#ifndef CMSIS_DEPRECATED + #define CMSIS_DEPRECATED __attribute__((deprecated)) +#endif #ifndef __USED #define __USED __attribute__((used)) #endif @@ -108,47 +111,6 @@ #define __ALIAS(x) __attribute__ ((alias(x))) #endif -/* ######################### Startup and Lowlevel Init ######################## */ -#ifndef __PROGRAM_START -#define __PROGRAM_START __main -#endif - -#ifndef __INITIAL_SP -#define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit -#endif - -#ifndef __STACK_LIMIT -#define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base -#endif - -#ifndef __VECTOR_TABLE -#define __VECTOR_TABLE __Vectors -#endif - -#ifndef __VECTOR_TABLE_ATTRIBUTE -#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET"))) -#endif - -#if (__ARM_FEATURE_CMSE == 3) -#ifndef __STACK_SEAL -#define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base -#endif - -#ifndef __TZ_STACK_SEAL_SIZE -#define __TZ_STACK_SEAL_SIZE 8U -#endif - -#ifndef __TZ_STACK_SEAL_VALUE -#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL -#endif - - -__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { - *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; -} -#endif - - /* ########################## Core Instruction Access ######################### */ /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface Access to dedicated instructions @@ -711,294 +673,6 @@ __STATIC_FORCEINLINE void __disable_irq(void) } #endif - -/** - \brief Get Control Register - \details Returns the content of the Control Register. - \return Control Register value - */ -__STATIC_FORCEINLINE uint32_t __get_CONTROL(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, control" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Control Register (non-secure) - \details Returns the content of the non-secure Control Register when in secure mode. - \return non-secure Control Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Control Register - \details Writes the given value to the Control Register. - \param [in] control Control Register value to set - */ -__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) -{ - __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); - __ISB(); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Control Register (non-secure) - \details Writes the given value to the non-secure Control Register when in secure state. - \param [in] control Control Register value to set - */ -__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) -{ - __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); - __ISB(); -} -#endif - - -/** - \brief Get IPSR Register - \details Returns the content of the IPSR Register. - \return IPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_IPSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get APSR Register - \details Returns the content of the APSR Register. - \return APSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_APSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, apsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get xPSR Register - \details Returns the content of the xPSR Register. - \return xPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_xPSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get Process Stack Pointer - \details Returns the current value of the Process Stack Pointer (PSP). - \return PSP Register value - */ -__STATIC_FORCEINLINE uint32_t __get_PSP(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, psp" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Process Stack Pointer (non-secure) - \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. - \return PSP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Process Stack Pointer - \details Assigns the given value to the Process Stack Pointer (PSP). - \param [in] topOfProcStack Process Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) -{ - __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Process Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. - \param [in] topOfProcStack Process Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) -{ - __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); -} -#endif - - -/** - \brief Get Main Stack Pointer - \details Returns the current value of the Main Stack Pointer (MSP). - \return MSP Register value - */ -__STATIC_FORCEINLINE uint32_t __get_MSP(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, msp" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Main Stack Pointer (non-secure) - \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. - \return MSP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Main Stack Pointer - \details Assigns the given value to the Main Stack Pointer (MSP). - \param [in] topOfMainStack Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) -{ - __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Main Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. - \param [in] topOfMainStack Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) -{ - __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); -} -#endif - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Stack Pointer (non-secure) - \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. - \return SP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); - return (result); -} - - -/** - \brief Set Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. - \param [in] topOfStack Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) -{ - __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); -} -#endif - - -/** - \brief Get Priority Mask - \details Returns the current state of the priority mask bit from the Priority Mask Register. - \return Priority Mask value - */ -__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, primask" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Priority Mask (non-secure) - \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. - \return Priority Mask value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Priority Mask - \details Assigns the given value to the Priority Mask Register. - \param [in] priMask Priority Mask - */ -__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) -{ - __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Priority Mask (non-secure) - \details Assigns the given value to the non-secure Priority Mask Register when in secure state. - \param [in] priMask Priority Mask - */ -__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) -{ - __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); -} -#endif - - #if (__ARM_ARCH_ISA_THUMB >= 2) /** \brief Enable FIQ @@ -1020,313 +694,9 @@ __STATIC_FORCEINLINE void __disable_fault_irq(void) { __ASM volatile ("cpsid f" : : : "memory"); } - - -/** - \brief Get Base Priority - \details Returns the current value of the Base Priority register. - \return Base Priority register value - */ -__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, basepri" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Base Priority (non-secure) - \details Returns the current value of the non-secure Base Priority register when in secure state. - \return Base Priority register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); - return (result); -} #endif -/** - \brief Set Base Priority - \details Assigns the given value to the Base Priority register. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) -{ - __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Base Priority (non-secure) - \details Assigns the given value to the non-secure Base Priority register when in secure state. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) -{ - __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); -} -#endif - - -/** - \brief Set Base Priority with condition - \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, - or the new value increases the BASEPRI priority level. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) -{ - __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); -} - - -/** - \brief Get Fault Mask - \details Returns the current value of the Fault Mask register. - \return Fault Mask register value - */ -__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Fault Mask (non-secure) - \details Returns the current value of the non-secure Fault Mask register when in secure state. - \return Fault Mask register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Fault Mask - \details Assigns the given value to the Fault Mask register. - \param [in] faultMask Fault Mask value to set - */ -__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) -{ - __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Fault Mask (non-secure) - \details Assigns the given value to the non-secure Fault Mask register when in secure state. - \param [in] faultMask Fault Mask value to set - */ -__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) -{ - __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); -} -#endif - -#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */ - - -#if (__ARM_ARCH >= 8) -/** - \brief Get Process Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always in non-secure - mode. - - \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). - \return PSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, psplim" : "=r" (result) ); - return (result); -#endif -} - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Process Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. - \return PSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); - return (result); -#endif -} -#endif - - -/** - \brief Set Process Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored in non-secure - mode. - - \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). - \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - (void)ProcStackPtrLimit; -#else - __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Process Stack Pointer (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. - \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - (void)ProcStackPtrLimit; -#else - __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); -#endif -} -#endif - - -/** - \brief Get Main Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). - \return MSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, msplim" : "=r" (result) ); - return (result); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Main Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. - \return MSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); - return (result); -#endif -} -#endif - - -/** - \brief Set Main Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). - \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - (void)MainStackPtrLimit; -#else - __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Main Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. - \param [in] MainStackPtrLimit Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - (void)MainStackPtrLimit; -#else - __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); -#endif -} -#endif - -#endif /* (__ARM_ARCH >= 8) */ - /** \brief Get FPSCR diff --git a/CMSIS/Core/Include/cmsis_armclang_corea.h b/CMSIS/Core/Include/cmsis_armclang_corea.h new file mode 100644 index 000000000..4ba92ec33 --- /dev/null +++ b/CMSIS/Core/Include/cmsis_armclang_corea.h @@ -0,0 +1,238 @@ +/**************************************************************************//** + * @file cmsis_armclang_corea.h + * @brief CMSIS compiler armclang (Arm Compiler 6) header file + * @version V5.5.0 + * @date 04. December 2022 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */ + +#ifndef __CMSIS_ARMCLANG_COREA_H +#define __CMSIS_ARMCLANG_COREA_H + +#pragma clang system_header /* treat file as system include file */ + +// Include the generic settings: +#include "cmsis_armclang.h" + + +/* ########################### Core Function Access ########################### */ +/** \ingroup CMSIS_Core_FunctionInterface + \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions + @{ + */ + +/** \brief Get CPSR Register + \return CPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CPSR(void) +{ + uint32_t result; + __ASM volatile("MRS %0, cpsr" : "=r" (result) ); + return(result); +} + +/** \brief Set CPSR Register + \param [in] cpsr CPSR value to set + */ +__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) +{ + __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); +} + +/** \brief Get Mode + \return Processor Mode + */ +__STATIC_FORCEINLINE uint32_t __get_mode(void) +{ + return (__get_CPSR() & 0x1FU); +} + +/** \brief Set Mode + \param [in] mode Mode value to set + */ +__STATIC_FORCEINLINE void __set_mode(uint32_t mode) +{ + __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); +} + +/** \brief Get Stack Pointer + \return Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP(void) +{ + uint32_t result; + __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); + return result; +} + +/** \brief Set Stack Pointer + \param [in] stack Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP(uint32_t stack) +{ + __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); +} + +/** \brief Get USR/SYS Stack Pointer + \return USR/SYS Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP_usr(void) +{ + uint32_t cpsr; + uint32_t result; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV %1, sp \n" + "MSR cpsr_c, %0 \n" // no effect in USR mode + "ISB" : "=r"(cpsr), "=r"(result) : : "memory" + ); + return result; +} + +/** \brief Set USR/SYS Stack Pointer + \param [in] topOfProcStack USR/SYS Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) +{ + uint32_t cpsr; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV sp, %1 \n" + "MSR cpsr_c, %0 \n" // no effect in USR mode + "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory" + ); +} + +/** \brief Get FPEXC + \return Floating Point Exception Control register value + */ +__STATIC_FORCEINLINE uint32_t __get_FPEXC(void) +{ +#if (__FPU_PRESENT == 1) + uint32_t result; + __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); + return(result); +#else + return(0); +#endif +} + +/** \brief Set FPEXC + \param [in] fpexc Floating Point Exception Control value to set + */ +__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) +{ +#if (__FPU_PRESENT == 1) + __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); +#endif +} + + +/* + * Include common core functions to access Coprocessor 15 registers + */ + +#define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) +#define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) +#define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) +#define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) + +#if __CORTEX_A == 5 ||__CORTEX_A == 7 || __CORTEX_A == 9 + #include "armv7a_cp15.h" +#elif __CORTEX_A == 35 || __CORTEX_A == 53 || __CORTEX_A == 57 + #include "armv8a_system_control.h" +#else + #warning "Unknown or unsupported core" +#endif + +/** \brief Enable Floating Point Unit + + Critical section, called from undef handler, so systick is disabled + */ +__STATIC_INLINE void __FPU_Enable(void) +{ + __ASM volatile( + //Permit access to VFP/NEON, registers by modifying CPACR + " MRC p15,0,R1,c1,c0,2 \n" + " ORR R1,R1,#0x00F00000 \n" + " MCR p15,0,R1,c1,c0,2 \n" + + //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted + " ISB \n" + + //Enable VFP/NEON + " VMRS R1,FPEXC \n" + " ORR R1,R1,#0x40000000 \n" + " VMSR FPEXC,R1 \n" + + //Initialise VFP/NEON registers to 0 + " MOV R2,#0 \n" + + //Initialise D16 registers to 0 + " VMOV D0, R2,R2 \n" + " VMOV D1, R2,R2 \n" + " VMOV D2, R2,R2 \n" + " VMOV D3, R2,R2 \n" + " VMOV D4, R2,R2 \n" + " VMOV D5, R2,R2 \n" + " VMOV D6, R2,R2 \n" + " VMOV D7, R2,R2 \n" + " VMOV D8, R2,R2 \n" + " VMOV D9, R2,R2 \n" + " VMOV D10,R2,R2 \n" + " VMOV D11,R2,R2 \n" + " VMOV D12,R2,R2 \n" + " VMOV D13,R2,R2 \n" + " VMOV D14,R2,R2 \n" + " VMOV D15,R2,R2 \n" + +#if (defined(__ARM_NEON) && (__ARM_NEON == 1)) + //Initialise D32 registers to 0 + " VMOV D16,R2,R2 \n" + " VMOV D17,R2,R2 \n" + " VMOV D18,R2,R2 \n" + " VMOV D19,R2,R2 \n" + " VMOV D20,R2,R2 \n" + " VMOV D21,R2,R2 \n" + " VMOV D22,R2,R2 \n" + " VMOV D23,R2,R2 \n" + " VMOV D24,R2,R2 \n" + " VMOV D25,R2,R2 \n" + " VMOV D26,R2,R2 \n" + " VMOV D27,R2,R2 \n" + " VMOV D28,R2,R2 \n" + " VMOV D29,R2,R2 \n" + " VMOV D30,R2,R2 \n" + " VMOV D31,R2,R2 \n" +#endif + + //Initialise FPSCR to a known state + " VMRS R1,FPSCR \n" + " LDR R2,=0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero. + " AND R1,R1,R2 \n" + " VMSR FPSCR,R1 " + : : : "cc", "r1", "r2" + ); +} + +#endif /* __CMSIS_ARMCLANG_COREA_H */ diff --git a/CMSIS/Core/Include/cmsis_armclang_corem.h b/CMSIS/Core/Include/cmsis_armclang_corem.h new file mode 100644 index 000000000..e0b21581d --- /dev/null +++ b/CMSIS/Core/Include/cmsis_armclang_corem.h @@ -0,0 +1,670 @@ +/**************************************************************************//** + * @file cmsis_armclang_corem.h + * @brief CMSIS compiler armclang (Arm Compiler 6) header file + * @version V6.0.0 + * @date 4. August 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_ARMCLANG_COREM_H +#define __CMSIS_ARMCLANG_COREM_H + +#pragma clang system_header /* treat file as system include file */ + +// Include the generic settings: +#include "cmsis_armclang.h" + +/* ######################### Startup and Lowlevel Init ######################## */ + +#ifndef __PROGRAM_START + #define __PROGRAM_START __main +#endif + +#ifndef __INITIAL_SP + #define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit +#endif + +#ifndef __STACK_LIMIT + #define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base +#endif + +#ifndef __VECTOR_TABLE + #define __VECTOR_TABLE __Vectors +#endif + +#ifndef __VECTOR_TABLE_ATTRIBUTE + #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET"))) +#endif + +#if (__ARM_FEATURE_CMSE == 3) + #ifndef __STACK_SEAL + #define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base + #endif + + #ifndef __TZ_STACK_SEAL_SIZE + #define __TZ_STACK_SEAL_SIZE 8U + #endif + + #ifndef __TZ_STACK_SEAL_VALUE + #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL + #endif + + __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { + *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; + } +#endif + + +/* ########################### Core Function Access ########################### */ +/** \ingroup CMSIS_Core_FunctionInterface + \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions + @{ + */ + + +/** + \brief Get Control Register + \details Returns the content of the Control Register. + \return Control Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CONTROL(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, control" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Control Register (non-secure) + \details Returns the content of the non-secure Control Register when in secure mode. + \return non-secure Control Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Control Register + \details Writes the given value to the Control Register. + \param [in] control Control Register value to set + */ +__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) +{ + __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); + __ISB(); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Control Register (non-secure) + \details Writes the given value to the non-secure Control Register when in secure state. + \param [in] control Control Register value to set + */ + __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) + { + __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); + __ISB(); + } +#endif + + +/** + \brief Get IPSR Register + \details Returns the content of the IPSR Register. + \return IPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_IPSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get APSR Register + \details Returns the content of the APSR Register. + \return APSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_APSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, apsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get xPSR Register + \details Returns the content of the xPSR Register. + \return xPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_xPSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get Process Stack Pointer + \details Returns the current value of the Process Stack Pointer (PSP). + \return PSP Register value + */ +__STATIC_FORCEINLINE uint32_t __get_PSP(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, psp" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Process Stack Pointer (non-secure) + \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. + \return PSP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Process Stack Pointer + \details Assigns the given value to the Process Stack Pointer (PSP). + \param [in] topOfProcStack Process Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) +{ + __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Process Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. + \param [in] topOfProcStack Process Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) + { + __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); + } +#endif + + +/** + \brief Get Main Stack Pointer + \details Returns the current value of the Main Stack Pointer (MSP). + \return MSP Register value + */ +__STATIC_FORCEINLINE uint32_t __get_MSP(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, msp" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Main Stack Pointer (non-secure) + \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. + \return MSP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Main Stack Pointer + \details Assigns the given value to the Main Stack Pointer (MSP). + \param [in] topOfMainStack Main Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) +{ + __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Main Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. + \param [in] topOfMainStack Main Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) + { + __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); + } +#endif + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Stack Pointer (non-secure) + \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. + \return SP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); + return (result); + } + + + /** + \brief Set Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. + \param [in] topOfStack Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) + { + __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); + } +#endif + + +/** + \brief Get Priority Mask + \details Returns the current state of the priority mask bit from the Priority Mask Register. + \return Priority Mask value + */ +__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, primask" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Priority Mask (non-secure) + \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. + \return Priority Mask value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Priority Mask + \details Assigns the given value to the Priority Mask Register. + \param [in] priMask Priority Mask + */ +__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) +{ + __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Priority Mask (non-secure) + \details Assigns the given value to the non-secure Priority Mask Register when in secure state. + \param [in] priMask Priority Mask + */ + __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) + { + __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); + } +#endif + + +#if (__ARM_ARCH_ISA_THUMB >= 2) + /** + \brief Get Base Priority + \details Returns the current value of the Base Priority register. + \return Base Priority register value + */ + __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, basepri" : "=r" (result) ); + return (result); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Base Priority (non-secure) + \details Returns the current value of the non-secure Base Priority register when in secure state. + \return Base Priority register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); + return (result); + } + #endif + + + /** + \brief Set Base Priority + \details Assigns the given value to the Base Priority register. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) + { + __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Base Priority (non-secure) + \details Assigns the given value to the non-secure Base Priority register when in secure state. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) + { + __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); + } + #endif + + + /** + \brief Set Base Priority with condition + \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, + or the new value increases the BASEPRI priority level. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) + { + __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); + } + + + /** + \brief Get Fault Mask + \details Returns the current value of the Fault Mask register. + \return Fault Mask register value + */ + __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); + return (result); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Fault Mask (non-secure) + \details Returns the current value of the non-secure Fault Mask register when in secure state. + \return Fault Mask register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); + return (result); + } + #endif + + + /** + \brief Set Fault Mask + \details Assigns the given value to the Fault Mask register. + \param [in] faultMask Fault Mask value to set + */ + __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) + { + __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Fault Mask (non-secure) + \details Assigns the given value to the non-secure Fault Mask register when in secure state. + \param [in] faultMask Fault Mask value to set + */ + __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) + { + __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); + } + #endif +#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */ + + +#if (__ARM_ARCH >= 8) + /** + \brief Get Process Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always in non-secure + mode. + + \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). + \return PSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure PSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, psplim" : "=r" (result) ); + return (result); + #endif + } + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Process Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. + \return PSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure PSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); + return (result); + #endif + } + #endif + + + /** + \brief Set Process Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored in non-secure + mode. + + \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). + \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)ProcStackPtrLimit; + #else + __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Process Stack Pointer (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. + \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)ProcStackPtrLimit; + #else + __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); + #endif + } + #endif + + + /** + \brief Get Main Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). + \return MSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure MSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, msplim" : "=r" (result) ); + return (result); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Main Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. + \return MSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure MSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); + return (result); + #endif + } + #endif + + + /** + \brief Set Main Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). + \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)MainStackPtrLimit; + #else + __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Main Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. + \param [in] MainStackPtrLimit Main Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)MainStackPtrLimit; + #else + __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); + #endif + } + #endif +#endif /* (__ARM_ARCH >= 8) */ +/** @} end of CMSIS_Core_RegAccFunctions */ + + +#endif /* __CMSIS_ARMCLANG_COREM_H */ diff --git a/CMSIS/Core/Include/cmsis_armclang_corer.h b/CMSIS/Core/Include/cmsis_armclang_corer.h new file mode 100644 index 000000000..4ccad7dfc --- /dev/null +++ b/CMSIS/Core/Include/cmsis_armclang_corer.h @@ -0,0 +1,150 @@ +/**************************************************************************//** + * @file cmsis_armclang_corer.h + * @brief CMSIS compiler armclang (Arm Compiler 6) header file + * @version V5.0.0 + * @date 04. December 2022 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_ARMCLANG_CORER_H +#define __CMSIS_ARMCLANG_CORER_H + +#pragma clang system_header /* treat file as system include file */ + +// Include the generic settings: +#include "cmsis_armclang.h" + + +/* ########################### Core Function Access ########################### */ +/** \ingroup CMSIS_Core_FunctionInterface + \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions + @{ + */ + +/** \brief Get CPSR Register + \return CPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CPSR(void) +{ + uint32_t result; + __ASM volatile("MRS %0, cpsr" : "=r" (result) ); + return(result); +} + +/** \brief Set CPSR Register + \param [in] cpsr CPSR value to set + */ +__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) +{ + __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); +} + +/** \brief Get Mode + \return Processor Mode + */ +__STATIC_FORCEINLINE uint32_t __get_mode(void) +{ + return (__get_CPSR() & 0x1FU); +} + +/** \brief Set Mode + \param [in] mode Mode value to set + */ +__STATIC_FORCEINLINE void __set_mode(uint32_t mode) +{ + __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); +} + +/** \brief Get Stack Pointer + \return Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP(void) +{ + uint32_t result; + __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); + return result; +} + +/** \brief Set Stack Pointer + \param [in] stack Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP(uint32_t stack) +{ + __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); +} + +/** \brief Get USR/SYS Stack Pointer + \return USR/SYS Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP_usr(void) +{ + uint32_t cpsr; + uint32_t result; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV %1, sp \n" + "MSR cpsr_c, %0 \n" // no effect in USR mode + "ISB" : "=r"(cpsr), "=r"(result) : : "memory" + ); + return result; +} + +/** \brief Set USR/SYS Stack Pointer + \param [in] topOfProcStack USR/SYS Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) +{ + uint32_t cpsr; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV sp, %1 \n" + "MSR cpsr_c, %0 \n" // no effect in USR mode + "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory" + ); +} + +/** \brief Get FPEXC + \return Floating Point Exception Control register value + */ +__STATIC_FORCEINLINE uint32_t __get_FPEXC(void) +{ +#if (__FPU_PRESENT == 1) + uint32_t result; + __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); + return(result); +#else + return(0); +#endif +} + +/** \brief Set FPEXC + \param [in] fpexc Floating Point Exception Control value to set + */ +__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) +{ +#if (__FPU_PRESENT == 1) + __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); +#endif +} + + + +#endif /* __CMSIS_ARMCLANG_CORER_H */ diff --git a/CMSIS/Core/Include/cmsis_clang.h b/CMSIS/Core/Include/cmsis_clang.h index 6747f9cb3..06baa43e7 100644 --- a/CMSIS/Core/Include/cmsis_clang.h +++ b/CMSIS/Core/Include/cmsis_clang.h @@ -54,6 +54,9 @@ #ifndef __NO_RETURN #define __NO_RETURN __attribute__((__noreturn__)) #endif +#ifndef CMSIS_DEPRECATED + #define CMSIS_DEPRECATED __attribute__((deprecated)) +#endif #ifndef __USED #define __USED __attribute__((used)) #endif @@ -113,47 +116,6 @@ #define __ALIAS(x) __attribute__ ((alias(x))) #endif -/* ######################### Startup and Lowlevel Init ######################## */ -#ifndef __PROGRAM_START -#define __PROGRAM_START _start -#endif - -#ifndef __INITIAL_SP -#define __INITIAL_SP __stack -#endif - -#ifndef __STACK_LIMIT -#define __STACK_LIMIT __stack_limit -#endif - -#ifndef __VECTOR_TABLE -#define __VECTOR_TABLE __Vectors -#endif - -#ifndef __VECTOR_TABLE_ATTRIBUTE -#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors"))) -#endif - -#if (__ARM_FEATURE_CMSE == 3) -#ifndef __STACK_SEAL -#define __STACK_SEAL __stack_seal -#endif - -#ifndef __TZ_STACK_SEAL_SIZE -#define __TZ_STACK_SEAL_SIZE 8U -#endif - -#ifndef __TZ_STACK_SEAL_VALUE -#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL -#endif - - -__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { - *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; -} -#endif - - /* ########################## Core Instruction Access ######################### */ /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface Access to dedicated instructions @@ -712,294 +674,6 @@ __STATIC_FORCEINLINE void __disable_irq(void) __ASM volatile ("cpsid i" : : : "memory"); } - -/** - \brief Get Control Register - \details Returns the content of the Control Register. - \return Control Register value - */ -__STATIC_FORCEINLINE uint32_t __get_CONTROL(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, control" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Control Register (non-secure) - \details Returns the content of the non-secure Control Register when in secure mode. - \return non-secure Control Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Control Register - \details Writes the given value to the Control Register. - \param [in] control Control Register value to set - */ -__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) -{ - __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); - __ISB(); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Control Register (non-secure) - \details Writes the given value to the non-secure Control Register when in secure state. - \param [in] control Control Register value to set - */ -__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) -{ - __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); - __ISB(); -} -#endif - - -/** - \brief Get IPSR Register - \details Returns the content of the IPSR Register. - \return IPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_IPSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get APSR Register - \details Returns the content of the APSR Register. - \return APSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_APSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, apsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get xPSR Register - \details Returns the content of the xPSR Register. - \return xPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_xPSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get Process Stack Pointer - \details Returns the current value of the Process Stack Pointer (PSP). - \return PSP Register value - */ -__STATIC_FORCEINLINE uint32_t __get_PSP(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, psp" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Process Stack Pointer (non-secure) - \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. - \return PSP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Process Stack Pointer - \details Assigns the given value to the Process Stack Pointer (PSP). - \param [in] topOfProcStack Process Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) -{ - __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Process Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. - \param [in] topOfProcStack Process Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) -{ - __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); -} -#endif - - -/** - \brief Get Main Stack Pointer - \details Returns the current value of the Main Stack Pointer (MSP). - \return MSP Register value - */ -__STATIC_FORCEINLINE uint32_t __get_MSP(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, msp" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Main Stack Pointer (non-secure) - \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. - \return MSP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Main Stack Pointer - \details Assigns the given value to the Main Stack Pointer (MSP). - \param [in] topOfMainStack Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) -{ - __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Main Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. - \param [in] topOfMainStack Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) -{ - __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); -} -#endif - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Stack Pointer (non-secure) - \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. - \return SP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); - return (result); -} - - -/** - \brief Set Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. - \param [in] topOfStack Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) -{ - __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); -} -#endif - - -/** - \brief Get Priority Mask - \details Returns the current state of the priority mask bit from the Priority Mask Register. - \return Priority Mask value - */ -__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, primask" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Priority Mask (non-secure) - \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. - \return Priority Mask value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Priority Mask - \details Assigns the given value to the Priority Mask Register. - \param [in] priMask Priority Mask - */ -__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) -{ - __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Priority Mask (non-secure) - \details Assigns the given value to the non-secure Priority Mask Register when in secure state. - \param [in] priMask Priority Mask - */ -__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) -{ - __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); -} -#endif - - #if (__ARM_ARCH_ISA_THUMB >= 2) /** \brief Enable FIQ @@ -1021,313 +695,9 @@ __STATIC_FORCEINLINE void __disable_fault_irq(void) { __ASM volatile ("cpsid f" : : : "memory"); } - - -/** - \brief Get Base Priority - \details Returns the current value of the Base Priority register. - \return Base Priority register value - */ -__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, basepri" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Base Priority (non-secure) - \details Returns the current value of the non-secure Base Priority register when in secure state. - \return Base Priority register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); - return (result); -} #endif -/** - \brief Set Base Priority - \details Assigns the given value to the Base Priority register. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) -{ - __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Base Priority (non-secure) - \details Assigns the given value to the non-secure Base Priority register when in secure state. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) -{ - __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); -} -#endif - - -/** - \brief Set Base Priority with condition - \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, - or the new value increases the BASEPRI priority level. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) -{ - __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); -} - - -/** - \brief Get Fault Mask - \details Returns the current value of the Fault Mask register. - \return Fault Mask register value - */ -__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Fault Mask (non-secure) - \details Returns the current value of the non-secure Fault Mask register when in secure state. - \return Fault Mask register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Fault Mask - \details Assigns the given value to the Fault Mask register. - \param [in] faultMask Fault Mask value to set - */ -__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) -{ - __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Fault Mask (non-secure) - \details Assigns the given value to the non-secure Fault Mask register when in secure state. - \param [in] faultMask Fault Mask value to set - */ -__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) -{ - __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); -} -#endif - -#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */ - - -#if (__ARM_ARCH >= 8) -/** - \brief Get Process Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always in non-secure - mode. - - \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). - \return PSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, psplim" : "=r" (result) ); - return (result); -#endif -} - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Process Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. - \return PSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); - return (result); -#endif -} -#endif - - -/** - \brief Set Process Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored in non-secure - mode. - - \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). - \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - (void)ProcStackPtrLimit; -#else - __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Process Stack Pointer (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. - \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - (void)ProcStackPtrLimit; -#else - __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); -#endif -} -#endif - - -/** - \brief Get Main Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). - \return MSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, msplim" : "=r" (result) ); - return (result); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Main Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. - \return MSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); - return (result); -#endif -} -#endif - - -/** - \brief Set Main Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). - \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - (void)MainStackPtrLimit; -#else - __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Main Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. - \param [in] MainStackPtrLimit Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - (void)MainStackPtrLimit; -#else - __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); -#endif -} -#endif - -#endif /* (__ARM_ARCH >= 8) */ - /** \brief Get FPSCR diff --git a/CMSIS/Core/Include/cmsis_clang_corea.h b/CMSIS/Core/Include/cmsis_clang_corea.h new file mode 100644 index 000000000..25c22081b --- /dev/null +++ b/CMSIS/Core/Include/cmsis_clang_corea.h @@ -0,0 +1,236 @@ +/**************************************************************************//** + * @file cmsis_armclang_corea.h + * @brief CMSIS compiler armclang (Arm Compiler 6) header file + * @version V5.5.0 + * @date 04. December 2022 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_CLANG_COREA_H +#define __CMSIS_CLANG_COREA_H + +#pragma clang system_header /* treat file as system include file */ + +// Include the generic settings: +#include "cmsis_clang.h" + + +/* ########################### Core Function Access ########################### */ +/** \ingroup CMSIS_Core_FunctionInterface + \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions + @{ + */ + +/** \brief Get CPSR Register + \return CPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CPSR(void) +{ + uint32_t result; + __ASM volatile("MRS %0, cpsr" : "=r" (result) ); + return(result); +} + +/** \brief Set CPSR Register + \param [in] cpsr CPSR value to set + */ +__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) +{ + __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); +} + +/** \brief Get Mode + \return Processor Mode + */ +__STATIC_FORCEINLINE uint32_t __get_mode(void) +{ + return (__get_CPSR() & 0x1FU); +} + +/** \brief Set Mode + \param [in] mode Mode value to set + */ +__STATIC_FORCEINLINE void __set_mode(uint32_t mode) +{ + __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); +} + +/** \brief Get Stack Pointer + \return Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP(void) +{ + uint32_t result; + __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); + return result; +} + +/** \brief Set Stack Pointer + \param [in] stack Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP(uint32_t stack) +{ + __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); +} + +/** \brief Get USR/SYS Stack Pointer + \return USR/SYS Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP_usr(void) +{ + uint32_t cpsr; + uint32_t result; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV %1, sp \n" + "MSR cpsr_c, %0 \n" // no effect in USR mode + "ISB" : "=r"(cpsr), "=r"(result) : : "memory" + ); + return result; +} + +/** \brief Set USR/SYS Stack Pointer + \param [in] topOfProcStack USR/SYS Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) +{ + uint32_t cpsr; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV sp, %1 \n" + "MSR cpsr_c, %0 \n" // no effect in USR mode + "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory" + ); +} + +/** \brief Get FPEXC + \return Floating Point Exception Control register value + */ +__STATIC_FORCEINLINE uint32_t __get_FPEXC(void) +{ +#if (__FPU_PRESENT == 1) + uint32_t result; + __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); + return(result); +#else + return(0); +#endif +} + +/** \brief Set FPEXC + \param [in] fpexc Floating Point Exception Control value to set + */ +__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) +{ +#if (__FPU_PRESENT == 1) + __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); +#endif +} + + +/* + * Include common core functions to access Coprocessor 15 registers + */ + +#define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) +#define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) +#define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) +#define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) + +#if __CORTEX_A == 5 ||__CORTEX_A == 7 || __CORTEX_A == 9 + #include "armv7a_cp15.h" +#elif __CORTEX_A == 35 || __CORTEX_A == 53 || __CORTEX_A == 57 + #include "armv8a_system_control.h" +#else + #warning "Unknown or unsupported core" +#endif + +/** \brief Enable Floating Point Unit + + Critical section, called from undef handler, so systick is disabled + */ +__STATIC_INLINE void __FPU_Enable(void) +{ + __ASM volatile( + //Permit access to VFP/NEON, registers by modifying CPACR + " MRC p15,0,R1,c1,c0,2 \n" + " ORR R1,R1,#0x00F00000 \n" + " MCR p15,0,R1,c1,c0,2 \n" + + //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted + " ISB \n" + + //Enable VFP/NEON + " VMRS R1,FPEXC \n" + " ORR R1,R1,#0x40000000 \n" + " VMSR FPEXC,R1 \n" + + //Initialise VFP/NEON registers to 0 + " MOV R2,#0 \n" + + //Initialise D16 registers to 0 + " VMOV D0, R2,R2 \n" + " VMOV D1, R2,R2 \n" + " VMOV D2, R2,R2 \n" + " VMOV D3, R2,R2 \n" + " VMOV D4, R2,R2 \n" + " VMOV D5, R2,R2 \n" + " VMOV D6, R2,R2 \n" + " VMOV D7, R2,R2 \n" + " VMOV D8, R2,R2 \n" + " VMOV D9, R2,R2 \n" + " VMOV D10,R2,R2 \n" + " VMOV D11,R2,R2 \n" + " VMOV D12,R2,R2 \n" + " VMOV D13,R2,R2 \n" + " VMOV D14,R2,R2 \n" + " VMOV D15,R2,R2 \n" + +#if (defined(__ARM_NEON) && (__ARM_NEON == 1)) + //Initialise D32 registers to 0 + " VMOV D16,R2,R2 \n" + " VMOV D17,R2,R2 \n" + " VMOV D18,R2,R2 \n" + " VMOV D19,R2,R2 \n" + " VMOV D20,R2,R2 \n" + " VMOV D21,R2,R2 \n" + " VMOV D22,R2,R2 \n" + " VMOV D23,R2,R2 \n" + " VMOV D24,R2,R2 \n" + " VMOV D25,R2,R2 \n" + " VMOV D26,R2,R2 \n" + " VMOV D27,R2,R2 \n" + " VMOV D28,R2,R2 \n" + " VMOV D29,R2,R2 \n" + " VMOV D30,R2,R2 \n" + " VMOV D31,R2,R2 \n" +#endif + + //Initialise FPSCR to a known state + " VMRS R1,FPSCR \n" + " LDR R2,=0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero. + " AND R1,R1,R2 \n" + " VMSR FPSCR,R1 " + : : : "cc", "r1", "r2" + ); +} + +#endif /* __CMSIS_CLANG_COREA_H */ diff --git a/CMSIS/Core/Include/cmsis_clang_corem.h b/CMSIS/Core/Include/cmsis_clang_corem.h new file mode 100644 index 000000000..f9b4abd9b --- /dev/null +++ b/CMSIS/Core/Include/cmsis_clang_corem.h @@ -0,0 +1,670 @@ +/**************************************************************************//** + * @file cmsis_armclang_corem.h + * @brief CMSIS compiler armclang (Arm Compiler 6) header file + * @version V6.0.0 + * @date 4. August 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_CLANG_COREM_H +#define __CMSIS_CLANG_COREM_H + +#pragma clang system_header /* treat file as system include file */ + +// Include the generic settings: +#include "cmsis_clang.h" + +/* ######################### Startup and Lowlevel Init ######################## */ + +#ifndef __PROGRAM_START + #define __PROGRAM_START __main +#endif + +#ifndef __INITIAL_SP + #define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit +#endif + +#ifndef __STACK_LIMIT + #define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base +#endif + +#ifndef __VECTOR_TABLE + #define __VECTOR_TABLE __Vectors +#endif + +#ifndef __VECTOR_TABLE_ATTRIBUTE + #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section("RESET"))) +#endif + +#if (__ARM_FEATURE_CMSE == 3) + #ifndef __STACK_SEAL + #define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base + #endif + + #ifndef __TZ_STACK_SEAL_SIZE + #define __TZ_STACK_SEAL_SIZE 8U + #endif + + #ifndef __TZ_STACK_SEAL_VALUE + #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL + #endif + + __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { + *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; + } +#endif + + +/* ########################### Core Function Access ########################### */ +/** \ingroup CMSIS_Core_FunctionInterface + \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions + @{ + */ + + +/** + \brief Get Control Register + \details Returns the content of the Control Register. + \return Control Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CONTROL(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, control" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Control Register (non-secure) + \details Returns the content of the non-secure Control Register when in secure mode. + \return non-secure Control Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Control Register + \details Writes the given value to the Control Register. + \param [in] control Control Register value to set + */ +__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) +{ + __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); + __ISB(); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Control Register (non-secure) + \details Writes the given value to the non-secure Control Register when in secure state. + \param [in] control Control Register value to set + */ + __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) + { + __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); + __ISB(); + } +#endif + + +/** + \brief Get IPSR Register + \details Returns the content of the IPSR Register. + \return IPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_IPSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get APSR Register + \details Returns the content of the APSR Register. + \return APSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_APSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, apsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get xPSR Register + \details Returns the content of the xPSR Register. + \return xPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_xPSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get Process Stack Pointer + \details Returns the current value of the Process Stack Pointer (PSP). + \return PSP Register value + */ +__STATIC_FORCEINLINE uint32_t __get_PSP(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, psp" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Process Stack Pointer (non-secure) + \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. + \return PSP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Process Stack Pointer + \details Assigns the given value to the Process Stack Pointer (PSP). + \param [in] topOfProcStack Process Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) +{ + __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Process Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. + \param [in] topOfProcStack Process Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) + { + __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); + } +#endif + + +/** + \brief Get Main Stack Pointer + \details Returns the current value of the Main Stack Pointer (MSP). + \return MSP Register value + */ +__STATIC_FORCEINLINE uint32_t __get_MSP(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, msp" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Main Stack Pointer (non-secure) + \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. + \return MSP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Main Stack Pointer + \details Assigns the given value to the Main Stack Pointer (MSP). + \param [in] topOfMainStack Main Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) +{ + __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Main Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. + \param [in] topOfMainStack Main Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) + { + __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); + } +#endif + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Stack Pointer (non-secure) + \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. + \return SP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); + return (result); + } + + + /** + \brief Set Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. + \param [in] topOfStack Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) + { + __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); + } +#endif + + +/** + \brief Get Priority Mask + \details Returns the current state of the priority mask bit from the Priority Mask Register. + \return Priority Mask value + */ +__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, primask" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Priority Mask (non-secure) + \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. + \return Priority Mask value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Priority Mask + \details Assigns the given value to the Priority Mask Register. + \param [in] priMask Priority Mask + */ +__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) +{ + __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Priority Mask (non-secure) + \details Assigns the given value to the non-secure Priority Mask Register when in secure state. + \param [in] priMask Priority Mask + */ + __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) + { + __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); + } +#endif + + +#if (__ARM_ARCH_ISA_THUMB >= 2) + /** + \brief Get Base Priority + \details Returns the current value of the Base Priority register. + \return Base Priority register value + */ + __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, basepri" : "=r" (result) ); + return (result); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Base Priority (non-secure) + \details Returns the current value of the non-secure Base Priority register when in secure state. + \return Base Priority register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); + return (result); + } + #endif + + + /** + \brief Set Base Priority + \details Assigns the given value to the Base Priority register. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) + { + __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Base Priority (non-secure) + \details Assigns the given value to the non-secure Base Priority register when in secure state. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) + { + __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); + } + #endif + + + /** + \brief Set Base Priority with condition + \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, + or the new value increases the BASEPRI priority level. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) + { + __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); + } + + + /** + \brief Get Fault Mask + \details Returns the current value of the Fault Mask register. + \return Fault Mask register value + */ + __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); + return (result); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Fault Mask (non-secure) + \details Returns the current value of the non-secure Fault Mask register when in secure state. + \return Fault Mask register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); + return (result); + } + #endif + + + /** + \brief Set Fault Mask + \details Assigns the given value to the Fault Mask register. + \param [in] faultMask Fault Mask value to set + */ + __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) + { + __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Fault Mask (non-secure) + \details Assigns the given value to the non-secure Fault Mask register when in secure state. + \param [in] faultMask Fault Mask value to set + */ + __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) + { + __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); + } + #endif +#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */ + + +#if (__ARM_ARCH >= 8) + /** + \brief Get Process Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always in non-secure + mode. + + \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). + \return PSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure PSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, psplim" : "=r" (result) ); + return (result); + #endif + } + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Process Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. + \return PSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure PSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); + return (result); + #endif + } + #endif + + + /** + \brief Set Process Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored in non-secure + mode. + + \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). + \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)ProcStackPtrLimit; + #else + __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Process Stack Pointer (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. + \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)ProcStackPtrLimit; + #else + __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); + #endif + } + #endif + + + /** + \brief Get Main Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). + \return MSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure MSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, msplim" : "=r" (result) ); + return (result); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Main Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. + \return MSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure MSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); + return (result); + #endif + } + #endif + + + /** + \brief Set Main Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). + \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)MainStackPtrLimit; + #else + __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Main Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. + \param [in] MainStackPtrLimit Main Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)MainStackPtrLimit; + #else + __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); + #endif + } + #endif +#endif /* (__ARM_ARCH >= 8) */ +/** @} end of CMSIS_Core_RegAccFunctions */ + + +#endif /* __CMSIS_CLANG_COREM_H */ diff --git a/CMSIS/Core/Include/cmsis_clang_corer.h b/CMSIS/Core/Include/cmsis_clang_corer.h new file mode 100644 index 000000000..c22b2edfd --- /dev/null +++ b/CMSIS/Core/Include/cmsis_clang_corer.h @@ -0,0 +1,150 @@ +/**************************************************************************//** + * @file cmsis_armclang_corer.h + * @brief CMSIS compiler armclang (Arm Compiler 6) header file + * @version V5.0.0 + * @date 04. December 2022 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_ARMCLANG_CORER_H +#define __CMSIS_ARMCLANG_CORER_H + +#pragma clang system_header /* treat file as system include file */ + +// Include the generic settings: +#include "cmsis_clang.h" + + +/* ########################### Core Function Access ########################### */ +/** \ingroup CMSIS_Core_FunctionInterface + \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions + @{ + */ + +/** \brief Get CPSR Register + \return CPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CPSR(void) +{ + uint32_t result; + __ASM volatile("MRS %0, cpsr" : "=r" (result) ); + return(result); +} + +/** \brief Set CPSR Register + \param [in] cpsr CPSR value to set + */ +__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) +{ + __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); +} + +/** \brief Get Mode + \return Processor Mode + */ +__STATIC_FORCEINLINE uint32_t __get_mode(void) +{ + return (__get_CPSR() & 0x1FU); +} + +/** \brief Set Mode + \param [in] mode Mode value to set + */ +__STATIC_FORCEINLINE void __set_mode(uint32_t mode) +{ + __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); +} + +/** \brief Get Stack Pointer + \return Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP(void) +{ + uint32_t result; + __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); + return result; +} + +/** \brief Set Stack Pointer + \param [in] stack Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP(uint32_t stack) +{ + __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); +} + +/** \brief Get USR/SYS Stack Pointer + \return USR/SYS Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP_usr(void) +{ + uint32_t cpsr; + uint32_t result; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV %1, sp \n" + "MSR cpsr_c, %0 \n" // no effect in USR mode + "ISB" : "=r"(cpsr), "=r"(result) : : "memory" + ); + return result; +} + +/** \brief Set USR/SYS Stack Pointer + \param [in] topOfProcStack USR/SYS Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) +{ + uint32_t cpsr; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV sp, %1 \n" + "MSR cpsr_c, %0 \n" // no effect in USR mode + "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory" + ); +} + +/** \brief Get FPEXC + \return Floating Point Exception Control register value + */ +__STATIC_FORCEINLINE uint32_t __get_FPEXC(void) +{ +#if (__FPU_PRESENT == 1) + uint32_t result; + __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); + return(result); +#else + return(0); +#endif +} + +/** \brief Set FPEXC + \param [in] fpexc Floating Point Exception Control value to set + */ +__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) +{ +#if (__FPU_PRESENT == 1) + __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); +#endif +} + + + +#endif /* __CMSIS_ARMCLANG_CORER_H */ diff --git a/CMSIS/Core/Include/cmsis_compiler.h b/CMSIS/Core/Include/cmsis_compiler.h index fb3783fee..1564be6f4 100644 --- a/CMSIS/Core/Include/cmsis_compiler.h +++ b/CMSIS/Core/Include/cmsis_compiler.h @@ -31,34 +31,71 @@ * Arm Compiler above 6.10.1 (armclang) */ #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6100100) - #include "cmsis_armclang.h" + #if defined __CORTEX_A + #include "cmsis_armclang_corema.h" + #elif defined __CORTEX_R + #include "cmsis_armclang_corer.h" + #elif defined __CORTEX_M + #include "cmsis_armclang.h" + #else + #error "Unknown architecture" + #endif /* * TI Arm Clang Compiler (tiarmclang) */ #elif defined (__ti__) - #include "cmsis_tiarmclang.h" - + #if defined __CORTEX_A + #error "Core-R is not supported for this compiler" + #elif defined __CORTEX_R + #error "Core-R is not supported for this compiler" + #elif defined __CORTEX_M + #include "cmsis_tiarmclang_corem.h" + #else + #error "Unknown architecture" + #endif /* * LLVM/Clang Compiler */ #elif defined ( __clang__ ) - #include "cmsis_clang.h" - + #if defined __CORTEX_A + #include "cmsis_clang_corea.h" + #elif defined __CORTEX_R + #include "cmsis_clang_corer.h" + #elif defined __CORTEX_M + #include "cmsis_clang_corem.h" + #else + #error "Unknown architecture" + #endif /* * GNU Compiler */ #elif defined ( __GNUC__ ) - #include "cmsis_gcc.h" - + #if defined __CORTEX_A + #include "cmsis_gcc_corea.h" + #elif defined __CORTEX_R + #include "cmsis_gcc_corer.h" + #elif defined __CORTEX_M + #include "cmsis_gcc_corem.h" + #else + #error "Unknown architecture" + #endif /* * IAR Compiler */ #elif defined ( __ICCARM__ ) - #include + #if defined __CORTEX_A + #include "cmsis_iccarm_corea.h" + #elif defined __CORTEX_R + #include "cmsis_iccarm_corer.h" + #elif defined __CORTEX_M + #include "cmsis_iccarm_corem.h" + #else + #error "Unknown architecture" + #endif /* @@ -82,6 +119,9 @@ #ifndef __NO_RETURN #define __NO_RETURN __attribute__((noreturn)) #endif + #ifndef CMSIS_DEPRECATED + #define CMSIS_DEPRECATED __attribute__((deprecated)) + #endif #ifndef __USED #define __USED __attribute__((used)) #endif @@ -134,6 +174,7 @@ #define __ALIAS(x) __attribute__ ((alias(x))) #endif + /* * TASKING Compiler */ @@ -159,6 +200,9 @@ #ifndef __NO_RETURN #define __NO_RETURN __attribute__((noreturn)) #endif + #ifndef CMSIS_DEPRECATED + #define CMSIS_DEPRECATED __attribute__((deprecated)) + #endif #ifndef __USED #define __USED __attribute__((used)) #endif @@ -195,7 +239,7 @@ #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v) #endif #ifndef __ALIGNED - #define __ALIGNED(x) __align(x) + #define __ALIGNED(x) __align(x) #endif #ifndef __RESTRICT #warning No compiler specific solution for __RESTRICT. __RESTRICT is ignored. @@ -212,6 +256,7 @@ #define __ALIAS(x) __attribute__ ((alias(x))) #endif + /* * COSMIC Compiler */ @@ -238,6 +283,10 @@ #warning No compiler specific solution for __USED. __USED is ignored. #define __USED #endif + #ifndef CMSIS_DEPRECATED + #warning No compiler specific solution for CMSIS_DEPRECATED. CMSIS_DEPRECATED is ignored. + #define CMSIS_DEPRECATED + #endif #ifndef __WEAK #define __WEAK __weak #endif diff --git a/CMSIS/Core/Include/cmsis_gcc.h b/CMSIS/Core/Include/cmsis_gcc.h index 26f517a8d..a7bdd0ee0 100644 --- a/CMSIS/Core/Include/cmsis_gcc.h +++ b/CMSIS/Core/Include/cmsis_gcc.h @@ -60,6 +60,9 @@ #ifndef __NO_RETURN #define __NO_RETURN __attribute__((__noreturn__)) #endif +#ifndef CMSIS_DEPRECATED + #define CMSIS_DEPRECATED __attribute__((deprecated)) +#endif #ifndef __USED #define __USED __attribute__((used)) #endif @@ -123,90 +126,6 @@ #define __ALIAS(x) __attribute__ ((alias(x))) #endif -/* ######################### Startup and Lowlevel Init ######################## */ -#ifndef __PROGRAM_START - -/** - \brief Initializes data and bss sections - \details This default implementations initialized all data and additional bss - sections relying on .copy.table and .zero.table specified properly - in the used linker script. - - */ -__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void) -{ - extern void _start(void) __NO_RETURN; - - typedef struct __copy_table { - uint32_t const* src; - uint32_t* dest; - uint32_t wlen; - } __copy_table_t; - - typedef struct __zero_table { - uint32_t* dest; - uint32_t wlen; - } __zero_table_t; - - extern const __copy_table_t __copy_table_start__; - extern const __copy_table_t __copy_table_end__; - extern const __zero_table_t __zero_table_start__; - extern const __zero_table_t __zero_table_end__; - - for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) { - for(uint32_t i=0u; iwlen; ++i) { - pTable->dest[i] = pTable->src[i]; - } - } - - for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) { - for(uint32_t i=0u; iwlen; ++i) { - pTable->dest[i] = 0u; - } - } - - _start(); -} - -#define __PROGRAM_START __cmsis_start -#endif - -#ifndef __INITIAL_SP -#define __INITIAL_SP __StackTop -#endif - -#ifndef __STACK_LIMIT -#define __STACK_LIMIT __StackLimit -#endif - -#ifndef __VECTOR_TABLE -#define __VECTOR_TABLE __Vectors -#endif - -#ifndef __VECTOR_TABLE_ATTRIBUTE -#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors"))) -#endif - -#if (__ARM_FEATURE_CMSE == 3) -#ifndef __STACK_SEAL -#define __STACK_SEAL __StackSeal -#endif - -#ifndef __TZ_STACK_SEAL_SIZE -#define __TZ_STACK_SEAL_SIZE 8U -#endif - -#ifndef __TZ_STACK_SEAL_VALUE -#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL -#endif - - -__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { - *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; -} -#endif - - /* ########################## Core Instruction Access ######################### */ /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface Access to dedicated instructions @@ -902,622 +821,29 @@ __STATIC_FORCEINLINE void __disable_irq(void) __ASM volatile ("cpsid i" : : : "memory"); } - -/** - \brief Get Control Register - \details Returns the content of the Control Register. - \return Control Register value - */ -__STATIC_FORCEINLINE uint32_t __get_CONTROL(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, control" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Control Register (non-secure) - \details Returns the content of the non-secure Control Register when in secure mode. - \return non-secure Control Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Control Register - \details Writes the given value to the Control Register. - \param [in] control Control Register value to set - */ -__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) -{ - __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); - __ISB(); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Control Register (non-secure) - \details Writes the given value to the non-secure Control Register when in secure state. - \param [in] control Control Register value to set - */ -__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) -{ - __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); - __ISB(); -} -#endif - - -/** - \brief Get IPSR Register - \details Returns the content of the IPSR Register. - \return IPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_IPSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get APSR Register - \details Returns the content of the APSR Register. - \return APSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_APSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, apsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get xPSR Register - \details Returns the content of the xPSR Register. - \return xPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_xPSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get Process Stack Pointer - \details Returns the current value of the Process Stack Pointer (PSP). - \return PSP Register value - */ -__STATIC_FORCEINLINE uint32_t __get_PSP(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, psp" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Process Stack Pointer (non-secure) - \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. - \return PSP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Process Stack Pointer - \details Assigns the given value to the Process Stack Pointer (PSP). - \param [in] topOfProcStack Process Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) -{ - __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Process Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. - \param [in] topOfProcStack Process Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) -{ - __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); -} -#endif - - -/** - \brief Get Main Stack Pointer - \details Returns the current value of the Main Stack Pointer (MSP). - \return MSP Register value - */ -__STATIC_FORCEINLINE uint32_t __get_MSP(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, msp" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Main Stack Pointer (non-secure) - \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. - \return MSP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Main Stack Pointer - \details Assigns the given value to the Main Stack Pointer (MSP). - \param [in] topOfMainStack Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) -{ - __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Main Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. - \param [in] topOfMainStack Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) -{ - __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); -} -#endif - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Stack Pointer (non-secure) - \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. - \return SP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); - return (result); -} - - -/** - \brief Set Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. - \param [in] topOfStack Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) -{ - __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); -} -#endif - - -/** - \brief Get Priority Mask - \details Returns the current state of the priority mask bit from the Priority Mask Register. - \return Priority Mask value - */ -__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, primask" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Priority Mask (non-secure) - \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. - \return Priority Mask value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Priority Mask - \details Assigns the given value to the Priority Mask Register. - \param [in] priMask Priority Mask - */ -__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) -{ - __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Priority Mask (non-secure) - \details Assigns the given value to the non-secure Priority Mask Register when in secure state. - \param [in] priMask Priority Mask - */ -__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) -{ - __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); -} -#endif - - #if (__ARM_ARCH_ISA_THUMB >= 2) -/** - \brief Enable FIQ + /** + \brief Enable FIQ \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __enable_fault_irq(void) -{ - __ASM volatile ("cpsie f" : : : "memory"); -} - - -/** - \brief Disable FIQ - \details Disables FIQ interrupts by setting special-purpose register FAULTMASK. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __disable_fault_irq(void) -{ - __ASM volatile ("cpsid f" : : : "memory"); -} - - -/** - \brief Get Base Priority - \details Returns the current value of the Base Priority register. - \return Base Priority register value - */ -__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, basepri" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Base Priority (non-secure) - \details Returns the current value of the non-secure Base Priority register when in secure state. - \return Base Priority register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Base Priority - \details Assigns the given value to the Base Priority register. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) -{ - __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Base Priority (non-secure) - \details Assigns the given value to the non-secure Base Priority register when in secure state. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) -{ - __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); -} -#endif - - -/** - \brief Set Base Priority with condition - \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, - or the new value increases the BASEPRI priority level. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) -{ - __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); -} - - -/** - \brief Get Fault Mask - \details Returns the current value of the Fault Mask register. - \return Fault Mask register value - */ -__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Fault Mask (non-secure) - \details Returns the current value of the non-secure Fault Mask register when in secure state. - \return Fault Mask register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Fault Mask - \details Assigns the given value to the Fault Mask register. - \param [in] faultMask Fault Mask value to set - */ -__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) -{ - __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Fault Mask (non-secure) - \details Assigns the given value to the non-secure Fault Mask register when in secure state. - \param [in] faultMask Fault Mask value to set - */ -__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) -{ - __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); -} -#endif - -#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */ - - -#if (__ARM_ARCH >= 8) -/** - \brief Get Process Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always in non-secure - mode. - - \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). - \return PSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, psplim" : "=r" (result) ); - return (result); -#endif -} - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Process Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. - \return PSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); - return (result); -#endif -} -#endif - - -/** - \brief Set Process Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored in non-secure - mode. - - \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). - \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - (void)ProcStackPtrLimit; -#else - __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Process Stack Pointer (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. - \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - (void)ProcStackPtrLimit; -#else - __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); -#endif -} -#endif - - -/** - \brief Get Main Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). - \return MSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, msplim" : "=r" (result) ); - return (result); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Main Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. - \return MSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); - return (result); -#endif -} -#endif - - -/** - \brief Set Main Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). - \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - (void)MainStackPtrLimit; -#else - __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); -#endif -} - + Can only be executed in Privileged modes. + */ + __STATIC_FORCEINLINE void __enable_fault_irq(void) + { + __ASM volatile ("cpsie f" : : : "memory"); + } -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Main Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. - \param [in] MainStackPtrLimit Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - (void)MainStackPtrLimit; -#else - __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); -#endif -} + /** + \brief Disable FIQ + \details Disables FIQ interrupts by setting special-purpose register FAULTMASK. + Can only be executed in Privileged modes. + */ + __STATIC_FORCEINLINE void __disable_fault_irq(void) + { + __ASM volatile ("cpsid f" : : : "memory"); + } #endif -#endif /* (__ARM_ARCH >= 8) */ - /** \brief Get FPSCR diff --git a/CMSIS/Core/Include/cmsis_gcc_corea.h b/CMSIS/Core/Include/cmsis_gcc_corea.h new file mode 100644 index 000000000..f7934bcd4 --- /dev/null +++ b/CMSIS/Core/Include/cmsis_gcc_corea.h @@ -0,0 +1,257 @@ +/**************************************************************************//** + * @file cmsis_gcc_corea.h + * @brief CMSIS compiler GCC header file + * @version V6.0.0 + * @date 4. August 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_GCC_COREA_H +#define __CMSIS_GCC_COREA_H + +// Include the generic settings: +#include "cmsis_gcc.h" + +/* ignore some GCC warnings */ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wsign-conversion" +#pragma GCC diagnostic ignored "-Wconversion" +#pragma GCC diagnostic ignored "-Wunused-parameter" + + +/** \defgroup CMSIS_Core_intrinsics CMSIS Core Intrinsics + Access to dedicated SIMD instructions + @{ +*/ +/** \brief Get CPSR Register + \return CPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CPSR(void) +{ + uint32_t result; + __ASM volatile("MRS %0, cpsr" : "=r" (result) ); + return(result); +} + +/** \brief Set CPSR Register + \param [in] cpsr CPSR value to set + */ +__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) +{ + __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); +} + +/** \brief Get Mode + \return Processor Mode + */ +__STATIC_FORCEINLINE uint32_t __get_mode(void) +{ + return (__get_CPSR() & 0x1FU); +} + +/** \brief Set Mode + \param [in] mode Mode value to set + */ +__STATIC_FORCEINLINE void __set_mode(uint32_t mode) +{ + __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); +} + +/** \brief Get Stack Pointer + \return Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP(void) +{ + uint32_t result; + __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); + return result; +} + +/** \brief Set Stack Pointer + \param [in] stack Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP(uint32_t stack) +{ + __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); +} + +/** \brief Get USR/SYS Stack Pointer + \return USR/SYS Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP_usr(void) +{ + uint32_t cpsr = __get_CPSR(); + uint32_t result; + __ASM volatile( + "CPS #0x1F \n" + "MOV %0, sp " : "=r"(result) : : "memory" + ); + __set_CPSR(cpsr); + __ISB(); + return result; +} + +/** \brief Set USR/SYS Stack Pointer + \param [in] topOfProcStack USR/SYS Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) +{ + uint32_t cpsr = __get_CPSR(); + __ASM volatile( + "CPS #0x1F \n" + "MOV sp, %0 " : : "r" (topOfProcStack) : "memory" + ); + __set_CPSR(cpsr); + __ISB(); +} + +/** \brief Get FPEXC + \return Floating Point Exception Control register value + */ +__STATIC_FORCEINLINE uint32_t __get_FPEXC(void) +{ +#if (__FPU_PRESENT == 1) + uint32_t result; + __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); + return(result); +#else + return(0); +#endif +} + +/** \brief Set FPEXC + \param [in] fpexc Floating Point Exception Control value to set + */ +__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) +{ +#if (__FPU_PRESENT == 1) + __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); +#endif +} + +/* + * Include common core functions to access Coprocessor 15 registers + */ + +#define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) +#define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) +#define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) +#define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) + +#if __CORTEX_A == 5 ||__CORTEX_A == 7 || __CORTEX_A == 9 + #include "armv7a_cp15.h" +#elif __CORTEX_A == 35 || __CORTEX_A == 53 || __CORTEX_A == 57 + #include "armv8a_system_control.h" +#else + #warning "Unknown or unsupported core" +#endif + +/** \brief Enable Floating Point Unit + + Critical section, called from undef handler, so systick is disabled + */ +#if __CORTEX_A == 5 ||__CORTEX_A == 7 || __CORTEX_A == 9 +__STATIC_INLINE void __FPU_Enable(void) +{ + // Permit access to VFP/NEON, registers by modifying CPACR + const uint32_t cpacr = __get_CPACR(); + __set_CPACR(cpacr | 0x00F00000ul); + __ISB(); + + // Enable VFP/NEON + const uint32_t fpexc = __get_FPEXC(); + __set_FPEXC(fpexc | 0x40000000ul); + + __ASM volatile( + // Initialise VFP/NEON registers to 0 + " MOV R2,#0 \n" + + // Initialise D16 registers to 0 + " VMOV D0, R2,R2 \n" + " VMOV D1, R2,R2 \n" + " VMOV D2, R2,R2 \n" + " VMOV D3, R2,R2 \n" + " VMOV D4, R2,R2 \n" + " VMOV D5, R2,R2 \n" + " VMOV D6, R2,R2 \n" + " VMOV D7, R2,R2 \n" + " VMOV D8, R2,R2 \n" + " VMOV D9, R2,R2 \n" + " VMOV D10,R2,R2 \n" + " VMOV D11,R2,R2 \n" + " VMOV D12,R2,R2 \n" + " VMOV D13,R2,R2 \n" + " VMOV D14,R2,R2 \n" + " VMOV D15,R2,R2 \n" + +#if (defined(__ARM_NEON) && (__ARM_NEON == 1)) + // Initialise D32 registers to 0 + " VMOV D16,R2,R2 \n" + " VMOV D17,R2,R2 \n" + " VMOV D18,R2,R2 \n" + " VMOV D19,R2,R2 \n" + " VMOV D20,R2,R2 \n" + " VMOV D21,R2,R2 \n" + " VMOV D22,R2,R2 \n" + " VMOV D23,R2,R2 \n" + " VMOV D24,R2,R2 \n" + " VMOV D25,R2,R2 \n" + " VMOV D26,R2,R2 \n" + " VMOV D27,R2,R2 \n" + " VMOV D28,R2,R2 \n" + " VMOV D29,R2,R2 \n" + " VMOV D30,R2,R2 \n" + " VMOV D31,R2,R2 \n" +#endif + : : : "cc", "r2" + ); + + // Initialise FPSCR to a known state + const uint32_t fpscr = __get_FPSCR(); + __set_FPSCR(fpscr & 0x00086060ul); +} +#elif __CORTEX_A == 35 || __CORTEX_A == 53 || __CORTEX_A == 57 + __STATIC_INLINE void __FPU_Enable(void) + { + __ASM volatile( + // In AArch64, you do not need to enable access to the NEON and FP registers. + // However, access to the NEON and FP registers can still be trapped. + + // Disable trapping of accessing in EL3 and EL2. + " MSR CPTR_EL3, XZR \n" + " MSR CPTR_EL2, XZR \n" + + // Disable access trapping in EL1 and EL0. + " MOV X1, #(0x3 << 20) \n" + + // FPEN disables trapping to EL1. + " MSR CPACR_EL1, X1 \n" + + //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted + " ISB " + + : : : "cc", "x1" + ); + } +#endif /* #elif __CORTEX_A == 35 || __CORTEX_A == 53 || __CORTEX_A == 57 */ +/*@} end of group CMSIS_Core_intrinsics */ + +#pragma GCC diagnostic pop + +#endif /* __CMSIS_GCC_COREA_H */ diff --git a/CMSIS/Core/Include/cmsis_gcc_corem.h b/CMSIS/Core/Include/cmsis_gcc_corem.h new file mode 100644 index 000000000..bf27754d1 --- /dev/null +++ b/CMSIS/Core/Include/cmsis_gcc_corem.h @@ -0,0 +1,723 @@ +/**************************************************************************//** + * @file cmsis_gcc_corem.h + * @brief CMSIS compiler GCC header file + * @version V6.0.0 + * @date 4. August 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_GCC_COREM_H +#define __CMSIS_GCC_COREM_H + +// Include the generic settings: +#include "cmsis_gcc.h" + +/* ignore some GCC warnings */ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wsign-conversion" +#pragma GCC diagnostic ignored "-Wconversion" +#pragma GCC diagnostic ignored "-Wunused-parameter" + + +/* ######################### Startup and Lowlevel Init ######################## */ + +#ifndef __PROGRAM_START + /** + \brief Initializes data and bss sections + \details This default implementations initialized all data and additional bss + sections relying on .copy.table and .zero.table specified properly + in the used linker script. + + */ + __STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void) + { + extern void _start(void) __NO_RETURN; + + typedef struct __copy_table { + uint32_t const* src; + uint32_t* dest; + uint32_t wlen; + } __copy_table_t; + + typedef struct __zero_table { + uint32_t* dest; + uint32_t wlen; + } __zero_table_t; + + extern const __copy_table_t __copy_table_start__; + extern const __copy_table_t __copy_table_end__; + extern const __zero_table_t __zero_table_start__; + extern const __zero_table_t __zero_table_end__; + + for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) { + for(uint32_t i=0u; iwlen; ++i) { + pTable->dest[i] = pTable->src[i]; + } + } + + for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) { + for(uint32_t i=0u; iwlen; ++i) { + pTable->dest[i] = 0u; + } + } + + _start(); + } + + #define __PROGRAM_START __cmsis_start +#endif + +#ifndef __INITIAL_SP + #define __INITIAL_SP __StackTop +#endif + +#ifndef __STACK_LIMIT + #define __STACK_LIMIT __StackLimit +#endif + +#ifndef __VECTOR_TABLE + #define __VECTOR_TABLE __Vectors +#endif + +#ifndef __VECTOR_TABLE_ATTRIBUTE + #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors"))) +#endif + +#if (__ARM_FEATURE_CMSE == 3) + #ifndef __STACK_SEAL + #define __STACK_SEAL __StackSeal + #endif + + #ifndef __TZ_STACK_SEAL_SIZE + #define __TZ_STACK_SEAL_SIZE 8U + #endif + + #ifndef __TZ_STACK_SEAL_VALUE + #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL + #endif + + + __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { + *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; + } +#endif + + +/* ########################### Core Function Access ########################### */ +/** \ingroup CMSIS_Core_FunctionInterface + \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions + @{ + */ + +/** + \brief Get Control Register + \details Returns the content of the Control Register. + \return Control Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CONTROL(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, control" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Control Register (non-secure) + \details Returns the content of the non-secure Control Register when in secure mode. + \return non-secure Control Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Control Register + \details Writes the given value to the Control Register. + \param [in] control Control Register value to set + */ +__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) +{ + __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); + __ISB(); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Control Register (non-secure) + \details Writes the given value to the non-secure Control Register when in secure state. + \param [in] control Control Register value to set + */ + __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) + { + __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); + __ISB(); + } +#endif + + +/** + \brief Get IPSR Register + \details Returns the content of the IPSR Register. + \return IPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_IPSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get APSR Register + \details Returns the content of the APSR Register. + \return APSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_APSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, apsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get xPSR Register + \details Returns the content of the xPSR Register. + \return xPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_xPSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get Process Stack Pointer + \details Returns the current value of the Process Stack Pointer (PSP). + \return PSP Register value + */ +__STATIC_FORCEINLINE uint32_t __get_PSP(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, psp" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Process Stack Pointer (non-secure) + \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. + \return PSP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Process Stack Pointer + \details Assigns the given value to the Process Stack Pointer (PSP). + \param [in] topOfProcStack Process Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) +{ + __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Process Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. + \param [in] topOfProcStack Process Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) + { + __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); + } +#endif + + +/** + \brief Get Main Stack Pointer + \details Returns the current value of the Main Stack Pointer (MSP). + \return MSP Register value + */ +__STATIC_FORCEINLINE uint32_t __get_MSP(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, msp" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Main Stack Pointer (non-secure) + \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. + \return MSP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Main Stack Pointer + \details Assigns the given value to the Main Stack Pointer (MSP). + \param [in] topOfMainStack Main Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) +{ + __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Main Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. + \param [in] topOfMainStack Main Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) + { + __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); + } +#endif + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Stack Pointer (non-secure) + \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. + \return SP Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); + return (result); + } + + + /** + \brief Set Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. + \param [in] topOfStack Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) + { + __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); + } +#endif + + +/** + \brief Get Priority Mask + \details Returns the current state of the priority mask bit from the Priority Mask Register. + \return Priority Mask value + */ +__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, primask" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Priority Mask (non-secure) + \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. + \return Priority Mask value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); + return (result); + } +#endif + + +/** + \brief Set Priority Mask + \details Assigns the given value to the Priority Mask Register. + \param [in] priMask Priority Mask + */ +__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) +{ + __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); +} + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Priority Mask (non-secure) + \details Assigns the given value to the non-secure Priority Mask Register when in secure state. + \param [in] priMask Priority Mask + */ + __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) + { + __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); + } +#endif + + +#if (__ARM_ARCH_ISA_THUMB >= 2) + + /** + \brief Get Base Priority + \details Returns the current value of the Base Priority register. + \return Base Priority register value + */ + __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, basepri" : "=r" (result) ); + return (result); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Base Priority (non-secure) + \details Returns the current value of the non-secure Base Priority register when in secure state. + \return Base Priority register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); + return (result); + } + #endif + + + /** + \brief Set Base Priority + \details Assigns the given value to the Base Priority register. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) + { + __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Base Priority (non-secure) + \details Assigns the given value to the non-secure Base Priority register when in secure state. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) + { + __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); + } + #endif + + + /** + \brief Set Base Priority with condition + \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, + or the new value increases the BASEPRI priority level. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) + { + __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); + } + + + /** + \brief Get Fault Mask + \details Returns the current value of the Fault Mask register. + \return Fault Mask register value + */ + __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); + return (result); + } + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Fault Mask (non-secure) + \details Returns the current value of the non-secure Fault Mask register when in secure state. + \return Fault Mask register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); + return (result); + } + #endif + + + /** + \brief Set Fault Mask + \details Assigns the given value to the Fault Mask register. + \param [in] faultMask Fault Mask value to set + */ + __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) + { + __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Fault Mask (non-secure) + \details Assigns the given value to the non-secure Fault Mask register when in secure state. + \param [in] faultMask Fault Mask value to set + */ + __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) + { + __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); + } + #endif +#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */ + + +#if (__ARM_ARCH >= 8) + /** + \brief Get Process Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always in non-secure + mode. + + \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). + \return PSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure PSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, psplim" : "=r" (result) ); + return (result); + #endif + } + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Process Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. + \return PSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure PSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); + return (result); + #endif + } + #endif + + + /** + \brief Set Process Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored in non-secure + mode. + + \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). + \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)ProcStackPtrLimit; + #else + __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); + #endif + } + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Process Stack Pointer (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. + \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)ProcStackPtrLimit; + #else + __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); + #endif + } + #endif + + + /** + \brief Get Main Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). + \return MSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure MSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, msplim" : "=r" (result) ); + return (result); + #endif + } + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Main Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. + \return MSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure MSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); + return (result); + #endif + } + #endif + + + /** + \brief Set Main Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). + \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)MainStackPtrLimit; + #else + __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); + #endif + } + + +#if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Main Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. + \param [in] MainStackPtrLimit Main Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)MainStackPtrLimit; + #else + __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); + #endif + } + #endif +#endif /* (__ARM_ARCH >= 8) */ + + + +/*@} end of CMSIS_Core_RegAccFunctions */ + + +#pragma GCC diagnostic pop + +#endif /* __CMSIS_GCC_COREM_H */ diff --git a/CMSIS/Core/Include/cmsis_gcc_corer.h b/CMSIS/Core/Include/cmsis_gcc_corer.h new file mode 100644 index 000000000..5aec44f1e --- /dev/null +++ b/CMSIS/Core/Include/cmsis_gcc_corer.h @@ -0,0 +1,153 @@ +/**************************************************************************//** + * @file cmsis_gcc_corer.h + * @brief CMSIS compiler GCC header file + * @version V6.0.0 + * @date 4. August 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_GCC_CORER_H +#define __CMSIS_GCC_CORER_H + +// Include the generic settings: +#include "cmsis_gcc.h" + +/* ignore some GCC warnings */ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wsign-conversion" +#pragma GCC diagnostic ignored "-Wconversion" +#pragma GCC diagnostic ignored "-Wunused-parameter" + + +/** \defgroup CMSIS_Core_intrinsics CMSIS Core Intrinsics + Access to dedicated SIMD instructions + @{ +*/ +/** \brief Get CPSR Register + \return CPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CPSR(void) +{ + uint32_t result; + __ASM volatile("MRS %0, cpsr" : "=r" (result) ); + return(result); +} + +/** \brief Set CPSR Register + \param [in] cpsr CPSR value to set + */ +__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) +{ + __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); +} + +/** \brief Get Mode + \return Processor Mode + */ +__STATIC_FORCEINLINE uint32_t __get_mode(void) +{ + return (__get_CPSR() & 0x1FU); +} + +/** \brief Set Mode + \param [in] mode Mode value to set + */ +__STATIC_FORCEINLINE void __set_mode(uint32_t mode) +{ + __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); +} + +/** \brief Get Stack Pointer + \return Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP(void) +{ + uint32_t result; + __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); + return result; +} + +/** \brief Set Stack Pointer + \param [in] stack Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP(uint32_t stack) +{ + __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); +} + +/** \brief Get USR/SYS Stack Pointer + \return USR/SYS Stack Pointer value + */ +__STATIC_FORCEINLINE uint32_t __get_SP_usr(void) +{ + uint32_t cpsr = __get_CPSR(); + uint32_t result; + __ASM volatile( + "CPS #0x1F \n" + "MOV %0, sp " : "=r"(result) : : "memory" + ); + __set_CPSR(cpsr); + __ISB(); + return result; +} + +/** \brief Set USR/SYS Stack Pointer + \param [in] topOfProcStack USR/SYS Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) +{ + uint32_t cpsr = __get_CPSR(); + __ASM volatile( + "CPS #0x1F \n" + "MOV sp, %0 " : : "r" (topOfProcStack) : "memory" + ); + __set_CPSR(cpsr); + __ISB(); +} + +/** \brief Get FPEXC + \return Floating Point Exception Control register value + */ +__STATIC_FORCEINLINE uint32_t __get_FPEXC(void) +{ +#if (__FPU_PRESENT == 1) + uint32_t result; + __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); + return(result); +#else + return(0); +#endif +} + +/** \brief Set FPEXC + \param [in] fpexc Floating Point Exception Control value to set + */ +__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) +{ +#if (__FPU_PRESENT == 1) + __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); +#endif +} + + +/*@} end of group CMSIS_Core_intrinsics */ + +#pragma GCC diagnostic pop + +#endif /* __CMSIS_GCC_CORER_H */ diff --git a/CMSIS/Core/Include/cmsis_iccarm.h b/CMSIS/Core/Include/cmsis_iccarm.h index 47d6a859e..c6e003a54 100644 --- a/CMSIS/Core/Include/cmsis_iccarm.h +++ b/CMSIS/Core/Include/cmsis_iccarm.h @@ -1,8 +1,8 @@ /**************************************************************************//** * @file cmsis_iccarm.h * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file - * @version V5.4.0 - * @date 20. January 2023 + * @version V5.0.0 + * @date 04. December 2022 ******************************************************************************/ //------------------------------------------------------------------------------ @@ -25,7 +25,6 @@ // //------------------------------------------------------------------------------ - #ifndef __CMSIS_ICCARM_H__ #define __CMSIS_ICCARM_H__ @@ -43,6 +42,8 @@ #define __ICCARM_V8 0 #endif +//#pragma language=extended + #ifndef __ALIGNED #if __ICCARM_V8 #define __ALIGNED(x) __attribute__((aligned(x))) @@ -58,13 +59,15 @@ /* Define compiler macros for CPU architecture, used in CMSIS 5. */ -#if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__ -/* Macros already defined */ +#if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__ || __ARM_ARCH_7A__ + /* Macros already defined */ #else #if defined(__ARM8M_MAINLINE__) || defined(__ARM8EM_MAINLINE__) #define __ARM_ARCH_8M_MAIN__ 1 #elif defined(__ARM8M_BASELINE__) #define __ARM_ARCH_8M_BASE__ 1 + #elif defined(__ARM7A__) + #define __ARM_ARCH_7A__ 1 #elif defined(__ARM_ARCH_PROFILE) && __ARM_ARCH_PROFILE == 'M' #if __ARM_ARCH == 6 #define __ARM_ARCH_6M__ 1 @@ -108,12 +111,6 @@ #define __IAR_M0_FAMILY 0 #endif -#ifndef __NO_INIT - #define __NO_INIT __attribute__ ((section (".noinit"))) -#endif -#ifndef __ALIAS - #define __ALIAS(x) __attribute__ ((alias(x))) -#endif #ifndef __ASM #define __ASM __asm @@ -123,6 +120,13 @@ #define __COMPILER_BARRIER() __ASM volatile("":::"memory") #endif +#ifndef __NO_INIT + #define __NO_INIT __attribute__ ((section (".noinit"))) +#endif +#ifndef __ALIAS + #define __ALIAS(x) __attribute__ ((alias(x))) +#endif + #ifndef __INLINE #define __INLINE inline #endif @@ -183,57 +187,63 @@ #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE #endif +#ifndef CMSIS_DEPRECATED + #define CMSIS_DEPRECATED __attribute__((deprecated)) +#endif + #ifndef __UNALIGNED_UINT16_READ -#pragma language=save -#pragma language=extended -__IAR_FT uint16_t __iar_uint16_read(void const *ptr) -{ - return *(__packed uint16_t*)(ptr); -} -#pragma language=restore -#define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR) + #pragma language=save + #pragma language=extended + __IAR_FT uint16_t __iar_uint16_read(void const *ptr) + { + return *(__packed uint16_t*)(ptr); + } + #pragma language=restore + #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR) #endif #ifndef __UNALIGNED_UINT16_WRITE -#pragma language=save -#pragma language=extended -__IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val) -{ - *(__packed uint16_t*)(ptr) = val;; -} -#pragma language=restore -#define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL) + #pragma language=save + #pragma language=extended + __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val) + { + *(__packed uint16_t*)(ptr) = val;; + } + #pragma language=restore + #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL) #endif #ifndef __UNALIGNED_UINT32_READ -#pragma language=save -#pragma language=extended -__IAR_FT uint32_t __iar_uint32_read(void const *ptr) -{ - return *(__packed uint32_t*)(ptr); -} -#pragma language=restore -#define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR) + #pragma language=save + #pragma language=extended + __IAR_FT uint32_t __iar_uint32_read(void const *ptr) + { + return *(__packed uint32_t*)(ptr); + } + #pragma language=restore + #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR) #endif #ifndef __UNALIGNED_UINT32_WRITE -#pragma language=save -#pragma language=extended -__IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val) -{ - *(__packed uint32_t*)(ptr) = val;; -} -#pragma language=restore -#define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL) + #pragma language=save + #pragma language=extended + __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val) + { + *(__packed uint32_t*)(ptr) = val;; + } + #pragma language=restore + #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL) #endif -#ifndef __UNALIGNED_UINT32 /* deprecated */ -#pragma language=save -#pragma language=extended -__packed struct __iar_u32 { uint32_t v; }; -#pragma language=restore -#define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v) +#if !defined (__arm__) + #ifndef __UNALIGNED_UINT32 /* deprecated */ + #pragma language=save + #pragma language=extended + __packed struct __iar_u32 { uint32_t v; }; + #pragma language=restore + #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v) + #endif #endif #ifndef __USED @@ -253,50 +263,11 @@ __packed struct __iar_u32 { uint32_t v; }; #endif #endif -#ifndef __PROGRAM_START -#define __PROGRAM_START __iar_program_start -#endif - -#ifndef __INITIAL_SP -#define __INITIAL_SP CSTACK$$Limit -#endif - -#ifndef __STACK_LIMIT -#define __STACK_LIMIT CSTACK$$Base -#endif - -#ifndef __VECTOR_TABLE -#define __VECTOR_TABLE __vector_table -#endif - -#ifndef __VECTOR_TABLE_ATTRIBUTE -#define __VECTOR_TABLE_ATTRIBUTE @".intvec" -#endif - -#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U) -#ifndef __STACK_SEAL -#define __STACK_SEAL STACKSEAL$$Base -#endif - -#ifndef __TZ_STACK_SEAL_SIZE -#define __TZ_STACK_SEAL_SIZE 8U -#endif - -#ifndef __TZ_STACK_SEAL_VALUE -#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL -#endif - -__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { - *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; -} -#endif - #ifndef __ICCARM_INTRINSICS_VERSION__ #define __ICCARM_INTRINSICS_VERSION__ 0 #endif #if __ICCARM_INTRINSICS_VERSION__ == 2 - #if defined(__CLZ) #undef __CLZ #endif @@ -314,526 +285,267 @@ __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { #endif #include "iccarm_builtin.h" - - #define __disable_fault_irq __iar_builtin_disable_fiq - #define __disable_irq __iar_builtin_disable_interrupt - #define __enable_fault_irq __iar_builtin_enable_fiq - #define __enable_irq __iar_builtin_enable_interrupt - #define __arm_rsr __iar_builtin_rsr - #define __arm_wsr __iar_builtin_wsr - - - #define __get_APSR() (__arm_rsr("APSR")) - #define __get_BASEPRI() (__arm_rsr("BASEPRI")) - #define __get_CONTROL() (__arm_rsr("CONTROL")) - #define __get_FAULTMASK() (__arm_rsr("FAULTMASK")) - - #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ - (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) - #define __get_FPSCR() (__arm_rsr("FPSCR")) - #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE))) - #else - #define __get_FPSCR() ( 0 ) - #define __set_FPSCR(VALUE) ((void)VALUE) - #endif - - #define __get_IPSR() (__arm_rsr("IPSR")) - #define __get_MSP() (__arm_rsr("MSP")) - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure MSPLIM is RAZ/WI - #define __get_MSPLIM() (0U) - #else - #define __get_MSPLIM() (__arm_rsr("MSPLIM")) - #endif - #define __get_PRIMASK() (__arm_rsr("PRIMASK")) - #define __get_PSP() (__arm_rsr("PSP")) - - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure PSPLIM is RAZ/WI - #define __get_PSPLIM() (0U) - #else - #define __get_PSPLIM() (__arm_rsr("PSPLIM")) - #endif - - #define __get_xPSR() (__arm_rsr("xPSR")) - - #define __set_BASEPRI(VALUE) (__arm_wsr("BASEPRI", (VALUE))) - #define __set_BASEPRI_MAX(VALUE) (__arm_wsr("BASEPRI_MAX", (VALUE))) - -__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) -{ - __arm_wsr("CONTROL", control); - __iar_builtin_ISB(); -} - - #define __set_FAULTMASK(VALUE) (__arm_wsr("FAULTMASK", (VALUE))) - #define __set_MSP(VALUE) (__arm_wsr("MSP", (VALUE))) - - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure MSPLIM is RAZ/WI - #define __set_MSPLIM(VALUE) ((void)(VALUE)) - #else - #define __set_MSPLIM(VALUE) (__arm_wsr("MSPLIM", (VALUE))) - #endif - #define __set_PRIMASK(VALUE) (__arm_wsr("PRIMASK", (VALUE))) - #define __set_PSP(VALUE) (__arm_wsr("PSP", (VALUE))) - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure PSPLIM is RAZ/WI - #define __set_PSPLIM(VALUE) ((void)(VALUE)) - #else - #define __set_PSPLIM(VALUE) (__arm_wsr("PSPLIM", (VALUE))) - #endif - - #define __TZ_get_CONTROL_NS() (__arm_rsr("CONTROL_NS")) - -__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) -{ - __arm_wsr("CONTROL_NS", control); - __iar_builtin_ISB(); -} - - #define __TZ_get_PSP_NS() (__arm_rsr("PSP_NS")) - #define __TZ_set_PSP_NS(VALUE) (__arm_wsr("PSP_NS", (VALUE))) - #define __TZ_get_MSP_NS() (__arm_rsr("MSP_NS")) - #define __TZ_set_MSP_NS(VALUE) (__arm_wsr("MSP_NS", (VALUE))) - #define __TZ_get_SP_NS() (__arm_rsr("SP_NS")) - #define __TZ_set_SP_NS(VALUE) (__arm_wsr("SP_NS", (VALUE))) - #define __TZ_get_PRIMASK_NS() (__arm_rsr("PRIMASK_NS")) - #define __TZ_set_PRIMASK_NS(VALUE) (__arm_wsr("PRIMASK_NS", (VALUE))) - #define __TZ_get_BASEPRI_NS() (__arm_rsr("BASEPRI_NS")) - #define __TZ_set_BASEPRI_NS(VALUE) (__arm_wsr("BASEPRI_NS", (VALUE))) - #define __TZ_get_FAULTMASK_NS() (__arm_rsr("FAULTMASK_NS")) - #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE))) - - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure PSPLIM is RAZ/WI - #define __TZ_get_PSPLIM_NS() (0U) - #define __TZ_set_PSPLIM_NS(VALUE) ((void)(VALUE)) - #else - #define __TZ_get_PSPLIM_NS() (__arm_rsr("PSPLIM_NS")) - #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE))) +#else /* __ICCARM_INTRINSICS_VERSION__ == 2 */ + #ifdef __INTRINSICS_INCLUDED + #error intrinsics.h is already included previously! #endif - #define __TZ_get_MSPLIM_NS() (__arm_rsr("MSPLIM_NS")) - #define __TZ_set_MSPLIM_NS(VALUE) (__arm_wsr("MSPLIM_NS", (VALUE))) + #include +#endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */ +#if __ICCARM_INTRINSICS_VERSION__ == 2 + /* ########################## Core Instruction Access ######################### */ + /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface + Access to dedicated instructions + @{ + */ + + /** + \brief No Operation + \details No Operation does nothing. This instruction can be used for code alignment purposes. + */ #define __NOP __iar_builtin_no_operation - - #define __CLZ __iar_builtin_CLZ - #define __CLREX __iar_builtin_CLREX - - #define __DMB __iar_builtin_DMB - #define __DSB __iar_builtin_DSB + + /** + \brief Wait For Interrupt + \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs. + */ + #define __WFI __iar_builtin_WFI + + /** + \brief Wait For Event + \details Wait For Event is a hint instruction that permits the processor to enter + a low-power state until one of a number of events occurs. + */ + #define __WFE __iar_builtin_WFE + + /** + \brief Send Event + \details Send Event is a hint instruction. It causes an event to be signaled to the CPU. + */ + #define __SEV __iar_builtin_SEV + + /** + \brief Instruction Synchronization Barrier + \details Instruction Synchronization Barrier flushes the pipeline in the processor, + so that all instructions following the ISB are fetched from cache or memory, + after the instruction has been completed. + */ #define __ISB __iar_builtin_ISB - - #define __LDREXB __iar_builtin_LDREXB - #define __LDREXH __iar_builtin_LDREXH - #define __LDREXW __iar_builtin_LDREX - - #define __RBIT __iar_builtin_RBIT + + /** + \brief Data Synchronization Barrier + \details Acts as a special kind of Data Memory Barrier. + It completes when all explicit memory accesses before this instruction complete. + */ + #define __DSB __iar_builtin_DSB + + /** + \brief Data Memory Barrier + \details Ensures the apparent order of the explicit memory operations before + and after the instruction, without ensuring their completion. + */ + #define __DMB __iar_builtin_DMB + + /** + \brief Reverse byte order (32 bit) + \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412. + \param [in] value Value to reverse + \return Reversed value + */ #define __REV __iar_builtin_REV + + /** + \brief Reverse byte order (16 bit) + \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856. + \param [in] value Value to reverse + \return Reversed value + */ #define __REV16 __iar_builtin_REV16 + /** + \brief Reverse byte order (16 bit) + \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000. + \param [in] value Value to reverse + \return Reversed value + */ __IAR_FT int16_t __REVSH(int16_t val) { return (int16_t) __iar_builtin_REVSH(val); } - - #define __ROR __iar_builtin_ROR - #define __RRX __iar_builtin_RRX - - #define __SEV __iar_builtin_SEV - - #if !__IAR_M0_FAMILY - #define __SSAT __iar_builtin_SSAT - #endif - - #define __STREXB __iar_builtin_STREXB - #define __STREXH __iar_builtin_STREXH - #define __STREXW __iar_builtin_STREX - - #if !__IAR_M0_FAMILY - #define __USAT __iar_builtin_USAT - #endif - - #define __WFE __iar_builtin_WFE - #define __WFI __iar_builtin_WFI - - #if __ARM_MEDIA__ - #define __SADD8 __iar_builtin_SADD8 - #define __QADD8 __iar_builtin_QADD8 - #define __SHADD8 __iar_builtin_SHADD8 - #define __UADD8 __iar_builtin_UADD8 - #define __UQADD8 __iar_builtin_UQADD8 - #define __UHADD8 __iar_builtin_UHADD8 - #define __SSUB8 __iar_builtin_SSUB8 - #define __QSUB8 __iar_builtin_QSUB8 - #define __SHSUB8 __iar_builtin_SHSUB8 - #define __USUB8 __iar_builtin_USUB8 - #define __UQSUB8 __iar_builtin_UQSUB8 - #define __UHSUB8 __iar_builtin_UHSUB8 - #define __SADD16 __iar_builtin_SADD16 - #define __QADD16 __iar_builtin_QADD16 - #define __SHADD16 __iar_builtin_SHADD16 - #define __UADD16 __iar_builtin_UADD16 - #define __UQADD16 __iar_builtin_UQADD16 - #define __UHADD16 __iar_builtin_UHADD16 - #define __SSUB16 __iar_builtin_SSUB16 - #define __QSUB16 __iar_builtin_QSUB16 - #define __SHSUB16 __iar_builtin_SHSUB16 - #define __USUB16 __iar_builtin_USUB16 - #define __UQSUB16 __iar_builtin_UQSUB16 - #define __UHSUB16 __iar_builtin_UHSUB16 - #define __SASX __iar_builtin_SASX - #define __QASX __iar_builtin_QASX - #define __SHASX __iar_builtin_SHASX - #define __UASX __iar_builtin_UASX - #define __UQASX __iar_builtin_UQASX - #define __UHASX __iar_builtin_UHASX - #define __SSAX __iar_builtin_SSAX - #define __QSAX __iar_builtin_QSAX - #define __SHSAX __iar_builtin_SHSAX - #define __USAX __iar_builtin_USAX - #define __UQSAX __iar_builtin_UQSAX - #define __UHSAX __iar_builtin_UHSAX - #define __USAD8 __iar_builtin_USAD8 - #define __USADA8 __iar_builtin_USADA8 - #define __SSAT16 __iar_builtin_SSAT16 - #define __USAT16 __iar_builtin_USAT16 - #define __UXTB16 __iar_builtin_UXTB16 - #define __UXTAB16 __iar_builtin_UXTAB16 - #define __SXTB16 __iar_builtin_SXTB16 - #define __SXTAB16 __iar_builtin_SXTAB16 - #define __SMUAD __iar_builtin_SMUAD - #define __SMUADX __iar_builtin_SMUADX - #define __SMMLA __iar_builtin_SMMLA - #define __SMLAD __iar_builtin_SMLAD - #define __SMLADX __iar_builtin_SMLADX - #define __SMLALD __iar_builtin_SMLALD - #define __SMLALDX __iar_builtin_SMLALDX - #define __SMUSD __iar_builtin_SMUSD - #define __SMUSDX __iar_builtin_SMUSDX - #define __SMLSD __iar_builtin_SMLSD - #define __SMLSDX __iar_builtin_SMLSDX - #define __SMLSLD __iar_builtin_SMLSLD - #define __SMLSLDX __iar_builtin_SMLSLDX - #define __SEL __iar_builtin_SEL - #define __QADD __iar_builtin_QADD - #define __QSUB __iar_builtin_QSUB - #define __PKHBT __iar_builtin_PKHBT - #define __PKHTB __iar_builtin_PKHTB - #endif - -#else /* __ICCARM_INTRINSICS_VERSION__ == 2 */ - - #if __IAR_M0_FAMILY - /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */ - #define __CLZ __cmsis_iar_clz_not_active - #define __SSAT __cmsis_iar_ssat_not_active - #define __USAT __cmsis_iar_usat_not_active - #define __RBIT __cmsis_iar_rbit_not_active - #define __get_APSR __cmsis_iar_get_APSR_not_active - #endif - - - #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ - (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) - #define __get_FPSCR __cmsis_iar_get_FPSR_not_active - #define __set_FPSCR __cmsis_iar_set_FPSR_not_active - #endif - - #ifdef __INTRINSICS_INCLUDED - #error intrinsics.h is already included previously! - #endif - - #include - - #if __IAR_M0_FAMILY - /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */ - #undef __CLZ - #undef __SSAT - #undef __USAT - #undef __RBIT - #undef __get_APSR - - __STATIC_INLINE uint8_t __CLZ(uint32_t data) - { - if (data == 0U) { return 32U; } - - uint32_t count = 0U; - uint32_t mask = 0x80000000U; - - while ((data & mask) == 0U) - { - count += 1U; - mask = mask >> 1U; - } - return count; - } - - __STATIC_INLINE uint32_t __RBIT(uint32_t v) - { - uint8_t sc = 31U; - uint32_t r = v; - for (v >>= 1U; v; v >>= 1U) - { - r <<= 1U; - r |= v & 1U; - sc--; - } - return (r << sc); - } - - __STATIC_INLINE uint32_t __get_APSR(void) - { - uint32_t res; - __asm("MRS %0,APSR" : "=r" (res)); - return res; - } - - #endif - - #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ - (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) - #undef __get_FPSCR - #undef __set_FPSCR - #define __get_FPSCR() (0) - #define __set_FPSCR(VALUE) ((void)VALUE) - #endif - - #pragma diag_suppress=Pe940 - #pragma diag_suppress=Pe177 - - #define __enable_irq __enable_interrupt - #define __disable_irq __disable_interrupt - #define __NOP __no_operation - - #define __get_xPSR __get_PSR - - #if (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0) - - __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr) - { - return __LDREX((unsigned long *)ptr); - } - - __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr) - { - return __STREX(value, (unsigned long *)ptr); - } - #endif - - - /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ - #if (__CORTEX_M >= 0x03) - - __IAR_FT uint32_t __RRX(uint32_t value) - { - uint32_t result; - __ASM volatile("RRX %0, %1" : "=r"(result) : "r" (value)); - return(result); - } - - __IAR_FT void __set_BASEPRI_MAX(uint32_t value) - { - __asm volatile("MSR BASEPRI_MAX,%0"::"r" (value)); - } - - - #define __enable_fault_irq __enable_fiq - #define __disable_fault_irq __disable_fiq - - - #endif /* (__CORTEX_M >= 0x03) */ - + + /** + \brief Rotate Right in unsigned value (32 bit) + \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. + \param [in] op1 Value to rotate + \param [in] op2 Number of Bits to rotate + \return Rotated value + */ + #define __ROR __iar_builtin_ROR +#else __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2) { return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2)); } +#endif - #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ - (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) - - __IAR_FT uint32_t __get_MSPLIM(void) - { - uint32_t res; - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure MSPLIM is RAZ/WI - res = 0U; - #else - __asm volatile("MRS %0,MSPLIM" : "=r" (res)); - #endif - return res; - } - - __IAR_FT void __set_MSPLIM(uint32_t value) - { - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure MSPLIM is RAZ/WI - (void)value; - #else - __asm volatile("MSR MSPLIM,%0" :: "r" (value)); - #endif - } - - __IAR_FT uint32_t __get_PSPLIM(void) - { - uint32_t res; - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure PSPLIM is RAZ/WI - res = 0U; - #else - __asm volatile("MRS %0,PSPLIM" : "=r" (res)); - #endif - return res; - } - - __IAR_FT void __set_PSPLIM(uint32_t value) - { - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure PSPLIM is RAZ/WI - (void)value; - #else - __asm volatile("MSR PSPLIM,%0" :: "r" (value)); - #endif - } - - __IAR_FT uint32_t __TZ_get_CONTROL_NS(void) - { - uint32_t res; - __asm volatile("MRS %0,CONTROL_NS" : "=r" (res)); - return res; - } - - __IAR_FT void __TZ_set_CONTROL_NS(uint32_t value) - { - __asm volatile("MSR CONTROL_NS,%0" :: "r" (value)); - __iar_builtin_ISB(); - } - - __IAR_FT uint32_t __TZ_get_PSP_NS(void) - { - uint32_t res; - __asm volatile("MRS %0,PSP_NS" : "=r" (res)); - return res; - } - - __IAR_FT void __TZ_set_PSP_NS(uint32_t value) - { - __asm volatile("MSR PSP_NS,%0" :: "r" (value)); - } - - __IAR_FT uint32_t __TZ_get_MSP_NS(void) - { - uint32_t res; - __asm volatile("MRS %0,MSP_NS" : "=r" (res)); - return res; - } - - __IAR_FT void __TZ_set_MSP_NS(uint32_t value) - { - __asm volatile("MSR MSP_NS,%0" :: "r" (value)); - } - - __IAR_FT uint32_t __TZ_get_SP_NS(void) - { - uint32_t res; - __asm volatile("MRS %0,SP_NS" : "=r" (res)); - return res; - } - __IAR_FT void __TZ_set_SP_NS(uint32_t value) - { - __asm volatile("MSR SP_NS,%0" :: "r" (value)); - } - - __IAR_FT uint32_t __TZ_get_PRIMASK_NS(void) - { - uint32_t res; - __asm volatile("MRS %0,PRIMASK_NS" : "=r" (res)); - return res; - } - - __IAR_FT void __TZ_set_PRIMASK_NS(uint32_t value) - { - __asm volatile("MSR PRIMASK_NS,%0" :: "r" (value)); - } - - __IAR_FT uint32_t __TZ_get_BASEPRI_NS(void) - { - uint32_t res; - __asm volatile("MRS %0,BASEPRI_NS" : "=r" (res)); - return res; - } - - __IAR_FT void __TZ_set_BASEPRI_NS(uint32_t value) - { - __asm volatile("MSR BASEPRI_NS,%0" :: "r" (value)); - } - - __IAR_FT uint32_t __TZ_get_FAULTMASK_NS(void) - { - uint32_t res; - __asm volatile("MRS %0,FAULTMASK_NS" : "=r" (res)); - return res; - } +/** + \brief Breakpoint + \details Causes the processor to enter Debug state. + Debug tools can use this to investigate system state when the instruction at a particular address is reached. + \param [in] value is ignored by the processor. + If required, a debugger can use it to store additional information about the breakpoint. + */ +#define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value)) - __IAR_FT void __TZ_set_FAULTMASK_NS(uint32_t value) +/** + \brief Reverse bit order of value + \details Reverses the bit order of the given value. + \param [in] value Value to reverse + \return Reversed value + */ +#if __ICCARM_INTRINSICS_VERSION__ == 2 + #define __RBIT __iar_builtin_RBIT +#elif __IAR_M0_FAMILY + __STATIC_INLINE uint32_t __RBIT(uint32_t v) + { + uint8_t sc = 31U; + uint32_t r = v; + for (v >>= 1U; v; v >>= 1U) { - __asm volatile("MSR FAULTMASK_NS,%0" :: "r" (value)); + r <<= 1U; + r |= v & 1U; + sc--; } + return (r << sc); + } +#endif - __IAR_FT uint32_t __TZ_get_PSPLIM_NS(void) - { - uint32_t res; - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure PSPLIM is RAZ/WI - res = 0U; - #else - __asm volatile("MRS %0,PSPLIM_NS" : "=r" (res)); - #endif - return res; - } +/** + \brief Count leading zeros + \details Counts the number of leading zeros of a data value. + \param [in] value Value to count the leading zeros + \return number of leading zeros in value + */ +#if __ICCARM_INTRINSICS_VERSION__ == 2 + #define __CLZ __iar_builtin_CLZ +#elif __IAR_M0_FAMILY + __STATIC_INLINE uint8_t __CLZ(uint32_t data) + { + if (data == 0U) { return 32U; } - __IAR_FT void __TZ_set_PSPLIM_NS(uint32_t value) - { - #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ - (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) - // without main extensions, the non-secure PSPLIM is RAZ/WI - (void)value; - #else - __asm volatile("MSR PSPLIM_NS,%0" :: "r" (value)); - #endif - } + uint32_t count = 0U; + uint32_t mask = 0x80000000U; - __IAR_FT uint32_t __TZ_get_MSPLIM_NS(void) + while ((data & mask) == 0U) { - uint32_t res; - __asm volatile("MRS %0,MSPLIM_NS" : "=r" (res)); - return res; + count += 1U; + mask = mask >> 1U; } + return count; + } +#endif - __IAR_FT void __TZ_set_MSPLIM_NS(uint32_t value) - { - __asm volatile("MSR MSPLIM_NS,%0" :: "r" (value)); - } +/** + \brief LDR Exclusive (8 bit) + \details Executes a exclusive LDR instruction for 8 bit value. + \param [in] ptr Pointer to data + \return value of type uint8_t at (*ptr) + */ +#define __LDREXB __iar_builtin_LDREXB - #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */ +/** + \brief LDR Exclusive (16 bit) + \details Executes a exclusive LDR instruction for 16 bit values. + \param [in] ptr Pointer to data + \return value of type uint16_t at (*ptr) + */ +#define __LDREXH __iar_builtin_LDREXH -#endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */ +/** + \brief LDR Exclusive (32 bit) + \details Executes a exclusive LDR instruction for 32 bit values. + \param [in] ptr Pointer to data + \return value of type uint32_t at (*ptr) + */ +#if __ICCARM_INTRINSICS_VERSION__ == 2 + #define __LDREXW __iar_builtin_LDREX +#elif (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0) + __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr) + { + return __LDREX((unsigned long *)ptr); + } +#endif -#define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value)) +/** + \brief STR Exclusive (8 bit) + \details Executes a exclusive STR instruction for 8 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + \return 0 Function succeeded + \return 1 Function failed + */ +#define __STREXB __iar_builtin_STREXB + +/** + \brief STR Exclusive (16 bit) + \details Executes a exclusive STR instruction for 16 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + \return 0 Function succeeded + \return 1 Function failed + */ +#define __STREXH __iar_builtin_STREXH + +/** + \brief STR Exclusive (32 bit) + \details Executes a exclusive STR instruction for 32 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + \return 0 Function succeeded + \return 1 Function failed + */ +#if __ICCARM_INTRINSICS_VERSION__ == 2 + #define __STREXW __iar_builtin_STREX +#elif (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0) + __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr) + { + return __STREX(value, (unsigned long *)ptr); + } +#endif -#if __IAR_M0_FAMILY +/** + \brief Remove the exclusive lock + \details Removes the exclusive lock which is created by LDREX. + */ +#define __CLREX __iar_builtin_CLREX + +#if !__IAR_M0_FAMILY + /** + \brief Signed Saturate + \details Saturates a signed value. + \param [in] ARG1 Value to be saturated + \param [in] ARG2 Bit position to saturate to (1..32) + \return Saturated value + */ + #define __SSAT __iar_builtin_SSAT + + /** + \brief Unsigned Saturate + \details Saturates an unsigned value. + \param [in] ARG1 Value to be saturated + \param [in] ARG2 Bit position to saturate to (0..31) + \return Saturated value + */ + #define __USAT __iar_builtin_USAT +#else /* !__IAR_M0_FAMILY */ + /** + \brief Signed Saturate + \details Saturates a signed value. + \param [in] val Value to be saturated + \param [in] sat Bit position to saturate to (1..32) + \return Saturated value + */ __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat) { if ((sat >= 1U) && (sat <= 32U)) @@ -852,6 +564,13 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return val; } + /** + \brief Unsigned Saturate + \details Saturates an unsigned value. + \param [in] val Value to be saturated + \param [in] sat Bit position to saturate to (0..31) + \return Saturated value + */ __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat) { if (sat <= 31U) @@ -868,10 +587,33 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) } return (uint32_t)val; } +#endif /* !__IAR_M0_FAMILY */ + +/** + \brief Rotate Right with Extend (32 bit) + \details Moves each bit of a bitstring right by one bit. + The carry input is shifted in at the left end of the bitstring. + \param [in] value Value to rotate + \return Rotated value + */ +#if __ICCARM_INTRINSICS_VERSION__ == 2 + #define __RRX __iar_builtin_RRX +#elif (defined (__CORTEX_M) && __CORTEX_M >= 0x03) /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ + __IAR_FT uint32_t __RRX(uint32_t value) + { + uint32_t result; + __ASM volatile("RRX %0, %1" : "=r"(result) : "r" (value)); + return(result); + } #endif -#if (__CORTEX_M >= 0x03) /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ - +#if (defined (__CORTEX_M) && __CORTEX_M >= 0x03) /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ + /** + \brief LDRT Unprivileged (8 bit) + \details Executes a Unprivileged LDRT instruction for 8 bit value. + \param [in] ptr Pointer to data + \return value of type uint8_t at (*ptr) + */ __IAR_FT uint8_t __LDRBT(volatile uint8_t *addr) { uint32_t res; @@ -879,6 +621,12 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return ((uint8_t)res); } + /** + \brief LDRT Unprivileged (16 bit) + \details Executes a Unprivileged LDRT instruction for 16 bit values. + \param [in] ptr Pointer to data + \return value of type uint16_t at (*ptr) + */ __IAR_FT uint16_t __LDRHT(volatile uint16_t *addr) { uint32_t res; @@ -886,6 +634,12 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return ((uint16_t)res); } + /** + \brief LDRT Unprivileged (32 bit) + \details Executes a Unprivileged LDRT instruction for 32 bit values. + \param [in] ptr Pointer to data + \return value of type uint32_t at (*ptr) + */ __IAR_FT uint32_t __LDRT(volatile uint32_t *addr) { uint32_t res; @@ -893,27 +647,48 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return res; } + /** + \brief STRT Unprivileged (8 bit) + \details Executes a Unprivileged STRT instruction for 8 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + */ __IAR_FT void __STRBT(uint8_t value, volatile uint8_t *addr) { __ASM volatile ("STRBT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory"); } + /** + \brief STRT Unprivileged (16 bit) + \details Executes a Unprivileged STRT instruction for 16 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + */ __IAR_FT void __STRHT(uint16_t value, volatile uint16_t *addr) { __ASM volatile ("STRHT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory"); } + /** + \brief STRT Unprivileged (32 bit) + \details Executes a Unprivileged STRT instruction for 32 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + */ __IAR_FT void __STRT(uint32_t value, volatile uint32_t *addr) { __ASM volatile ("STRT %1, [%0]" : : "r" (addr), "r" (value) : "memory"); } - -#endif /* (__CORTEX_M >= 0x03) */ +#endif /* (defined (__CORTEX_M) && __CORTEX_M >= 0x03) */ #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) - - + /** + \brief Load-Acquire (8 bit) + \details Executes a LDAB instruction for 8 bit value. + \param [in] ptr Pointer to data + \return value of type uint8_t at (*ptr) + */ __IAR_FT uint8_t __LDAB(volatile uint8_t *ptr) { uint32_t res; @@ -921,6 +696,12 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return ((uint8_t)res); } + /** + \brief Load-Acquire (16 bit) + \details Executes a LDAH instruction for 16 bit values. + \param [in] ptr Pointer to data + \return value of type uint16_t at (*ptr) + */ __IAR_FT uint16_t __LDAH(volatile uint16_t *ptr) { uint32_t res; @@ -928,6 +709,12 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return ((uint16_t)res); } + /** + \brief Load-Acquire (32 bit) + \details Executes a LDA instruction for 32 bit values. + \param [in] ptr Pointer to data + \return value of type uint32_t at (*ptr) + */ __IAR_FT uint32_t __LDA(volatile uint32_t *ptr) { uint32_t res; @@ -935,21 +722,45 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return res; } + /** + \brief Store-Release (8 bit) + \details Executes a STLB instruction for 8 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + */ __IAR_FT void __STLB(uint8_t value, volatile uint8_t *ptr) { __ASM volatile ("STLB %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); } + /** + \brief Store-Release (16 bit) + \details Executes a STLH instruction for 16 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + */ __IAR_FT void __STLH(uint16_t value, volatile uint16_t *ptr) { __ASM volatile ("STLH %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); } + /** + \brief Store-Release (32 bit) + \details Executes a STL instruction for 32 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + */ __IAR_FT void __STL(uint32_t value, volatile uint32_t *ptr) { __ASM volatile ("STL %1, [%0]" :: "r" (ptr), "r" (value) : "memory"); } + /** + \brief Load-Acquire Exclusive (8 bit) + \details Executes a LDAB exclusive instruction for 8 bit value. + \param [in] ptr Pointer to data + \return value of type uint8_t at (*ptr) + */ __IAR_FT uint8_t __LDAEXB(volatile uint8_t *ptr) { uint32_t res; @@ -957,6 +768,12 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return ((uint8_t)res); } + /** + \brief Load-Acquire Exclusive (16 bit) + \details Executes a LDAH exclusive instruction for 16 bit values. + \param [in] ptr Pointer to data + \return value of type uint16_t at (*ptr) + */ __IAR_FT uint16_t __LDAEXH(volatile uint16_t *ptr) { uint32_t res; @@ -964,6 +781,12 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return ((uint16_t)res); } + /** + \brief Load-Acquire Exclusive (32 bit) + \details Executes a LDA exclusive instruction for 32 bit values. + \param [in] ptr Pointer to data + \return value of type uint32_t at (*ptr) + */ __IAR_FT uint32_t __LDAEX(volatile uint32_t *ptr) { uint32_t res; @@ -971,6 +794,14 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return res; } + /** + \brief Store-Release Exclusive (8 bit) + \details Executes a STLB exclusive instruction for 8 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + \return 0 Function succeeded + \return 1 Function failed + */ __IAR_FT uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr) { uint32_t res; @@ -978,6 +809,14 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return res; } + /** + \brief Store-Release Exclusive (16 bit) + \details Executes a STLH exclusive instruction for 16 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + \return 0 Function succeeded + \return 1 Function failed + */ __IAR_FT uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr) { uint32_t res; @@ -985,24 +824,85 @@ __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) return res; } + /** + \brief Store-Release Exclusive (32 bit) + \details Executes a STL exclusive instruction for 32 bit values. + \param [in] value Value to store + \param [in] ptr Pointer to location + \return 0 Function succeeded + \return 1 Function failed + */ __IAR_FT uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr) { uint32_t res; __ASM volatile ("STLEX %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory"); return res; } - #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */ -#undef __IAR_FT -#undef __IAR_M0_FAMILY -#undef __ICCARM_V8 - -#pragma diag_default=Pe940 -#pragma diag_default=Pe177 - -#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2)) - -#define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3)) +#if __ICCARM_INTRINSICS_VERSION__ == 2 && __ARM_MEDIA__ + #define __SADD8 __iar_builtin_SADD8 + #define __QADD8 __iar_builtin_QADD8 + #define __SHADD8 __iar_builtin_SHADD8 + #define __UADD8 __iar_builtin_UADD8 + #define __UQADD8 __iar_builtin_UQADD8 + #define __UHADD8 __iar_builtin_UHADD8 + #define __SSUB8 __iar_builtin_SSUB8 + #define __QSUB8 __iar_builtin_QSUB8 + #define __SHSUB8 __iar_builtin_SHSUB8 + #define __USUB8 __iar_builtin_USUB8 + #define __UQSUB8 __iar_builtin_UQSUB8 + #define __UHSUB8 __iar_builtin_UHSUB8 + #define __SADD16 __iar_builtin_SADD16 + #define __QADD16 __iar_builtin_QADD16 + #define __SHADD16 __iar_builtin_SHADD16 + #define __UADD16 __iar_builtin_UADD16 + #define __UQADD16 __iar_builtin_UQADD16 + #define __UHADD16 __iar_builtin_UHADD16 + #define __SSUB16 __iar_builtin_SSUB16 + #define __QSUB16 __iar_builtin_QSUB16 + #define __SHSUB16 __iar_builtin_SHSUB16 + #define __USUB16 __iar_builtin_USUB16 + #define __UQSUB16 __iar_builtin_UQSUB16 + #define __UHSUB16 __iar_builtin_UHSUB16 + #define __SASX __iar_builtin_SASX + #define __QASX __iar_builtin_QASX + #define __SHASX __iar_builtin_SHASX + #define __UASX __iar_builtin_UASX + #define __UQASX __iar_builtin_UQASX + #define __UHASX __iar_builtin_UHASX + #define __SSAX __iar_builtin_SSAX + #define __QSAX __iar_builtin_QSAX + #define __SHSAX __iar_builtin_SHSAX + #define __USAX __iar_builtin_USAX + #define __UQSAX __iar_builtin_UQSAX + #define __UHSAX __iar_builtin_UHSAX + #define __USAD8 __iar_builtin_USAD8 + #define __USADA8 __iar_builtin_USADA8 + #define __SSAT16 __iar_builtin_SSAT16 + #define __USAT16 __iar_builtin_USAT16 + #define __UXTB16 __iar_builtin_UXTB16 + #define __UXTAB16 __iar_builtin_UXTAB16 + #define __SXTB16 __iar_builtin_SXTB16 + #define __SXTAB16 __iar_builtin_SXTAB16 + #define __SMUAD __iar_builtin_SMUAD + #define __SMUADX __iar_builtin_SMUADX + #define __SMMLA __iar_builtin_SMMLA + #define __SMLAD __iar_builtin_SMLAD + #define __SMLADX __iar_builtin_SMLADX + #define __SMLALD __iar_builtin_SMLALD + #define __SMLALDX __iar_builtin_SMLALDX + #define __SMUSD __iar_builtin_SMUSD + #define __SMUSDX __iar_builtin_SMUSDX + #define __SMLSD __iar_builtin_SMLSD + #define __SMLSDX __iar_builtin_SMLSDX + #define __SMLSLD __iar_builtin_SMLSLD + #define __SMLSLDX __iar_builtin_SMLSLDX + #define __SEL __iar_builtin_SEL + #define __QADD __iar_builtin_QADD + #define __QSUB __iar_builtin_QSUB + #define __PKHBT __iar_builtin_PKHBT + #define __PKHTB __iar_builtin_PKHTB +#endif /* __ICCARM_INTRINSICS_VERSION__ == 2 && __ARM_MEDIA__*/ #endif /* __CMSIS_ICCARM_H__ */ diff --git a/CMSIS/Core/Include/cmsis_iccarm_corea.h b/CMSIS/Core/Include/cmsis_iccarm_corea.h new file mode 100644 index 000000000..5d55d56b9 --- /dev/null +++ b/CMSIS/Core/Include/cmsis_iccarm_corea.h @@ -0,0 +1,283 @@ +/**************************************************************************//** + * @file cmsis_iccarm_corea.h + * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file + * @version V5.1.0 + * @date 04. December 2022 + ******************************************************************************/ + +//------------------------------------------------------------------------------ +// +// Copyright (c) 2017-2018 IAR Systems +// Copyright (c) 2018-2023 Arm Limited. All rights reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +//------------------------------------------------------------------------------ + + +#ifndef __CMSIS_ICCARM_COREA_H__ +#define __CMSIS_ICCARM_COREA_H__ + +// Include the generic settigs: +#include "cmsis_iccarm.h" + +#ifndef __ICCARM__ + #error This file should only be compiled by ICCARM +#endif + +#if __ICCARM_INTRINSICS_VERSION__ == 2 + #define __disable_fault_irq __iar_builtin_disable_fiq + #define __disable_irq __iar_builtin_disable_interrupt + #define __enable_fault_irq __iar_builtin_enable_fiq + #define __enable_irq __iar_builtin_enable_interrupt + #define __arm_rsr __iar_builtin_rsr + #define __arm_wsr __iar_builtin_wsr + + #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ + (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) + #define __get_FPSCR() (__arm_rsr("FPSCR")) + #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE))) + #else + #define __get_FPSCR() ( 0 ) + #define __set_FPSCR(VALUE) ((void)VALUE) + #endif + + #define __get_CPSR() (__arm_rsr("CPSR")) + #define __get_mode() (__get_CPSR() & 0x1FU) + + #define __set_CPSR(VALUE) (__arm_wsr("CPSR", (VALUE))) + #define __set_mode(VALUE) (__arm_wsr("CPSR_c", (VALUE))) + + + #define __get_FPEXC() (__arm_rsr("FPEXC")) + #define __set_FPEXC(VALUE) (__arm_wsr("FPEXC", VALUE)) + + #define __get_CP(cp, op1, RT, CRn, CRm, op2) \ + ((RT) = __arm_rsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2)) + + #define __set_CP(cp, op1, RT, CRn, CRm, op2) \ + (__arm_wsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2, (RT))) + + #define __get_CP64(cp, op1, Rt, CRm) \ + __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) + + #define __set_CP64(cp, op1, Rt, CRm) \ + __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) + + #if __CORTEX_A == 5 ||__CORTEX_A == 7 || __CORTEX_A == 9 + #include "armv7a_cp15.h" + #elif __CORTEX_A == 35 || __CORTEX_A == 53 || __CORTEX_A == 57 + #include "armv8a_system_control.h" + #else + #warning "Unknown or unsupported core" + #endif + + + #if !((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) + #define __get_FPSCR __cmsis_iar_get_FPSR_not_active + #endif + + #ifdef __INTRINSICS_INCLUDED + #error intrinsics.h is already included previously! + #endif + + #include + + #if !((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) + #define __get_FPSCR() (0) + #endif + + #pragma diag_suppress=Pe940 + #pragma diag_suppress=Pe177 + + #define __enable_irq __enable_interrupt + #define __disable_irq __disable_interrupt + #define __enable_fault_irq __enable_fiq + #define __disable_fault_irq __disable_fiq + #define __NOP __no_operation + + #define __get_xPSR __get_PSR + + __IAR_FT void __set_mode(uint32_t mode) + { + __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); + } + + __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr) + { + return __LDREX((unsigned long *)ptr); + } + + __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr) + { + return __STREX(value, (unsigned long *)ptr); + } + + + __IAR_FT uint32_t __RRX(uint32_t value) + { + uint32_t result; + __ASM("RRX %0, %1" : "=r"(result) : "r" (value) : "cc"); + return(result); + } + + + __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2) + { + return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2)); + } + + __IAR_FT uint32_t __get_FPEXC(void) + { + #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) + uint32_t result; + __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); + return(result); + #else + return(0); + #endif + } + + __IAR_FT void __set_FPEXC(uint32_t fpexc) + { + #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) + __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); + #endif + } + + + #define __get_CP(cp, op1, Rt, CRn, CRm, op2) \ + __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) + #define __set_CP(cp, op1, Rt, CRn, CRm, op2) \ + __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) + #define __get_CP64(cp, op1, Rt, CRm) \ + __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) + #define __set_CP64(cp, op1, Rt, CRm) \ + __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) + + #if __CORTEX_A == 5 ||__CORTEX_A == 7 || __CORTEX_A == 9 + #include "armv7a_cp15.h" + #elif __CORTEX_A == 35 || __CORTEX_A == 53 || __CORTEX_A == 57 + #include "armv8a_system_control.h" + #else + #warning "Unknown or unsupported core" + #endif + +#endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */ + + +__IAR_FT uint32_t __get_SP_usr(void) +{ + uint32_t cpsr; + uint32_t result; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV %1, sp \n" + "MSR cpsr_c, %2 \n" // no effect in USR mode + "ISB" : "=r"(cpsr), "=r"(result) : "r"(cpsr) : "memory" + ); + return result; +} + +__IAR_FT void __set_SP_usr(uint32_t topOfProcStack) +{ + uint32_t cpsr; + __ASM volatile( + "MRS %0, cpsr \n" + "CPS #0x1F \n" // no effect in USR mode + "MOV sp, %1 \n" + "MSR cpsr_c, %2 \n" // no effect in USR mode + "ISB" : "=r"(cpsr) : "r" (topOfProcStack), "r"(cpsr) : "memory" + ); +} + +#define __get_mode() (__get_CPSR() & 0x1FU) + +__STATIC_INLINE +void __FPU_Enable(void) +{ + __ASM volatile( + //Permit access to VFP/NEON, registers by modifying CPACR + " MRC p15,0,R1,c1,c0,2 \n" + " ORR R1,R1,#0x00F00000 \n" + " MCR p15,0,R1,c1,c0,2 \n" + + //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted + " ISB \n" + + //Enable VFP/NEON + " VMRS R1,FPEXC \n" + " ORR R1,R1,#0x40000000 \n" + " VMSR FPEXC,R1 \n" + + //Initialise VFP/NEON registers to 0 + " MOV R2,#0 \n" + + //Initialise D16 registers to 0 + " VMOV D0, R2,R2 \n" + " VMOV D1, R2,R2 \n" + " VMOV D2, R2,R2 \n" + " VMOV D3, R2,R2 \n" + " VMOV D4, R2,R2 \n" + " VMOV D5, R2,R2 \n" + " VMOV D6, R2,R2 \n" + " VMOV D7, R2,R2 \n" + " VMOV D8, R2,R2 \n" + " VMOV D9, R2,R2 \n" + " VMOV D10,R2,R2 \n" + " VMOV D11,R2,R2 \n" + " VMOV D12,R2,R2 \n" + " VMOV D13,R2,R2 \n" + " VMOV D14,R2,R2 \n" + " VMOV D15,R2,R2 \n" + +#ifdef __ARM_ADVANCED_SIMD__ + //Initialise D32 registers to 0 + " VMOV D16,R2,R2 \n" + " VMOV D17,R2,R2 \n" + " VMOV D18,R2,R2 \n" + " VMOV D19,R2,R2 \n" + " VMOV D20,R2,R2 \n" + " VMOV D21,R2,R2 \n" + " VMOV D22,R2,R2 \n" + " VMOV D23,R2,R2 \n" + " VMOV D24,R2,R2 \n" + " VMOV D25,R2,R2 \n" + " VMOV D26,R2,R2 \n" + " VMOV D27,R2,R2 \n" + " VMOV D28,R2,R2 \n" + " VMOV D29,R2,R2 \n" + " VMOV D30,R2,R2 \n" + " VMOV D31,R2,R2 \n" +#endif + + //Initialise FPSCR to a known state + " VMRS R1,FPSCR \n" + " MOV32 R2,#0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero. + " AND R1,R1,R2 \n" + " VMSR FPSCR,R1 \n" + : : : "cc", "r1", "r2" + ); +} + + + +#undef __IAR_FT +#undef __ICCARM_V8 + +#pragma diag_default=Pe940 +#pragma diag_default=Pe177 + +#endif /* __CMSIS_ICCARM_COREA_H__ */ diff --git a/CMSIS/Core/Include/cmsis_iccarm_corem.h b/CMSIS/Core/Include/cmsis_iccarm_corem.h new file mode 100644 index 000000000..b90b27e41 --- /dev/null +++ b/CMSIS/Core/Include/cmsis_iccarm_corem.h @@ -0,0 +1,464 @@ +/**************************************************************************//** + * @file cmsis_iccarm_corem.h + * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file + * @version V5.4.0 + * @date 04. December 2022 + ******************************************************************************/ + +//------------------------------------------------------------------------------ +// +// Copyright (c) 2017-2021 IAR Systems +// Copyright (c) 2017-2023 Arm Limited. All rights reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +//------------------------------------------------------------------------------ + + +#ifndef __CMSIS_ICCARM_COREM_H__ +#define __CMSIS_ICCARM_COREM_H__ + +// Include the generic settigs: +#include "cmsis_iccarm.h" + +#ifndef __ICCARM__ + #error This file should only be compiled by ICCARM +#endif + + +#ifndef __PROGRAM_START + #define __PROGRAM_START __iar_program_start +#endif + +#ifndef __INITIAL_SP + #define __INITIAL_SP CSTACK$$Limit +#endif + +#ifndef __STACK_LIMIT + #define __STACK_LIMIT CSTACK$$Base +#endif + +#ifndef __VECTOR_TABLE + #define __VECTOR_TABLE __vector_table +#endif + +#ifndef __VECTOR_TABLE_ATTRIBUTE + #define __VECTOR_TABLE_ATTRIBUTE @".intvec" +#endif + +#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U) + #ifndef __STACK_SEAL + #define __STACK_SEAL STACKSEAL$$Base + #endif + + #ifndef __TZ_STACK_SEAL_SIZE + #define __TZ_STACK_SEAL_SIZE 8U + #endif + + #ifndef __TZ_STACK_SEAL_VALUE + #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL + #endif + + __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { + *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; + } +#endif + +#if __ICCARM_INTRINSICS_VERSION__ == 2 + #define __disable_fault_irq __iar_builtin_disable_fiq + #define __disable_irq __iar_builtin_disable_interrupt + #define __enable_fault_irq __iar_builtin_enable_fiq + #define __enable_irq __iar_builtin_enable_interrupt + #define __arm_rsr __iar_builtin_rsr + #define __arm_wsr __iar_builtin_wsr + + + #define __get_APSR() (__arm_rsr("APSR")) + #define __get_BASEPRI() (__arm_rsr("BASEPRI")) + #define __get_CONTROL() (__arm_rsr("CONTROL")) + #define __get_FAULTMASK() (__arm_rsr("FAULTMASK")) + + #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ + (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) + #define __get_FPSCR() (__arm_rsr("FPSCR")) + #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE))) + #else + #define __get_FPSCR() ( 0 ) + #define __set_FPSCR(VALUE) ((void)VALUE) + #endif + + #define __get_IPSR() (__arm_rsr("IPSR")) + #define __get_MSP() (__arm_rsr("MSP")) + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure MSPLIM is RAZ/WI + #define __get_MSPLIM() (0U) + #else + #define __get_MSPLIM() (__arm_rsr("MSPLIM")) + #endif + #define __get_PRIMASK() (__arm_rsr("PRIMASK")) + #define __get_PSP() (__arm_rsr("PSP")) + + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure PSPLIM is RAZ/WI + #define __get_PSPLIM() (0U) + #else + #define __get_PSPLIM() (__arm_rsr("PSPLIM")) + #endif + + #define __get_xPSR() (__arm_rsr("xPSR")) + + #define __set_BASEPRI(VALUE) (__arm_wsr("BASEPRI", (VALUE))) + #define __set_BASEPRI_MAX(VALUE) (__arm_wsr("BASEPRI_MAX", (VALUE))) + + __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) + { + __arm_wsr("CONTROL", control); + __iar_builtin_ISB(); + } + + #define __set_FAULTMASK(VALUE) (__arm_wsr("FAULTMASK", (VALUE))) + #define __set_MSP(VALUE) (__arm_wsr("MSP", (VALUE))) + + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure MSPLIM is RAZ/WI + #define __set_MSPLIM(VALUE) ((void)(VALUE)) + #else + #define __set_MSPLIM(VALUE) (__arm_wsr("MSPLIM", (VALUE))) + #endif + #define __set_PRIMASK(VALUE) (__arm_wsr("PRIMASK", (VALUE))) + #define __set_PSP(VALUE) (__arm_wsr("PSP", (VALUE))) + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure PSPLIM is RAZ/WI + #define __set_PSPLIM(VALUE) ((void)(VALUE)) + #else + #define __set_PSPLIM(VALUE) (__arm_wsr("PSPLIM", (VALUE))) + #endif + + #define __TZ_get_CONTROL_NS() (__arm_rsr("CONTROL_NS")) + + __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) + { + __arm_wsr("CONTROL_NS", control); + __iar_builtin_ISB(); + } + + #define __TZ_get_PSP_NS() (__arm_rsr("PSP_NS")) + #define __TZ_set_PSP_NS(VALUE) (__arm_wsr("PSP_NS", (VALUE))) + #define __TZ_get_MSP_NS() (__arm_rsr("MSP_NS")) + #define __TZ_set_MSP_NS(VALUE) (__arm_wsr("MSP_NS", (VALUE))) + #define __TZ_get_SP_NS() (__arm_rsr("SP_NS")) + #define __TZ_set_SP_NS(VALUE) (__arm_wsr("SP_NS", (VALUE))) + #define __TZ_get_PRIMASK_NS() (__arm_rsr("PRIMASK_NS")) + #define __TZ_set_PRIMASK_NS(VALUE) (__arm_wsr("PRIMASK_NS", (VALUE))) + #define __TZ_get_BASEPRI_NS() (__arm_rsr("BASEPRI_NS")) + #define __TZ_set_BASEPRI_NS(VALUE) (__arm_wsr("BASEPRI_NS", (VALUE))) + #define __TZ_get_FAULTMASK_NS() (__arm_rsr("FAULTMASK_NS")) + #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE))) + + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure PSPLIM is RAZ/WI + #define __TZ_get_PSPLIM_NS() (0U) + #define __TZ_set_PSPLIM_NS(VALUE) ((void)(VALUE)) + #else + #define __TZ_get_PSPLIM_NS() (__arm_rsr("PSPLIM_NS")) + #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE))) + #endif + + #define __TZ_get_MSPLIM_NS() (__arm_rsr("MSPLIM_NS")) + #define __TZ_set_MSPLIM_NS(VALUE) (__arm_wsr("MSPLIM_NS", (VALUE))) + + + #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ + (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) + #define __get_FPSCR __cmsis_iar_get_FPSR_not_active + #define __set_FPSCR __cmsis_iar_set_FPSR_not_active + #endif + + #ifdef __INTRINSICS_INCLUDED + #error intrinsics.h is already included previously! + #endif + + #include + + #if __IAR_M0_FAMILY + /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */ + #undef __CLZ + #undef __SSAT + #undef __USAT + #undef __RBIT + #undef __get_APSR + + __STATIC_INLINE uint8_t __CLZ(uint32_t data) + { + if (data == 0U) { return 32U; } + + uint32_t count = 0U; + uint32_t mask = 0x80000000U; + + while ((data & mask) == 0U) + { + count += 1U; + mask = mask >> 1U; + } + return count; + } + + __STATIC_INLINE uint32_t __RBIT(uint32_t v) + { + uint8_t sc = 31U; + uint32_t r = v; + for (v >>= 1U; v; v >>= 1U) + { + r <<= 1U; + r |= v & 1U; + sc--; + } + return (r << sc); + } + + __STATIC_INLINE uint32_t __get_APSR(void) + { + uint32_t res; + __asm("MRS %0,APSR" : "=r" (res)); + return res; + } + + #endif /* __IAR_M0_FAMILY */ +#else /* __ICCARM_INTRINSICS_VERSION__ == 2 */ + #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ + (defined (__FPU_USED ) && (__FPU_USED == 1U)) )) + #undef __get_FPSCR + #undef __set_FPSCR + #define __get_FPSCR() (0) + #define __set_FPSCR(VALUE) ((void)VALUE) + #endif + + #pragma diag_suppress=Pe940 + #pragma diag_suppress=Pe177 + + #define __enable_irq __enable_interrupt + #define __disable_irq __disable_interrupt + #define __NOP __no_operation + + #define __get_xPSR __get_PSR + + /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */ + #if (defined (__CORTEX_M) && __CORTEX_M >= 0x03) + __IAR_FT void __set_BASEPRI_MAX(uint32_t value) + { + __asm volatile("MSR BASEPRI_MAX,%0"::"r" (value)); + } + + + #define __enable_fault_irq __enable_fiq + #define __disable_fault_irq __disable_fiq + #endif /* (__CORTEX_M >= 0x03) */ + + #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ + (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) + + __IAR_FT uint32_t __get_MSPLIM(void) + { + uint32_t res; + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure MSPLIM is RAZ/WI + res = 0U; + #else + __asm volatile("MRS %0,MSPLIM" : "=r" (res)); + #endif + return res; + } + + __IAR_FT void __set_MSPLIM(uint32_t value) + { + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)value; + #else + __asm volatile("MSR MSPLIM,%0" :: "r" (value)); + #endif + } + + __IAR_FT uint32_t __get_PSPLIM(void) + { + uint32_t res; + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure PSPLIM is RAZ/WI + res = 0U; + #else + __asm volatile("MRS %0,PSPLIM" : "=r" (res)); + #endif + return res; + } + + __IAR_FT void __set_PSPLIM(uint32_t value) + { + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)value; + #else + __asm volatile("MSR PSPLIM,%0" :: "r" (value)); + #endif + } + + __IAR_FT uint32_t __TZ_get_CONTROL_NS(void) + { + uint32_t res; + __asm volatile("MRS %0,CONTROL_NS" : "=r" (res)); + return res; + } + + __IAR_FT void __TZ_set_CONTROL_NS(uint32_t value) + { + __asm volatile("MSR CONTROL_NS,%0" :: "r" (value)); + __iar_builtin_ISB(); + } + + __IAR_FT uint32_t __TZ_get_PSP_NS(void) + { + uint32_t res; + __asm volatile("MRS %0,PSP_NS" : "=r" (res)); + return res; + } + + __IAR_FT void __TZ_set_PSP_NS(uint32_t value) + { + __asm volatile("MSR PSP_NS,%0" :: "r" (value)); + } + + __IAR_FT uint32_t __TZ_get_MSP_NS(void) + { + uint32_t res; + __asm volatile("MRS %0,MSP_NS" : "=r" (res)); + return res; + } + + __IAR_FT void __TZ_set_MSP_NS(uint32_t value) + { + __asm volatile("MSR MSP_NS,%0" :: "r" (value)); + } + + __IAR_FT uint32_t __TZ_get_SP_NS(void) + { + uint32_t res; + __asm volatile("MRS %0,SP_NS" : "=r" (res)); + return res; + } + __IAR_FT void __TZ_set_SP_NS(uint32_t value) + { + __asm volatile("MSR SP_NS,%0" :: "r" (value)); + } + + __IAR_FT uint32_t __TZ_get_PRIMASK_NS(void) + { + uint32_t res; + __asm volatile("MRS %0,PRIMASK_NS" : "=r" (res)); + return res; + } + + __IAR_FT void __TZ_set_PRIMASK_NS(uint32_t value) + { + __asm volatile("MSR PRIMASK_NS,%0" :: "r" (value)); + } + + __IAR_FT uint32_t __TZ_get_BASEPRI_NS(void) + { + uint32_t res; + __asm volatile("MRS %0,BASEPRI_NS" : "=r" (res)); + return res; + } + + __IAR_FT void __TZ_set_BASEPRI_NS(uint32_t value) + { + __asm volatile("MSR BASEPRI_NS,%0" :: "r" (value)); + } + + __IAR_FT uint32_t __TZ_get_FAULTMASK_NS(void) + { + uint32_t res; + __asm volatile("MRS %0,FAULTMASK_NS" : "=r" (res)); + return res; + } + + __IAR_FT void __TZ_set_FAULTMASK_NS(uint32_t value) + { + __asm volatile("MSR FAULTMASK_NS,%0" :: "r" (value)); + } + + __IAR_FT uint32_t __TZ_get_PSPLIM_NS(void) + { + uint32_t res; + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure PSPLIM is RAZ/WI + res = 0U; + #else + __asm volatile("MRS %0,PSPLIM_NS" : "=r" (res)); + #endif + return res; + } + + __IAR_FT void __TZ_set_PSPLIM_NS(uint32_t value) + { + #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ + (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3))) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)value; + #else + __asm volatile("MSR PSPLIM_NS,%0" :: "r" (value)); + #endif + } + + __IAR_FT uint32_t __TZ_get_MSPLIM_NS(void) + { + uint32_t res; + __asm volatile("MRS %0,MSPLIM_NS" : "=r" (res)); + return res; + } + + __IAR_FT void __TZ_set_MSPLIM_NS(uint32_t value) + { + __asm volatile("MSR MSPLIM_NS,%0" :: "r" (value)); + } + + #endif /* (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ + (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */ +#endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */ + + +#if __IAR_M0_FAMILY + +#endif + +#undef __IAR_FT +#undef __IAR_M0_FAMILY +#undef __ICCARM_V8 + +#pragma diag_default=Pe940 +#pragma diag_default=Pe177 + +#define __SXTB16_RORn(ARG1, ARG2) __SXTB16(__ROR(ARG1, ARG2)) + +#define __SXTAB16_RORn(ARG1, ARG2, ARG3) __SXTAB16(ARG1, __ROR(ARG2, ARG3)) + +#endif /* __CMSIS_ICCARM_COREM_H__ */ diff --git a/CMSIS/Core/Include/cmsis_iccarm_corer.h b/CMSIS/Core/Include/cmsis_iccarm_corer.h new file mode 100644 index 000000000..4ce96f019 --- /dev/null +++ b/CMSIS/Core/Include/cmsis_iccarm_corer.h @@ -0,0 +1,34 @@ +/**************************************************************************//** + * @file cmsis_iccarm_corer.h + * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file + * @version V5.0.0 + * @date 04. December 2022 + ******************************************************************************/ + +//------------------------------------------------------------------------------ +// +// Copyright (c) 2018-2023 Arm Limited. All rights reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +//------------------------------------------------------------------------------ + + +#ifndef __CMSIS_ICCARM_CORER_H__ +#define __CMSIS_ICCARM_CORER_H__ + +// Include the generic settigs: +#include "cmsis_iccarm.h" + +#endif /* __CMSIS_ICCARM_CORER_H__ */ diff --git a/CMSIS/Core/Include/cmsis_tiarmclang.h b/CMSIS/Core/Include/cmsis_tiarmclang.h index d134d4213..a4d719ddf 100644 --- a/CMSIS/Core/Include/cmsis_tiarmclang.h +++ b/CMSIS/Core/Include/cmsis_tiarmclang.h @@ -49,6 +49,9 @@ #ifndef __NO_RETURN #define __NO_RETURN __attribute__((__noreturn__)) #endif +#ifndef CMSIS_DEPRECATED + #define CMSIS_DEPRECATED __attribute__((deprecated)) +#endif #ifndef __USED #define __USED __attribute__((used)) #endif @@ -108,47 +111,6 @@ #define __ALIAS(x) __attribute__ ((alias(x))) #endif -/* ######################### Startup and Lowlevel Init ######################## */ -#ifndef __PROGRAM_START -#define __PROGRAM_START _c_int00 -#endif - -#ifndef __INITIAL_SP -#define __INITIAL_SP __STACK_END -#endif - -#ifndef __STACK_LIMIT -#define __STACK_LIMIT __STACK_SIZE -#endif - -#ifndef __VECTOR_TABLE -#define __VECTOR_TABLE __Vectors -#endif - -#ifndef __VECTOR_TABLE_ATTRIBUTE -#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".intvecs"))) -#endif - -#if (__ARM_FEATURE_CMSE == 3) -#ifndef __STACK_SEAL -#define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base -#endif - -#ifndef __TZ_STACK_SEAL_SIZE -#define __TZ_STACK_SEAL_SIZE 8U -#endif - -#ifndef __TZ_STACK_SEAL_VALUE -#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL -#endif - - -__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { - *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; -} -#endif - - /* ########################## Core Instruction Access ######################### */ /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface Access to dedicated instructions @@ -287,9 +249,9 @@ __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { #define __CLZ(value) __clz(value) -/* __ARM_FEATURE_SAT is wrong for for Armv8-M Baseline devices */ #if ((__ARM_FEATURE_SAT >= 1) && \ (__ARM_ARCH_ISA_THUMB >= 2) ) +/* __ARM_FEATURE_SAT is wrong for for Armv8-M Baseline devices */ /** \brief Signed Saturate \details Saturates a signed value. @@ -711,294 +673,6 @@ __STATIC_FORCEINLINE void __disable_irq(void) } #endif - -/** - \brief Get Control Register - \details Returns the content of the Control Register. - \return Control Register value - */ -__STATIC_FORCEINLINE uint32_t __get_CONTROL(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, control" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Control Register (non-secure) - \details Returns the content of the non-secure Control Register when in secure mode. - \return non-secure Control Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Control Register - \details Writes the given value to the Control Register. - \param [in] control Control Register value to set - */ -__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) -{ - __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); - __ISB(); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Control Register (non-secure) - \details Writes the given value to the non-secure Control Register when in secure state. - \param [in] control Control Register value to set - */ -__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) -{ - __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); - __ISB(); -} -#endif - - -/** - \brief Get IPSR Register - \details Returns the content of the IPSR Register. - \return IPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_IPSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get APSR Register - \details Returns the content of the APSR Register. - \return APSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_APSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, apsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get xPSR Register - \details Returns the content of the xPSR Register. - \return xPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_xPSR(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); - return (result); -} - - -/** - \brief Get Process Stack Pointer - \details Returns the current value of the Process Stack Pointer (PSP). - \return PSP Register value - */ -__STATIC_FORCEINLINE uint32_t __get_PSP(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, psp" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Process Stack Pointer (non-secure) - \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. - \return PSP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Process Stack Pointer - \details Assigns the given value to the Process Stack Pointer (PSP). - \param [in] topOfProcStack Process Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) -{ - __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Process Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. - \param [in] topOfProcStack Process Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) -{ - __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); -} -#endif - - -/** - \brief Get Main Stack Pointer - \details Returns the current value of the Main Stack Pointer (MSP). - \return MSP Register value - */ -__STATIC_FORCEINLINE uint32_t __get_MSP(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, msp" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Main Stack Pointer (non-secure) - \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. - \return MSP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Main Stack Pointer - \details Assigns the given value to the Main Stack Pointer (MSP). - \param [in] topOfMainStack Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) -{ - __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Main Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. - \param [in] topOfMainStack Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) -{ - __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); -} -#endif - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Stack Pointer (non-secure) - \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. - \return SP Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); - return (result); -} - - -/** - \brief Set Stack Pointer (non-secure) - \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. - \param [in] topOfStack Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) -{ - __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); -} -#endif - - -/** - \brief Get Priority Mask - \details Returns the current state of the priority mask bit from the Priority Mask Register. - \return Priority Mask value - */ -__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, primask" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Priority Mask (non-secure) - \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. - \return Priority Mask value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Priority Mask - \details Assigns the given value to the Priority Mask Register. - \param [in] priMask Priority Mask - */ -__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) -{ - __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Priority Mask (non-secure) - \details Assigns the given value to the non-secure Priority Mask Register when in secure state. - \param [in] priMask Priority Mask - */ -__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) -{ - __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); -} -#endif - - #if (__ARM_ARCH_ISA_THUMB >= 2) /** \brief Enable FIQ @@ -1020,312 +694,8 @@ __STATIC_FORCEINLINE void __disable_fault_irq(void) { __ASM volatile ("cpsid f" : : : "memory"); } - - -/** - \brief Get Base Priority - \details Returns the current value of the Base Priority register. - \return Base Priority register value - */ -__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, basepri" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Base Priority (non-secure) - \details Returns the current value of the non-secure Base Priority register when in secure state. - \return Base Priority register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Base Priority - \details Assigns the given value to the Base Priority register. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) -{ - __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Base Priority (non-secure) - \details Assigns the given value to the non-secure Base Priority register when in secure state. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) -{ - __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); -} -#endif - - -/** - \brief Set Base Priority with condition - \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, - or the new value increases the BASEPRI priority level. - \param [in] basePri Base Priority value to set - */ -__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) -{ - __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); -} - - -/** - \brief Get Fault Mask - \details Returns the current value of the Fault Mask register. - \return Fault Mask register value - */ -__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); - return (result); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Fault Mask (non-secure) - \details Returns the current value of the non-secure Fault Mask register when in secure state. - \return Fault Mask register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) -{ - uint32_t result; - - __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); - return (result); -} -#endif - - -/** - \brief Set Fault Mask - \details Assigns the given value to the Fault Mask register. - \param [in] faultMask Fault Mask value to set - */ -__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) -{ - __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Fault Mask (non-secure) - \details Assigns the given value to the non-secure Fault Mask register when in secure state. - \param [in] faultMask Fault Mask value to set - */ -__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) -{ - __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); -} -#endif - -#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */ - - -#if (__ARM_ARCH >= 8) -/** - \brief Get Process Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always in non-secure - mode. - - \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). - \return PSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, psplim" : "=r" (result) ); - return (result); -#endif -} - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Process Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. - \return PSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); - return (result); #endif -} -#endif - - -/** - \brief Set Process Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored in non-secure - mode. - - \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). - \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - (void)ProcStackPtrLimit; -#else - __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Process Stack Pointer (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. - \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure PSPLIM is RAZ/WI */ - (void)ProcStackPtrLimit; -#else - __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); -#endif -} -#endif - -/** - \brief Get Main Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). - \return MSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, msplim" : "=r" (result) ); - return (result); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Get Main Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence zero is returned always. - - \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. - \return MSPLIM Register value - */ -__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - return (0U); -#else - uint32_t result; - __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); - return (result); -#endif -} -#endif - - -/** - \brief Set Main Stack Pointer Limit - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). - \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set - */ -__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) -{ -#if (((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ - (__ARM_FEATURE_CMSE < 3) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - (void)MainStackPtrLimit; -#else - __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); -#endif -} - - -#if (__ARM_FEATURE_CMSE == 3) -/** - \brief Set Main Stack Pointer Limit (non-secure) - Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure - Stack Pointer Limit register hence the write is silently ignored. - - \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. - \param [in] MainStackPtrLimit Main Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) -{ -#if ((__ARM_ARCH_8M_MAIN__ < 1) && \ - (__ARM_ARCH_8_1M_MAIN__ < 1) ) - /* without main extensions, the non-secure MSPLIM is RAZ/WI */ - (void)MainStackPtrLimit; -#else - __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); -#endif -} -#endif - -#endif /* (__ARM_ARCH >= 8) */ /** diff --git a/CMSIS/Core/Include/cmsis_tiarmclang_corem.h b/CMSIS/Core/Include/cmsis_tiarmclang_corem.h new file mode 100644 index 000000000..b14d84648 --- /dev/null +++ b/CMSIS/Core/Include/cmsis_tiarmclang_corem.h @@ -0,0 +1,670 @@ +/**************************************************************************//** + * @file cmsis_tiarmclang_corem.h + * @brief CMSIS compiler tiarmclang header file + * @version V6.0.0 + * @date 04. August 2023 + ******************************************************************************/ +/* + * Copyright (c) 2023 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CMSIS_TIARMCLANG_COREM_H +#define __CMSIS_TIARMCLANG_COREM_H + +#pragma clang system_header /* treat file as system include file */ + +// Include the generic settings: +#include "cmsis_tiarmclang.h" + +/* ######################### Startup and Lowlevel Init ######################## */ + +#ifndef __PROGRAM_START +#define __PROGRAM_START _c_int00 +#endif + +#ifndef __INITIAL_SP +#define __INITIAL_SP __STACK_END +#endif + +#ifndef __STACK_LIMIT +#define __STACK_LIMIT __STACK_SIZE +#endif + +#ifndef __VECTOR_TABLE +#define __VECTOR_TABLE __Vectors +#endif + +#ifndef __VECTOR_TABLE_ATTRIBUTE +#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".intvecs"))) +#endif + +#if (__ARM_FEATURE_CMSE == 3) +#ifndef __STACK_SEAL +#define __STACK_SEAL Image$$STACKSEAL$$ZI$$Base +#endif + +#ifndef __TZ_STACK_SEAL_SIZE +#define __TZ_STACK_SEAL_SIZE 8U +#endif + +#ifndef __TZ_STACK_SEAL_VALUE +#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL +#endif + +__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { + *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; +} +#endif + + +/* ########################### Core Function Access ########################### */ +/** \ingroup CMSIS_Core_FunctionInterface + \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions + @{ + */ + + +/** + \brief Get Control Register + \details Returns the content of the Control Register. + \return Control Register value + */ +__STATIC_FORCEINLINE uint32_t __get_CONTROL(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, control" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Get Control Register (non-secure) + \details Returns the content of the non-secure Control Register when in secure mode. + \return non-secure Control Register value + */ +__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); + return (result); +} +#endif + + +/** + \brief Set Control Register + \details Writes the given value to the Control Register. + \param [in] control Control Register value to set + */ +__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) +{ + __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); + __ISB(); +} + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Set Control Register (non-secure) + \details Writes the given value to the non-secure Control Register when in secure state. + \param [in] control Control Register value to set + */ +__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) +{ + __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); + __ISB(); +} +#endif + + +/** + \brief Get IPSR Register + \details Returns the content of the IPSR Register. + \return IPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_IPSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get APSR Register + \details Returns the content of the APSR Register. + \return APSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_APSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, apsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get xPSR Register + \details Returns the content of the xPSR Register. + \return xPSR Register value + */ +__STATIC_FORCEINLINE uint32_t __get_xPSR(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); + return (result); +} + + +/** + \brief Get Process Stack Pointer + \details Returns the current value of the Process Stack Pointer (PSP). + \return PSP Register value + */ +__STATIC_FORCEINLINE uint32_t __get_PSP(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, psp" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Get Process Stack Pointer (non-secure) + \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. + \return PSP Register value + */ +__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); + return (result); +} +#endif + + +/** + \brief Set Process Stack Pointer + \details Assigns the given value to the Process Stack Pointer (PSP). + \param [in] topOfProcStack Process Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) +{ + __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); +} + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Set Process Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. + \param [in] topOfProcStack Process Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) +{ + __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); +} +#endif + + +/** + \brief Get Main Stack Pointer + \details Returns the current value of the Main Stack Pointer (MSP). + \return MSP Register value + */ +__STATIC_FORCEINLINE uint32_t __get_MSP(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, msp" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Get Main Stack Pointer (non-secure) + \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. + \return MSP Register value + */ +__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); + return (result); +} +#endif + + +/** + \brief Set Main Stack Pointer + \details Assigns the given value to the Main Stack Pointer (MSP). + \param [in] topOfMainStack Main Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) +{ + __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); +} + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Set Main Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. + \param [in] topOfMainStack Main Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) +{ + __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); +} +#endif + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Get Stack Pointer (non-secure) + \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. + \return SP Register value + */ +__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); + return (result); +} + + +/** + \brief Set Stack Pointer (non-secure) + \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. + \param [in] topOfStack Stack Pointer value to set + */ +__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) +{ + __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); +} +#endif + + +/** + \brief Get Priority Mask + \details Returns the current state of the priority mask bit from the Priority Mask Register. + \return Priority Mask value + */ +__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, primask" : "=r" (result) ); + return (result); +} + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Get Priority Mask (non-secure) + \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. + \return Priority Mask value + */ +__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) +{ + uint32_t result; + + __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); + return (result); +} +#endif + + +/** + \brief Set Priority Mask + \details Assigns the given value to the Priority Mask Register. + \param [in] priMask Priority Mask + */ +__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) +{ + __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); +} + + +#if (__ARM_FEATURE_CMSE == 3) +/** + \brief Set Priority Mask (non-secure) + \details Assigns the given value to the non-secure Priority Mask Register when in secure state. + \param [in] priMask Priority Mask + */ +__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) +{ + __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); +} +#endif + + +#if (__ARM_ARCH_ISA_THUMB >= 2) + /** + \brief Get Base Priority + \details Returns the current value of the Base Priority register. + \return Base Priority register value + */ + __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, basepri" : "=r" (result) ); + return (result); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Base Priority (non-secure) + \details Returns the current value of the non-secure Base Priority register when in secure state. + \return Base Priority register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); + return (result); + } + #endif + + + /** + \brief Set Base Priority + \details Assigns the given value to the Base Priority register. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) + { + __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Base Priority (non-secure) + \details Assigns the given value to the non-secure Base Priority register when in secure state. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) + { + __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); + } + #endif + + + /** + \brief Set Base Priority with condition + \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, + or the new value increases the BASEPRI priority level. + \param [in] basePri Base Priority value to set + */ + __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) + { + __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); + } + + + /** + \brief Get Fault Mask + \details Returns the current value of the Fault Mask register. + \return Fault Mask register value + */ + __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); + return (result); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Fault Mask (non-secure) + \details Returns the current value of the non-secure Fault Mask register when in secure state. + \return Fault Mask register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) + { + uint32_t result; + + __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); + return (result); + } + #endif + + + /** + \brief Set Fault Mask + \details Assigns the given value to the Fault Mask register. + \param [in] faultMask Fault Mask value to set + */ + __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) + { + __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Fault Mask (non-secure) + \details Assigns the given value to the non-secure Fault Mask register when in secure state. + \param [in] faultMask Fault Mask value to set + */ + __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) + { + __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); + } + #endif +#endif /* (__ARM_ARCH_ISA_THUMB >= 2) */ + + +#if (__ARM_ARCH >= 8) + /** + \brief Get Process Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always in non-secure + mode. + + \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). + \return PSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure PSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, psplim" : "=r" (result) ); + return (result); + #endif + } + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Process Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. + \return PSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure PSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); + return (result); + #endif + } + #endif + + + /** + \brief Set Process Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored in non-secure + mode. + + \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). + \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)ProcStackPtrLimit; + #else + __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Process Stack Pointer (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. + \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure PSPLIM is RAZ/WI + (void)ProcStackPtrLimit; + #else + __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); + #endif + } + #endif + + + /** + \brief Get Main Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). + \return MSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure MSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, msplim" : "=r" (result) ); + return (result); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Get Main Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence zero is returned always. + + \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. + \return MSPLIM Register value + */ + __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure MSPLIM is RAZ/WI + return (0U); + #else + uint32_t result; + __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); + return (result); + #endif + } + #endif + + + /** + \brief Set Main Stack Pointer Limit + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). + \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set + */ + __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) + { + #if (((__ARM_ARCH_8M_MAIN__ < 1) && \ + (__ARM_ARCH_8_1M_MAIN__ < 1) ) && \ + (__ARM_FEATURE_CMSE < 3) ) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)MainStackPtrLimit; + #else + __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); + #endif + } + + + #if (__ARM_FEATURE_CMSE == 3) + /** + \brief Set Main Stack Pointer Limit (non-secure) + Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure + Stack Pointer Limit register hence the write is silently ignored. + + \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. + \param [in] MainStackPtrLimit Main Stack Pointer value to set + */ + __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) + { + #if ((__ARM_ARCH_8M_MAIN__ < 1) && (__ARM_ARCH_8_1M_MAIN__ < 1)) + // without main extensions, the non-secure MSPLIM is RAZ/WI + (void)MainStackPtrLimit; + #else + __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); + #endif + } + #endif +#endif /* (__ARM_ARCH >= 8) */ +/** @} end of CMSIS_Core_RegAccFunctions */ + + +#endif /* __CMSIS_TIARMCLANG_COREM_H */ diff --git a/CMSIS/Core/Include/cmsis_version.h b/CMSIS/Core/Include/cmsis_version.h index 8b4765f18..90529147c 100644 --- a/CMSIS/Core/Include/cmsis_version.h +++ b/CMSIS/Core/Include/cmsis_version.h @@ -1,11 +1,11 @@ /**************************************************************************//** * @file cmsis_version.h - * @brief CMSIS Core(M) Version definitions - * @version V5.0.5 - * @date 02. February 2022 + * @brief CMSIS Core Version definitions + * @version V6.0.0 + * @date 2. July 2023 ******************************************************************************/ /* - * Copyright (c) 2009-2022 ARM Limited. All rights reserved. + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. * * SPDX-License-Identifier: Apache-2.0 * @@ -32,8 +32,24 @@ #define __CMSIS_VERSION_H /* CMSIS Version definitions */ -#define __CM_CMSIS_VERSION_MAIN ( 5U) /*!< [31:16] CMSIS Core(M) main version */ -#define __CM_CMSIS_VERSION_SUB ( 6U) /*!< [15:0] CMSIS Core(M) sub version */ -#define __CM_CMSIS_VERSION ((__CM_CMSIS_VERSION_MAIN << 16U) | \ - __CM_CMSIS_VERSION_SUB ) /*!< CMSIS Core(M) version number */ -#endif +#define __CMSIS_VERSION_MAIN ( 6U) /*!< \brief [31:16] CMSIS-Core(A/R/M) main version */ +#define __CMSIS_VERSION_SUB ( 0U) /*!< \brief [15:0] CMSIS-Core(A/R/M) sub version */ +#define __CMSIS_VERSION ((__CMSIS_VERSION_MAIN << 16U) | \ + _CMSIS_VERSION_SUB ) /*!< \brief CMSIS-Core(A/R/M) version number */ + +#define __CA_CMSIS_VERSION_MAIN (6U) /*!< \brief [31:16] CMSIS-Core(A) main version */ +#define __CA_CMSIS_VERSION_SUB (0U) /*!< \brief [15:0] CMSIS-Core(A) sub version */ +#define __CA_CMSIS_VERSION ((__CA_CMSIS_VERSION_MAIN << 16U) | \ + __CA_CMSIS_VERSION_SUB ) /*!< \brief CMSIS-Core(A) version number */ + +#define __CR_CMSIS_VERSION_MAIN (6U) /*!< \brief [31:16] CMSIS-Core(R) main version */ +#define __CR_CMSIS_VERSION_SUB (0U) /*!< \brief [15:0] CMSIS-Core(R) sub version */ +#define __CR_CMSIS_VERSION ((__CR_CMSIS_VERSION_MAIN << 16U) | \ + __CR_CMSIS_VERSION_SUB ) /*!< \brief CMSIS-Core(R) version number */ + +#define __CM_CMSIS_VERSION_MAIN ( 6U) /*!< \brief [31:16] CMSIS-Core(M) main version */ +#define __CM_CMSIS_VERSION_SUB ( 0U) /*!< \brief [15:0] CMSIS-Core(M) sub version */ +#define __CM_CMSIS_VERSION ((__CM_CMSIS_VERSION_MAIN << 16U) | \ + __CM_CMSIS_VERSION_SUB ) /*!< \brief CMSIS-Core(M) version number */ + +#endif \ No newline at end of file diff --git a/CMSIS/Core/Include/core_ca.h b/CMSIS/Core/Include/core_ca.h new file mode 100644 index 000000000..2da701bb9 --- /dev/null +++ b/CMSIS/Core/Include/core_ca.h @@ -0,0 +1,36 @@ +/**************************************************************************//** + * @file core_ca9.h + * @brief CMSIS Cortex-A9 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + /* + * This file exists for compatibility reasons only + */ + + +#ifndef __CORE_CA_H +#define __CORE_CA_H + + +#include "armv7a.h" + + +#endif /* __CORE_CA_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_ca35.h b/CMSIS/Core/Include/core_ca35.h new file mode 100644 index 000000000..0fd9f6442 --- /dev/null +++ b/CMSIS/Core/Include/core_ca35.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_ca35.h + * @brief CMSIS Cortex-A57 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CA35_H +#define __CORE_CA35_H + +#define __CORTEX_A 35U /*!< \brief Cortex-A35 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CA35_REV + #define __CA35_REV 0x0000U + #warning "__CA35_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv8a.h" + + +#endif /* __CORE_CA35_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_ca5.h b/CMSIS/Core/Include/core_ca5.h new file mode 100644 index 000000000..e40dbc0c3 --- /dev/null +++ b/CMSIS/Core/Include/core_ca5.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_ca5.h + * @brief CMSIS Cortex-A5 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CA5_H +#define __CORE_CA5_H + +#define __CORTEX_A 5U /*!< \brief Cortex-A5 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CA5_REV + #define __CA5_REV 0x0000U + #warning "__CA5_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv7a.h" + + +#endif /* __CORE_CA5_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_ca53.h b/CMSIS/Core/Include/core_ca53.h new file mode 100644 index 000000000..425fa59e1 --- /dev/null +++ b/CMSIS/Core/Include/core_ca53.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_ca53.h + * @brief CMSIS Cortex-A57 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CA53_H +#define __CORE_CA53_H + +#define __CORTEX_A 53U /*!< \brief Cortex-A53 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CA53_REV + #define __CA53_REV 0x0000U + #warning "__CA53_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv8a.h" + + +#endif /* __CORE_CA53_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_ca57.h b/CMSIS/Core/Include/core_ca57.h new file mode 100644 index 000000000..5a5cfeca4 --- /dev/null +++ b/CMSIS/Core/Include/core_ca57.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_ca57.h + * @brief CMSIS Cortex-A57 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CA57_H +#define __CORE_CA57_H + +#define __CORTEX_A 57U /*!< \brief Cortex-A57 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CA57_REV + #define __CA57_REV 0x0000U + #warning "__CA57_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv8a.h" + + +#endif /* __CORE_CA57_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_ca7.h b/CMSIS/Core/Include/core_ca7.h new file mode 100644 index 000000000..726fbbb68 --- /dev/null +++ b/CMSIS/Core/Include/core_ca7.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_ca7.h + * @brief CMSIS Cortex-A7 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CA7_H +#define __CORE_CA7_H + +#define __CORTEX_A 7U /*!< \brief Cortex-A7 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CA7_REV + #define __CA7_REV 0x0000U + #warning "__CA7_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv7a.h" + + +#endif /* __CORE_CA7_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_ca9.h b/CMSIS/Core/Include/core_ca9.h new file mode 100644 index 000000000..870be82f5 --- /dev/null +++ b/CMSIS/Core/Include/core_ca9.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_ca9.h + * @brief CMSIS Cortex-A9 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CA9_H +#define __CORE_CA9_H + +#define __CORTEX_A 9U /*!< \brief Cortex-A9 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CA7_REV + #define __CA7_REV 0x0000U + #warning "__CA7_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv7a.h" + + +#endif /* __CORE_CA9_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_cr4.h b/CMSIS/Core/Include/core_cr4.h new file mode 100644 index 000000000..216f56b94 --- /dev/null +++ b/CMSIS/Core/Include/core_cr4.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_cr4.h + * @brief CMSIS Cortex-R4 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CR4_H +#define __CORE_CR4_H + +#define __CORTEX_R 4U /*!< \brief Cortex-R5 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CR4_REV + #define __CR4_REV 0x0000U + #warning "__CR4_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv7r.h" + + +#endif /* __CORE_CR4_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_cr5.h b/CMSIS/Core/Include/core_cr5.h new file mode 100644 index 000000000..14f7cd922 --- /dev/null +++ b/CMSIS/Core/Include/core_cr5.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_cr5.h + * @brief CMSIS Cortex-R5 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CR5_H +#define __CORE_CR5_H + +#define __CORTEX_R 5U /*!< \brief Cortex-R5 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CR5_REV + #define __CR5_REV 0x0000U + #warning "__CR5_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv7r.h" + + +#endif /* __CORE_CR5_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_cr52.h b/CMSIS/Core/Include/core_cr52.h new file mode 100644 index 000000000..7da16cca1 --- /dev/null +++ b/CMSIS/Core/Include/core_cr52.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_cr52.h + * @brief CMSIS Cortex-R52 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CR52_H +#define __CORE_CR52_H + +#define __CORTEX_R 52U /*!< \brief Cortex-R52 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CR52_REV + #define __CR52_REV 0x0000U + #warning "__CR52_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv8r.h" + + +#endif /* __CORE_CR52_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_cr7.h b/CMSIS/Core/Include/core_cr7.h new file mode 100644 index 000000000..3540ff88c --- /dev/null +++ b/CMSIS/Core/Include/core_cr7.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_cr7.h + * @brief CMSIS Cortex-R7 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CR7_H +#define __CORE_CR7_H + +#define __CORTEX_R 7U /*!< \brief Cortex-R7 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CR7_REV + #define __CR7_REV 0x0000U + #warning "__CR7_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv7r.h" + + +#endif /* __CORE_CR7_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/core_cr8.h b/CMSIS/Core/Include/core_cr8.h new file mode 100644 index 000000000..c8bdfbb46 --- /dev/null +++ b/CMSIS/Core/Include/core_cr8.h @@ -0,0 +1,41 @@ +/**************************************************************************//** + * @file core_cr8.h + * @brief CMSIS Cortex-R8 Core Peripheral Access Layer Header File + * @version V1.0.0 + * @date 2. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2009-2023 ARM Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef __CORE_CR8_H +#define __CORE_CR8_H + +#define __CORTEX_R 8U /*!< \brief Cortex-R8 Core */ + +#if defined __CHECK_DEVICE_DEFINES + #ifndef __CR8_REV + #define __CR8_REV 0x0000U + #warning "__CR8_REV not defined in device header file; using default!" + #endif +#endif + +#include "armv7r.h" + + +#endif /* __CORE_CR8_H */ \ No newline at end of file diff --git a/CMSIS/Core/Include/gic_v20.h b/CMSIS/Core/Include/gic_v20.h new file mode 100644 index 000000000..ab4d427b2 --- /dev/null +++ b/CMSIS/Core/Include/gic_v20.h @@ -0,0 +1,757 @@ +/****************************************************************************** + * @file gic_v20.h + * @brief CMSIS GIC 2.0 API for Armv7-A MPU and Armv7-R MCU + * @version V6.0.0 + * @date 8. July 2023 + ******************************************************************************/ +/* + * Copyright (c) 2017-2022 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if defined ( __ICCARM__ ) + #pragma system_include /* treat file as system include file for MISRA check */ +#elif defined (__clang__) + #pragma clang system_header /* treat file as system include file */ +#endif + +#ifndef ARM_GIC_V20_H +#define ARM_GIC_V20_H + +#include + +/** \brief Structure type to access the Generic Interrupt Controller Distributor (GICD) +*/ +typedef struct +{ + __IOM uint32_t CTLR; /*!< \brief Offset: 0x000 (R/W) Distributor Control Register */ + __IM uint32_t TYPER; /*!< \brief Offset: 0x004 (R/ ) Interrupt Controller Type Register */ + __IM uint32_t IIDR; /*!< \brief Offset: 0x008 (R/ ) Distributor Implementer Identification Register */ + RESERVED(0, uint32_t) + __IOM uint32_t STATUSR; /*!< \brief Offset: 0x010 (R/W) Error Reporting Status Register, optional */ + RESERVED(1[11], uint32_t) + __OM uint32_t SETSPI_NSR; /*!< \brief Offset: 0x040 ( /W) Set SPI Register */ + RESERVED(2, uint32_t) + __OM uint32_t CLRSPI_NSR; /*!< \brief Offset: 0x048 ( /W) Clear SPI Register */ + RESERVED(3, uint32_t) + __OM uint32_t SETSPI_SR; /*!< \brief Offset: 0x050 ( /W) Set SPI, Secure Register */ + RESERVED(4, uint32_t) + __OM uint32_t CLRSPI_SR; /*!< \brief Offset: 0x058 ( /W) Clear SPI, Secure Register */ + RESERVED(5[9], uint32_t) + __IOM uint32_t IGROUPR[32]; /*!< \brief Offset: 0x080 (R/W) Interrupt Group Registers */ + __IOM uint32_t ISENABLER[32]; /*!< \brief Offset: 0x100 (R/W) Interrupt Set-Enable Registers */ + __IOM uint32_t ICENABLER[32]; /*!< \brief Offset: 0x180 (R/W) Interrupt Clear-Enable Registers */ + __IOM uint32_t ISPENDR[32]; /*!< \brief Offset: 0x200 (R/W) Interrupt Set-Pending Registers */ + __IOM uint32_t ICPENDR[32]; /*!< \brief Offset: 0x280 (R/W) Interrupt Clear-Pending Registers */ + __IOM uint32_t ISACTIVER[32]; /*!< \brief Offset: 0x300 (R/W) Interrupt Set-Active Registers */ + __IOM uint32_t ICACTIVER[32]; /*!< \brief Offset: 0x380 (R/W) Interrupt Clear-Active Registers */ + __IOM uint32_t IPRIORITYR[255]; /*!< \brief Offset: 0x400 (R/W) Interrupt Priority Registers */ + RESERVED(6, uint32_t) + __IOM uint32_t ITARGETSR[255]; /*!< \brief Offset: 0x800 (R/W) Interrupt Targets Registers */ + RESERVED(7, uint32_t) + __IOM uint32_t ICFGR[64]; /*!< \brief Offset: 0xC00 (R/W) Interrupt Configuration Registers */ + __IOM uint32_t IGRPMODR[32]; /*!< \brief Offset: 0xD00 (R/W) Interrupt Group Modifier Registers */ + RESERVED(8[32], uint32_t) + __IOM uint32_t NSACR[64]; /*!< \brief Offset: 0xE00 (R/W) Non-secure Access Control Registers */ + __OM uint32_t SGIR; /*!< \brief Offset: 0xF00 ( /W) Software Generated Interrupt Register */ + RESERVED(9[3], uint32_t) + __IOM uint32_t CPENDSGIR[4]; /*!< \brief Offset: 0xF10 (R/W) SGI Clear-Pending Registers */ + __IOM uint32_t SPENDSGIR[4]; /*!< \brief Offset: 0xF20 (R/W) SGI Set-Pending Registers */ + RESERVED(10[5236], uint32_t) + __IOM uint64_t IROUTER[988]; /*!< \brief Offset: 0x6100(R/W) Interrupt Routing Registers */ +} GICDistributor_Type; + +#define GICDistributor ((GICDistributor_Type *) GIC_DISTRIBUTOR_BASE ) /*!< \brief GIC Distributor register set access pointer */ + +/* GICDistributor CTLR Register */ +#define GICDistributor_CTLR_EnableGrp0_Pos 0U /*!< GICDistributor CTLR: EnableGrp0 Position */ +#define GICDistributor_CTLR_EnableGrp0_Msk (0x1U /*<< GICDistributor_CTLR_EnableGrp0_Pos*/) /*!< GICDistributor CTLR: EnableGrp0 Mask */ +#define GICDistributor_CTLR_EnableGrp0(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_CTLR_EnableGrp0_Pos*/)) & GICDistributor_CTLR_EnableGrp0_Msk) + +#define GICDistributor_CTLR_EnableGrp1_Pos 1U /*!< GICDistributor CTLR: EnableGrp1 Position */ +#define GICDistributor_CTLR_EnableGrp1_Msk (0x1U << GICDistributor_CTLR_EnableGrp1_Pos) /*!< GICDistributor CTLR: EnableGrp1 Mask */ +#define GICDistributor_CTLR_EnableGrp1(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_EnableGrp1_Pos)) & GICDistributor_CTLR_EnableGrp1_Msk) + +#define GICDistributor_CTLR_ARE_Pos 4U /*!< GICDistributor CTLR: ARE Position */ +#define GICDistributor_CTLR_ARE_Msk (0x1U << GICDistributor_CTLR_ARE_Pos) /*!< GICDistributor CTLR: ARE Mask */ +#define GICDistributor_CTLR_ARE(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_ARE_Pos)) & GICDistributor_CTLR_ARE_Msk) + +#define GICDistributor_CTLR_DC_Pos 6U /*!< GICDistributor CTLR: DC Position */ +#define GICDistributor_CTLR_DC_Msk (0x1U << GICDistributor_CTLR_DC_Pos) /*!< GICDistributor CTLR: DC Mask */ +#define GICDistributor_CTLR_DC(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_DC_Pos)) & GICDistributor_CTLR_DC_Msk) + +#define GICDistributor_CTLR_EINWF_Pos 7U /*!< GICDistributor CTLR: EINWF Position */ +#define GICDistributor_CTLR_EINWF_Msk (0x1U << GICDistributor_CTLR_EINWF_Pos) /*!< GICDistributor CTLR: EINWF Mask */ +#define GICDistributor_CTLR_EINWF(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_EINWF_Pos)) & GICDistributor_CTLR_EINWF_Msk) + +#define GICDistributor_CTLR_RWP_Pos 31U /*!< GICDistributor CTLR: RWP Position */ +#define GICDistributor_CTLR_RWP_Msk (0x1U << GICDistributor_CTLR_RWP_Pos) /*!< GICDistributor CTLR: RWP Mask */ +#define GICDistributor_CTLR_RWP(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_CTLR_RWP_Pos)) & GICDistributor_CTLR_RWP_Msk) + +/* GICDistributor TYPER Register */ +#define GICDistributor_TYPER_ITLinesNumber_Pos 0U /*!< GICDistributor TYPER: ITLinesNumber Position */ +#define GICDistributor_TYPER_ITLinesNumber_Msk (0x1FU /*<< GICDistributor_TYPER_ITLinesNumber_Pos*/) /*!< GICDistributor TYPER: ITLinesNumber Mask */ +#define GICDistributor_TYPER_ITLinesNumber(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_TYPER_ITLinesNumber_Pos*/)) & GICDistributor_CTLR_ITLinesNumber_Msk) + +#define GICDistributor_TYPER_CPUNumber_Pos 5U /*!< GICDistributor TYPER: CPUNumber Position */ +#define GICDistributor_TYPER_CPUNumber_Msk (0x7U << GICDistributor_TYPER_CPUNumber_Pos) /*!< GICDistributor TYPER: CPUNumber Mask */ +#define GICDistributor_TYPER_CPUNumber(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_TYPER_CPUNumber_Pos)) & GICDistributor_TYPER_CPUNumber_Msk) + +#define GICDistributor_TYPER_SecurityExtn_Pos 10U /*!< GICDistributor TYPER: SecurityExtn Position */ +#define GICDistributor_TYPER_SecurityExtn_Msk (0x1U << GICDistributor_TYPER_SecurityExtn_Pos) /*!< GICDistributor TYPER: SecurityExtn Mask */ +#define GICDistributor_TYPER_SecurityExtn(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_TYPER_SecurityExtn_Pos)) & GICDistributor_TYPER_SecurityExtn_Msk) + +#define GICDistributor_TYPER_LSPI_Pos 11U /*!< GICDistributor TYPER: LSPI Position */ +#define GICDistributor_TYPER_LSPI_Msk (0x1FU << GICDistributor_TYPER_LSPI_Pos) /*!< GICDistributor TYPER: LSPI Mask */ +#define GICDistributor_TYPER_LSPI(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_TYPER_LSPI_Pos)) & GICDistributor_TYPER_LSPI_Msk) + +/* GICDistributor IIDR Register */ +#define GICDistributor_IIDR_Implementer_Pos 0U /*!< GICDistributor IIDR: Implementer Position */ +#define GICDistributor_IIDR_Implementer_Msk (0xFFFU /*<< GICDistributor_IIDR_Implementer_Pos*/) /*!< GICDistributor IIDR: Implementer Mask */ +#define GICDistributor_IIDR_Implementer(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_IIDR_Implementer_Pos*/)) & GICDistributor_IIDR_Implementer_Msk) + +#define GICDistributor_IIDR_Revision_Pos 12U /*!< GICDistributor IIDR: Revision Position */ +#define GICDistributor_IIDR_Revision_Msk (0xFU << GICDistributor_IIDR_Revision_Pos) /*!< GICDistributor IIDR: Revision Mask */ +#define GICDistributor_IIDR_Revision(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_IIDR_Revision_Pos)) & GICDistributor_IIDR_Revision_Msk) + +#define GICDistributor_IIDR_Variant_Pos 16U /*!< GICDistributor IIDR: Variant Position */ +#define GICDistributor_IIDR_Variant_Msk (0xFU << GICDistributor_IIDR_Variant_Pos) /*!< GICDistributor IIDR: Variant Mask */ +#define GICDistributor_IIDR_Variant(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_IIDR_Variant_Pos)) & GICDistributor_IIDR_Variant_Msk) + +#define GICDistributor_IIDR_ProductID_Pos 24U /*!< GICDistributor IIDR: ProductID Position */ +#define GICDistributor_IIDR_ProductID_Msk (0xFFU << GICDistributor_IIDR_ProductID_Pos) /*!< GICDistributor IIDR: ProductID Mask */ +#define GICDistributor_IIDR_ProductID(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_IIDR_ProductID_Pos)) & GICDistributor_IIDR_ProductID_Msk) + +/* GICDistributor STATUSR Register */ +#define GICDistributor_STATUSR_RRD_Pos 0U /*!< GICDistributor STATUSR: RRD Position */ +#define GICDistributor_STATUSR_RRD_Msk (0x1U /*<< GICDistributor_STATUSR_RRD_Pos*/) /*!< GICDistributor STATUSR: RRD Mask */ +#define GICDistributor_STATUSR_RRD(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_STATUSR_RRD_Pos*/)) & GICDistributor_STATUSR_RRD_Msk) + +#define GICDistributor_STATUSR_WRD_Pos 1U /*!< GICDistributor STATUSR: WRD Position */ +#define GICDistributor_STATUSR_WRD_Msk (0x1U << GICDistributor_STATUSR_WRD_Pos) /*!< GICDistributor STATUSR: WRD Mask */ +#define GICDistributor_STATUSR_WRD(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_STATUSR_WRD_Pos)) & GICDistributor_STATUSR_WRD_Msk) + +#define GICDistributor_STATUSR_RWOD_Pos 2U /*!< GICDistributor STATUSR: RWOD Position */ +#define GICDistributor_STATUSR_RWOD_Msk (0x1U << GICDistributor_STATUSR_RWOD_Pos) /*!< GICDistributor STATUSR: RWOD Mask */ +#define GICDistributor_STATUSR_RWOD(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_STATUSR_RWOD_Pos)) & GICDistributor_STATUSR_RWOD_Msk) + +#define GICDistributor_STATUSR_WROD_Pos 3U /*!< GICDistributor STATUSR: WROD Position */ +#define GICDistributor_STATUSR_WROD_Msk (0x1U << GICDistributor_STATUSR_WROD_Pos) /*!< GICDistributor STATUSR: WROD Mask */ +#define GICDistributor_STATUSR_WROD(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_STATUSR_WROD_Pos)) & GICDistributor_STATUSR_WROD_Msk) + +/* GICDistributor SETSPI_NSR Register */ +#define GICDistributor_SETSPI_NSR_INTID_Pos 0U /*!< GICDistributor SETSPI_NSR: INTID Position */ +#define GICDistributor_SETSPI_NSR_INTID_Msk (0x3FFU /*<< GICDistributor_SETSPI_NSR_INTID_Pos*/) /*!< GICDistributor SETSPI_NSR: INTID Mask */ +#define GICDistributor_SETSPI_NSR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_SETSPI_NSR_INTID_Pos*/)) & GICDistributor_SETSPI_NSR_INTID_Msk) + +/* GICDistributor CLRSPI_NSR Register */ +#define GICDistributor_CLRSPI_NSR_INTID_Pos 0U /*!< GICDistributor CLRSPI_NSR: INTID Position */ +#define GICDistributor_CLRSPI_NSR_INTID_Msk (0x3FFU /*<< GICDistributor_CLRSPI_NSR_INTID_Pos*/) /*!< GICDistributor CLRSPI_NSR: INTID Mask */ +#define GICDistributor_CLRSPI_NSR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_CLRSPI_NSR_INTID_Pos*/)) & GICDistributor_CLRSPI_NSR_INTID_Msk) + +/* GICDistributor SETSPI_SR Register */ +#define GICDistributor_SETSPI_SR_INTID_Pos 0U /*!< GICDistributor SETSPI_SR: INTID Position */ +#define GICDistributor_SETSPI_SR_INTID_Msk (0x3FFU /*<< GICDistributor_SETSPI_SR_INTID_Pos*/) /*!< GICDistributor SETSPI_SR: INTID Mask */ +#define GICDistributor_SETSPI_SR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_SETSPI_SR_INTID_Pos*/)) & GICDistributor_SETSPI_SR_INTID_Msk) + +/* GICDistributor CLRSPI_SR Register */ +#define GICDistributor_CLRSPI_SR_INTID_Pos 0U /*!< GICDistributor CLRSPI_SR: INTID Position */ +#define GICDistributor_CLRSPI_SR_INTID_Msk (0x3FFU /*<< GICDistributor_CLRSPI_SR_INTID_Pos*/) /*!< GICDistributor CLRSPI_SR: INTID Mask */ +#define GICDistributor_CLRSPI_SR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_CLRSPI_SR_INTID_Pos*/)) & GICDistributor_CLRSPI_SR_INTID_Msk) + +/* GICDistributor ITARGETSR Register */ +#define GICDistributor_ITARGETSR_CPU0_Pos 0U /*!< GICDistributor ITARGETSR: CPU0 Position */ +#define GICDistributor_ITARGETSR_CPU0_Msk (0x1U /*<< GICDistributor_ITARGETSR_CPU0_Pos*/) /*!< GICDistributor ITARGETSR: CPU0 Mask */ +#define GICDistributor_ITARGETSR_CPU0(x) (((uint8_t)(((uint8_t)(x)) /*<< GICDistributor_ITARGETSR_CPU0_Pos*/)) & GICDistributor_ITARGETSR_CPU0_Msk) + +#define GICDistributor_ITARGETSR_CPU1_Pos 1U /*!< GICDistributor ITARGETSR: CPU1 Position */ +#define GICDistributor_ITARGETSR_CPU1_Msk (0x1U << GICDistributor_ITARGETSR_CPU1_Pos) /*!< GICDistributor ITARGETSR: CPU1 Mask */ +#define GICDistributor_ITARGETSR_CPU1(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU1_Pos)) & GICDistributor_ITARGETSR_CPU1_Msk) + +#define GICDistributor_ITARGETSR_CPU2_Pos 2U /*!< GICDistributor ITARGETSR: CPU2 Position */ +#define GICDistributor_ITARGETSR_CPU2_Msk (0x1U << GICDistributor_ITARGETSR_CPU2_Pos) /*!< GICDistributor ITARGETSR: CPU2 Mask */ +#define GICDistributor_ITARGETSR_CPU2(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU2_Pos)) & GICDistributor_ITARGETSR_CPU2_Msk) + +#define GICDistributor_ITARGETSR_CPU3_Pos 3U /*!< GICDistributor ITARGETSR: CPU3 Position */ +#define GICDistributor_ITARGETSR_CPU3_Msk (0x1U << GICDistributor_ITARGETSR_CPU3_Pos) /*!< GICDistributor ITARGETSR: CPU3 Mask */ +#define GICDistributor_ITARGETSR_CPU3(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU3_Pos)) & GICDistributor_ITARGETSR_CPU3_Msk) + +#define GICDistributor_ITARGETSR_CPU4_Pos 4U /*!< GICDistributor ITARGETSR: CPU4 Position */ +#define GICDistributor_ITARGETSR_CPU4_Msk (0x1U << GICDistributor_ITARGETSR_CPU4_Pos) /*!< GICDistributor ITARGETSR: CPU4 Mask */ +#define GICDistributor_ITARGETSR_CPU4(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU4_Pos)) & GICDistributor_ITARGETSR_CPU4_Msk) + +#define GICDistributor_ITARGETSR_CPU5_Pos 5U /*!< GICDistributor ITARGETSR: CPU5 Position */ +#define GICDistributor_ITARGETSR_CPU5_Msk (0x1U << GICDistributor_ITARGETSR_CPU5_Pos) /*!< GICDistributor ITARGETSR: CPU5 Mask */ +#define GICDistributor_ITARGETSR_CPU5(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU5_Pos)) & GICDistributor_ITARGETSR_CPU5_Msk) + +#define GICDistributor_ITARGETSR_CPU6_Pos 6U /*!< GICDistributor ITARGETSR: CPU6 Position */ +#define GICDistributor_ITARGETSR_CPU6_Msk (0x1U << GICDistributor_ITARGETSR_CPU6_Pos) /*!< GICDistributor ITARGETSR: CPU6 Mask */ +#define GICDistributor_ITARGETSR_CPU6(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU6_Pos)) & GICDistributor_ITARGETSR_CPU6_Msk) + +#define GICDistributor_ITARGETSR_CPU7_Pos 7U /*!< GICDistributor ITARGETSR: CPU7 Position */ +#define GICDistributor_ITARGETSR_CPU7_Msk (0x1U << GICDistributor_ITARGETSR_CPU7_Pos) /*!< GICDistributor ITARGETSR: CPU7 Mask */ +#define GICDistributor_ITARGETSR_CPU7(x) (((uint8_t)(((uint8_t)(x)) << GICDistributor_ITARGETSR_CPU7_Pos)) & GICDistributor_ITARGETSR_CPU7_Msk) + +/* GICDistributor SGIR Register */ +#define GICDistributor_SGIR_INTID_Pos 0U /*!< GICDistributor SGIR: INTID Position */ +#define GICDistributor_SGIR_INTID_Msk (0x7U /*<< GICDistributor_SGIR_INTID_Pos*/) /*!< GICDistributor SGIR: INTID Mask */ +#define GICDistributor_SGIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICDistributor_SGIR_INTID_Pos*/)) & GICDistributor_SGIR_INTID_Msk) + +#define GICDistributor_SGIR_NSATT_Pos 15U /*!< GICDistributor SGIR: NSATT Position */ +#define GICDistributor_SGIR_NSATT_Msk (0x1U << GICDistributor_SGIR_NSATT_Pos) /*!< GICDistributor SGIR: NSATT Mask */ +#define GICDistributor_SGIR_NSATT(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_SGIR_NSATT_Pos)) & GICDistributor_SGIR_NSATT_Msk) + +#define GICDistributor_SGIR_CPUTargetList_Pos 16U /*!< GICDistributor SGIR: CPUTargetList Position */ +#define GICDistributor_SGIR_CPUTargetList_Msk (0xFFU << GICDistributor_SGIR_CPUTargetList_Pos) /*!< GICDistributor SGIR: CPUTargetList Mask */ +#define GICDistributor_SGIR_CPUTargetList(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_SGIR_CPUTargetList_Pos)) & GICDistributor_SGIR_CPUTargetList_Msk) + +#define GICDistributor_SGIR_TargetFilterList_Pos 24U /*!< GICDistributor SGIR: TargetFilterList Position */ +#define GICDistributor_SGIR_TargetFilterList_Msk (0x3U << GICDistributor_SGIR_TargetFilterList_Pos) /*!< GICDistributor SGIR: TargetFilterList Mask */ +#define GICDistributor_SGIR_TargetFilterList(x) (((uint32_t)(((uint32_t)(x)) << GICDistributor_SGIR_TargetFilterList_Pos)) & GICDistributor_SGIR_TargetFilterList_Msk) + +/* GICDistributor IROUTER Register */ +#define GICDistributor_IROUTER_Aff0_Pos 0UL /*!< GICDistributor IROUTER: Aff0 Position */ +#define GICDistributor_IROUTER_Aff0_Msk (0xFFUL /*<< GICDistributor_IROUTER_Aff0_Pos*/) /*!< GICDistributor IROUTER: Aff0 Mask */ +#define GICDistributor_IROUTER_Aff0(x) (((uint64_t)(((uint64_t)(x)) /*<< GICDistributor_IROUTER_Aff0_Pos*/)) & GICDistributor_IROUTER_Aff0_Msk) + +#define GICDistributor_IROUTER_Aff1_Pos 8UL /*!< GICDistributor IROUTER: Aff1 Position */ +#define GICDistributor_IROUTER_Aff1_Msk (0xFFUL << GICDistributor_IROUTER_Aff1_Pos) /*!< GICDistributor IROUTER: Aff1 Mask */ +#define GICDistributor_IROUTER_Aff1(x) (((uint64_t)(((uint64_t)(x)) << GICDistributor_IROUTER_Aff1_Pos)) & GICDistributor_IROUTER_Aff1_Msk) + +#define GICDistributor_IROUTER_Aff2_Pos 16UL /*!< GICDistributor IROUTER: Aff2 Position */ +#define GICDistributor_IROUTER_Aff2_Msk (0xFFUL << GICDistributor_IROUTER_Aff2_Pos) /*!< GICDistributor IROUTER: Aff2 Mask */ +#define GICDistributor_IROUTER_Aff2(x) (((uint64_t)(((uint64_t)(x)) << GICDistributor_IROUTER_Aff2_Pos)) & GICDistributor_IROUTER_Aff2_Msk) + +#define GICDistributor_IROUTER_IRM_Pos 31UL /*!< GICDistributor IROUTER: IRM Position */ +#define GICDistributor_IROUTER_IRM_Msk (0xFFUL << GICDistributor_IROUTER_IRM_Pos) /*!< GICDistributor IROUTER: IRM Mask */ +#define GICDistributor_IROUTER_IRM(x) (((uint64_t)(((uint64_t)(x)) << GICDistributor_IROUTER_IRM_Pos)) & GICDistributor_IROUTER_IRM_Msk) + +#define GICDistributor_IROUTER_Aff3_Pos 32UL /*!< GICDistributor IROUTER: Aff3 Position */ +#define GICDistributor_IROUTER_Aff3_Msk (0xFFUL << GICDistributor_IROUTER_Aff3_Pos) /*!< GICDistributor IROUTER: Aff3 Mask */ +#define GICDistributor_IROUTER_Aff3(x) (((uint64_t)(((uint64_t)(x)) << GICDistributor_IROUTER_Aff3_Pos)) & GICDistributor_IROUTER_Aff3_Msk) + + + +/** \brief Structure type to access the Generic Interrupt Controller Interface (GICC) +*/ +typedef struct +{ + __IOM uint32_t CTLR; /*!< \brief Offset: 0x000 (R/W) CPU Interface Control Register */ + __IOM uint32_t PMR; /*!< \brief Offset: 0x004 (R/W) Interrupt Priority Mask Register */ + __IOM uint32_t BPR; /*!< \brief Offset: 0x008 (R/W) Binary Point Register */ + __IM uint32_t IAR; /*!< \brief Offset: 0x00C (R/ ) Interrupt Acknowledge Register */ + __OM uint32_t EOIR; /*!< \brief Offset: 0x010 ( /W) End Of Interrupt Register */ + __IM uint32_t RPR; /*!< \brief Offset: 0x014 (R/ ) Running Priority Register */ + __IM uint32_t HPPIR; /*!< \brief Offset: 0x018 (R/ ) Highest Priority Pending Interrupt Register */ + __IOM uint32_t ABPR; /*!< \brief Offset: 0x01C (R/W) Aliased Binary Point Register */ + __IM uint32_t AIAR; /*!< \brief Offset: 0x020 (R/ ) Aliased Interrupt Acknowledge Register */ + __OM uint32_t AEOIR; /*!< \brief Offset: 0x024 ( /W) Aliased End Of Interrupt Register */ + __IM uint32_t AHPPIR; /*!< \brief Offset: 0x028 (R/ ) Aliased Highest Priority Pending Interrupt Register */ + __IOM uint32_t STATUSR; /*!< \brief Offset: 0x02C (R/W) Error Reporting Status Register, optional */ + RESERVED(1[40], uint32_t) + __IOM uint32_t APR[4]; /*!< \brief Offset: 0x0D0 (R/W) Active Priority Register */ + __IOM uint32_t NSAPR[4]; /*!< \brief Offset: 0x0E0 (R/W) Non-secure Active Priority Register */ + RESERVED(2[3], uint32_t) + __IM uint32_t IIDR; /*!< \brief Offset: 0x0FC (R/ ) CPU Interface Identification Register */ + RESERVED(3[960], uint32_t) + __OM uint32_t DIR; /*!< \brief Offset: 0x1000( /W) Deactivate Interrupt Register */ +} GICInterface_Type; + +#define GICInterface ((GICInterface_Type *) GIC_INTERFACE_BASE ) /*!< \brief GIC Interface register set access pointer */ + +/* GICInterface CTLR Register */ +#define GICInterface_CTLR_Enable_Pos 0U /*!< PTIM CTLR: Enable Position */ +#define GICInterface_CTLR_Enable_Msk (0x1U /*<< GICInterface_CTLR_Enable_Pos*/) /*!< PTIM CTLR: Enable Mask */ +#define GICInterface_CTLR_Enable(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_CTLR_Enable_Pos*/)) & GICInterface_CTLR_Enable_Msk) + +/* GICInterface PMR Register */ +#define GICInterface_PMR_Priority_Pos 0U /*!< PTIM PMR: Priority Position */ +#define GICInterface_PMR_Priority_Msk (0xFFU /*<< GICInterface_PMR_Priority_Pos*/) /*!< PTIM PMR: Priority Mask */ +#define GICInterface_PMR_Priority(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_PMR_Priority_Pos*/)) & GICInterface_PMR_Priority_Msk) + +/* GICInterface BPR Register */ +#define GICInterface_BPR_Binary_Point_Pos 0U /*!< PTIM BPR: Binary_Point Position */ +#define GICInterface_BPR_Binary_Point_Msk (0x7U /*<< GICInterface_BPR_Binary_Point_Pos*/) /*!< PTIM BPR: Binary_Point Mask */ +#define GICInterface_BPR_Binary_Point(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_BPR_Binary_Point_Pos*/)) & GICInterface_BPR_Binary_Point_Msk) + +/* GICInterface IAR Register */ +#define GICInterface_IAR_INTID_Pos 0U /*!< PTIM IAR: INTID Position */ +#define GICInterface_IAR_INTID_Msk (0xFFFFFFU /*<< GICInterface_IAR_INTID_Pos*/) /*!< PTIM IAR: INTID Mask */ +#define GICInterface_IAR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_IAR_INTID_Pos*/)) & GICInterface_IAR_INTID_Msk) + +/* GICInterface EOIR Register */ +#define GICInterface_EOIR_INTID_Pos 0U /*!< PTIM EOIR: INTID Position */ +#define GICInterface_EOIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_EOIR_INTID_Pos*/) /*!< PTIM EOIR: INTID Mask */ +#define GICInterface_EOIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_EOIR_INTID_Pos*/)) & GICInterface_EOIR_INTID_Msk) + +/* GICInterface RPR Register */ +#define GICInterface_RPR_INTID_Pos 0U /*!< PTIM RPR: INTID Position */ +#define GICInterface_RPR_INTID_Msk (0xFFU /*<< GICInterface_RPR_INTID_Pos*/) /*!< PTIM RPR: INTID Mask */ +#define GICInterface_RPR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_RPR_INTID_Pos*/)) & GICInterface_RPR_INTID_Msk) + +/* GICInterface HPPIR Register */ +#define GICInterface_HPPIR_INTID_Pos 0U /*!< PTIM HPPIR: INTID Position */ +#define GICInterface_HPPIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_HPPIR_INTID_Pos*/) /*!< PTIM HPPIR: INTID Mask */ +#define GICInterface_HPPIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_HPPIR_INTID_Pos*/)) & GICInterface_HPPIR_INTID_Msk) + +/* GICInterface ABPR Register */ +#define GICInterface_ABPR_Binary_Point_Pos 0U /*!< PTIM ABPR: Binary_Point Position */ +#define GICInterface_ABPR_Binary_Point_Msk (0x7U /*<< GICInterface_ABPR_Binary_Point_Pos*/) /*!< PTIM ABPR: Binary_Point Mask */ +#define GICInterface_ABPR_Binary_Point(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_ABPR_Binary_Point_Pos*/)) & GICInterface_ABPR_Binary_Point_Msk) + +/* GICInterface AIAR Register */ +#define GICInterface_AIAR_INTID_Pos 0U /*!< PTIM AIAR: INTID Position */ +#define GICInterface_AIAR_INTID_Msk (0xFFFFFFU /*<< GICInterface_AIAR_INTID_Pos*/) /*!< PTIM AIAR: INTID Mask */ +#define GICInterface_AIAR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_AIAR_INTID_Pos*/)) & GICInterface_AIAR_INTID_Msk) + +/* GICInterface AEOIR Register */ +#define GICInterface_AEOIR_INTID_Pos 0U /*!< PTIM AEOIR: INTID Position */ +#define GICInterface_AEOIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_AEOIR_INTID_Pos*/) /*!< PTIM AEOIR: INTID Mask */ +#define GICInterface_AEOIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_AEOIR_INTID_Pos*/)) & GICInterface_AEOIR_INTID_Msk) + +/* GICInterface AHPPIR Register */ +#define GICInterface_AHPPIR_INTID_Pos 0U /*!< PTIM AHPPIR: INTID Position */ +#define GICInterface_AHPPIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_AHPPIR_INTID_Pos*/) /*!< PTIM AHPPIR: INTID Mask */ +#define GICInterface_AHPPIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_AHPPIR_INTID_Pos*/)) & GICInterface_AHPPIR_INTID_Msk) + +/* GICInterface STATUSR Register */ +#define GICInterface_STATUSR_RRD_Pos 0U /*!< GICInterface STATUSR: RRD Position */ +#define GICInterface_STATUSR_RRD_Msk (0x1U /*<< GICInterface_STATUSR_RRD_Pos*/) /*!< GICInterface STATUSR: RRD Mask */ +#define GICInterface_STATUSR_RRD(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_STATUSR_RRD_Pos*/)) & GICInterface_STATUSR_RRD_Msk) + +#define GICInterface_STATUSR_WRD_Pos 1U /*!< GICInterface STATUSR: WRD Position */ +#define GICInterface_STATUSR_WRD_Msk (0x1U << GICInterface_STATUSR_WRD_Pos) /*!< GICInterface STATUSR: WRD Mask */ +#define GICInterface_STATUSR_WRD(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_STATUSR_WRD_Pos)) & GICInterface_STATUSR_WRD_Msk) + +#define GICInterface_STATUSR_RWOD_Pos 2U /*!< GICInterface STATUSR: RWOD Position */ +#define GICInterface_STATUSR_RWOD_Msk (0x1U << GICInterface_STATUSR_RWOD_Pos) /*!< GICInterface STATUSR: RWOD Mask */ +#define GICInterface_STATUSR_RWOD(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_STATUSR_RWOD_Pos)) & GICInterface_STATUSR_RWOD_Msk) + +#define GICInterface_STATUSR_WROD_Pos 3U /*!< GICInterface STATUSR: WROD Position */ +#define GICInterface_STATUSR_WROD_Msk (0x1U << GICInterface_STATUSR_WROD_Pos) /*!< GICInterface STATUSR: WROD Mask */ +#define GICInterface_STATUSR_WROD(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_STATUSR_WROD_Pos)) & GICInterface_STATUSR_WROD_Msk) + +#define GICInterface_STATUSR_ASV_Pos 4U /*!< GICInterface STATUSR: ASV Position */ +#define GICInterface_STATUSR_ASV_Msk (0x1U << GICInterface_STATUSR_ASV_Pos) /*!< GICInterface STATUSR: ASV Mask */ +#define GICInterface_STATUSR_ASV(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_STATUSR_ASV_Pos)) & GICInterface_STATUSR_ASV_Msk) + +/* GICInterface IIDR Register */ +#define GICInterface_IIDR_Implementer_Pos 0U /*!< GICInterface IIDR: Implementer Position */ +#define GICInterface_IIDR_Implementer_Msk (0xFFFU /*<< GICInterface_IIDR_Implementer_Pos*/) /*!< GICInterface IIDR: Implementer Mask */ +#define GICInterface_IIDR_Implementer(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_IIDR_Implementer_Pos*/)) & GICInterface_IIDR_Implementer_Msk) + +#define GICInterface_IIDR_Revision_Pos 12U /*!< GICInterface IIDR: Revision Position */ +#define GICInterface_IIDR_Revision_Msk (0xFU << GICInterface_IIDR_Revision_Pos) /*!< GICInterface IIDR: Revision Mask */ +#define GICInterface_IIDR_Revision(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_IIDR_Revision_Pos)) & GICInterface_IIDR_Revision_Msk) + +#define GICInterface_IIDR_Arch_version_Pos 16U /*!< GICInterface IIDR: Arch_version Position */ +#define GICInterface_IIDR_Arch_version_Msk (0xFU << GICInterface_IIDR_Arch_version_Pos) /*!< GICInterface IIDR: Arch_version Mask */ +#define GICInterface_IIDR_Arch_version(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_IIDR_Arch_version_Pos)) & GICInterface_IIDR_Arch_version_Msk) + +#define GICInterface_IIDR_ProductID_Pos 20U /*!< GICInterface IIDR: ProductID Position */ +#define GICInterface_IIDR_ProductID_Msk (0xFFFU << GICInterface_IIDR_ProductID_Pos) /*!< GICInterface IIDR: ProductID Mask */ +#define GICInterface_IIDR_ProductID(x) (((uint32_t)(((uint32_t)(x)) << GICInterface_IIDR_ProductID_Pos)) & GICInterface_IIDR_ProductID_Msk) + +/* GICInterface DIR Register */ +#define GICInterface_DIR_INTID_Pos 0U /*!< PTIM DIR: INTID Position */ +#define GICInterface_DIR_INTID_Msk (0xFFFFFFU /*<< GICInterface_DIR_INTID_Pos*/) /*!< PTIM DIR: INTID Mask */ +#define GICInterface_DIR_INTID(x) (((uint32_t)(((uint32_t)(x)) /*<< GICInterface_DIR_INTID_Pos*/)) & GICInterface_DIR_INTID_Msk) + + + +/** \brief Enable the interrupt distributor using the GIC's CTLR register. +*/ +__STATIC_INLINE void GIC_EnableDistributor(void) +{ + GICDistributor->CTLR |= 1U; +} + +/** \brief Disable the interrupt distributor using the GIC's CTLR register. +*/ +__STATIC_INLINE void GIC_DisableDistributor(void) +{ + GICDistributor->CTLR &=~1U; +} + +/** \brief Read the GIC's TYPER register. +* \return GICDistributor_Type::TYPER +*/ +__STATIC_INLINE uint32_t GIC_DistributorInfo(void) +{ + return (GICDistributor->TYPER); +} + +/** \brief Reads the GIC's IIDR register. +* \return GICDistributor_Type::IIDR +*/ +__STATIC_INLINE uint32_t GIC_DistributorImplementer(void) +{ + return (GICDistributor->IIDR); +} + +/** \brief Sets the GIC's ITARGETSR register for the given interrupt. +* \param [in] IRQn Interrupt to be configured. +* \param [in] cpu_target CPU interfaces to assign this interrupt to. +*/ +__STATIC_INLINE void GIC_SetTarget(IRQn_Type IRQn, uint32_t cpu_target) +{ + uint32_t mask = GICDistributor->ITARGETSR[IRQn / 4U] & ~(0xFFUL << ((IRQn % 4U) * 8U)); + GICDistributor->ITARGETSR[IRQn / 4U] = mask | ((cpu_target & 0xFFUL) << ((IRQn % 4U) * 8U)); +} + +/** \brief Read the GIC's ITARGETSR register. +* \param [in] IRQn Interrupt to acquire the configuration for. +* \return GICDistributor_Type::ITARGETSR +*/ +__STATIC_INLINE uint32_t GIC_GetTarget(IRQn_Type IRQn) +{ + return (GICDistributor->ITARGETSR[IRQn / 4U] >> ((IRQn % 4U) * 8U)) & 0xFFUL; +} + +/** \brief Enable the CPU's interrupt interface. +*/ +__STATIC_INLINE void GIC_EnableInterface(void) +{ + GICInterface->CTLR |= 1U; //enable interface +} + +/** \brief Disable the CPU's interrupt interface. +*/ +__STATIC_INLINE void GIC_DisableInterface(void) +{ + GICInterface->CTLR &=~1U; //disable distributor +} + +/** \brief Read the CPU's IAR register. +* \return GICInterface_Type::IAR +*/ +__STATIC_INLINE IRQn_Type GIC_AcknowledgePending(void) +{ + return (IRQn_Type)(GICInterface->IAR); +} + +/** \brief Writes the given interrupt number to the CPU's EOIR register. +* \param [in] IRQn The interrupt to be signaled as finished. +*/ +__STATIC_INLINE void GIC_EndInterrupt(IRQn_Type IRQn) +{ + GICInterface->EOIR = IRQn; +} + +/** \brief Enables the given interrupt using GIC's ISENABLER register. +* \param [in] IRQn The interrupt to be enabled. +*/ +__STATIC_INLINE void GIC_EnableIRQ(IRQn_Type IRQn) +{ + GICDistributor->ISENABLER[IRQn / 32U] = 1U << (IRQn % 32U); +} + +/** \brief Get interrupt enable status using GIC's ISENABLER register. +* \param [in] IRQn The interrupt to be queried. +* \return 0 - interrupt is not enabled, 1 - interrupt is enabled. +*/ +__STATIC_INLINE uint32_t GIC_GetEnableIRQ(IRQn_Type IRQn) +{ + return (GICDistributor->ISENABLER[IRQn / 32U] >> (IRQn % 32U)) & 1UL; +} + +/** \brief Disables the given interrupt using GIC's ICENABLER register. +* \param [in] IRQn The interrupt to be disabled. +*/ +__STATIC_INLINE void GIC_DisableIRQ(IRQn_Type IRQn) +{ + GICDistributor->ICENABLER[IRQn / 32U] = 1U << (IRQn % 32U); +} + +/** \brief Get interrupt pending status from GIC's ISPENDR register. +* \param [in] IRQn The interrupt to be queried. +* \return 0 - interrupt is not pending, 1 - interrupt is pendig. +*/ +__STATIC_INLINE uint32_t GIC_GetPendingIRQ(IRQn_Type IRQn) +{ + uint32_t pend; + + if (IRQn >= 16U) { + pend = (GICDistributor->ISPENDR[IRQn / 32U] >> (IRQn % 32U)) & 1UL; + } else { + // INTID 0-15 Software Generated Interrupt + pend = (GICDistributor->SPENDSGIR[IRQn / 4U] >> ((IRQn % 4U) * 8U)) & 0xFFUL; + // No CPU identification offered + if (pend != 0U) { + pend = 1U; + } else { + pend = 0U; + } + } + + return (pend); +} + +/** \brief Sets the given interrupt as pending using GIC's ISPENDR register. +* \param [in] IRQn The interrupt to be enabled. +*/ +__STATIC_INLINE void GIC_SetPendingIRQ(IRQn_Type IRQn) +{ + if (IRQn >= 16U) { + GICDistributor->ISPENDR[IRQn / 32U] = 1U << (IRQn % 32U); + } else { + // INTID 0-15 Software Generated Interrupt + // Forward the interrupt to the CPU interface that requested it + GICDistributor->SGIR = (IRQn | 0x02000000U); + } +} + +/** \brief Clears the given interrupt from being pending using GIC's ICPENDR register. +* \param [in] IRQn The interrupt to be enabled. +*/ +__STATIC_INLINE void GIC_ClearPendingIRQ(IRQn_Type IRQn) +{ + if (IRQn >= 16U) { + GICDistributor->ICPENDR[IRQn / 32U] = 1U << (IRQn % 32U); + } else { + // INTID 0-15 Software Generated Interrupt + GICDistributor->CPENDSGIR[IRQn / 4U] = 1U << ((IRQn % 4U) * 8U); + } +} + +/** \brief Sets the interrupt configuration using GIC's ICFGR register. +* \param [in] IRQn The interrupt to be configured. +* \param [in] int_config Int_config field value. Bit 0: Reserved (0 - N-N model, 1 - 1-N model for some GIC before v1) +* Bit 1: 0 - level sensitive, 1 - edge triggered +*/ +__STATIC_INLINE void GIC_SetConfiguration(IRQn_Type IRQn, uint32_t int_config) +{ + uint32_t icfgr = GICDistributor->ICFGR[IRQn / 16U]; /* read current register content */ + uint32_t shift = (IRQn % 16U) << 1U; /* calculate shift value */ + + int_config &= 3U; /* only 2 bits are valid */ + icfgr &= (~(3U << shift)); /* clear bits to change */ + icfgr |= ( int_config << shift); /* set new configuration */ + + GICDistributor->ICFGR[IRQn / 16U] = icfgr; /* write new register content */ +} + +/** \brief Get the interrupt configuration from the GIC's ICFGR register. +* \param [in] IRQn Interrupt to acquire the configuration for. +* \return Int_config field value. Bit 0: Reserved (0 - N-N model, 1 - 1-N model for some GIC before v1) +* Bit 1: 0 - level sensitive, 1 - edge triggered +*/ +__STATIC_INLINE uint32_t GIC_GetConfiguration(IRQn_Type IRQn) +{ + return (GICDistributor->ICFGR[IRQn / 16U] >> ((IRQn % 16U) >> 1U)); +} + +/** \brief Set the priority for the given interrupt in the GIC's IPRIORITYR register. +* \param [in] IRQn The interrupt to be configured. +* \param [in] priority The priority for the interrupt, lower values denote higher priorities. +*/ +__STATIC_INLINE void GIC_SetPriority(IRQn_Type IRQn, uint32_t priority) +{ + uint32_t mask = GICDistributor->IPRIORITYR[IRQn / 4U] & ~(0xFFUL << ((IRQn % 4U) * 8U)); + GICDistributor->IPRIORITYR[IRQn / 4U] = mask | ((priority & 0xFFUL) << ((IRQn % 4U) * 8U)); +} + +/** \brief Read the current interrupt priority from GIC's IPRIORITYR register. +* \param [in] IRQn The interrupt to be queried. +*/ +__STATIC_INLINE uint32_t GIC_GetPriority(IRQn_Type IRQn) +{ + return (GICDistributor->IPRIORITYR[IRQn / 4U] >> ((IRQn % 4U) * 8U)) & 0xFFUL; +} + +/** \brief Set the interrupt priority mask using CPU's PMR register. +* \param [in] priority Priority mask to be set. +*/ +__STATIC_INLINE void GIC_SetInterfacePriorityMask(uint32_t priority) +{ + GICInterface->PMR = priority & 0xFFUL; //set priority mask +} + +/** \brief Read the current interrupt priority mask from CPU's PMR register. +* \result GICInterface_Type::PMR +*/ +__STATIC_INLINE uint32_t GIC_GetInterfacePriorityMask(void) +{ + return GICInterface->PMR; +} + +/** \brief Configures the group priority and subpriority split point using CPU's BPR register. +* \param [in] binary_point Amount of bits used as subpriority. +*/ +__STATIC_INLINE void GIC_SetBinaryPoint(uint32_t binary_point) +{ + GICInterface->BPR = binary_point & 7U; //set binary point +} + +/** \brief Read the current group priority and subpriority split point from CPU's BPR register. +* \return GICInterface_Type::BPR +*/ +__STATIC_INLINE uint32_t GIC_GetBinaryPoint(void) +{ + return GICInterface->BPR; +} + +/** \brief Get the status for a given interrupt. +* \param [in] IRQn The interrupt to get status for. +* \return 0 - not pending/active, 1 - pending, 2 - active, 3 - pending and active +*/ +__STATIC_INLINE uint32_t GIC_GetIRQStatus(IRQn_Type IRQn) +{ + uint32_t pending, active; + + active = ((GICDistributor->ISACTIVER[IRQn / 32U]) >> (IRQn % 32U)) & 1UL; + pending = ((GICDistributor->ISPENDR[IRQn / 32U]) >> (IRQn % 32U)) & 1UL; + + return ((active<<1U) | pending); +} + +/** \brief Generate a software interrupt using GIC's SGIR register. +* \param [in] IRQn Software interrupt to be generated. +* \param [in] target_list List of CPUs the software interrupt should be forwarded to. +* \param [in] filter_list Filter to be applied to determine interrupt receivers. +*/ +__STATIC_INLINE void GIC_SendSGI(IRQn_Type IRQn, uint32_t target_list, uint32_t filter_list) +{ + GICDistributor->SGIR = ((filter_list & 3U) << 24U) | ((target_list & 0xFFUL) << 16U) | (IRQn & 0x0FUL); +} + +/** \brief Get the interrupt number of the highest interrupt pending from CPU's HPPIR register. +* \return GICInterface_Type::HPPIR +*/ +__STATIC_INLINE uint32_t GIC_GetHighPendingIRQ(void) +{ + return GICInterface->HPPIR; +} + +/** \brief Provides information about the implementer and revision of the CPU interface. +* \return GICInterface_Type::IIDR +*/ +__STATIC_INLINE uint32_t GIC_GetInterfaceId(void) +{ + return GICInterface->IIDR; +} + +/** \brief Set the interrupt group from the GIC's IGROUPR register. +* \param [in] IRQn The interrupt to be queried. +* \param [in] group Interrupt group number: 0 - Group 0, 1 - Group 1 +*/ +__STATIC_INLINE void GIC_SetGroup(IRQn_Type IRQn, uint32_t group) +{ + uint32_t igroupr = GICDistributor->IGROUPR[IRQn / 32U]; + uint32_t shift = (IRQn % 32U); + + igroupr &= (~(1U << shift)); + igroupr |= ( (group & 1U) << shift); + + GICDistributor->IGROUPR[IRQn / 32U] = igroupr; +} +#define GIC_SetSecurity GIC_SetGroup + +/** \brief Get the interrupt group from the GIC's IGROUPR register. +* \param [in] IRQn The interrupt to be queried. +* \return 0 - Group 0, 1 - Group 1 +*/ +__STATIC_INLINE uint32_t GIC_GetGroup(IRQn_Type IRQn) +{ + return (GICDistributor->IGROUPR[IRQn / 32U] >> (IRQn % 32U)) & 1UL; +} +#define GIC_GetSecurity GIC_GetGroup + +/** \brief Initialize the interrupt distributor. +*/ +__STATIC_INLINE void GIC_DistInit(void) +{ + uint32_t i; + uint32_t num_irq = 0U; + uint32_t priority_field; + + //A reset sets all bits in the IGROUPRs corresponding to the SPIs to 0, + //configuring all of the interrupts as Secure. + + //Disable interrupt forwarding + GIC_DisableDistributor(); + //Get the maximum number of interrupts that the GIC supports + num_irq = 32U * ((GIC_DistributorInfo() & 0x1FU) + 1U); + + /* Priority level is implementation defined. + To determine the number of priority bits implemented write 0xFF to an IPRIORITYR + priority field and read back the value stored.*/ + GIC_SetPriority((IRQn_Type)0U, 0xFFU); + priority_field = GIC_GetPriority((IRQn_Type)0U); + + for (i = 32U; i < num_irq; i++) + { + //Disable the SPI interrupt + GIC_DisableIRQ((IRQn_Type)i); + //Set level-sensitive (and N-N model) + GIC_SetConfiguration((IRQn_Type)i, 0U); + //Set priority + GIC_SetPriority((IRQn_Type)i, priority_field/2U); + //Set target list to CPU0 + GIC_SetTarget((IRQn_Type)i, 1U); + } + //Enable distributor + GIC_EnableDistributor(); +} + +/** \brief Initialize the CPU's interrupt interface +*/ +__STATIC_INLINE void GIC_CPUInterfaceInit(void) +{ + uint32_t i; + uint32_t priority_field; + + //A reset sets all bits in the IGROUPRs corresponding to the SPIs to 0, + //configuring all of the interrupts as Secure. + + //Disable interrupt forwarding + GIC_DisableInterface(); + + /* Priority level is implementation defined. + To determine the number of priority bits implemented write 0xFF to an IPRIORITYR + priority field and read back the value stored.*/ + GIC_SetPriority((IRQn_Type)0U, 0xFFU); + priority_field = GIC_GetPriority((IRQn_Type)0U); + + //SGI and PPI + for (i = 0U; i < 32U; i++) + { + if(i > 15U) { + //Set level-sensitive (and N-N model) for PPI + GIC_SetConfiguration((IRQn_Type)i, 0U); + } + //Disable SGI and PPI interrupts + GIC_DisableIRQ((IRQn_Type)i); + //Set priority + GIC_SetPriority((IRQn_Type)i, priority_field/2U); + } + //Enable interface + GIC_EnableInterface(); + //Set binary point to 0 + GIC_SetBinaryPoint(0U); + //Set priority mask + GIC_SetInterfacePriorityMask(0xFFU); +} + +/** \brief Initialize and enable the GIC +*/ +__STATIC_INLINE void GIC_Enable(void) +{ + GIC_DistInit(); + GIC_CPUInterfaceInit(); //per CPU +} + +#endif /* ARM_GIC_V20_H */ \ No newline at end of file diff --git a/CMSIS/Core_A/Include/irq_ctrl.h b/CMSIS/Core/Include/irq_ctrl.h similarity index 99% rename from CMSIS/Core_A/Include/irq_ctrl.h rename to CMSIS/Core/Include/irq_ctrl.h index 1ca29a27e..b61991f62 100644 --- a/CMSIS/Core_A/Include/irq_ctrl.h +++ b/CMSIS/Core/Include/irq_ctrl.h @@ -5,7 +5,7 @@ * @date 03. March 2020 ******************************************************************************/ /* - * Copyright (c) 2017-2020 ARM Limited. All rights reserved. + * Copyright (c) 2017-2023 ARM Limited. All rights reserved. * * SPDX-License-Identifier: Apache-2.0 * diff --git a/CMSIS/Core_A/Source/irq_ctrl_gic.c b/CMSIS/Core/Source/irq_ctrl_gic.c similarity index 100% rename from CMSIS/Core_A/Source/irq_ctrl_gic.c rename to CMSIS/Core/Source/irq_ctrl_gic.c diff --git a/CMSIS/CoreValidation/Layer/Target/CA5/RTE/Device/ARMCA5/mmu_ARMCA5.c b/CMSIS/CoreValidation/Layer/Target/CA5/RTE/Device/ARMCA5/mmu_ARMCA5.c index 58a94805d..b6b027269 100644 --- a/CMSIS/CoreValidation/Layer/Target/CA5/RTE/Device/ARMCA5/mmu_ARMCA5.c +++ b/CMSIS/CoreValidation/Layer/Target/CA5/RTE/Device/ARMCA5/mmu_ARMCA5.c @@ -145,7 +145,7 @@ void MMU_CreateTranslationTable(void) MMU_TTSection (TTB_BASE, 0, 4096, DESCRIPTOR_FAULT); /* - * Generate descriptors. Refer to core_ca.h to get information about attributes + * Generate descriptors. Refer to armv7a.h to get information about attributes * */ //Create descriptors for Vectors, RO, RW, ZI sections diff --git a/CMSIS/CoreValidation/Layer/Target/CA7/RTE/Device/ARMCA7/mmu_ARMCA7.c b/CMSIS/CoreValidation/Layer/Target/CA7/RTE/Device/ARMCA7/mmu_ARMCA7.c index 26431f3b4..758e254f5 100644 --- a/CMSIS/CoreValidation/Layer/Target/CA7/RTE/Device/ARMCA7/mmu_ARMCA7.c +++ b/CMSIS/CoreValidation/Layer/Target/CA7/RTE/Device/ARMCA7/mmu_ARMCA7.c @@ -145,7 +145,7 @@ void MMU_CreateTranslationTable(void) MMU_TTSection (TTB_BASE, 0, 4096, DESCRIPTOR_FAULT); /* - * Generate descriptors. Refer to core_ca.h to get information about attributes + * Generate descriptors. Refer to armv7a.h to get information about attributes * */ //Create descriptors for Vectors, RO, RW, ZI sections diff --git a/CMSIS/CoreValidation/Layer/Target/CA9/RTE/Device/ARMCA9/mmu_ARMCA9.c b/CMSIS/CoreValidation/Layer/Target/CA9/RTE/Device/ARMCA9/mmu_ARMCA9.c index 1435eb92b..5447e2e81 100644 --- a/CMSIS/CoreValidation/Layer/Target/CA9/RTE/Device/ARMCA9/mmu_ARMCA9.c +++ b/CMSIS/CoreValidation/Layer/Target/CA9/RTE/Device/ARMCA9/mmu_ARMCA9.c @@ -145,7 +145,7 @@ void MMU_CreateTranslationTable(void) MMU_TTSection (TTB_BASE, 0, 4096, DESCRIPTOR_FAULT); /* - * Generate descriptors. Refer to core_ca.h to get information about attributes + * Generate descriptors. Refer to armv7a.h to get information about attributes * */ //Create descriptors for Vectors, RO, RW, ZI sections diff --git a/CMSIS/CoreValidation/Project/avh.yml b/CMSIS/CoreValidation/Project/avh.yml index c958d09fa..5d1f011c4 100644 --- a/CMSIS/CoreValidation/Project/avh.yml +++ b/CMSIS/CoreValidation/Project/avh.yml @@ -7,7 +7,6 @@ backend: upload: - ARM.CMSIS.pdsc - CMSIS/Core/**/* - - CMSIS/Core_A/**/* - CMSIS/CoreValidation/**/* - -:CMSIS/CoreValidation/Project/Core_Validation-*.zip - -:CMSIS/CoreValidation/Project/Core_Validation-*.junit diff --git a/CMSIS/Core_A/Include/cmsis_armcc.h b/CMSIS/Core_A/Include/cmsis_armcc.h deleted file mode 100644 index 66a03f8de..000000000 --- a/CMSIS/Core_A/Include/cmsis_armcc.h +++ /dev/null @@ -1,605 +0,0 @@ -/**************************************************************************//** - * @file cmsis_armcc.h - * @brief CMSIS compiler ARMCC (Arm Compiler 5) header file - * @version V1.0.6 - * @date 13. November 2022 - ******************************************************************************/ -/* - * Copyright (c) 2009-2021 Arm Limited. All rights reserved. - * - * SPDX-License-Identifier: Apache-2.0 - * - * Licensed under the Apache License, Version 2.0 (the License); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef __CMSIS_ARMCC_H -#define __CMSIS_ARMCC_H - - -#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677) - #error "Please use Arm Compiler Toolchain V4.0.677 or later!" -#endif - -/* CMSIS compiler control architecture macros */ -#if (defined (__TARGET_ARCH_7_A ) && (__TARGET_ARCH_7_A == 1)) - #define __ARM_ARCH_7A__ 1 -#endif - -/* CMSIS compiler specific defines */ -#ifndef __ASM - #define __ASM __asm -#endif -#ifndef __INLINE - #define __INLINE __inline -#endif -#ifndef __FORCEINLINE - #define __FORCEINLINE __forceinline -#endif -#ifndef __STATIC_INLINE - #define __STATIC_INLINE static __inline -#endif -#ifndef __STATIC_FORCEINLINE - #define __STATIC_FORCEINLINE static __forceinline -#endif -#ifndef __NO_RETURN - #define __NO_RETURN __declspec(noreturn) -#endif -#ifndef CMSIS_DEPRECATED - #define CMSIS_DEPRECATED __attribute__((deprecated)) -#endif -#ifndef __USED - #define __USED __attribute__((used)) -#endif -#ifndef __WEAK - #define __WEAK __attribute__((weak)) -#endif -#ifndef __PACKED - #define __PACKED __attribute__((packed)) -#endif -#ifndef __PACKED_STRUCT - #define __PACKED_STRUCT __packed struct -#endif -#ifndef __PACKED_UNION - #define __PACKED_UNION __packed union -#endif -#ifndef __UNALIGNED_UINT32 /* deprecated */ - #define __UNALIGNED_UINT32(x) (*((__packed uint32_t *)(x))) -#endif -#ifndef __UNALIGNED_UINT16_WRITE - #define __UNALIGNED_UINT16_WRITE(addr, val) ((*((__packed uint16_t *)(addr))) = (val)) -#endif -#ifndef __UNALIGNED_UINT16_READ - #define __UNALIGNED_UINT16_READ(addr) (*((const __packed uint16_t *)(addr))) -#endif -#ifndef __UNALIGNED_UINT32_WRITE - #define __UNALIGNED_UINT32_WRITE(addr, val) ((*((__packed uint32_t *)(addr))) = (val)) -#endif -#ifndef __UNALIGNED_UINT32_READ - #define __UNALIGNED_UINT32_READ(addr) (*((const __packed uint32_t *)(addr))) -#endif -#ifndef __ALIGNED - #define __ALIGNED(x) __attribute__((aligned(x))) -#endif -#ifndef __RESTRICT - #define __RESTRICT __restrict -#endif -#ifndef __COMPILER_BARRIER - #define __COMPILER_BARRIER() __memory_changed() -#endif - -/* ########################## Core Instruction Access ######################### */ -/** - \brief No Operation - \details No Operation does nothing. This instruction can be used for code alignment purposes. - */ -#define __NOP __nop - - -/** - \brief Wait For Interrupt - \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs. - */ -#define __WFI __wfi - - -/** - \brief Wait For Event - \details Wait For Event is a hint instruction that permits the processor to enter - a low-power state until one of a number of events occurs. - */ -#define __WFE __wfe - - -/** - \brief Send Event - \details Send Event is a hint instruction. It causes an event to be signaled to the CPU. - */ -#define __SEV __sev - - -/** - \brief Instruction Synchronization Barrier - \details Instruction Synchronization Barrier flushes the pipeline in the processor, - so that all instructions following the ISB are fetched from cache or memory, - after the instruction has been completed. - */ -#define __ISB() __isb(0xF) - -/** - \brief Data Synchronization Barrier - \details Acts as a special kind of Data Memory Barrier. - It completes when all explicit memory accesses before this instruction complete. - */ -#define __DSB() __dsb(0xF) - -/** - \brief Data Memory Barrier - \details Ensures the apparent order of the explicit memory operations before - and after the instruction, without ensuring their completion. - */ -#define __DMB() __dmb(0xF) - - -/** - \brief Reverse byte order (32 bit) - \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412. - \param [in] value Value to reverse - \return Reversed value - */ -#define __REV __rev - - -/** - \brief Reverse byte order (16 bit) - \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856. - \param [in] value Value to reverse - \return Reversed value - */ -#ifndef __NO_EMBEDDED_ASM -__attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value) -{ - rev16 r0, r0 - bx lr -} -#endif - - -/** - \brief Reverse byte order (16 bit) - \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000. - \param [in] value Value to reverse - \return Reversed value - */ -#ifndef __NO_EMBEDDED_ASM -__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value) -{ - revsh r0, r0 - bx lr -} -#endif - - -/** - \brief Rotate Right in unsigned value (32 bit) - \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. - \param [in] op1 Value to rotate - \param [in] op2 Number of Bits to rotate - \return Rotated value - */ -#define __ROR __ror - - -/** - \brief Breakpoint - \details Causes the processor to enter Debug state. - Debug tools can use this to investigate system state when the instruction at a particular address is reached. - \param [in] value is ignored by the processor. - If required, a debugger can use it to store additional information about the breakpoint. - */ -#define __BKPT(value) __breakpoint(value) - - -/** - \brief Reverse bit order of value - \details Reverses the bit order of the given value. - \param [in] value Value to reverse - \return Reversed value - */ -#define __RBIT __rbit - -/** - \brief Count leading zeros - \details Counts the number of leading zeros of a data value. - \param [in] value Value to count the leading zeros - \return number of leading zeros in value - */ -#define __CLZ __clz - -/** - \brief LDR Exclusive (8 bit) - \details Executes a exclusive LDR instruction for 8 bit value. - \param [in] ptr Pointer to data - \return value of type uint8_t at (*ptr) - */ -#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020) - #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr)) -#else - #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop") -#endif - - -/** - \brief LDR Exclusive (16 bit) - \details Executes a exclusive LDR instruction for 16 bit values. - \param [in] ptr Pointer to data - \return value of type uint16_t at (*ptr) - */ -#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020) - #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr)) -#else - #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop") -#endif - - -/** - \brief LDR Exclusive (32 bit) - \details Executes a exclusive LDR instruction for 32 bit values. - \param [in] ptr Pointer to data - \return value of type uint32_t at (*ptr) - */ -#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020) - #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr)) -#else - #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop") -#endif - - -/** - \brief STR Exclusive (8 bit) - \details Executes a exclusive STR instruction for 8 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020) - #define __STREXB(value, ptr) __strex(value, ptr) -#else - #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop") -#endif - - -/** - \brief STR Exclusive (16 bit) - \details Executes a exclusive STR instruction for 16 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020) - #define __STREXH(value, ptr) __strex(value, ptr) -#else - #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop") -#endif - - -/** - \brief STR Exclusive (32 bit) - \details Executes a exclusive STR instruction for 32 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020) - #define __STREXW(value, ptr) __strex(value, ptr) -#else - #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop") -#endif - - -/** - \brief Remove the exclusive lock - \details Removes the exclusive lock which is created by LDREX. - */ -#define __CLREX __clrex - - -/** - \brief Signed Saturate - \details Saturates a signed value. - \param [in] value Value to be saturated - \param [in] sat Bit position to saturate to (1..32) - \return Saturated value - */ -#define __SSAT __ssat - - -/** - \brief Unsigned Saturate - \details Saturates an unsigned value. - \param [in] value Value to be saturated - \param [in] sat Bit position to saturate to (0..31) - \return Saturated value - */ -#define __USAT __usat - -/* ########################### Core Function Access ########################### */ - -/** - \brief Enable IRQ Interrupts - \details Enables IRQ interrupts by clearing special-purpose register PRIMASK. - Can only be executed in Privileged modes. - */ -/* intrinsic void __enable_irq(); */ - - -/** - \brief Disable IRQ Interrupts - \details Disables IRQ interrupts by setting special-purpose register PRIMASK. - Can only be executed in Privileged modes. - */ -/* intrinsic void __disable_irq(void); */ - -/** - \brief Enable FIQ - \details Enables FIQ interrupts by clearing the F-bit in the CPSR. - Can only be executed in Privileged modes. - */ -#define __enable_fault_irq __enable_fiq - -/** - \brief Disable FIQ - \details Disables FIQ interrupts by setting the F-bit in the CPSR. - Can only be executed in Privileged modes. - */ -#define __disable_fault_irq __disable_fiq - -/** - \brief Get FPSCR (Floating Point Status/Control) - \return Floating Point Status/Control register value - */ -__STATIC_INLINE uint32_t __get_FPSCR(void) -{ -#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ - (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) - register uint32_t __regfpscr __ASM("fpscr"); - return(__regfpscr); -#else - return(0U); -#endif -} - -/** - \brief Set FPSCR (Floating Point Status/Control) - \param [in] fpscr Floating Point Status/Control value to set - */ -__STATIC_INLINE void __set_FPSCR(uint32_t fpscr) -{ -#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ - (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) - register uint32_t __regfpscr __ASM("fpscr"); - __regfpscr = (fpscr); -#else - (void)fpscr; -#endif -} - -/** \brief Get CPSR (Current Program Status Register) - \return CPSR Register value - */ -__STATIC_INLINE uint32_t __get_CPSR(void) -{ - register uint32_t __regCPSR __ASM("cpsr"); - return(__regCPSR); -} - - -/** \brief Set CPSR (Current Program Status Register) - \param [in] cpsr CPSR value to set - */ -__STATIC_INLINE void __set_CPSR(uint32_t cpsr) -{ - register uint32_t __regCPSR __ASM("cpsr"); - __regCPSR = cpsr; -} - -/** \brief Get Mode - \return Processor Mode - */ -__STATIC_INLINE uint32_t __get_mode(void) -{ - return (__get_CPSR() & 0x1FU); -} - -/** \brief Set Mode - \param [in] mode Mode value to set - */ -__STATIC_INLINE __ASM void __set_mode(uint32_t mode) -{ - MOV r1, lr - MSR CPSR_C, r0 - BX r1 -} - -/** \brief Get Stack Pointer - \return Stack Pointer - */ -__STATIC_INLINE __ASM uint32_t __get_SP(void) -{ - MOV r0, sp - BX lr -} - -/** \brief Set Stack Pointer - \param [in] stack Stack Pointer value to set - */ -__STATIC_INLINE __ASM void __set_SP(uint32_t stack) -{ - MOV sp, r0 - BX lr -} - - -/** \brief Get USR/SYS Stack Pointer - \return USR/SYSStack Pointer - */ -__STATIC_INLINE __ASM uint32_t __get_SP_usr(void) -{ - ARM - PRESERVE8 - - MRS R1, CPSR - CPS #0x1F ;no effect in USR mode - MOV R0, SP - MSR CPSR_c, R1 ;no effect in USR mode - ISB - BX LR -} - -/** \brief Set USR/SYS Stack Pointer - \param [in] topOfProcStack USR/SYS Stack Pointer value to set - */ -__STATIC_INLINE __ASM void __set_SP_usr(uint32_t topOfProcStack) -{ - ARM - PRESERVE8 - - MRS R1, CPSR - CPS #0x1F ;no effect in USR mode - MOV SP, R0 - MSR CPSR_c, R1 ;no effect in USR mode - ISB - BX LR -} - -/** \brief Get FPEXC (Floating Point Exception Control Register) - \return Floating Point Exception Control Register value - */ -__STATIC_INLINE uint32_t __get_FPEXC(void) -{ -#if (__FPU_PRESENT == 1) - register uint32_t __regfpexc __ASM("fpexc"); - return(__regfpexc); -#else - return(0); -#endif -} - -/** \brief Set FPEXC (Floating Point Exception Control Register) - \param [in] fpexc Floating Point Exception Control value to set - */ -__STATIC_INLINE void __set_FPEXC(uint32_t fpexc) -{ -#if (__FPU_PRESENT == 1) - register uint32_t __regfpexc __ASM("fpexc"); - __regfpexc = (fpexc); -#endif -} - -/* - * Include common core functions to access Coprocessor 15 registers - */ - -#define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); (Rt) = tmp; } while(0) -#define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = (Rt); } while(0) -#define __get_CP64(cp, op1, Rt, CRm) \ - do { \ - uint32_t ltmp, htmp; \ - __ASM volatile("MRRC p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \ - (Rt) = ((((uint64_t)htmp) << 32U) | ((uint64_t)ltmp)); \ - } while(0) - -#define __set_CP64(cp, op1, Rt, CRm) \ - do { \ - const uint64_t tmp = (Rt); \ - const uint32_t ltmp = (uint32_t)(tmp); \ - const uint32_t htmp = (uint32_t)(tmp >> 32U); \ - __ASM volatile("MCRR p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \ - } while(0) - -#include "cmsis_cp15.h" - -/** \brief Enable Floating Point Unit - - Critical section, called from undef handler, so systick is disabled - */ -__STATIC_INLINE __ASM void __FPU_Enable(void) -{ - ARM - - //Permit access to VFP/NEON, registers by modifying CPACR - MRC p15,0,R1,c1,c0,2 - ORR R1,R1,#0x00F00000 - MCR p15,0,R1,c1,c0,2 - - //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted - ISB - - //Enable VFP/NEON - VMRS R1,FPEXC - ORR R1,R1,#0x40000000 - VMSR FPEXC,R1 - - //Initialise VFP/NEON registers to 0 - MOV R2,#0 - - //Initialise D16 registers to 0 - VMOV D0, R2,R2 - VMOV D1, R2,R2 - VMOV D2, R2,R2 - VMOV D3, R2,R2 - VMOV D4, R2,R2 - VMOV D5, R2,R2 - VMOV D6, R2,R2 - VMOV D7, R2,R2 - VMOV D8, R2,R2 - VMOV D9, R2,R2 - VMOV D10,R2,R2 - VMOV D11,R2,R2 - VMOV D12,R2,R2 - VMOV D13,R2,R2 - VMOV D14,R2,R2 - VMOV D15,R2,R2 - - IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} == 32 - //Initialise D32 registers to 0 - VMOV D16,R2,R2 - VMOV D17,R2,R2 - VMOV D18,R2,R2 - VMOV D19,R2,R2 - VMOV D20,R2,R2 - VMOV D21,R2,R2 - VMOV D22,R2,R2 - VMOV D23,R2,R2 - VMOV D24,R2,R2 - VMOV D25,R2,R2 - VMOV D26,R2,R2 - VMOV D27,R2,R2 - VMOV D28,R2,R2 - VMOV D29,R2,R2 - VMOV D30,R2,R2 - VMOV D31,R2,R2 - ENDIF - - //Initialise FPSCR to a known state - VMRS R1,FPSCR - LDR R2,=0x00086060 //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero. - AND R1,R1,R2 - VMSR FPSCR,R1 - - BX LR -} - -#endif /* __CMSIS_ARMCC_H */ diff --git a/CMSIS/Core_A/Include/cmsis_armclang.h b/CMSIS/Core_A/Include/cmsis_armclang.h deleted file mode 100644 index 2d5be0f22..000000000 --- a/CMSIS/Core_A/Include/cmsis_armclang.h +++ /dev/null @@ -1,644 +0,0 @@ -/**************************************************************************//** - * @file cmsis_armclang.h - * @brief CMSIS compiler armclang (Arm Compiler 6) header file - * @version V1.2.2 - * @date 13. November 2022 - ******************************************************************************/ -/* - * Copyright (c) 2009-2021 Arm Limited. All rights reserved. - * - * SPDX-License-Identifier: Apache-2.0 - * - * Licensed under the Apache License, Version 2.0 (the License); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef __CMSIS_ARMCLANG_H -#define __CMSIS_ARMCLANG_H - -#pragma clang system_header /* treat file as system include file */ - -/* CMSIS compiler specific defines */ -#ifndef __ASM - #define __ASM __asm -#endif -#ifndef __INLINE - #define __INLINE __inline -#endif -#ifndef __FORCEINLINE - #define __FORCEINLINE __attribute__((always_inline)) -#endif -#ifndef __STATIC_INLINE - #define __STATIC_INLINE static __inline -#endif -#ifndef __STATIC_FORCEINLINE - #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline -#endif -#ifndef __NO_RETURN - #define __NO_RETURN __attribute__((__noreturn__)) -#endif -#ifndef CMSIS_DEPRECATED - #define CMSIS_DEPRECATED __attribute__((deprecated)) -#endif -#ifndef __USED - #define __USED __attribute__((used)) -#endif -#ifndef __WEAK - #define __WEAK __attribute__((weak)) -#endif -#ifndef __PACKED - #define __PACKED __attribute__((packed, aligned(1))) -#endif -#ifndef __PACKED_STRUCT - #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) -#endif -#ifndef __UNALIGNED_UINT16_WRITE - #pragma clang diagnostic push - #pragma clang diagnostic ignored "-Wpacked" -/*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */ - __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; }; - #pragma clang diagnostic pop - #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val)) -#endif -#ifndef __UNALIGNED_UINT16_READ - #pragma clang diagnostic push - #pragma clang diagnostic ignored "-Wpacked" -/*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */ - __PACKED_STRUCT T_UINT16_READ { uint16_t v; }; - #pragma clang diagnostic pop - #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v) -#endif -#ifndef __UNALIGNED_UINT32_WRITE - #pragma clang diagnostic push - #pragma clang diagnostic ignored "-Wpacked" -/*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */ - __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; }; - #pragma clang diagnostic pop - #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val)) -#endif -#ifndef __UNALIGNED_UINT32_READ - #pragma clang diagnostic push - #pragma clang diagnostic ignored "-Wpacked" - __PACKED_STRUCT T_UINT32_READ { uint32_t v; }; - #pragma clang diagnostic pop - #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v) -#endif -#ifndef __ALIGNED - #define __ALIGNED(x) __attribute__((aligned(x))) -#endif -#ifndef __PACKED - #define __PACKED __attribute__((packed)) -#endif -#ifndef __COMPILER_BARRIER - #define __COMPILER_BARRIER() __ASM volatile("":::"memory") -#endif - -/* ########################## Core Instruction Access ######################### */ -/** - \brief No Operation - \details No Operation does nothing. This instruction can be used for code alignment purposes. - */ -#define __NOP __builtin_arm_nop - -/** - \brief Wait For Interrupt - \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs. - */ -#define __WFI __builtin_arm_wfi - - -/** - \brief Wait For Event - \details Wait For Event is a hint instruction that permits the processor to enter - a low-power state until one of a number of events occurs. - */ -#define __WFE __builtin_arm_wfe - - -/** - \brief Send Event - \details Send Event is a hint instruction. It causes an event to be signaled to the CPU. - */ -#define __SEV __builtin_arm_sev - - -/** - \brief Instruction Synchronization Barrier - \details Instruction Synchronization Barrier flushes the pipeline in the processor, - so that all instructions following the ISB are fetched from cache or memory, - after the instruction has been completed. - */ -#define __ISB() __builtin_arm_isb(0xF) - -/** - \brief Data Synchronization Barrier - \details Acts as a special kind of Data Memory Barrier. - It completes when all explicit memory accesses before this instruction complete. - */ -#define __DSB() __builtin_arm_dsb(0xF) - - -/** - \brief Data Memory Barrier - \details Ensures the apparent order of the explicit memory operations before - and after the instruction, without ensuring their completion. - */ -#define __DMB() __builtin_arm_dmb(0xF) - - -/** - \brief Reverse byte order (32 bit) - \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412. - \param [in] value Value to reverse - \return Reversed value - */ -#define __REV(value) __builtin_bswap32(value) - - -/** - \brief Reverse byte order (16 bit) - \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856. - \param [in] value Value to reverse - \return Reversed value - */ -#define __REV16(value) __ROR(__REV(value), 16) - - -/** - \brief Reverse byte order (16 bit) - \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000. - \param [in] value Value to reverse - \return Reversed value - */ -#define __REVSH(value) (int16_t)__builtin_bswap16(value) - - -/** - \brief Rotate Right in unsigned value (32 bit) - \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. - \param [in] op1 Value to rotate - \param [in] op2 Number of Bits to rotate - \return Rotated value - */ -__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2) -{ - op2 %= 32U; - if (op2 == 0U) - { - return op1; - } - return (op1 >> op2) | (op1 << (32U - op2)); -} - - -/** - \brief Breakpoint - \details Causes the processor to enter Debug state. - Debug tools can use this to investigate system state when the instruction at a particular address is reached. - \param [in] value is ignored by the processor. - If required, a debugger can use it to store additional information about the breakpoint. - */ -#define __BKPT(value) __ASM volatile ("bkpt "#value) - - -/** - \brief Reverse bit order of value - \details Reverses the bit order of the given value. - \param [in] value Value to reverse - \return Reversed value - */ -#define __RBIT __builtin_arm_rbit - -/** - \brief Count leading zeros - \details Counts the number of leading zeros of a data value. - \param [in] value Value to count the leading zeros - \return number of leading zeros in value - */ -__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value) -{ - /* Even though __builtin_clz produces a CLZ instruction on ARM, formally - __builtin_clz(0) is undefined behaviour, so handle this case specially. - This guarantees ARM-compatible results if happening to compile on a non-ARM - target, and ensures the compiler doesn't decide to activate any - optimisations using the logic "value was passed to __builtin_clz, so it - is non-zero". - ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a - single CLZ instruction. - */ - if (value == 0U) - { - return 32U; - } - return __builtin_clz(value); -} - -/** - \brief LDR Exclusive (8 bit) - \details Executes a exclusive LDR instruction for 8 bit value. - \param [in] ptr Pointer to data - \return value of type uint8_t at (*ptr) - */ -#define __LDREXB (uint8_t)__builtin_arm_ldrex - - -/** - \brief LDR Exclusive (16 bit) - \details Executes a exclusive LDR instruction for 16 bit values. - \param [in] ptr Pointer to data - \return value of type uint16_t at (*ptr) - */ -#define __LDREXH (uint16_t)__builtin_arm_ldrex - - -/** - \brief LDR Exclusive (32 bit) - \details Executes a exclusive LDR instruction for 32 bit values. - \param [in] ptr Pointer to data - \return value of type uint32_t at (*ptr) - */ -#define __LDREXW (uint32_t)__builtin_arm_ldrex - - -/** - \brief STR Exclusive (8 bit) - \details Executes a exclusive STR instruction for 8 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -#define __STREXB (uint32_t)__builtin_arm_strex - - -/** - \brief STR Exclusive (16 bit) - \details Executes a exclusive STR instruction for 16 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -#define __STREXH (uint32_t)__builtin_arm_strex - - -/** - \brief STR Exclusive (32 bit) - \details Executes a exclusive STR instruction for 32 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -#define __STREXW (uint32_t)__builtin_arm_strex - - -/** - \brief Remove the exclusive lock - \details Removes the exclusive lock which is created by LDREX. - */ -#define __CLREX __builtin_arm_clrex - -/** - \brief Signed Saturate - \details Saturates a signed value. - \param [in] value Value to be saturated - \param [in] sat Bit position to saturate to (1..32) - \return Saturated value - */ -#define __SSAT __builtin_arm_ssat - - -/** - \brief Unsigned Saturate - \details Saturates an unsigned value. - \param [in] value Value to be saturated - \param [in] sat Bit position to saturate to (0..31) - \return Saturated value - */ -#define __USAT __builtin_arm_usat - -/* ################### Compiler specific Intrinsics ########################### */ -/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics - Access to dedicated SIMD instructions - @{ -*/ - -#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) - -#define __SADD8 __builtin_arm_sadd8 -#define __SADD16 __builtin_arm_sadd16 -#define __QADD8 __builtin_arm_qadd8 -#define __QSUB8 __builtin_arm_qsub8 -#define __QADD16 __builtin_arm_qadd16 -#define __SHADD16 __builtin_arm_shadd16 -#define __QSUB16 __builtin_arm_qsub16 -#define __SHSUB16 __builtin_arm_shsub16 -#define __QASX __builtin_arm_qasx -#define __SHASX __builtin_arm_shasx -#define __QSAX __builtin_arm_qsax -#define __SHSAX __builtin_arm_shsax -#define __SXTB16 __builtin_arm_sxtb16 -#define __SMUAD __builtin_arm_smuad -#define __SMUADX __builtin_arm_smuadx -#define __SMLAD __builtin_arm_smlad -#define __SMLADX __builtin_arm_smladx -#define __SMLALD __builtin_arm_smlald -#define __SMLALDX __builtin_arm_smlaldx -#define __SMUSD __builtin_arm_smusd -#define __SMUSDX __builtin_arm_smusdx -#define __SMLSDX __builtin_arm_smlsdx -#define __USAT16 __builtin_arm_usat16 -#define __SSUB8 __builtin_arm_ssub8 -#define __SXTB16 __builtin_arm_sxtb16 -#define __SXTAB16 __builtin_arm_sxtab16 - - -__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2) -{ - int32_t result; - - __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2) -{ - int32_t result; - - __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \ - ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) ) - -#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \ - ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) ) - -__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3) -{ - int32_t result; - - __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); - return(result); -} - -#endif /* (__ARM_FEATURE_DSP == 1) */ - -/* ########################### Core Function Access ########################### */ - -/** - \brief Enable IRQ Interrupts - \details Enables IRQ interrupts by clearing the I-bit in the CPSR. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __enable_irq(void) -{ - __ASM volatile ("cpsie i" : : : "memory"); -} - -/** - \brief Disable IRQ Interrupts - \details Disables IRQ interrupts by setting the I-bit in the CPSR. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __disable_irq(void) -{ - __ASM volatile ("cpsid i" : : : "memory"); -} - -/** - \brief Enable FIQ - \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __enable_fault_irq(void) -{ - __ASM volatile ("cpsie f" : : : "memory"); -} - - -/** - \brief Disable FIQ - \details Disables FIQ interrupts by setting special-purpose register FAULTMASK. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __disable_fault_irq(void) -{ - __ASM volatile ("cpsid f" : : : "memory"); -} - -/** - \brief Get FPSCR - \details Returns the current value of the Floating Point Status/Control register. - \return Floating Point Status/Control register value - */ -#define __get_FPSCR __builtin_arm_get_fpscr - -/** - \brief Set FPSCR - \details Assigns the given value to the Floating Point Status/Control register. - \param [in] fpscr Floating Point Status/Control value to set - */ -#define __set_FPSCR __builtin_arm_set_fpscr - -/** \brief Get CPSR Register - \return CPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_CPSR(void) -{ - uint32_t result; - __ASM volatile("MRS %0, cpsr" : "=r" (result) ); - return(result); -} - -/** \brief Set CPSR Register - \param [in] cpsr CPSR value to set - */ -__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) -{ - __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); -} - -/** \brief Get Mode - \return Processor Mode - */ -__STATIC_FORCEINLINE uint32_t __get_mode(void) -{ - return (__get_CPSR() & 0x1FU); -} - -/** \brief Set Mode - \param [in] mode Mode value to set - */ -__STATIC_FORCEINLINE void __set_mode(uint32_t mode) -{ - __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); -} - -/** \brief Get Stack Pointer - \return Stack Pointer value - */ -__STATIC_FORCEINLINE uint32_t __get_SP(void) -{ - uint32_t result; - __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); - return result; -} - -/** \brief Set Stack Pointer - \param [in] stack Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_SP(uint32_t stack) -{ - __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); -} - -/** \brief Get USR/SYS Stack Pointer - \return USR/SYS Stack Pointer value - */ -__STATIC_FORCEINLINE uint32_t __get_SP_usr(void) -{ - uint32_t cpsr; - uint32_t result; - __ASM volatile( - "MRS %0, cpsr \n" - "CPS #0x1F \n" // no effect in USR mode - "MOV %1, sp \n" - "MSR cpsr_c, %0 \n" // no effect in USR mode - "ISB" : "=r"(cpsr), "=r"(result) : : "memory" - ); - return result; -} - -/** \brief Set USR/SYS Stack Pointer - \param [in] topOfProcStack USR/SYS Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) -{ - uint32_t cpsr; - __ASM volatile( - "MRS %0, cpsr \n" - "CPS #0x1F \n" // no effect in USR mode - "MOV sp, %1 \n" - "MSR cpsr_c, %0 \n" // no effect in USR mode - "ISB" : "=r"(cpsr) : "r" (topOfProcStack) : "memory" - ); -} - -/** \brief Get FPEXC - \return Floating Point Exception Control register value - */ -__STATIC_FORCEINLINE uint32_t __get_FPEXC(void) -{ -#if (__FPU_PRESENT == 1) - uint32_t result; - __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); - return(result); -#else - return(0); -#endif -} - -/** \brief Set FPEXC - \param [in] fpexc Floating Point Exception Control value to set - */ -__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) -{ -#if (__FPU_PRESENT == 1) - __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); -#endif -} - -/* - * Include common core functions to access Coprocessor 15 registers - */ - -#define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) -#define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) -#define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) -#define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) - -#include "cmsis_cp15.h" - -/** \brief Enable Floating Point Unit - - Critical section, called from undef handler, so systick is disabled - */ -__STATIC_INLINE void __FPU_Enable(void) -{ - __ASM volatile( - //Permit access to VFP/NEON, registers by modifying CPACR - " MRC p15,0,R1,c1,c0,2 \n" - " ORR R1,R1,#0x00F00000 \n" - " MCR p15,0,R1,c1,c0,2 \n" - - //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted - " ISB \n" - - //Enable VFP/NEON - " VMRS R1,FPEXC \n" - " ORR R1,R1,#0x40000000 \n" - " VMSR FPEXC,R1 \n" - - //Initialise VFP/NEON registers to 0 - " MOV R2,#0 \n" - - //Initialise D16 registers to 0 - " VMOV D0, R2,R2 \n" - " VMOV D1, R2,R2 \n" - " VMOV D2, R2,R2 \n" - " VMOV D3, R2,R2 \n" - " VMOV D4, R2,R2 \n" - " VMOV D5, R2,R2 \n" - " VMOV D6, R2,R2 \n" - " VMOV D7, R2,R2 \n" - " VMOV D8, R2,R2 \n" - " VMOV D9, R2,R2 \n" - " VMOV D10,R2,R2 \n" - " VMOV D11,R2,R2 \n" - " VMOV D12,R2,R2 \n" - " VMOV D13,R2,R2 \n" - " VMOV D14,R2,R2 \n" - " VMOV D15,R2,R2 \n" - -#if (defined(__ARM_NEON) && (__ARM_NEON == 1)) - //Initialise D32 registers to 0 - " VMOV D16,R2,R2 \n" - " VMOV D17,R2,R2 \n" - " VMOV D18,R2,R2 \n" - " VMOV D19,R2,R2 \n" - " VMOV D20,R2,R2 \n" - " VMOV D21,R2,R2 \n" - " VMOV D22,R2,R2 \n" - " VMOV D23,R2,R2 \n" - " VMOV D24,R2,R2 \n" - " VMOV D25,R2,R2 \n" - " VMOV D26,R2,R2 \n" - " VMOV D27,R2,R2 \n" - " VMOV D28,R2,R2 \n" - " VMOV D29,R2,R2 \n" - " VMOV D30,R2,R2 \n" - " VMOV D31,R2,R2 \n" -#endif - - //Initialise FPSCR to a known state - " VMRS R1,FPSCR \n" - " LDR R2,=0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero. - " AND R1,R1,R2 \n" - " VMSR FPSCR,R1 " - : : : "cc", "r1", "r2" - ); -} - -#endif /* __CMSIS_ARMCLANG_H */ diff --git a/CMSIS/Core_A/Include/cmsis_compiler.h b/CMSIS/Core_A/Include/cmsis_compiler.h deleted file mode 100644 index 3adf602ae..000000000 --- a/CMSIS/Core_A/Include/cmsis_compiler.h +++ /dev/null @@ -1,245 +0,0 @@ -/**************************************************************************//** - * @file cmsis_compiler.h - * @brief CMSIS compiler specific macros, functions, instructions - * @version V1.0.3 - * @date 13. November 2022 - ******************************************************************************/ -/* - * Copyright (c) 2009-2018 Arm Limited. All rights reserved. - * - * SPDX-License-Identifier: Apache-2.0 - * - * Licensed under the Apache License, Version 2.0 (the License); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef __CMSIS_COMPILER_H -#define __CMSIS_COMPILER_H - -#include - -/* - * Arm Compiler 4/5 - */ -#if defined ( __CC_ARM ) - #include "cmsis_armcc.h" - - -/* - * Arm Compiler 6.6 LTM (armclang) - */ -#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) && (__ARMCC_VERSION < 6100100) - #include "cmsis_armclang_ltm.h" - - /* - * Arm Compiler above 6.10.1 (armclang) - */ -#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6100100) - #include "cmsis_armclang.h" - - -/* - * GNU Compiler - */ -#elif defined ( __GNUC__ ) - #include "cmsis_gcc.h" - - -/* - * IAR Compiler - */ -#elif defined ( __ICCARM__ ) - #include - - -/* - * TI Arm Compiler - */ -#elif defined ( __TI_ARM__ ) - #include - - #ifndef __ASM - #define __ASM __asm - #endif - #ifndef __INLINE - #define __INLINE inline - #endif - #ifndef __STATIC_INLINE - #define __STATIC_INLINE static inline - #endif - #ifndef __STATIC_FORCEINLINE - #define __STATIC_FORCEINLINE __STATIC_INLINE - #endif - #ifndef __NO_RETURN - #define __NO_RETURN __attribute__((noreturn)) - #endif - #ifndef CMSIS_DEPRECATED - #define CMSIS_DEPRECATED __attribute__((deprecated)) - #endif - #ifndef __USED - #define __USED __attribute__((used)) - #endif - #ifndef __WEAK - #define __WEAK __attribute__((weak)) - #endif - #ifndef __PACKED - #define __PACKED __attribute__((packed)) - #endif - #ifndef __PACKED_STRUCT - #define __PACKED_STRUCT struct __attribute__((packed)) - #endif - #ifndef __PACKED_UNION - #define __PACKED_UNION union __attribute__((packed)) - #endif - #ifndef __UNALIGNED_UINT32 /* deprecated */ - struct __attribute__((packed)) T_UINT32 { uint32_t v; }; - #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v) - #endif - #ifndef __ALIGNED - #define __ALIGNED(x) __attribute__((aligned(x))) - #endif - #ifndef __RESTRICT - #define __RESTRICT __restrict - #endif - #ifndef __COMPILER_BARRIER - #warning No compiler specific solution for __COMPILER_BARRIER. __COMPILER_BARRIER is ignored. - #define __COMPILER_BARRIER() (void)0 - #endif - - -/* - * TASKING Compiler - */ -#elif defined ( __TASKING__ ) - /* - * The CMSIS functions have been implemented as intrinsics in the compiler. - * Please use "carm -?i" to get an up to date list of all intrinsics, - * Including the CMSIS ones. - */ - - #ifndef __ASM - #define __ASM __asm - #endif - #ifndef __INLINE - #define __INLINE inline - #endif - #ifndef __STATIC_INLINE - #define __STATIC_INLINE static inline - #endif - #ifndef __STATIC_FORCEINLINE - #define __STATIC_FORCEINLINE __STATIC_INLINE - #endif - #ifndef __NO_RETURN - #define __NO_RETURN __attribute__((noreturn)) - #endif - #ifndef CMSIS_DEPRECATED - #define CMSIS_DEPRECATED __attribute__((deprecated)) - #endif - #ifndef __USED - #define __USED __attribute__((used)) - #endif - #ifndef __WEAK - #define __WEAK __attribute__((weak)) - #endif - #ifndef __PACKED - #define __PACKED __packed__ - #endif - #ifndef __PACKED_STRUCT - #define __PACKED_STRUCT struct __packed__ - #endif - #ifndef __PACKED_UNION - #define __PACKED_UNION union __packed__ - #endif - #ifndef __UNALIGNED_UINT32 /* deprecated */ - struct __packed__ T_UINT32 { uint32_t v; }; - #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v) - #endif - #ifndef __ALIGNED - #define __ALIGNED(x) __align(x) - #endif - #ifndef __RESTRICT - #warning No compiler specific solution for __RESTRICT. __RESTRICT is ignored. - #define __RESTRICT - #endif - #ifndef __COMPILER_BARRIER - #warning No compiler specific solution for __COMPILER_BARRIER. __COMPILER_BARRIER is ignored. - #define __COMPILER_BARRIER() (void)0 - #endif - - -/* - * COSMIC Compiler - */ -#elif defined ( __CSMC__ ) - #include - - #ifndef __ASM - #define __ASM _asm - #endif - #ifndef __INLINE - #define __INLINE inline - #endif - #ifndef __STATIC_INLINE - #define __STATIC_INLINE static inline - #endif - #ifndef __STATIC_FORCEINLINE - #define __STATIC_FORCEINLINE __STATIC_INLINE - #endif - #ifndef __NO_RETURN - // NO RETURN is automatically detected hence no warning here - #define __NO_RETURN - #endif - #ifndef __USED - #warning No compiler specific solution for __USED. __USED is ignored. - #define __USED - #endif - #ifndef CMSIS_DEPRECATED - #warning No compiler specific solution for CMSIS_DEPRECATED. CMSIS_DEPRECATED is ignored. - #define CMSIS_DEPRECATED - #endif - #ifndef __WEAK - #define __WEAK __weak - #endif - #ifndef __PACKED - #define __PACKED @packed - #endif - #ifndef __PACKED_STRUCT - #define __PACKED_STRUCT @packed struct - #endif - #ifndef __PACKED_UNION - #define __PACKED_UNION @packed union - #endif - #ifndef __UNALIGNED_UINT32 /* deprecated */ - @packed struct T_UINT32 { uint32_t v; }; - #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v) - #endif - #ifndef __ALIGNED - #warning No compiler specific solution for __ALIGNED. __ALIGNED is ignored. - #define __ALIGNED(x) - #endif - #ifndef __RESTRICT - #warning No compiler specific solution for __RESTRICT. __RESTRICT is ignored. - #define __RESTRICT - #endif - #ifndef __COMPILER_BARRIER - #warning No compiler specific solution for __COMPILER_BARRIER. __COMPILER_BARRIER is ignored. - #define __COMPILER_BARRIER() (void)0 - #endif - - -#else - #error Unknown compiler. -#endif - - -#endif /* __CMSIS_COMPILER_H */ - diff --git a/CMSIS/Core_A/Include/cmsis_gcc.h b/CMSIS/Core_A/Include/cmsis_gcc.h deleted file mode 100644 index 1daa85324..000000000 --- a/CMSIS/Core_A/Include/cmsis_gcc.h +++ /dev/null @@ -1,960 +0,0 @@ -/**************************************************************************//** - * @file cmsis_gcc.h - * @brief CMSIS compiler GCC header file - * @version V1.3.3 - * @date 13. November 2022 - ******************************************************************************/ -/* - * Copyright (c) 2009-2022 Arm Limited. All rights reserved. - * - * SPDX-License-Identifier: Apache-2.0 - * - * Licensed under the Apache License, Version 2.0 (the License); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef __CMSIS_GCC_H -#define __CMSIS_GCC_H - -/* ignore some GCC warnings */ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wsign-conversion" -#pragma GCC diagnostic ignored "-Wconversion" -#pragma GCC diagnostic ignored "-Wunused-parameter" - -/* Fallback for __has_builtin */ -#ifndef __has_builtin - #define __has_builtin(x) (0) -#endif - -/* CMSIS compiler specific defines */ -#ifndef __ASM - #define __ASM __asm -#endif -#ifndef __INLINE - #define __INLINE inline -#endif -#ifndef __FORCEINLINE - #define __FORCEINLINE __attribute__((always_inline)) -#endif -#ifndef __STATIC_INLINE - #define __STATIC_INLINE static inline -#endif -#ifndef __STATIC_FORCEINLINE - #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline -#endif -#ifndef __NO_RETURN - #define __NO_RETURN __attribute__((__noreturn__)) -#endif -#ifndef CMSIS_DEPRECATED - #define CMSIS_DEPRECATED __attribute__((deprecated)) -#endif -#ifndef __USED - #define __USED __attribute__((used)) -#endif -#ifndef __WEAK - #define __WEAK __attribute__((weak)) -#endif -#ifndef __PACKED - #define __PACKED __attribute__((packed, aligned(1))) -#endif -#ifndef __PACKED_STRUCT - #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) -#endif -#ifndef __UNALIGNED_UINT16_WRITE - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wpacked" - #pragma GCC diagnostic ignored "-Wattributes" - __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; }; - #pragma GCC diagnostic pop - #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val)) -#endif -#ifndef __UNALIGNED_UINT16_READ - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wpacked" - #pragma GCC diagnostic ignored "-Wattributes" - __PACKED_STRUCT T_UINT16_READ { uint16_t v; }; - #pragma GCC diagnostic pop - #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v) -#endif -#ifndef __UNALIGNED_UINT32_WRITE - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wpacked" - #pragma GCC diagnostic ignored "-Wattributes" - __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; }; - #pragma GCC diagnostic pop - #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val)) -#endif -#ifndef __UNALIGNED_UINT32_READ - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wpacked" - #pragma GCC diagnostic ignored "-Wattributes" - __PACKED_STRUCT T_UINT32_READ { uint32_t v; }; - #pragma GCC diagnostic pop - #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v) -#endif -#ifndef __ALIGNED - #define __ALIGNED(x) __attribute__((aligned(x))) -#endif -#ifndef __RESTRICT - #define __RESTRICT __restrict -#endif -#ifndef __COMPILER_BARRIER - #define __COMPILER_BARRIER() __ASM volatile("":::"memory") -#endif - - -/* ########################## Core Instruction Access ######################### */ -/** - \brief No Operation - \details No Operation does nothing. This instruction can be used for code alignment purposes. - */ -#define __NOP() __ASM volatile ("nop") - -/** - \brief Wait For Interrupt - \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs. - */ -#define __WFI() __ASM volatile ("wfi":::"memory") - - -/** - \brief Wait For Event - \details Wait For Event is a hint instruction that permits the processor to enter - a low-power state until one of a number of events occurs. - */ -#define __WFE() __ASM volatile ("wfe":::"memory") - - -/** - \brief Send Event - \details Send Event is a hint instruction. It causes an event to be signaled to the CPU. - */ -#define __SEV() __ASM volatile ("sev") - - -/** - \brief Instruction Synchronization Barrier - \details Instruction Synchronization Barrier flushes the pipeline in the processor, - so that all instructions following the ISB are fetched from cache or memory, - after the instruction has been completed. - */ -__STATIC_FORCEINLINE void __ISB(void) -{ - __ASM volatile ("isb 0xF":::"memory"); -} - - -/** - \brief Data Synchronization Barrier - \details Acts as a special kind of Data Memory Barrier. - It completes when all explicit memory accesses before this instruction complete. - */ -__STATIC_FORCEINLINE void __DSB(void) -{ - __ASM volatile ("dsb 0xF":::"memory"); -} - - -/** - \brief Data Memory Barrier - \details Ensures the apparent order of the explicit memory operations before - and after the instruction, without ensuring their completion. - */ -__STATIC_FORCEINLINE void __DMB(void) -{ - __ASM volatile ("dmb 0xF":::"memory"); -} - - -/** - \brief Reverse byte order (32 bit) - \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412. - \param [in] value Value to reverse - \return Reversed value - */ -__STATIC_FORCEINLINE uint32_t __REV(uint32_t value) -{ -#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) - return __builtin_bswap32(value); -#else - uint32_t result; - - __ASM ("rev %0, %1" : "=r" (result) : "r" (value) ); - return result; -#endif -} - - -/** - \brief Reverse byte order (16 bit) - \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856. - \param [in] value Value to reverse - \return Reversed value - */ -__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value) -{ - uint32_t result; - __ASM ("rev16 %0, %1" : "=r" (result) : "r" (value)); - return result; -} - - -/** - \brief Reverse byte order (16 bit) - \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000. - \param [in] value Value to reverse - \return Reversed value - */ -__STATIC_FORCEINLINE int16_t __REVSH(int16_t value) -{ -#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) - return (int16_t)__builtin_bswap16(value); -#else - int16_t result; - - __ASM ("revsh %0, %1" : "=r" (result) : "r" (value) ); - return result; -#endif -} - - -/** - \brief Rotate Right in unsigned value (32 bit) - \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. - \param [in] op1 Value to rotate - \param [in] op2 Number of Bits to rotate - \return Rotated value - */ -__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2) -{ - op2 %= 32U; - if (op2 == 0U) - { - return op1; - } - return (op1 >> op2) | (op1 << (32U - op2)); -} - - -/** - \brief Breakpoint - \details Causes the processor to enter Debug state. - Debug tools can use this to investigate system state when the instruction at a particular address is reached. - \param [in] value is ignored by the processor. - If required, a debugger can use it to store additional information about the breakpoint. - */ -#define __BKPT(value) __ASM volatile ("bkpt "#value) - - -/** - \brief Reverse bit order of value - \details Reverses the bit order of the given value. - \param [in] value Value to reverse - \return Reversed value - */ -__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value) -{ - uint32_t result; - __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) ); - return result; -} - - -/** - \brief Count leading zeros - \details Counts the number of leading zeros of a data value. - \param [in] value Value to count the leading zeros - \return number of leading zeros in value - */ -__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value) -{ - /* Even though __builtin_clz produces a CLZ instruction on ARM, formally - __builtin_clz(0) is undefined behaviour, so handle this case specially. - This guarantees ARM-compatible results if happening to compile on a non-ARM - target, and ensures the compiler doesn't decide to activate any - optimisations using the logic "value was passed to __builtin_clz, so it - is non-zero". - ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a - single CLZ instruction. - */ - if (value == 0U) - { - return 32U; - } - return __builtin_clz(value); -} - -/** - \brief LDR Exclusive (8 bit) - \details Executes a exclusive LDR instruction for 8 bit value. - \param [in] ptr Pointer to data - \return value of type uint8_t at (*ptr) - */ -__STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr) -{ - uint32_t result; - -#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) - __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) ); -#else - /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not - accepted by assembler. So has to use following less efficient pattern. - */ - __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); -#endif - return ((uint8_t) result); /* Add explicit type cast here */ -} - - -/** - \brief LDR Exclusive (16 bit) - \details Executes a exclusive LDR instruction for 16 bit values. - \param [in] ptr Pointer to data - \return value of type uint16_t at (*ptr) - */ -__STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr) -{ - uint32_t result; - -#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) - __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) ); -#else - /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not - accepted by assembler. So has to use following less efficient pattern. - */ - __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); -#endif - return ((uint16_t) result); /* Add explicit type cast here */ -} - - -/** - \brief LDR Exclusive (32 bit) - \details Executes a exclusive LDR instruction for 32 bit values. - \param [in] ptr Pointer to data - \return value of type uint32_t at (*ptr) - */ -__STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr) -{ - uint32_t result; - - __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) ); - return(result); -} - - -/** - \brief STR Exclusive (8 bit) - \details Executes a exclusive STR instruction for 8 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -__STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr) -{ - uint32_t result; - - __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); - return(result); -} - - -/** - \brief STR Exclusive (16 bit) - \details Executes a exclusive STR instruction for 16 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -__STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr) -{ - uint32_t result; - - __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); - return(result); -} - - -/** - \brief STR Exclusive (32 bit) - \details Executes a exclusive STR instruction for 32 bit values. - \param [in] value Value to store - \param [in] ptr Pointer to location - \return 0 Function succeeded - \return 1 Function failed - */ -__STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr) -{ - uint32_t result; - - __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) ); - return(result); -} - - -/** - \brief Remove the exclusive lock - \details Removes the exclusive lock which is created by LDREX. - */ -__STATIC_FORCEINLINE void __CLREX(void) -{ - __ASM volatile ("clrex" ::: "memory"); -} - -/** - \brief Signed Saturate - \details Saturates a signed value. - \param [in] ARG1 Value to be saturated - \param [in] ARG2 Bit position to saturate to (1..32) - \return Saturated value - */ -#define __SSAT(ARG1, ARG2) \ -__extension__ \ -({ \ - int32_t __RES, __ARG1 = (ARG1); \ - __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ - __RES; \ - }) - - -/** - \brief Unsigned Saturate - \details Saturates an unsigned value. - \param [in] ARG1 Value to be saturated - \param [in] ARG2 Bit position to saturate to (0..31) - \return Saturated value - */ -#define __USAT(ARG1, ARG2) \ -__extension__ \ -({ \ - uint32_t __RES, __ARG1 = (ARG1); \ - __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ - __RES; \ - }) - -/* ########################### Core Function Access ########################### */ -/** \ingroup CMSIS_Core_FunctionInterface - \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions - @{ - */ - -/** - \brief Enable IRQ Interrupts - \details Enables IRQ interrupts by clearing special-purpose register PRIMASK. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __enable_irq(void) -{ - __ASM volatile ("cpsie i" : : : "memory"); -} - - -/** - \brief Disable IRQ Interrupts - \details Disables IRQ interrupts by setting special-purpose register PRIMASK. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __disable_irq(void) -{ - __ASM volatile ("cpsid i" : : : "memory"); -} - -/** - \brief Enable FIQ - \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __enable_fault_irq(void) -{ - __ASM volatile ("cpsie f" : : : "memory"); -} - - -/** - \brief Disable FIQ - \details Disables FIQ interrupts by setting special-purpose register FAULTMASK. - Can only be executed in Privileged modes. - */ -__STATIC_FORCEINLINE void __disable_fault_irq(void) -{ - __ASM volatile ("cpsid f" : : : "memory"); -} - -/** - \brief Get FPSCR - \details Returns the current value of the Floating Point Status/Control register. - \return Floating Point Status/Control register value - */ -__STATIC_FORCEINLINE uint32_t __get_FPSCR(void) -{ - #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ - (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) - #if __has_builtin(__builtin_arm_get_fpscr) - // Re-enable using built-in when GCC has been fixed - // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2) - /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */ - return __builtin_arm_get_fpscr(); - #else - uint32_t result; - - __ASM volatile ("VMRS %0, fpscr" : "=r" (result) ); - return(result); - #endif - #else - return(0U); - #endif -} - - -/** - \brief Set FPSCR - \details Assigns the given value to the Floating Point Status/Control register. - \param [in] fpscr Floating Point Status/Control value to set - */ -__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr) -{ - #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ - (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) - #if __has_builtin(__builtin_arm_set_fpscr) - // Re-enable using built-in when GCC has been fixed - // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2) - /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */ - __builtin_arm_set_fpscr(fpscr); - #else - __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory"); - #endif - #else - (void)fpscr; - #endif -} - - -/*@} end of CMSIS_Core_RegAccFunctions */ - - -/* ################### Compiler specific Intrinsics ########################### */ -/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics - Access to dedicated SIMD instructions - @{ -*/ - -__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - - -__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - - -__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1) -{ - uint32_t result; - - __ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1)); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3) -{ - uint32_t result; - - __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); - return(result); -} - -__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc) -{ - union llreg_u{ - uint32_t w32[2]; - uint64_t w64; - } llr; - llr.w64 = acc; - -#ifndef __ARMEB__ /* Little endian */ - __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); -#else /* Big endian */ - __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); -#endif - - return(llr.w64); -} - -__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc) -{ - union llreg_u{ - uint32_t w32[2]; - uint64_t w64; - } llr; - llr.w64 = acc; - -#ifndef __ARMEB__ /* Little endian */ - __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); -#else /* Big endian */ - __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); -#endif - - return(llr.w64); -} - -__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3) -{ - uint32_t result; - - __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); - return(result); -} - -__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2) -{ - int32_t result; - - __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - -__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2) -{ - int32_t result; - - __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - - -__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2) -{ - uint32_t result; - - __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); - return(result); -} - - - -#define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \ - ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) ) - -#define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \ - ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) ) - -__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3) -{ - int32_t result; - - __ASM ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); - return(result); -} - -__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3) -{ - uint32_t result; - - __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); - return(result); -} - -/*@} end of group CMSIS_SIMD_intrinsics */ - - - -/** \defgroup CMSIS_Core_intrinsics CMSIS Core Intrinsics - Access to dedicated SIMD instructions - @{ -*/ - -/** \brief Get CPSR Register - \return CPSR Register value - */ -__STATIC_FORCEINLINE uint32_t __get_CPSR(void) -{ - uint32_t result; - __ASM volatile("MRS %0, cpsr" : "=r" (result) ); - return(result); -} - -/** \brief Set CPSR Register - \param [in] cpsr CPSR value to set - */ -__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr) -{ - __ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "cc", "memory"); -} - -/** \brief Get Mode - \return Processor Mode - */ -__STATIC_FORCEINLINE uint32_t __get_mode(void) -{ - return (__get_CPSR() & 0x1FU); -} - -/** \brief Set Mode - \param [in] mode Mode value to set - */ -__STATIC_FORCEINLINE void __set_mode(uint32_t mode) -{ - __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); -} - -/** \brief Get Stack Pointer - \return Stack Pointer value - */ -__STATIC_FORCEINLINE uint32_t __get_SP(void) -{ - uint32_t result; - __ASM volatile("MOV %0, sp" : "=r" (result) : : "memory"); - return result; -} - -/** \brief Set Stack Pointer - \param [in] stack Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_SP(uint32_t stack) -{ - __ASM volatile("MOV sp, %0" : : "r" (stack) : "memory"); -} - -/** \brief Get USR/SYS Stack Pointer - \return USR/SYS Stack Pointer value - */ -__STATIC_FORCEINLINE uint32_t __get_SP_usr(void) -{ - uint32_t cpsr = __get_CPSR(); - uint32_t result; - __ASM volatile( - "CPS #0x1F \n" - "MOV %0, sp " : "=r"(result) : : "memory" - ); - __set_CPSR(cpsr); - __ISB(); - return result; -} - -/** \brief Set USR/SYS Stack Pointer - \param [in] topOfProcStack USR/SYS Stack Pointer value to set - */ -__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack) -{ - uint32_t cpsr = __get_CPSR(); - __ASM volatile( - "CPS #0x1F \n" - "MOV sp, %0 " : : "r" (topOfProcStack) : "memory" - ); - __set_CPSR(cpsr); - __ISB(); -} - -/** \brief Get FPEXC - \return Floating Point Exception Control register value - */ -__STATIC_FORCEINLINE uint32_t __get_FPEXC(void) -{ -#if (__FPU_PRESENT == 1) - uint32_t result; - __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); - return(result); -#else - return(0); -#endif -} - -/** \brief Set FPEXC - \param [in] fpexc Floating Point Exception Control value to set - */ -__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc) -{ -#if (__FPU_PRESENT == 1) - __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); -#endif -} - -/* - * Include common core functions to access Coprocessor 15 registers - */ - -#define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) -#define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) -#define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) -#define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) - -#include "cmsis_cp15.h" - -/** \brief Enable Floating Point Unit - - Critical section, called from undef handler, so systick is disabled - */ -__STATIC_INLINE void __FPU_Enable(void) -{ - // Permit access to VFP/NEON, registers by modifying CPACR - const uint32_t cpacr = __get_CPACR(); - __set_CPACR(cpacr | 0x00F00000ul); - __ISB(); - - // Enable VFP/NEON - const uint32_t fpexc = __get_FPEXC(); - __set_FPEXC(fpexc | 0x40000000ul); - - __ASM volatile( - // Initialise VFP/NEON registers to 0 - " MOV R2,#0 \n" - - // Initialise D16 registers to 0 - " VMOV D0, R2,R2 \n" - " VMOV D1, R2,R2 \n" - " VMOV D2, R2,R2 \n" - " VMOV D3, R2,R2 \n" - " VMOV D4, R2,R2 \n" - " VMOV D5, R2,R2 \n" - " VMOV D6, R2,R2 \n" - " VMOV D7, R2,R2 \n" - " VMOV D8, R2,R2 \n" - " VMOV D9, R2,R2 \n" - " VMOV D10,R2,R2 \n" - " VMOV D11,R2,R2 \n" - " VMOV D12,R2,R2 \n" - " VMOV D13,R2,R2 \n" - " VMOV D14,R2,R2 \n" - " VMOV D15,R2,R2 \n" - -#if (defined(__ARM_NEON) && (__ARM_NEON == 1)) - // Initialise D32 registers to 0 - " VMOV D16,R2,R2 \n" - " VMOV D17,R2,R2 \n" - " VMOV D18,R2,R2 \n" - " VMOV D19,R2,R2 \n" - " VMOV D20,R2,R2 \n" - " VMOV D21,R2,R2 \n" - " VMOV D22,R2,R2 \n" - " VMOV D23,R2,R2 \n" - " VMOV D24,R2,R2 \n" - " VMOV D25,R2,R2 \n" - " VMOV D26,R2,R2 \n" - " VMOV D27,R2,R2 \n" - " VMOV D28,R2,R2 \n" - " VMOV D29,R2,R2 \n" - " VMOV D30,R2,R2 \n" - " VMOV D31,R2,R2 \n" -#endif - : : : "cc", "r2" - ); - - // Initialise FPSCR to a known state - const uint32_t fpscr = __get_FPSCR(); - __set_FPSCR(fpscr & 0x00086060ul); -} - -/*@} end of group CMSIS_Core_intrinsics */ - -#pragma GCC diagnostic pop - -#endif /* __CMSIS_GCC_H */ diff --git a/CMSIS/Core_A/Include/cmsis_iccarm.h b/CMSIS/Core_A/Include/cmsis_iccarm.h deleted file mode 100644 index 10f1f92b9..000000000 --- a/CMSIS/Core_A/Include/cmsis_iccarm.h +++ /dev/null @@ -1,573 +0,0 @@ -/**************************************************************************//** - * @file cmsis_iccarm.h - * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file - * @version V5.0.8 - * @date 13. November 2022 - ******************************************************************************/ - -//------------------------------------------------------------------------------ -// -// Copyright (c) 2017-2018 IAR Systems -// Copyright (c) 2018-2019 Arm Limited -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -//------------------------------------------------------------------------------ - - -#ifndef __CMSIS_ICCARM_H__ -#define __CMSIS_ICCARM_H__ - -#ifndef __ICCARM__ - #error This file should only be compiled by ICCARM -#endif - -#pragma system_include - -#define __IAR_FT _Pragma("inline=forced") __intrinsic - -#if (__VER__ >= 8000000) - #define __ICCARM_V8 1 -#else - #define __ICCARM_V8 0 -#endif - -#pragma language=extended - -#ifndef __ALIGNED - #if __ICCARM_V8 - #define __ALIGNED(x) __attribute__((aligned(x))) - #elif (__VER__ >= 7080000) - /* Needs IAR language extensions */ - #define __ALIGNED(x) __attribute__((aligned(x))) - #else - #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored. - #define __ALIGNED(x) - #endif -#endif - - -/* Define compiler macros for CPU architecture, used in CMSIS 5. - */ -#if __ARM_ARCH_7A__ -/* Macro already defined */ -#else - #if defined(__ARM7A__) - #define __ARM_ARCH_7A__ 1 - #endif -#endif - -#ifndef __ASM - #define __ASM __asm -#endif - -#ifndef __COMPILER_BARRIER - #define __COMPILER_BARRIER() __ASM volatile("":::"memory") -#endif - -#ifndef __INLINE - #define __INLINE inline -#endif - -#ifndef __NO_RETURN - #if __ICCARM_V8 - #define __NO_RETURN __attribute__((__noreturn__)) - #else - #define __NO_RETURN _Pragma("object_attribute=__noreturn") - #endif -#endif - -#ifndef __PACKED - #if __ICCARM_V8 - #define __PACKED __attribute__((packed, aligned(1))) - #else - /* Needs IAR language extensions */ - #define __PACKED __packed - #endif -#endif - -#ifndef __PACKED_STRUCT - #if __ICCARM_V8 - #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) - #else - /* Needs IAR language extensions */ - #define __PACKED_STRUCT __packed struct - #endif -#endif - -#ifndef __PACKED_UNION - #if __ICCARM_V8 - #define __PACKED_UNION union __attribute__((packed, aligned(1))) - #else - /* Needs IAR language extensions */ - #define __PACKED_UNION __packed union - #endif -#endif - -#ifndef __RESTRICT - #if __ICCARM_V8 - #define __RESTRICT __restrict - #else - /* Needs IAR language extensions */ - #define __RESTRICT restrict - #endif -#endif - -#ifndef __STATIC_INLINE - #define __STATIC_INLINE static inline -#endif - -#ifndef __FORCEINLINE - #define __FORCEINLINE _Pragma("inline=forced") -#endif - -#ifndef __STATIC_FORCEINLINE - #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE -#endif - -#ifndef CMSIS_DEPRECATED - #define CMSIS_DEPRECATED __attribute__((deprecated)) -#endif - -#ifndef __UNALIGNED_UINT16_READ - #pragma language=save - #pragma language=extended - __IAR_FT uint16_t __iar_uint16_read(void const *ptr) - { - return *(__packed uint16_t*)(ptr); - } - #pragma language=restore - #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR) -#endif - - -#ifndef __UNALIGNED_UINT16_WRITE - #pragma language=save - #pragma language=extended - __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val) - { - *(__packed uint16_t*)(ptr) = val;; - } - #pragma language=restore - #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL) -#endif - -#ifndef __UNALIGNED_UINT32_READ - #pragma language=save - #pragma language=extended - __IAR_FT uint32_t __iar_uint32_read(void const *ptr) - { - return *(__packed uint32_t*)(ptr); - } - #pragma language=restore - #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR) -#endif - -#ifndef __UNALIGNED_UINT32_WRITE - #pragma language=save - #pragma language=extended - __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val) - { - *(__packed uint32_t*)(ptr) = val;; - } - #pragma language=restore - #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL) -#endif - -#if 0 -#ifndef __UNALIGNED_UINT32 /* deprecated */ - #pragma language=save - #pragma language=extended - __packed struct __iar_u32 { uint32_t v; }; - #pragma language=restore - #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v) -#endif -#endif - -#ifndef __USED - #if __ICCARM_V8 - #define __USED __attribute__((used)) - #else - #define __USED _Pragma("__root") - #endif -#endif - -#ifndef __WEAK - #if __ICCARM_V8 - #define __WEAK __attribute__((weak)) - #else - #define __WEAK _Pragma("__weak") - #endif -#endif - - -#ifndef __ICCARM_INTRINSICS_VERSION__ - #define __ICCARM_INTRINSICS_VERSION__ 0 -#endif - -#if __ICCARM_INTRINSICS_VERSION__ == 2 - - #if defined(__CLZ) - #undef __CLZ - #endif - #if defined(__REVSH) - #undef __REVSH - #endif - #if defined(__RBIT) - #undef __RBIT - #endif - #if defined(__SSAT) - #undef __SSAT - #endif - #if defined(__USAT) - #undef __USAT - #endif - - #include "iccarm_builtin.h" - - #define __disable_fault_irq __iar_builtin_disable_fiq - #define __disable_irq __iar_builtin_disable_interrupt - #define __enable_fault_irq __iar_builtin_enable_fiq - #define __enable_irq __iar_builtin_enable_interrupt - #define __arm_rsr __iar_builtin_rsr - #define __arm_wsr __iar_builtin_wsr - - #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) - #define __get_FPSCR() (__arm_rsr("FPSCR")) - #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE))) - #else - #define __get_FPSCR() ( 0 ) - #define __set_FPSCR(VALUE) ((void)VALUE) - #endif - - #define __get_CPSR() (__arm_rsr("CPSR")) - #define __get_mode() (__get_CPSR() & 0x1FU) - - #define __set_CPSR(VALUE) (__arm_wsr("CPSR", (VALUE))) - #define __set_mode(VALUE) (__arm_wsr("CPSR_c", (VALUE))) - - - #define __get_FPEXC() (__arm_rsr("FPEXC")) - #define __set_FPEXC(VALUE) (__arm_wsr("FPEXC", VALUE)) - - #define __get_CP(cp, op1, RT, CRn, CRm, op2) \ - ((RT) = __arm_rsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2)) - - #define __set_CP(cp, op1, RT, CRn, CRm, op2) \ - (__arm_wsr("p" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2, (RT))) - - #define __get_CP64(cp, op1, Rt, CRm) \ - __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) - - #define __set_CP64(cp, op1, Rt, CRm) \ - __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) - - #include "cmsis_cp15.h" - - #define __NOP __iar_builtin_no_operation - - #define __CLZ __iar_builtin_CLZ - #define __CLREX __iar_builtin_CLREX - - #define __DMB __iar_builtin_DMB - #define __DSB __iar_builtin_DSB - #define __ISB __iar_builtin_ISB - - #define __LDREXB __iar_builtin_LDREXB - #define __LDREXH __iar_builtin_LDREXH - #define __LDREXW __iar_builtin_LDREX - - #define __RBIT __iar_builtin_RBIT - #define __REV __iar_builtin_REV - #define __REV16 __iar_builtin_REV16 - - __IAR_FT int16_t __REVSH(int16_t val) - { - return (int16_t) __iar_builtin_REVSH(val); - } - - #define __ROR __iar_builtin_ROR - #define __RRX __iar_builtin_RRX - - #define __SEV __iar_builtin_SEV - - #define __SSAT __iar_builtin_SSAT - - #define __STREXB __iar_builtin_STREXB - #define __STREXH __iar_builtin_STREXH - #define __STREXW __iar_builtin_STREX - - #define __USAT __iar_builtin_USAT - - #define __WFE __iar_builtin_WFE - #define __WFI __iar_builtin_WFI - - #define __SADD8 __iar_builtin_SADD8 - #define __QADD8 __iar_builtin_QADD8 - #define __SHADD8 __iar_builtin_SHADD8 - #define __UADD8 __iar_builtin_UADD8 - #define __UQADD8 __iar_builtin_UQADD8 - #define __UHADD8 __iar_builtin_UHADD8 - #define __SSUB8 __iar_builtin_SSUB8 - #define __QSUB8 __iar_builtin_QSUB8 - #define __SHSUB8 __iar_builtin_SHSUB8 - #define __USUB8 __iar_builtin_USUB8 - #define __UQSUB8 __iar_builtin_UQSUB8 - #define __UHSUB8 __iar_builtin_UHSUB8 - #define __SADD16 __iar_builtin_SADD16 - #define __QADD16 __iar_builtin_QADD16 - #define __SHADD16 __iar_builtin_SHADD16 - #define __UADD16 __iar_builtin_UADD16 - #define __UQADD16 __iar_builtin_UQADD16 - #define __UHADD16 __iar_builtin_UHADD16 - #define __SSUB16 __iar_builtin_SSUB16 - #define __QSUB16 __iar_builtin_QSUB16 - #define __SHSUB16 __iar_builtin_SHSUB16 - #define __USUB16 __iar_builtin_USUB16 - #define __UQSUB16 __iar_builtin_UQSUB16 - #define __UHSUB16 __iar_builtin_UHSUB16 - #define __SASX __iar_builtin_SASX - #define __QASX __iar_builtin_QASX - #define __SHASX __iar_builtin_SHASX - #define __UASX __iar_builtin_UASX - #define __UQASX __iar_builtin_UQASX - #define __UHASX __iar_builtin_UHASX - #define __SSAX __iar_builtin_SSAX - #define __QSAX __iar_builtin_QSAX - #define __SHSAX __iar_builtin_SHSAX - #define __USAX __iar_builtin_USAX - #define __UQSAX __iar_builtin_UQSAX - #define __UHSAX __iar_builtin_UHSAX - #define __USAD8 __iar_builtin_USAD8 - #define __USADA8 __iar_builtin_USADA8 - #define __SSAT16 __iar_builtin_SSAT16 - #define __USAT16 __iar_builtin_USAT16 - #define __UXTB16 __iar_builtin_UXTB16 - #define __UXTAB16 __iar_builtin_UXTAB16 - #define __SXTB16 __iar_builtin_SXTB16 - #define __SXTAB16 __iar_builtin_SXTAB16 - #define __SMUAD __iar_builtin_SMUAD - #define __SMUADX __iar_builtin_SMUADX - #define __SMMLA __iar_builtin_SMMLA - #define __SMLAD __iar_builtin_SMLAD - #define __SMLADX __iar_builtin_SMLADX - #define __SMLALD __iar_builtin_SMLALD - #define __SMLALDX __iar_builtin_SMLALDX - #define __SMUSD __iar_builtin_SMUSD - #define __SMUSDX __iar_builtin_SMUSDX - #define __SMLSD __iar_builtin_SMLSD - #define __SMLSDX __iar_builtin_SMLSDX - #define __SMLSLD __iar_builtin_SMLSLD - #define __SMLSLDX __iar_builtin_SMLSLDX - #define __SEL __iar_builtin_SEL - #define __QADD __iar_builtin_QADD - #define __QSUB __iar_builtin_QSUB - #define __PKHBT __iar_builtin_PKHBT - #define __PKHTB __iar_builtin_PKHTB - -#else /* __ICCARM_INTRINSICS_VERSION__ == 2 */ - - #if !((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) - #define __get_FPSCR __cmsis_iar_get_FPSR_not_active - #endif - - #ifdef __INTRINSICS_INCLUDED - #error intrinsics.h is already included previously! - #endif - - #include - - #if !((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) - #define __get_FPSCR() (0) - #endif - - #pragma diag_suppress=Pe940 - #pragma diag_suppress=Pe177 - - #define __enable_irq __enable_interrupt - #define __disable_irq __disable_interrupt - #define __enable_fault_irq __enable_fiq - #define __disable_fault_irq __disable_fiq - #define __NOP __no_operation - - #define __get_xPSR __get_PSR - - __IAR_FT void __set_mode(uint32_t mode) - { - __ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory"); - } - - __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr) - { - return __LDREX((unsigned long *)ptr); - } - - __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr) - { - return __STREX(value, (unsigned long *)ptr); - } - - - __IAR_FT uint32_t __RRX(uint32_t value) - { - uint32_t result; - __ASM("RRX %0, %1" : "=r"(result) : "r" (value) : "cc"); - return(result); - } - - - __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2) - { - return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2)); - } - - __IAR_FT uint32_t __get_FPEXC(void) - { - #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) - uint32_t result; - __ASM volatile("VMRS %0, fpexc" : "=r" (result) : : "memory"); - return(result); - #else - return(0); - #endif - } - - __IAR_FT void __set_FPEXC(uint32_t fpexc) - { - #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U))) - __ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory"); - #endif - } - - - #define __get_CP(cp, op1, Rt, CRn, CRm, op2) \ - __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" ) - #define __set_CP(cp, op1, Rt, CRn, CRm, op2) \ - __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" ) - #define __get_CP64(cp, op1, Rt, CRm) \ - __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" ) - #define __set_CP64(cp, op1, Rt, CRm) \ - __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" ) - - #include "cmsis_cp15.h" - -#endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */ - -#define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value)) - - -__IAR_FT uint32_t __get_SP_usr(void) -{ - uint32_t cpsr; - uint32_t result; - __ASM volatile( - "MRS %0, cpsr \n" - "CPS #0x1F \n" // no effect in USR mode - "MOV %1, sp \n" - "MSR cpsr_c, %2 \n" // no effect in USR mode - "ISB" : "=r"(cpsr), "=r"(result) : "r"(cpsr) : "memory" - ); - return result; -} - -__IAR_FT void __set_SP_usr(uint32_t topOfProcStack) -{ - uint32_t cpsr; - __ASM volatile( - "MRS %0, cpsr \n" - "CPS #0x1F \n" // no effect in USR mode - "MOV sp, %1 \n" - "MSR cpsr_c, %2 \n" // no effect in USR mode - "ISB" : "=r"(cpsr) : "r" (topOfProcStack), "r"(cpsr) : "memory" - ); -} - -#define __get_mode() (__get_CPSR() & 0x1FU) - -__STATIC_INLINE -void __FPU_Enable(void) -{ - __ASM volatile( - //Permit access to VFP/NEON, registers by modifying CPACR - " MRC p15,0,R1,c1,c0,2 \n" - " ORR R1,R1,#0x00F00000 \n" - " MCR p15,0,R1,c1,c0,2 \n" - - //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted - " ISB \n" - - //Enable VFP/NEON - " VMRS R1,FPEXC \n" - " ORR R1,R1,#0x40000000 \n" - " VMSR FPEXC,R1 \n" - - //Initialise VFP/NEON registers to 0 - " MOV R2,#0 \n" - - //Initialise D16 registers to 0 - " VMOV D0, R2,R2 \n" - " VMOV D1, R2,R2 \n" - " VMOV D2, R2,R2 \n" - " VMOV D3, R2,R2 \n" - " VMOV D4, R2,R2 \n" - " VMOV D5, R2,R2 \n" - " VMOV D6, R2,R2 \n" - " VMOV D7, R2,R2 \n" - " VMOV D8, R2,R2 \n" - " VMOV D9, R2,R2 \n" - " VMOV D10,R2,R2 \n" - " VMOV D11,R2,R2 \n" - " VMOV D12,R2,R2 \n" - " VMOV D13,R2,R2 \n" - " VMOV D14,R2,R2 \n" - " VMOV D15,R2,R2 \n" - -#ifdef __ARM_ADVANCED_SIMD__ - //Initialise D32 registers to 0 - " VMOV D16,R2,R2 \n" - " VMOV D17,R2,R2 \n" - " VMOV D18,R2,R2 \n" - " VMOV D19,R2,R2 \n" - " VMOV D20,R2,R2 \n" - " VMOV D21,R2,R2 \n" - " VMOV D22,R2,R2 \n" - " VMOV D23,R2,R2 \n" - " VMOV D24,R2,R2 \n" - " VMOV D25,R2,R2 \n" - " VMOV D26,R2,R2 \n" - " VMOV D27,R2,R2 \n" - " VMOV D28,R2,R2 \n" - " VMOV D29,R2,R2 \n" - " VMOV D30,R2,R2 \n" - " VMOV D31,R2,R2 \n" -#endif - - //Initialise FPSCR to a known state - " VMRS R1,FPSCR \n" - " MOV32 R2,#0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero. - " AND R1,R1,R2 \n" - " VMSR FPSCR,R1 \n" - : : : "cc", "r1", "r2" - ); -} - - - -#undef __IAR_FT -#undef __ICCARM_V8 - -#pragma diag_default=Pe940 -#pragma diag_default=Pe177 - -#endif /* __CMSIS_ICCARM_H__ */ diff --git a/CMSIS/DoxyGen/Core/Core.dxy.in b/CMSIS/DoxyGen/Core/Core.dxy.in index e330fba08..acb25c9da 100644 --- a/CMSIS/DoxyGen/Core/Core.dxy.in +++ b/CMSIS/DoxyGen/Core/Core.dxy.in @@ -47,7 +47,7 @@ PROJECT_NAME = "CMSIS-Core (Cortex-M)" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = "Version 5.7.0" +PROJECT_NUMBER = "Version 6.0.0" # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/CMSIS/DoxyGen/Core_A/Core_A.dxy.in b/CMSIS/DoxyGen/Core_A/Core_A.dxy.in index 438e56146..97b8e5c2f 100644 --- a/CMSIS/DoxyGen/Core_A/Core_A.dxy.in +++ b/CMSIS/DoxyGen/Core_A/Core_A.dxy.in @@ -47,7 +47,7 @@ PROJECT_NAME = "CMSIS-Core (Cortex-A)" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = "Version 1.2.1" +PROJECT_NUMBER = "Version 6.0.0" # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a @@ -920,18 +920,22 @@ INPUT = src/Overview.txt \ src/Using.txt \ src/Template.txt \ src/MISRA.txt \ - ../../Core_A/Include/core_ca.h \ - ../../Core_A/Include/cmsis_armcc.h \ - ../../Core_A/Include/cmsis_cp15.h \ - ../../Core_A/Source/irq_ctrl_gic.c \ - ../../Core_A/Include/irq_ctrl.h \ + ../../Core/Include/cmsis_version.h \ + ../../Core/Include/armv7a.h \ + ../../Core/Include/armv7a_cp15.h \ + ../../Core/Include/armv8a.h \ + ../../Core/Include/cmsis_armcc.h \ + ../../Core/Include/cmsis_armcc_corea.h \ + ../../Core/Include/gic_v20.h \ + ../../Core/Source/irq_ctrl_gic.c \ + ../../Core/Include/irq_ctrl.h \ src/Ref_SystemAndClock.txt \ src/ref_gic.txt \ src/ref_core_register.txt \ src/ref_cache.txt \ src/ref_timer.txt \ src/ref_mmu.txt \ - src/core_ca.txt \ + src/arm7a.txt \ src/cmsis_armcc.txt \ src/irq_ctrl.txt \ diff --git a/CMSIS/DoxyGen/Core_A/src/Overview.txt b/CMSIS/DoxyGen/Core_A/src/Overview.txt index 7719a4dce..32f113a74 100644 --- a/CMSIS/DoxyGen/Core_A/src/Overview.txt +++ b/CMSIS/DoxyGen/Core_A/src/Overview.txt @@ -26,7 +26,7 @@ Files relevant to CMSIS-Core (Cortex-A) are present in the following ARM::CMS |File/Folder |Content | |--------------------------------|------------------------------------------------------------------------| |\b CMSIS\\Documentation\\Core_A | This documentation | -|\b CMSIS\\Core_A\\Include | CMSIS-Core (Cortex-A) header files (for example core_ca.h, etc.) | +|\b CMSIS\\Core\\Include | CMSIS-Core (Cortex-A) header files (for example core_ca5.h, etc.) | |\b Device | \ref using_ARM_pg "Arm reference implementations" of Cortex-A devices | |\b Device\\\_Template_Vendor | \ref templates_pg for extension by silicon vendors | diff --git a/CMSIS/DoxyGen/Core_A/src/Template.txt b/CMSIS/DoxyGen/Core_A/src/Template.txt index 7c784cfe9..c4669ccc6 100644 --- a/CMSIS/DoxyGen/Core_A/src/Template.txt +++ b/CMSIS/DoxyGen/Core_A/src/Template.txt @@ -6,6 +6,7 @@ Arm supplies CMSIS-Core device template files for the all supported Cortex-A processors and various compiler vendors. Refer to the list of \ref tested_tools_sec for compliance. + These CMSIS-Core device template files include the following: - Register names of the Core Peripherals and names of the Core Exception Vectors. - Functions to access core peripherals, cache, MMU and special CPU instructions @@ -15,26 +16,32 @@ The detailed file structure of the CMSIS-Core device templates is shown in the f -\section CMSIS_Processor_files CMSIS-Core Processor Files +\section CMSIS_Processor_files CMSIS-Core Processor Files -The CMSIS-Core processor files provided by Arm are in the directory .\\CMSIS\\Core_A\\Include. These header files define all processor specific attributes do not need any modifications. +The CMSIS-Core processor files provided by Arm are in the directory .\\CMSIS\\Core\\Include. These header files define all processor specific attributes do not need any modifications. The core_<cpu>.h defines the core peripherals and provides helper functions that access the core registers. One file is available for each supported Cortex-A processor: Header File | Processor :----------------|:------------------------------ -core_ca.h | generics for all supported Cortex-A processors - +core_ca5.h | for the Cortex-A5 processor +core_ca7.h | for the Cortex-A7 processor +core_ca9.h | for the Cortex-A9 processor +core_ca35.h | for the Cortex-A35 processor +core_ca53.h | for the Cortex-A53 processor +core_ca57.h | for the Cortex-A57 processor \section device_examples Device Examples The CMSIS Software Pack defines several devices that are based on the various processors. The device related CMSIS-Core files are in the directory .\\Device\\ARM and include CMSIS-Core processor file explained before. The following sample devices are defined in the CMSIS-Pack description file ARM.CMSIS.pdsc: -Family | Device | Description -:------------------|:------------------|:--------------------------------- -ARM Cortex-A5 | ARMCA5 | Cortex-A5 based device -ARM Cortex-A7 | ARMCA7 | Cortex-A7 based device -ARM Cortex-A9 | ARMCA9 | Cortex-A9 based device - +Family | Device | Description +:-----------------|:------------------|:--------------------------------- +ARM Cortex-A5 | ARMCA5 | Cortex-A5 based device +ARM Cortex-A7 | ARMCA7 | Cortex-A7 based device +ARM Cortex-A9 | ARMCA9 | Cortex-A9 based device +ARM Cortex-A35 | ARMCA35 | Cortex-A35 based device +ARM Cortex-A53 | ARMCA53 | Cortex-A53 based device +ARM Cortex-A57 | ARMCA57 | Cortex-A57 based device \section template_files_sec Template Files @@ -53,7 +60,7 @@ Silicon vendors add to these template files the following information: .\\Device\\\_Template_Vendor\\Vendor\\Device_A\\Source\\ARM\\startup_Device.c Startup file template for Arm C/C++ Compiler. - + .\\Device\\\_Template_Vendor\\Vendor\\Device_A\\Source\\ARM\\Device.sct Linker scatter file template for Arm C/C++ Compiler. @@ -129,9 +136,10 @@ The device configuration of the template files is described in detail on the fol \page startup_c_pg Startup File startup_.c The \ref startup_c_pg contains: - - Exception vectors of the Cortex-A Processor with weak functions that implement default routines. - The reset handler which is executed after CPU reset and typically calls the \ref SystemInit function. - The setup values for the various stack pointers, i.e. per exceptional mode and main stack. + - Exception vectors of the Cortex-A Processor with weak functions that implement default routines. + - Interrupt vectors that are device specific with weak functions that implement default routines. The file exists for each supported toolchain and is the only tool-chain specific CMSIS file. @@ -238,7 +246,7 @@ If these \#defines are missing default values are used. Description - __CM0_REV + __CA5_REV 0x0000 0x0000 Core revision number ([15:8] revision number, [7:0] patch number) @@ -286,9 +294,9 @@ The following code exemplifies the configuration of the Cortex-A9 Processor and #define __GIC_PRESENT 1U /*!< GIC present */ #define __TIM_PRESENT 0U /*!< TIM not present */ #define __L2C_PRESENT 0U /*!< L2C not present */ -: -: -#include "core_ca.h" /* Cortex-A processor and core peripherals */ +. +. +#include "core_ca9.h" /* Cortex-A processor and core peripherals */ \endcode diff --git a/CMSIS/DoxyGen/Core_A/src/core_ca.txt b/CMSIS/DoxyGen/Core_A/src/arm7a.txt similarity index 99% rename from CMSIS/DoxyGen/Core_A/src/core_ca.txt rename to CMSIS/DoxyGen/Core_A/src/arm7a.txt index c4cbf51d4..95369cb8f 100644 --- a/CMSIS/DoxyGen/Core_A/src/core_ca.txt +++ b/CMSIS/DoxyGen/Core_A/src/arm7a.txt @@ -1,5 +1,5 @@ /**************************************************************************//** - * @file core_ca.txt + * @file arm7a.txt * @brief CMSIS Cortex-A Core Peripheral Access Layer Header File ******************************************************************************/ @@ -79,7 +79,7 @@ The registers in the various UARTs can now be referred in the user code as shown \section core_cmsis_pal_min_reqs Minimal Requirements \details - To access the peripheral registers and related function in a device, the files device.h and core_ca.h define as a minimum: + To access the peripheral registers and related function in a device, the files device.h and armv7a.h define as a minimum: \n\n - The Register Layout Typedef for each peripheral that defines all register names. RESERVED is used to introduce space into the structure for adjusting the addresses of diff --git a/README.md b/README.md index 24f4cec11..973b114c8 100644 --- a/README.md +++ b/README.md @@ -31,8 +31,7 @@ For a list of all CMSIS components refer to [**Introduction - CMSIS Components** Directory | Content :------------------- |:--------------------------------------------------------- -CMSIS/Core | CMSIS-Core(M) related files (for release) -CMSIS/Core_A | CMSIS-Core(A) related files (for release) +CMSIS/Core | CMSIS-Core(A/R/M) related files (for release) CMSIS/CoreValidation | Validation for Core(M) and Core(A) (NOT part of release) CMSIS/Driver | CMSIS-Driver API headers and template files CMSIS/RTOS2 | RTOS v2 related files (for Cortex-M & Armv8-M) diff --git a/gen_pack.sh b/gen_pack.sh index 6e491be74..e2ef35316 100755 --- a/gen_pack.sh +++ b/gen_pack.sh @@ -30,7 +30,6 @@ DEFAULT_ARGS=(-c "v") # PACK_DIRS=" CMSIS/Core - CMSIS/Core_A CMSIS/Documentation CMSIS/Driver CMSIS/RTOS2