|
|
|
@ -1,25 +1,39 @@
|
|
|
|
|
/**************************************************************************//**
|
|
|
|
|
* @file core_cmInstr.h
|
|
|
|
|
* @brief CMSIS Cortex-M Core Instruction Access Header File
|
|
|
|
|
* @version V3.01
|
|
|
|
|
* @date 06. March 2012
|
|
|
|
|
* @version V3.20
|
|
|
|
|
* @date 05. March 2013
|
|
|
|
|
*
|
|
|
|
|
* @note
|
|
|
|
|
* Copyright (C) 2009-2012 ARM Limited. All rights reserved.
|
|
|
|
|
*
|
|
|
|
|
* @par
|
|
|
|
|
* ARM Limited (ARM) is supplying this software for use with Cortex-M
|
|
|
|
|
* processor based microcontrollers. This file can be freely distributed
|
|
|
|
|
* within development tools that are supporting such ARM based processors.
|
|
|
|
|
*
|
|
|
|
|
* @par
|
|
|
|
|
* THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
|
|
|
|
|
* OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
|
|
|
|
|
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
|
|
|
|
|
* ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR
|
|
|
|
|
* CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
|
|
|
|
|
*
|
|
|
|
|
******************************************************************************/
|
|
|
|
|
/* Copyright (c) 2009 - 2013 ARM LIMITED
|
|
|
|
|
|
|
|
|
|
All rights reserved.
|
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
|
|
|
modification, are permitted provided that the following conditions are met:
|
|
|
|
|
- Redistributions of source code must retain the above copyright
|
|
|
|
|
notice, this list of conditions and the following disclaimer.
|
|
|
|
|
- Redistributions in binary form must reproduce the above copyright
|
|
|
|
|
notice, this list of conditions and the following disclaimer in the
|
|
|
|
|
documentation and/or other materials provided with the distribution.
|
|
|
|
|
- Neither the name of ARM nor the names of its contributors may be used
|
|
|
|
|
to endorse or promote products derived from this software without
|
|
|
|
|
specific prior written permission.
|
|
|
|
|
*
|
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
|
|
|
ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
|
|
|
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
|
|
|
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
|
|
|
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
|
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
|
|
|
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
|
|
|
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
|
|
|
POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
|
---------------------------------------------------------------------------*/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#ifndef __CORE_CMINSTR_H
|
|
|
|
|
#define __CORE_CMINSTR_H
|
|
|
|
@ -111,12 +125,13 @@
|
|
|
|
|
\param [in] value Value to reverse
|
|
|
|
|
\return Reversed value
|
|
|
|
|
*/
|
|
|
|
|
#ifndef __NO_EMBEDDED_ASM
|
|
|
|
|
__attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
|
|
|
|
|
{
|
|
|
|
|
rev16 r0, r0
|
|
|
|
|
bx lr
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
/** \brief Reverse byte order in signed short value
|
|
|
|
|
|
|
|
|
@ -125,11 +140,13 @@ __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(u
|
|
|
|
|
\param [in] value Value to reverse
|
|
|
|
|
\return Reversed value
|
|
|
|
|
*/
|
|
|
|
|
#ifndef __NO_EMBEDDED_ASM
|
|
|
|
|
__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
|
|
|
|
|
{
|
|
|
|
|
revsh r0, r0
|
|
|
|
|
bx lr
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** \brief Rotate Right in unsigned value (32 bit)
|
|
|
|
@ -143,6 +160,17 @@ __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(in
|
|
|
|
|
#define __ROR __ror
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** \brief Breakpoint
|
|
|
|
|
|
|
|
|
|
This function causes the processor to enter Debug state.
|
|
|
|
|
Debug tools can use this to investigate system state when the instruction at a particular address is reached.
|
|
|
|
|
|
|
|
|
|
\param [in] value is ignored by the processor.
|
|
|
|
|
If required, a debugger can use it to store additional information about the breakpoint.
|
|
|
|
|
*/
|
|
|
|
|
#define __BKPT(value) __breakpoint(value)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#if (__CORTEX_M >= 0x03)
|
|
|
|
|
|
|
|
|
|
/** \brief Reverse bit order of value
|
|
|
|
@ -279,6 +307,17 @@ __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(in
|
|
|
|
|
#elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
|
|
|
|
|
/* GNU gcc specific functions */
|
|
|
|
|
|
|
|
|
|
/* Define macros for porting to both thumb1 and thumb2.
|
|
|
|
|
* For thumb1, use low register (r0-r7), specified by constrant "l"
|
|
|
|
|
* Otherwise, use general registers, specified by constrant "r" */
|
|
|
|
|
#if defined (__thumb__) && !defined (__thumb2__)
|
|
|
|
|
#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
|
|
|
|
|
#define __CMSIS_GCC_USE_REG(r) "l" (r)
|
|
|
|
|
#else
|
|
|
|
|
#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
|
|
|
|
|
#define __CMSIS_GCC_USE_REG(r) "r" (r)
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
/** \brief No Operation
|
|
|
|
|
|
|
|
|
|
No Operation does nothing. This instruction can be used for code alignment purposes.
|
|
|
|
@ -364,10 +403,14 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
|
|
|
|
|
*/
|
|
|
|
|
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
|
|
|
|
|
{
|
|
|
|
|
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
|
|
|
|
|
return __builtin_bswap32(value);
|
|
|
|
|
#else
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
|
|
|
|
|
__ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
|
|
|
|
|
return(result);
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -382,7 +425,7 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t val
|
|
|
|
|
{
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
|
|
|
|
|
__ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
|
|
|
|
|
return(result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -396,10 +439,14 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t val
|
|
|
|
|
*/
|
|
|
|
|
__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
|
|
|
|
|
{
|
|
|
|
|
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
|
|
|
|
|
return (short)__builtin_bswap16(value);
|
|
|
|
|
#else
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
|
|
|
|
|
__ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
|
|
|
|
|
return(result);
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -413,12 +460,21 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value
|
|
|
|
|
*/
|
|
|
|
|
__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
|
|
|
|
|
{
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("ror %0, %0, %1" : "+r" (op1) : "r" (op2) );
|
|
|
|
|
return(op1);
|
|
|
|
|
return (op1 >> op2) | (op1 << (32 - op2));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** \brief Breakpoint
|
|
|
|
|
|
|
|
|
|
This function causes the processor to enter Debug state.
|
|
|
|
|
Debug tools can use this to investigate system state when the instruction at a particular address is reached.
|
|
|
|
|
|
|
|
|
|
\param [in] value is ignored by the processor.
|
|
|
|
|
If required, a debugger can use it to store additional information about the breakpoint.
|
|
|
|
|
*/
|
|
|
|
|
#define __BKPT(value) __ASM volatile ("bkpt "#value)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#if (__CORTEX_M >= 0x03)
|
|
|
|
|
|
|
|
|
|
/** \brief Reverse bit order of value
|
|
|
|
@ -446,9 +502,16 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t valu
|
|
|
|
|
*/
|
|
|
|
|
__attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
|
|
|
|
|
{
|
|
|
|
|
uint8_t result;
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
|
|
|
|
|
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
|
|
|
|
|
__ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
|
|
|
|
|
#else
|
|
|
|
|
/* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
|
|
|
|
|
accepted by assembler. So has to use following less efficient pattern.
|
|
|
|
|
*/
|
|
|
|
|
__ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
|
|
|
|
|
#endif
|
|
|
|
|
return(result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -462,9 +525,16 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uin
|
|
|
|
|
*/
|
|
|
|
|
__attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
|
|
|
|
|
{
|
|
|
|
|
uint16_t result;
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
|
|
|
|
|
#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
|
|
|
|
|
__ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
|
|
|
|
|
#else
|
|
|
|
|
/* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
|
|
|
|
|
accepted by assembler. So has to use following less efficient pattern.
|
|
|
|
|
*/
|
|
|
|
|
__ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
|
|
|
|
|
#endif
|
|
|
|
|
return(result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -480,7 +550,7 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile ui
|
|
|
|
|
{
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
|
|
|
|
|
__ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
|
|
|
|
|
return(result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -498,7 +568,7 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t val
|
|
|
|
|
{
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("strexb %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
|
|
|
|
|
__ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
|
|
|
|
|
return(result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -516,7 +586,7 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t va
|
|
|
|
|
{
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("strexh %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
|
|
|
|
|
__ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
|
|
|
|
|
return(result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -534,7 +604,7 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t va
|
|
|
|
|
{
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("strex %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
|
|
|
|
|
__ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
|
|
|
|
|
return(result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -546,7 +616,7 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t va
|
|
|
|
|
*/
|
|
|
|
|
__attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
|
|
|
|
|
{
|
|
|
|
|
__ASM volatile ("clrex");
|
|
|
|
|
__ASM volatile ("clrex" ::: "memory");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -591,7 +661,7 @@ __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
|
|
|
|
|
*/
|
|
|
|
|
__attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
|
|
|
|
|
{
|
|
|
|
|
uint8_t result;
|
|
|
|
|
uint32_t result;
|
|
|
|
|
|
|
|
|
|
__ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
|
|
|
|
|
return(result);
|