2015-04-11 07:44:37 +08:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2013-2014 Wind River Systems, Inc.
|
|
|
|
*
|
2017-01-19 09:01:01 +08:00
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
2015-04-11 07:44:37 +08:00
|
|
|
*/
|
|
|
|
|
2015-12-04 23:09:39 +08:00
|
|
|
/**
|
|
|
|
* @file
|
2017-03-12 00:33:29 +08:00
|
|
|
* @brief ARM Cortex-M wrapper for ISRs with parameter
|
2015-12-04 23:09:39 +08:00
|
|
|
*
|
|
|
|
* Wrapper installed in vector table for handling dynamic interrupts that accept
|
|
|
|
* a parameter.
|
2015-07-02 05:22:39 +08:00
|
|
|
*/
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2016-11-08 23:36:50 +08:00
|
|
|
#include <offsets_short.h>
|
2015-04-11 07:44:37 +08:00
|
|
|
#include <toolchain.h>
|
2017-06-17 23:30:47 +08:00
|
|
|
#include <linker/sections.h>
|
2015-04-11 07:44:37 +08:00
|
|
|
#include <sw_isr_table.h>
|
2016-11-08 23:36:50 +08:00
|
|
|
#include <kernel_structs.h>
|
2015-05-29 01:56:47 +08:00
|
|
|
#include <arch/cpu.h>
|
2015-04-11 07:44:37 +08:00
|
|
|
|
|
|
|
_ASM_FILE_PROLOGUE
|
|
|
|
|
2015-05-09 06:12:46 +08:00
|
|
|
GDATA(_sw_isr_table)
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2015-04-24 15:40:17 +08:00
|
|
|
GTEXT(_isr_wrapper)
|
2015-04-11 07:44:37 +08:00
|
|
|
GTEXT(_IntExit)
|
|
|
|
|
2015-07-02 05:22:39 +08:00
|
|
|
/**
|
|
|
|
*
|
2015-07-02 05:51:40 +08:00
|
|
|
* @brief Wrapper around ISRs when inserted in software ISR table
|
2015-07-02 05:22:39 +08:00
|
|
|
*
|
|
|
|
* When inserted in the vector table, _isr_wrapper() demuxes the ISR table using
|
|
|
|
* the running interrupt number as the index, and invokes the registered ISR
|
2017-04-20 01:16:15 +08:00
|
|
|
* with its corresponding argument. When returning from the ISR, it determines
|
2015-07-02 05:22:39 +08:00
|
|
|
* if a context switch needs to happen (see documentation for __pendsv()) and
|
|
|
|
* pends the PendSV exception if so: the latter will perform the context switch
|
|
|
|
* itself.
|
|
|
|
*
|
2015-07-02 05:29:04 +08:00
|
|
|
* @return N/A
|
2015-07-02 05:22:39 +08:00
|
|
|
*/
|
2015-04-24 15:40:17 +08:00
|
|
|
SECTION_FUNC(TEXT, _isr_wrapper)
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2015-05-15 01:00:38 +08:00
|
|
|
push {lr} /* lr is now the first item on the stack */
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2017-05-03 15:41:51 +08:00
|
|
|
#ifdef CONFIG_EXECUTION_BENCHMARKING
|
2017-08-31 22:35:05 +08:00
|
|
|
bl read_timer_start_of_isr
|
2017-05-03 15:41:51 +08:00
|
|
|
#endif
|
|
|
|
|
2015-10-17 04:45:02 +08:00
|
|
|
#ifdef CONFIG_KERNEL_EVENT_LOGGER_INTERRUPT
|
|
|
|
bl _sys_k_event_logger_interrupt
|
2015-08-22 08:10:32 +08:00
|
|
|
#endif
|
|
|
|
|
2015-12-03 00:13:14 +08:00
|
|
|
#ifdef CONFIG_KERNEL_EVENT_LOGGER_SLEEP
|
|
|
|
bl _sys_k_event_logger_exit_sleep
|
|
|
|
#endif
|
|
|
|
|
2016-03-19 07:43:40 +08:00
|
|
|
#ifdef CONFIG_SYS_POWER_MANAGEMENT
|
2015-05-15 01:00:38 +08:00
|
|
|
/*
|
|
|
|
* All interrupts are disabled when handling idle wakeup. For tickless
|
|
|
|
* idle, this ensures that the calculation and programming of the device
|
|
|
|
* for the next timer deadline is not interrupted. For non-tickless idle,
|
|
|
|
* this ensures that the clearing of the kernel idle state is not
|
|
|
|
* interrupted. In each case, _sys_power_save_idle_exit is called with
|
|
|
|
* interrupts disabled.
|
|
|
|
*/
|
|
|
|
cpsid i /* PRIMASK = 1 */
|
|
|
|
|
|
|
|
/* is this a wakeup from idle ? */
|
2016-11-08 23:36:50 +08:00
|
|
|
ldr r2, =_kernel
|
|
|
|
/* requested idle duration, in ticks */
|
|
|
|
ldr r0, [r2, #_kernel_offset_to_idle]
|
2015-05-15 01:00:38 +08:00
|
|
|
cmp r0, #0
|
2016-10-06 06:43:36 +08:00
|
|
|
|
2016-12-31 22:09:41 +08:00
|
|
|
#if defined(CONFIG_ARMV6_M)
|
2016-10-06 06:43:36 +08:00
|
|
|
beq _idle_state_cleared
|
|
|
|
movs.n r1, #0
|
2016-11-08 23:36:50 +08:00
|
|
|
/* clear kernel idle state */
|
|
|
|
str r1, [r2, #_kernel_offset_to_idle]
|
2016-10-06 06:43:36 +08:00
|
|
|
blx _sys_power_save_idle_exit
|
|
|
|
_idle_state_cleared:
|
|
|
|
|
2016-12-31 22:41:19 +08:00
|
|
|
#elif defined(CONFIG_ARMV7_M)
|
2015-05-15 01:00:38 +08:00
|
|
|
ittt ne
|
2015-04-11 07:44:37 +08:00
|
|
|
movne r1, #0
|
2016-11-08 23:36:50 +08:00
|
|
|
/* clear kernel idle state */
|
|
|
|
strne r1, [r2, #_kernel_offset_to_idle]
|
2015-05-15 01:00:38 +08:00
|
|
|
blxne _sys_power_save_idle_exit
|
2016-12-31 21:18:25 +08:00
|
|
|
#else
|
|
|
|
#error Unknown ARM architecture
|
2016-12-31 22:09:41 +08:00
|
|
|
#endif /* CONFIG_ARMV6_M */
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2015-05-15 01:00:38 +08:00
|
|
|
cpsie i /* re-enable interrupts (PRIMASK = 0) */
|
2016-03-19 07:43:40 +08:00
|
|
|
#endif
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2015-05-15 01:00:38 +08:00
|
|
|
mrs r0, IPSR /* get exception number */
|
2016-12-31 22:09:41 +08:00
|
|
|
#if defined(CONFIG_ARMV6_M)
|
2016-10-06 06:43:36 +08:00
|
|
|
ldr r1, =16
|
|
|
|
subs r0, r1 /* get IRQ number */
|
|
|
|
lsls r0, #3 /* table is 8-byte wide */
|
2016-12-31 22:41:19 +08:00
|
|
|
#elif defined(CONFIG_ARMV7_M)
|
2015-05-15 01:00:38 +08:00
|
|
|
sub r0, r0, #16 /* get IRQ number */
|
|
|
|
lsl r0, r0, #3 /* table is 8-byte wide */
|
2016-12-31 21:18:25 +08:00
|
|
|
#else
|
|
|
|
#error Unknown ARM architecture
|
2016-12-31 22:09:41 +08:00
|
|
|
#endif /* CONFIG_ARMV6_M */
|
2015-05-15 01:00:38 +08:00
|
|
|
ldr r1, =_sw_isr_table
|
|
|
|
add r1, r1, r0 /* table entry: ISRs must have their MSB set to stay
|
2016-12-31 21:03:53 +08:00
|
|
|
* in thumb mode */
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2016-10-06 06:43:36 +08:00
|
|
|
ldm r1!,{r0,r3} /* arg in r0, ISR in r3 */
|
2017-05-03 15:41:51 +08:00
|
|
|
#ifdef CONFIG_EXECUTION_BENCHMARKING
|
2017-08-23 18:32:00 +08:00
|
|
|
stm sp!,{r0-r3} /* Save r0 to r4 into stack */
|
2017-05-03 15:41:51 +08:00
|
|
|
push {lr}
|
2017-08-31 22:35:05 +08:00
|
|
|
bl read_timer_end_of_isr
|
2017-05-03 15:41:51 +08:00
|
|
|
#if defined(CONFIG_ARMV6_M)
|
|
|
|
pop {r3}
|
|
|
|
mov lr,r3
|
|
|
|
#else
|
|
|
|
pop {lr}
|
|
|
|
#endif
|
2017-08-23 18:32:00 +08:00
|
|
|
ldm sp!,{r0-r3} /* Restore r0 to r4 regs */
|
2017-05-03 15:41:51 +08:00
|
|
|
#endif
|
2015-05-15 01:00:38 +08:00
|
|
|
blx r3 /* call ISR */
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2016-12-31 22:09:41 +08:00
|
|
|
#if defined(CONFIG_ARMV6_M)
|
2016-10-06 06:43:36 +08:00
|
|
|
pop {r3}
|
|
|
|
mov lr, r3
|
2016-12-31 22:41:19 +08:00
|
|
|
#elif defined(CONFIG_ARMV7_M)
|
2015-05-15 01:00:38 +08:00
|
|
|
pop {lr}
|
2016-12-31 21:18:25 +08:00
|
|
|
#else
|
|
|
|
#error Unknown ARM architecture
|
2016-12-31 22:09:41 +08:00
|
|
|
#endif /* CONFIG_ARMV6_M */
|
2015-04-11 07:44:37 +08:00
|
|
|
|
2016-12-19 22:07:04 +08:00
|
|
|
/* exception return is done in _IntExit() */
|
2015-05-15 01:00:38 +08:00
|
|
|
b _IntExit
|