^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-or-later
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) //
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) // Copyright (C) 2006, 2019 Texas Instruments.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) //
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) // Interrupt handler for DaVinci boards.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <linux/init.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <linux/interrupt.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <linux/irq.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/irqchip/irq-davinci-aintc.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/io.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <linux/irqdomain.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <asm/exception.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #define DAVINCI_AINTC_FIQ_REG0 0x00
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #define DAVINCI_AINTC_FIQ_REG1 0x04
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #define DAVINCI_AINTC_IRQ_REG0 0x08
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #define DAVINCI_AINTC_IRQ_REG1 0x0c
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #define DAVINCI_AINTC_IRQ_IRQENTRY 0x14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #define DAVINCI_AINTC_IRQ_ENT_REG0 0x18
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #define DAVINCI_AINTC_IRQ_ENT_REG1 0x1c
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #define DAVINCI_AINTC_IRQ_INCTL_REG 0x20
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #define DAVINCI_AINTC_IRQ_EABASE_REG 0x24
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #define DAVINCI_AINTC_IRQ_INTPRI0_REG 0x30
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #define DAVINCI_AINTC_IRQ_INTPRI7_REG 0x4c
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) static void __iomem *davinci_aintc_base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) static struct irq_domain *davinci_aintc_irq_domain;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) static inline void davinci_aintc_writel(unsigned long value, int offset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) writel_relaxed(value, davinci_aintc_base + offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) static inline unsigned long davinci_aintc_readl(int offset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) return readl_relaxed(davinci_aintc_base + offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) static __init void
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) davinci_aintc_setup_gc(void __iomem *base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) unsigned int irq_start, unsigned int num)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) struct irq_chip_generic *gc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) struct irq_chip_type *ct;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) gc = irq_get_domain_generic_chip(davinci_aintc_irq_domain, irq_start);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) gc->reg_base = base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) gc->irq_base = irq_start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) ct = gc->chip_types;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) ct->chip.irq_ack = irq_gc_ack_set_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) ct->chip.irq_mask = irq_gc_mask_clr_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) ct->chip.irq_unmask = irq_gc_mask_set_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) ct->regs.ack = DAVINCI_AINTC_IRQ_REG0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) ct->regs.mask = DAVINCI_AINTC_IRQ_ENT_REG0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) irq_setup_generic_chip(gc, IRQ_MSK(num), IRQ_GC_INIT_MASK_CACHE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) IRQ_NOREQUEST | IRQ_NOPROBE, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) static asmlinkage void __exception_irq_entry
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) davinci_aintc_handle_irq(struct pt_regs *regs)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) int irqnr = davinci_aintc_readl(DAVINCI_AINTC_IRQ_IRQENTRY);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) * Use the formula for entry vector index generation from section
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) * 8.3.3 of the manual.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) irqnr >>= 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) irqnr -= 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) handle_domain_irq(davinci_aintc_irq_domain, irqnr, regs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) /* ARM Interrupt Controller Initialization */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) void __init davinci_aintc_init(const struct davinci_aintc_config *config)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) unsigned int irq_off, reg_off, prio, shift;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) void __iomem *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) int ret, irq_base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) const u8 *prios;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) req = request_mem_region(config->reg.start,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) resource_size(&config->reg),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) "davinci-cp-intc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) if (!req) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) pr_err("%s: register range busy\n", __func__);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) davinci_aintc_base = ioremap(config->reg.start,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) resource_size(&config->reg));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) if (!davinci_aintc_base) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) pr_err("%s: unable to ioremap register range\n", __func__);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) /* Clear all interrupt requests */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) davinci_aintc_writel(~0x0, DAVINCI_AINTC_FIQ_REG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) davinci_aintc_writel(~0x0, DAVINCI_AINTC_FIQ_REG1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) davinci_aintc_writel(~0x0, DAVINCI_AINTC_IRQ_REG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) davinci_aintc_writel(~0x0, DAVINCI_AINTC_IRQ_REG1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) /* Disable all interrupts */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) davinci_aintc_writel(0x0, DAVINCI_AINTC_IRQ_ENT_REG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) davinci_aintc_writel(0x0, DAVINCI_AINTC_IRQ_ENT_REG1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) /* Interrupts disabled immediately, IRQ entry reflects all */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) davinci_aintc_writel(0x0, DAVINCI_AINTC_IRQ_INCTL_REG);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) /* we don't use the hardware vector table, just its entry addresses */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) davinci_aintc_writel(0, DAVINCI_AINTC_IRQ_EABASE_REG);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) /* Clear all interrupt requests */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) davinci_aintc_writel(~0x0, DAVINCI_AINTC_FIQ_REG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) davinci_aintc_writel(~0x0, DAVINCI_AINTC_FIQ_REG1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) davinci_aintc_writel(~0x0, DAVINCI_AINTC_IRQ_REG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) davinci_aintc_writel(~0x0, DAVINCI_AINTC_IRQ_REG1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) prios = config->prios;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) for (reg_off = DAVINCI_AINTC_IRQ_INTPRI0_REG;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) reg_off <= DAVINCI_AINTC_IRQ_INTPRI7_REG; reg_off += 4) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) for (shift = 0, prio = 0; shift < 32; shift += 4, prios++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) prio |= (*prios & 0x07) << shift;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) davinci_aintc_writel(prio, reg_off);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) irq_base = irq_alloc_descs(-1, 0, config->num_irqs, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) if (irq_base < 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) pr_err("%s: unable to allocate interrupt descriptors: %d\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) __func__, irq_base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) davinci_aintc_irq_domain = irq_domain_add_legacy(NULL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) config->num_irqs, irq_base, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) &irq_domain_simple_ops, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) if (!davinci_aintc_irq_domain) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) pr_err("%s: unable to create interrupt domain\n", __func__);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) ret = irq_alloc_domain_generic_chips(davinci_aintc_irq_domain, 32, 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) "AINTC", handle_edge_irq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) IRQ_NOREQUEST | IRQ_NOPROBE, 0, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) pr_err("%s: unable to allocate generic irq chips for domain\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) __func__);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) for (irq_off = 0, reg_off = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) irq_off < config->num_irqs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) irq_off += 32, reg_off += 0x04)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) davinci_aintc_setup_gc(davinci_aintc_base + reg_off,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) irq_base + irq_off, 32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) set_handle_irq(davinci_aintc_handle_irq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) }