^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-or-later
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Zynq power management
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2012 - 2014 Xilinx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Sören Brinkmann <soren.brinkmann@xilinx.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <linux/io.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/of_address.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/of_device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include "common.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) /* register offsets */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #define DDRC_CTRL_REG1_OFFS 0x60
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #define DDRC_DRAM_PARAM_REG3_OFFS 0x20
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) /* bitfields */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #define DDRC_CLOCKSTOP_MASK BIT(23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #define DDRC_SELFREFRESH_MASK BIT(12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) static void __iomem *ddrc_base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * zynq_pm_ioremap() - Create IO mappings
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * @comp: DT compatible string
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) * Return: Pointer to the mapped memory or NULL.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) * Remap the memory region for a compatible DT node.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) static void __iomem *zynq_pm_ioremap(const char *comp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) struct device_node *np;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) void __iomem *base = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) np = of_find_compatible_node(NULL, NULL, comp);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) if (np) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) base = of_iomap(np, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) of_node_put(np);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) pr_warn("%s: no compatible node found for '%s'\n", __func__,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) comp);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) return base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) * zynq_pm_late_init() - Power management init
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) * Initialization of power management related features and infrastructure.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) void __init zynq_pm_late_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) u32 reg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) ddrc_base = zynq_pm_ioremap("xlnx,zynq-ddrc-a05");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) if (!ddrc_base) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) pr_warn("%s: Unable to map DDRC IO memory.\n", __func__);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) * Enable DDRC clock stop feature. The HW takes care of
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) * entering/exiting the correct mode depending
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) * on activity state.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) reg = readl(ddrc_base + DDRC_DRAM_PARAM_REG3_OFFS);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) reg |= DDRC_CLOCKSTOP_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) writel(reg, ddrc_base + DDRC_DRAM_PARAM_REG3_OFFS);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) }