^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: (GPL-2.0 OR MIT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Copyright 2018 NXP.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * This driver supports the SCCG plls found in the imx8m SOCs
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Documentation for this SCCG pll can be found at:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * https://www.nxp.com/docs/en/reference-manual/IMX8MDQLQRM.pdf#page=834
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/clk-provider.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/err.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <linux/export.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/io.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <linux/iopoll.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/slab.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/bitfield.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include "clk.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) /* PLL CFGs */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #define PLL_CFG0 0x0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #define PLL_CFG1 0x4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #define PLL_CFG2 0x8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #define PLL_DIVF1_MASK GENMASK(18, 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #define PLL_DIVF2_MASK GENMASK(12, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #define PLL_DIVR1_MASK GENMASK(27, 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #define PLL_DIVR2_MASK GENMASK(24, 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #define PLL_DIVQ_MASK GENMASK(6, 1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #define PLL_REF_MASK GENMASK(2, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #define PLL_LOCK_MASK BIT(31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #define PLL_PD_MASK BIT(7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) /* These are the specification limits for the SSCG PLL */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #define PLL_REF_MIN_FREQ 25000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) #define PLL_REF_MAX_FREQ 235000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) #define PLL_STAGE1_MIN_FREQ 1600000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) #define PLL_STAGE1_MAX_FREQ 2400000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) #define PLL_STAGE1_REF_MIN_FREQ 25000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) #define PLL_STAGE1_REF_MAX_FREQ 54000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) #define PLL_STAGE2_MIN_FREQ 1200000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) #define PLL_STAGE2_MAX_FREQ 2400000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) #define PLL_STAGE2_REF_MIN_FREQ 54000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) #define PLL_STAGE2_REF_MAX_FREQ 75000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) #define PLL_OUT_MIN_FREQ 20000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) #define PLL_OUT_MAX_FREQ 1200000000UL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) #define PLL_DIVR1_MAX 7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) #define PLL_DIVR2_MAX 63
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) #define PLL_DIVF1_MAX 63
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #define PLL_DIVF2_MAX 63
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) #define PLL_DIVQ_MAX 63
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) #define PLL_BYPASS_NONE 0x0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) #define PLL_BYPASS1 0x2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) #define PLL_BYPASS2 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) #define SSCG_PLL_BYPASS1_MASK BIT(5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) #define SSCG_PLL_BYPASS2_MASK BIT(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) #define SSCG_PLL_BYPASS_MASK GENMASK(5, 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) #define PLL_SCCG_LOCK_TIMEOUT 70
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) struct clk_sscg_pll_setup {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) int divr1, divf1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) int divr2, divf2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) int divq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) int bypass;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) uint64_t vco1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) uint64_t vco2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) uint64_t fout;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) uint64_t ref;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) uint64_t ref_div1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) uint64_t ref_div2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) uint64_t fout_request;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) int fout_error;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) struct clk_sscg_pll {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) struct clk_hw hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) const struct clk_ops ops;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) void __iomem *base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) struct clk_sscg_pll_setup setup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) u8 parent;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) u8 bypass1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) u8 bypass2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) #define to_clk_sscg_pll(_hw) container_of(_hw, struct clk_sscg_pll, hw)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) static int clk_sscg_pll_wait_lock(struct clk_sscg_pll *pll)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) val = readl_relaxed(pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) /* don't wait for lock if all plls are bypassed */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) if (!(val & SSCG_PLL_BYPASS2_MASK))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) return readl_poll_timeout(pll->base, val, val & PLL_LOCK_MASK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) 0, PLL_SCCG_LOCK_TIMEOUT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) static int clk_sscg_pll2_check_match(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) struct clk_sscg_pll_setup *temp_setup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) int new_diff = temp_setup->fout - temp_setup->fout_request;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) int diff = temp_setup->fout_error;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) if (abs(diff) > abs(new_diff)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) temp_setup->fout_error = new_diff;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) memcpy(setup, temp_setup, sizeof(struct clk_sscg_pll_setup));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) if (temp_setup->fout_request == temp_setup->fout)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) return -1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) static int clk_sscg_divq_lookup(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) struct clk_sscg_pll_setup *temp_setup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) int ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) for (temp_setup->divq = 0; temp_setup->divq <= PLL_DIVQ_MAX;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) temp_setup->divq++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) temp_setup->vco2 = temp_setup->vco1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) do_div(temp_setup->vco2, temp_setup->divr2 + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) temp_setup->vco2 *= 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) temp_setup->vco2 *= temp_setup->divf2 + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) if (temp_setup->vco2 >= PLL_STAGE2_MIN_FREQ &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) temp_setup->vco2 <= PLL_STAGE2_MAX_FREQ) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) temp_setup->fout = temp_setup->vco2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) do_div(temp_setup->fout, 2 * (temp_setup->divq + 1));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) ret = clk_sscg_pll2_check_match(setup, temp_setup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) if (!ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) temp_setup->bypass = PLL_BYPASS1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) static int clk_sscg_divf2_lookup(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) struct clk_sscg_pll_setup *temp_setup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) int ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) for (temp_setup->divf2 = 0; temp_setup->divf2 <= PLL_DIVF2_MAX;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) temp_setup->divf2++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) ret = clk_sscg_divq_lookup(setup, temp_setup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) static int clk_sscg_divr2_lookup(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) struct clk_sscg_pll_setup *temp_setup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) int ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) for (temp_setup->divr2 = 0; temp_setup->divr2 <= PLL_DIVR2_MAX;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) temp_setup->divr2++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) temp_setup->ref_div2 = temp_setup->vco1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) do_div(temp_setup->ref_div2, temp_setup->divr2 + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) if (temp_setup->ref_div2 >= PLL_STAGE2_REF_MIN_FREQ &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) temp_setup->ref_div2 <= PLL_STAGE2_REF_MAX_FREQ) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) ret = clk_sscg_divf2_lookup(setup, temp_setup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) static int clk_sscg_pll2_find_setup(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) struct clk_sscg_pll_setup *temp_setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) uint64_t ref)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) if (ref < PLL_STAGE1_MIN_FREQ || ref > PLL_STAGE1_MAX_FREQ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) temp_setup->vco1 = ref;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) ret = clk_sscg_divr2_lookup(setup, temp_setup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) static int clk_sscg_divf1_lookup(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) struct clk_sscg_pll_setup *temp_setup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) int ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) for (temp_setup->divf1 = 0; temp_setup->divf1 <= PLL_DIVF1_MAX;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) temp_setup->divf1++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) uint64_t vco1 = temp_setup->ref;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) do_div(vco1, temp_setup->divr1 + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) vco1 *= 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) vco1 *= temp_setup->divf1 + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) ret = clk_sscg_pll2_find_setup(setup, temp_setup, vco1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) if (!ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) temp_setup->bypass = PLL_BYPASS_NONE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) static int clk_sscg_divr1_lookup(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) struct clk_sscg_pll_setup *temp_setup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) int ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) for (temp_setup->divr1 = 0; temp_setup->divr1 <= PLL_DIVR1_MAX;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) temp_setup->divr1++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) temp_setup->ref_div1 = temp_setup->ref;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) do_div(temp_setup->ref_div1, temp_setup->divr1 + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) if (temp_setup->ref_div1 >= PLL_STAGE1_REF_MIN_FREQ &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) temp_setup->ref_div1 <= PLL_STAGE1_REF_MAX_FREQ) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) ret = clk_sscg_divf1_lookup(setup, temp_setup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) static int clk_sscg_pll1_find_setup(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) struct clk_sscg_pll_setup *temp_setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) uint64_t ref)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) if (ref < PLL_REF_MIN_FREQ || ref > PLL_REF_MAX_FREQ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) temp_setup->ref = ref;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) ret = clk_sscg_divr1_lookup(setup, temp_setup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) static int clk_sscg_pll_find_setup(struct clk_sscg_pll_setup *setup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) uint64_t prate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) uint64_t rate, int try_bypass)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) struct clk_sscg_pll_setup temp_setup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) int ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) memset(&temp_setup, 0, sizeof(struct clk_sscg_pll_setup));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) memset(setup, 0, sizeof(struct clk_sscg_pll_setup));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) temp_setup.fout_error = PLL_OUT_MAX_FREQ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) temp_setup.fout_request = rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) switch (try_bypass) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) case PLL_BYPASS2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) if (prate == rate) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) setup->bypass = PLL_BYPASS2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) setup->fout = rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) case PLL_BYPASS1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) ret = clk_sscg_pll2_find_setup(setup, &temp_setup, prate);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) case PLL_BYPASS_NONE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) ret = clk_sscg_pll1_find_setup(setup, &temp_setup, prate);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) static int clk_sscg_pll_is_prepared(struct clk_hw *hw)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) u32 val = readl_relaxed(pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) return (val & PLL_PD_MASK) ? 0 : 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) static int clk_sscg_pll_prepare(struct clk_hw *hw)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) val = readl_relaxed(pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) val &= ~PLL_PD_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) writel_relaxed(val, pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) return clk_sscg_pll_wait_lock(pll);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) static void clk_sscg_pll_unprepare(struct clk_hw *hw)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) val = readl_relaxed(pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) val |= PLL_PD_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) writel_relaxed(val, pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) static unsigned long clk_sscg_pll_recalc_rate(struct clk_hw *hw,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) unsigned long parent_rate)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) u32 val, divr1, divf1, divr2, divf2, divq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) u64 temp64;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) val = readl_relaxed(pll->base + PLL_CFG2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) divr1 = FIELD_GET(PLL_DIVR1_MASK, val);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) divr2 = FIELD_GET(PLL_DIVR2_MASK, val);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) divf1 = FIELD_GET(PLL_DIVF1_MASK, val);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) divf2 = FIELD_GET(PLL_DIVF2_MASK, val);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) divq = FIELD_GET(PLL_DIVQ_MASK, val);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) temp64 = parent_rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) val = readl(pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) if (val & SSCG_PLL_BYPASS2_MASK) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) temp64 = parent_rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) } else if (val & SSCG_PLL_BYPASS1_MASK) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) temp64 *= divf2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) do_div(temp64, (divr2 + 1) * (divq + 1));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) temp64 *= 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) temp64 *= (divf1 + 1) * (divf2 + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) do_div(temp64, (divr1 + 1) * (divr2 + 1) * (divq + 1));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) return temp64;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) static int clk_sscg_pll_set_rate(struct clk_hw *hw, unsigned long rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) unsigned long parent_rate)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) struct clk_sscg_pll_setup *setup = &pll->setup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) /* set bypass here too since the parent might be the same */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) val = readl(pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) val &= ~SSCG_PLL_BYPASS_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) val |= FIELD_PREP(SSCG_PLL_BYPASS_MASK, setup->bypass);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) writel(val, pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) val = readl_relaxed(pll->base + PLL_CFG2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) val &= ~(PLL_DIVF1_MASK | PLL_DIVF2_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) val &= ~(PLL_DIVR1_MASK | PLL_DIVR2_MASK | PLL_DIVQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) val |= FIELD_PREP(PLL_DIVF1_MASK, setup->divf1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) val |= FIELD_PREP(PLL_DIVF2_MASK, setup->divf2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) val |= FIELD_PREP(PLL_DIVR1_MASK, setup->divr1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) val |= FIELD_PREP(PLL_DIVR2_MASK, setup->divr2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) val |= FIELD_PREP(PLL_DIVQ_MASK, setup->divq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) writel_relaxed(val, pll->base + PLL_CFG2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) return clk_sscg_pll_wait_lock(pll);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) static u8 clk_sscg_pll_get_parent(struct clk_hw *hw)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) u8 ret = pll->parent;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) val = readl(pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) if (val & SSCG_PLL_BYPASS2_MASK)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) ret = pll->bypass2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) else if (val & SSCG_PLL_BYPASS1_MASK)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) ret = pll->bypass1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) static int clk_sscg_pll_set_parent(struct clk_hw *hw, u8 index)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) val = readl(pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) val &= ~SSCG_PLL_BYPASS_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) val |= FIELD_PREP(SSCG_PLL_BYPASS_MASK, pll->setup.bypass);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) writel(val, pll->base + PLL_CFG0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) return clk_sscg_pll_wait_lock(pll);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) static int __clk_sscg_pll_determine_rate(struct clk_hw *hw,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) struct clk_rate_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) uint64_t min,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) uint64_t max,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) uint64_t rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) int bypass)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) struct clk_sscg_pll_setup *setup = &pll->setup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) struct clk_hw *parent_hw = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) int bypass_parent_index;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) req->max_rate = max;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) req->min_rate = min;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) switch (bypass) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) case PLL_BYPASS2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) bypass_parent_index = pll->bypass2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) case PLL_BYPASS1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) bypass_parent_index = pll->bypass1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) bypass_parent_index = pll->parent;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) parent_hw = clk_hw_get_parent_by_index(hw, bypass_parent_index);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) ret = __clk_determine_rate(parent_hw, req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) if (!ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) ret = clk_sscg_pll_find_setup(setup, req->rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) rate, bypass);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) req->best_parent_hw = parent_hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) req->best_parent_rate = req->rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) req->rate = setup->fout;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) static int clk_sscg_pll_determine_rate(struct clk_hw *hw,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) struct clk_rate_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) struct clk_sscg_pll *pll = to_clk_sscg_pll(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) struct clk_sscg_pll_setup *setup = &pll->setup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) uint64_t rate = req->rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) uint64_t min = req->min_rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) uint64_t max = req->max_rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) if (rate < PLL_OUT_MIN_FREQ || rate > PLL_OUT_MAX_FREQ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) ret = __clk_sscg_pll_determine_rate(hw, req, req->rate, req->rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) rate, PLL_BYPASS2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) ret = __clk_sscg_pll_determine_rate(hw, req, PLL_STAGE1_REF_MIN_FREQ,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) PLL_STAGE1_REF_MAX_FREQ, rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) PLL_BYPASS1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) ret = __clk_sscg_pll_determine_rate(hw, req, PLL_REF_MIN_FREQ,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) PLL_REF_MAX_FREQ, rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) PLL_BYPASS_NONE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) if (setup->fout >= min && setup->fout <= max)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) static const struct clk_ops clk_sscg_pll_ops = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) .prepare = clk_sscg_pll_prepare,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) .unprepare = clk_sscg_pll_unprepare,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) .is_prepared = clk_sscg_pll_is_prepared,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) .recalc_rate = clk_sscg_pll_recalc_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) .set_rate = clk_sscg_pll_set_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) .set_parent = clk_sscg_pll_set_parent,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) .get_parent = clk_sscg_pll_get_parent,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) .determine_rate = clk_sscg_pll_determine_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) struct clk_hw *imx_clk_hw_sscg_pll(const char *name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) const char * const *parent_names,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) u8 num_parents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) u8 parent, u8 bypass1, u8 bypass2,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) void __iomem *base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) unsigned long flags)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) struct clk_sscg_pll *pll;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) struct clk_init_data init;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) struct clk_hw *hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) pll = kzalloc(sizeof(*pll), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) if (!pll)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) return ERR_PTR(-ENOMEM);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) pll->parent = parent;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) pll->bypass1 = bypass1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) pll->bypass2 = bypass2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) pll->base = base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) init.name = name;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) init.ops = &clk_sscg_pll_ops;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) init.flags = flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) init.parent_names = parent_names;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) init.num_parents = num_parents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) pll->base = base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) pll->hw.init = &init;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) hw = &pll->hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) ret = clk_hw_register(NULL, hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) kfree(pll);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) return ERR_PTR(ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) return hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) EXPORT_SYMBOL_GPL(imx_clk_hw_sscg_pll);