^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Copyright (C) 2014 Intel Corporation
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Adjustable fractional divider clock implementation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Output rate = (m / n) * parent_rate.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Uses rational best approximation algorithm.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <linux/clk-provider.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/io.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <linux/device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/slab.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <linux/rational.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) static inline u32 clk_fd_readl(struct clk_fractional_divider *fd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) if (fd->flags & CLK_FRAC_DIVIDER_BIG_ENDIAN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) return ioread32be(fd->reg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) return readl(fd->reg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) static inline void clk_fd_writel(struct clk_fractional_divider *fd, u32 val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) if (fd->flags & CLK_FRAC_DIVIDER_BIG_ENDIAN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) iowrite32be(val, fd->reg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) writel(val, fd->reg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) static unsigned long clk_fd_recalc_rate(struct clk_hw *hw,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) unsigned long parent_rate)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) struct clk_fractional_divider *fd = to_clk_fd(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) unsigned long flags = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) unsigned long m, n;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) u64 ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) if (fd->lock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) spin_lock_irqsave(fd->lock, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) __acquire(fd->lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) val = clk_fd_readl(fd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) if (fd->lock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) spin_unlock_irqrestore(fd->lock, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) __release(fd->lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) m = (val & fd->mmask) >> fd->mshift;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) n = (val & fd->nmask) >> fd->nshift;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) if (fd->flags & CLK_FRAC_DIVIDER_ZERO_BASED) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) m++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) n++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) if (!n || !m)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) return parent_rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) ret = (u64)parent_rate * m;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) do_div(ret, n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) static void clk_fd_general_approximation(struct clk_hw *hw, unsigned long rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) unsigned long *parent_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) unsigned long *m, unsigned long *n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) struct clk_fractional_divider *fd = to_clk_fd(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) unsigned long scale;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) * Get rate closer to *parent_rate to guarantee there is no overflow
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) * for m and n. In the result it will be the nearest rate left shifted
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) * by (scale - fd->nwidth) bits.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) scale = fls_long(*parent_rate / rate - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) if (scale > fd->nwidth)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) rate <<= scale - fd->nwidth;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) rational_best_approximation(rate, *parent_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) GENMASK(fd->mwidth - 1, 0), GENMASK(fd->nwidth - 1, 0),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) m, n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) static long clk_fd_round_rate(struct clk_hw *hw, unsigned long rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) unsigned long *parent_rate)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) struct clk_fractional_divider *fd = to_clk_fd(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) unsigned long m, n;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) u64 ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) if (!rate && rate >= *parent_rate)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) return *parent_rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) if (fd->approximation)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) fd->approximation(hw, rate, parent_rate, &m, &n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) clk_fd_general_approximation(hw, rate, parent_rate, &m, &n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) ret = (u64)*parent_rate * m;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) do_div(ret, n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) static int clk_fd_set_rate(struct clk_hw *hw, unsigned long rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) unsigned long parent_rate)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) struct clk_fractional_divider *fd = to_clk_fd(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) unsigned long flags = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) unsigned long m, n;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) rational_best_approximation(rate, parent_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) GENMASK(fd->mwidth - 1, 0), GENMASK(fd->nwidth - 1, 0),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) &m, &n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) if (fd->flags & CLK_FRAC_DIVIDER_ZERO_BASED) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) m--;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) n--;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) * When compensation the fractional divider,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) * the [1:0] bits of the numerator register are omitted,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) * which will lead to a large deviation in the result.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) * Therefore, it is required that the numerator must
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) * be greater than 4.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) * Note that there are some exceptions here:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) * If there is an even frac div, we need to keep the original
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) * numerator(<4) and denominator. Otherwise, it may cause the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) * issue that the duty ratio is not 50%.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) if (m < 4 && m != 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) if (n % 2 == 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) val = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) val = DIV_ROUND_UP(4, m);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) n *= val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) m *= val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) if (n > fd->nmask) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) pr_debug("%s n(%ld) is overflow, use mask value\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) __func__, n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) n = fd->nmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) if (fd->lock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) spin_lock_irqsave(fd->lock, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) __acquire(fd->lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) val = clk_fd_readl(fd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) val &= ~(fd->mmask | fd->nmask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) val |= (m << fd->mshift) | (n << fd->nshift);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) clk_fd_writel(fd, val);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) if (fd->lock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) spin_unlock_irqrestore(fd->lock, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) __release(fd->lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) const struct clk_ops clk_fractional_divider_ops = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) .recalc_rate = clk_fd_recalc_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) .round_rate = clk_fd_round_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) .set_rate = clk_fd_set_rate,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) EXPORT_SYMBOL_GPL(clk_fractional_divider_ops);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) struct clk_hw *clk_hw_register_fractional_divider(struct device *dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) const char *name, const char *parent_name, unsigned long flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) void __iomem *reg, u8 mshift, u8 mwidth, u8 nshift, u8 nwidth,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) u8 clk_divider_flags, spinlock_t *lock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) struct clk_fractional_divider *fd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) struct clk_init_data init;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) struct clk_hw *hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) fd = kzalloc(sizeof(*fd), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) if (!fd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) return ERR_PTR(-ENOMEM);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) init.name = name;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) init.ops = &clk_fractional_divider_ops;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) init.flags = flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) init.parent_names = parent_name ? &parent_name : NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) init.num_parents = parent_name ? 1 : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) fd->reg = reg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) fd->mshift = mshift;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) fd->mwidth = mwidth;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) fd->mmask = GENMASK(mwidth - 1, 0) << mshift;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) fd->nshift = nshift;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) fd->nwidth = nwidth;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) fd->nmask = GENMASK(nwidth - 1, 0) << nshift;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) fd->flags = clk_divider_flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) fd->lock = lock;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) fd->hw.init = &init;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) hw = &fd->hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) ret = clk_hw_register(dev, hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) kfree(fd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) hw = ERR_PTR(ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) return hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) EXPORT_SYMBOL_GPL(clk_hw_register_fractional_divider);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) struct clk *clk_register_fractional_divider(struct device *dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) const char *name, const char *parent_name, unsigned long flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) void __iomem *reg, u8 mshift, u8 mwidth, u8 nshift, u8 nwidth,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) u8 clk_divider_flags, spinlock_t *lock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) struct clk_hw *hw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) hw = clk_hw_register_fractional_divider(dev, name, parent_name, flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) reg, mshift, mwidth, nshift, nwidth, clk_divider_flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) if (IS_ERR(hw))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) return ERR_CAST(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) return hw->clk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) EXPORT_SYMBOL_GPL(clk_register_fractional_divider);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) void clk_hw_unregister_fractional_divider(struct clk_hw *hw)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) struct clk_fractional_divider *fd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) fd = to_clk_fd(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) clk_hw_unregister(hw);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) kfree(fd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) }