mirror of
https://github.com/torvalds/linux.git
synced 2024-11-24 05:02:12 +00:00
d54fb4b25a
ULONG_MAX is used by a few drivers to figure out the highest available
clock rate via clk_round_rate(clk, ULONG_MAX). Since abs() takes a
signed value as input, the current logic effectively calculates with
ULONG_MAX = -1, which results in the worst parent clock being chosen
instead of the best one.
For example on Rockchip RK3588 the eMMC driver tries to figure out
the highest available clock rate. There are three parent clocks
available resulting in the following rate diffs with the existing
logic:
GPLL: abs(18446744073709551615 - 1188000000) = 1188000001
CPLL: abs(18446744073709551615 - 1500000000) = 1500000001
XIN24M: abs(18446744073709551615 - 24000000) = 24000001
As a result the clock framework will promote a maximum supported
clock rate of 24 MHz, even though 1.5GHz are possible. With the
updated logic any casting between signed and unsigned is avoided
and the numbers look like this instead:
GPLL: 18446744073709551615 - 1188000000 = 18446744072521551615
CPLL: 18446744073709551615 - 1500000000 = 18446744072209551615
XIN24M: 18446744073709551615 - 24000000 = 18446744073685551615
As a result the parent with the highest acceptable rate is chosen
instead of the parent clock with the lowest one.
Cc: stable@vger.kernel.org
Fixes: 4950240800
("mmc: sdhci-of-dwcmshc: properly determine max clock on Rockchip")
Tested-by: Christopher Obbard <chris.obbard@collabora.com>
Signed-off-by: Sebastian Reichel <sebastian.reichel@collabora.com>
Link: https://lore.kernel.org/r/20230526171057.66876-2-sebastian.reichel@collabora.com
Reviewed-by: AngeloGioacchino Del Regno <angelogioacchino.delregno@collabora.com>
Signed-off-by: Stephen Boyd <sboyd@kernel.org>
493 lines
13 KiB
C
493 lines
13 KiB
C
// SPDX-License-Identifier: GPL-2.0
|
|
/*
|
|
* Copyright (c) 2013 NVIDIA CORPORATION. All rights reserved.
|
|
*/
|
|
|
|
#include <linux/clk-provider.h>
|
|
#include <linux/device.h>
|
|
#include <linux/err.h>
|
|
#include <linux/slab.h>
|
|
|
|
static u8 clk_composite_get_parent(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *mux_ops = composite->mux_ops;
|
|
struct clk_hw *mux_hw = composite->mux_hw;
|
|
|
|
__clk_hw_set_clk(mux_hw, hw);
|
|
|
|
return mux_ops->get_parent(mux_hw);
|
|
}
|
|
|
|
static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *mux_ops = composite->mux_ops;
|
|
struct clk_hw *mux_hw = composite->mux_hw;
|
|
|
|
__clk_hw_set_clk(mux_hw, hw);
|
|
|
|
return mux_ops->set_parent(mux_hw, index);
|
|
}
|
|
|
|
static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
|
|
unsigned long parent_rate)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
|
|
return rate_ops->recalc_rate(rate_hw, parent_rate);
|
|
}
|
|
|
|
static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw,
|
|
struct clk_rate_request *req,
|
|
struct clk_hw *parent_hw,
|
|
const struct clk_ops *rate_ops)
|
|
{
|
|
long rate;
|
|
|
|
req->best_parent_hw = parent_hw;
|
|
req->best_parent_rate = clk_hw_get_rate(parent_hw);
|
|
|
|
if (rate_ops->determine_rate)
|
|
return rate_ops->determine_rate(rate_hw, req);
|
|
|
|
rate = rate_ops->round_rate(rate_hw, req->rate,
|
|
&req->best_parent_rate);
|
|
if (rate < 0)
|
|
return rate;
|
|
|
|
req->rate = rate;
|
|
|
|
return 0;
|
|
}
|
|
|
|
static int clk_composite_determine_rate(struct clk_hw *hw,
|
|
struct clk_rate_request *req)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
const struct clk_ops *mux_ops = composite->mux_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
struct clk_hw *mux_hw = composite->mux_hw;
|
|
struct clk_hw *parent;
|
|
unsigned long rate_diff;
|
|
unsigned long best_rate_diff = ULONG_MAX;
|
|
unsigned long best_rate = 0;
|
|
int i, ret;
|
|
|
|
if (rate_hw && rate_ops &&
|
|
(rate_ops->determine_rate || rate_ops->round_rate) &&
|
|
mux_hw && mux_ops && mux_ops->set_parent) {
|
|
req->best_parent_hw = NULL;
|
|
|
|
if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
|
|
struct clk_rate_request tmp_req;
|
|
|
|
parent = clk_hw_get_parent(mux_hw);
|
|
|
|
clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
|
|
ret = clk_composite_determine_rate_for_parent(rate_hw,
|
|
&tmp_req,
|
|
parent,
|
|
rate_ops);
|
|
if (ret)
|
|
return ret;
|
|
|
|
req->rate = tmp_req.rate;
|
|
req->best_parent_hw = tmp_req.best_parent_hw;
|
|
req->best_parent_rate = tmp_req.best_parent_rate;
|
|
|
|
return 0;
|
|
}
|
|
|
|
for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
|
|
struct clk_rate_request tmp_req;
|
|
|
|
parent = clk_hw_get_parent_by_index(mux_hw, i);
|
|
if (!parent)
|
|
continue;
|
|
|
|
clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
|
|
ret = clk_composite_determine_rate_for_parent(rate_hw,
|
|
&tmp_req,
|
|
parent,
|
|
rate_ops);
|
|
if (ret)
|
|
continue;
|
|
|
|
if (req->rate >= tmp_req.rate)
|
|
rate_diff = req->rate - tmp_req.rate;
|
|
else
|
|
rate_diff = tmp_req.rate - req->rate;
|
|
|
|
if (!rate_diff || !req->best_parent_hw
|
|
|| best_rate_diff > rate_diff) {
|
|
req->best_parent_hw = parent;
|
|
req->best_parent_rate = tmp_req.best_parent_rate;
|
|
best_rate_diff = rate_diff;
|
|
best_rate = tmp_req.rate;
|
|
}
|
|
|
|
if (!rate_diff)
|
|
return 0;
|
|
}
|
|
|
|
req->rate = best_rate;
|
|
return 0;
|
|
} else if (rate_hw && rate_ops && rate_ops->determine_rate) {
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
return rate_ops->determine_rate(rate_hw, req);
|
|
} else if (mux_hw && mux_ops && mux_ops->determine_rate) {
|
|
__clk_hw_set_clk(mux_hw, hw);
|
|
return mux_ops->determine_rate(mux_hw, req);
|
|
} else {
|
|
pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n");
|
|
return -EINVAL;
|
|
}
|
|
}
|
|
|
|
static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate,
|
|
unsigned long *prate)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
|
|
return rate_ops->round_rate(rate_hw, rate, prate);
|
|
}
|
|
|
|
static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
|
|
unsigned long parent_rate)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
|
|
return rate_ops->set_rate(rate_hw, rate, parent_rate);
|
|
}
|
|
|
|
static int clk_composite_set_rate_and_parent(struct clk_hw *hw,
|
|
unsigned long rate,
|
|
unsigned long parent_rate,
|
|
u8 index)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
const struct clk_ops *mux_ops = composite->mux_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
struct clk_hw *mux_hw = composite->mux_hw;
|
|
unsigned long temp_rate;
|
|
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
__clk_hw_set_clk(mux_hw, hw);
|
|
|
|
temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate);
|
|
if (temp_rate > rate) {
|
|
rate_ops->set_rate(rate_hw, rate, parent_rate);
|
|
mux_ops->set_parent(mux_hw, index);
|
|
} else {
|
|
mux_ops->set_parent(mux_hw, index);
|
|
rate_ops->set_rate(rate_hw, rate, parent_rate);
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
static int clk_composite_is_enabled(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *gate_ops = composite->gate_ops;
|
|
struct clk_hw *gate_hw = composite->gate_hw;
|
|
|
|
__clk_hw_set_clk(gate_hw, hw);
|
|
|
|
return gate_ops->is_enabled(gate_hw);
|
|
}
|
|
|
|
static int clk_composite_enable(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *gate_ops = composite->gate_ops;
|
|
struct clk_hw *gate_hw = composite->gate_hw;
|
|
|
|
__clk_hw_set_clk(gate_hw, hw);
|
|
|
|
return gate_ops->enable(gate_hw);
|
|
}
|
|
|
|
static void clk_composite_disable(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *gate_ops = composite->gate_ops;
|
|
struct clk_hw *gate_hw = composite->gate_hw;
|
|
|
|
__clk_hw_set_clk(gate_hw, hw);
|
|
|
|
gate_ops->disable(gate_hw);
|
|
}
|
|
|
|
static struct clk_hw *__clk_hw_register_composite(struct device *dev,
|
|
const char *name, const char * const *parent_names,
|
|
const struct clk_parent_data *pdata, int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
struct clk_hw *hw;
|
|
struct clk_init_data init = {};
|
|
struct clk_composite *composite;
|
|
struct clk_ops *clk_composite_ops;
|
|
int ret;
|
|
|
|
composite = kzalloc(sizeof(*composite), GFP_KERNEL);
|
|
if (!composite)
|
|
return ERR_PTR(-ENOMEM);
|
|
|
|
init.name = name;
|
|
init.flags = flags;
|
|
if (parent_names)
|
|
init.parent_names = parent_names;
|
|
else
|
|
init.parent_data = pdata;
|
|
init.num_parents = num_parents;
|
|
hw = &composite->hw;
|
|
|
|
clk_composite_ops = &composite->ops;
|
|
|
|
if (mux_hw && mux_ops) {
|
|
if (!mux_ops->get_parent) {
|
|
hw = ERR_PTR(-EINVAL);
|
|
goto err;
|
|
}
|
|
|
|
composite->mux_hw = mux_hw;
|
|
composite->mux_ops = mux_ops;
|
|
clk_composite_ops->get_parent = clk_composite_get_parent;
|
|
if (mux_ops->set_parent)
|
|
clk_composite_ops->set_parent = clk_composite_set_parent;
|
|
if (mux_ops->determine_rate)
|
|
clk_composite_ops->determine_rate = clk_composite_determine_rate;
|
|
}
|
|
|
|
if (rate_hw && rate_ops) {
|
|
if (!rate_ops->recalc_rate) {
|
|
hw = ERR_PTR(-EINVAL);
|
|
goto err;
|
|
}
|
|
clk_composite_ops->recalc_rate = clk_composite_recalc_rate;
|
|
|
|
if (rate_ops->determine_rate)
|
|
clk_composite_ops->determine_rate =
|
|
clk_composite_determine_rate;
|
|
else if (rate_ops->round_rate)
|
|
clk_composite_ops->round_rate =
|
|
clk_composite_round_rate;
|
|
|
|
/* .set_rate requires either .round_rate or .determine_rate */
|
|
if (rate_ops->set_rate) {
|
|
if (rate_ops->determine_rate || rate_ops->round_rate)
|
|
clk_composite_ops->set_rate =
|
|
clk_composite_set_rate;
|
|
else
|
|
WARN(1, "%s: missing round_rate op is required\n",
|
|
__func__);
|
|
}
|
|
|
|
composite->rate_hw = rate_hw;
|
|
composite->rate_ops = rate_ops;
|
|
}
|
|
|
|
if (mux_hw && mux_ops && rate_hw && rate_ops) {
|
|
if (mux_ops->set_parent && rate_ops->set_rate)
|
|
clk_composite_ops->set_rate_and_parent =
|
|
clk_composite_set_rate_and_parent;
|
|
}
|
|
|
|
if (gate_hw && gate_ops) {
|
|
if (!gate_ops->is_enabled || !gate_ops->enable ||
|
|
!gate_ops->disable) {
|
|
hw = ERR_PTR(-EINVAL);
|
|
goto err;
|
|
}
|
|
|
|
composite->gate_hw = gate_hw;
|
|
composite->gate_ops = gate_ops;
|
|
clk_composite_ops->is_enabled = clk_composite_is_enabled;
|
|
clk_composite_ops->enable = clk_composite_enable;
|
|
clk_composite_ops->disable = clk_composite_disable;
|
|
}
|
|
|
|
init.ops = clk_composite_ops;
|
|
composite->hw.init = &init;
|
|
|
|
ret = clk_hw_register(dev, hw);
|
|
if (ret) {
|
|
hw = ERR_PTR(ret);
|
|
goto err;
|
|
}
|
|
|
|
if (composite->mux_hw)
|
|
composite->mux_hw->clk = hw->clk;
|
|
|
|
if (composite->rate_hw)
|
|
composite->rate_hw->clk = hw->clk;
|
|
|
|
if (composite->gate_hw)
|
|
composite->gate_hw->clk = hw->clk;
|
|
|
|
return hw;
|
|
|
|
err:
|
|
kfree(composite);
|
|
return hw;
|
|
}
|
|
|
|
struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name,
|
|
const char * const *parent_names, int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
return __clk_hw_register_composite(dev, name, parent_names, NULL,
|
|
num_parents, mux_hw, mux_ops,
|
|
rate_hw, rate_ops, gate_hw,
|
|
gate_ops, flags);
|
|
}
|
|
EXPORT_SYMBOL_GPL(clk_hw_register_composite);
|
|
|
|
struct clk_hw *clk_hw_register_composite_pdata(struct device *dev,
|
|
const char *name,
|
|
const struct clk_parent_data *parent_data,
|
|
int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
return __clk_hw_register_composite(dev, name, NULL, parent_data,
|
|
num_parents, mux_hw, mux_ops,
|
|
rate_hw, rate_ops, gate_hw,
|
|
gate_ops, flags);
|
|
}
|
|
|
|
struct clk *clk_register_composite(struct device *dev, const char *name,
|
|
const char * const *parent_names, int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
struct clk_hw *hw;
|
|
|
|
hw = clk_hw_register_composite(dev, name, parent_names, num_parents,
|
|
mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops,
|
|
flags);
|
|
if (IS_ERR(hw))
|
|
return ERR_CAST(hw);
|
|
return hw->clk;
|
|
}
|
|
EXPORT_SYMBOL_GPL(clk_register_composite);
|
|
|
|
struct clk *clk_register_composite_pdata(struct device *dev, const char *name,
|
|
const struct clk_parent_data *parent_data,
|
|
int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
struct clk_hw *hw;
|
|
|
|
hw = clk_hw_register_composite_pdata(dev, name, parent_data,
|
|
num_parents, mux_hw, mux_ops, rate_hw, rate_ops,
|
|
gate_hw, gate_ops, flags);
|
|
if (IS_ERR(hw))
|
|
return ERR_CAST(hw);
|
|
return hw->clk;
|
|
}
|
|
|
|
void clk_unregister_composite(struct clk *clk)
|
|
{
|
|
struct clk_composite *composite;
|
|
struct clk_hw *hw;
|
|
|
|
hw = __clk_get_hw(clk);
|
|
if (!hw)
|
|
return;
|
|
|
|
composite = to_clk_composite(hw);
|
|
|
|
clk_unregister(clk);
|
|
kfree(composite);
|
|
}
|
|
|
|
void clk_hw_unregister_composite(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite;
|
|
|
|
composite = to_clk_composite(hw);
|
|
|
|
clk_hw_unregister(hw);
|
|
kfree(composite);
|
|
}
|
|
EXPORT_SYMBOL_GPL(clk_hw_unregister_composite);
|
|
|
|
static void devm_clk_hw_release_composite(struct device *dev, void *res)
|
|
{
|
|
clk_hw_unregister_composite(*(struct clk_hw **)res);
|
|
}
|
|
|
|
static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev,
|
|
const char *name, const char * const *parent_names,
|
|
const struct clk_parent_data *pdata, int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
struct clk_hw **ptr, *hw;
|
|
|
|
ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr),
|
|
GFP_KERNEL);
|
|
if (!ptr)
|
|
return ERR_PTR(-ENOMEM);
|
|
|
|
hw = __clk_hw_register_composite(dev, name, parent_names, pdata,
|
|
num_parents, mux_hw, mux_ops, rate_hw,
|
|
rate_ops, gate_hw, gate_ops, flags);
|
|
|
|
if (!IS_ERR(hw)) {
|
|
*ptr = hw;
|
|
devres_add(dev, ptr);
|
|
} else {
|
|
devres_free(ptr);
|
|
}
|
|
|
|
return hw;
|
|
}
|
|
|
|
struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev,
|
|
const char *name,
|
|
const struct clk_parent_data *parent_data,
|
|
int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
return __devm_clk_hw_register_composite(dev, name, NULL, parent_data,
|
|
num_parents, mux_hw, mux_ops,
|
|
rate_hw, rate_ops, gate_hw,
|
|
gate_ops, flags);
|
|
}
|