mirror of
https://github.com/torvalds/linux.git
synced 2024-12-04 01:51:34 +00:00
262ca38f4b
If the clock cannot modify its rate and has CLK_SET_RATE_PARENT, clk_mux_determine_rate_flags(), clk_core_round_rate_nolock() and a number of drivers will forward the clk_rate_request to the parent clock. clk_core_round_rate_nolock() will pass the pointer directly, which means that we pass a clk_rate_request to the parent that has the rate, min_rate and max_rate of the child, and the best_parent_rate and best_parent_hw fields will be relative to the child as well, so will point to our current clock and its rate. The most common case for CLK_SET_RATE_PARENT is that the child and parent clock rates will be equal, so the rate field isn't a worry, but the other fields are. Similarly, if the parent clock driver ever modifies the best_parent_rate or best_parent_hw, this will be applied to the child once the call to clk_core_round_rate_nolock() is done. best_parent_hw is probably not going to be a valid parent, and best_parent_rate might lead to a parent rate change different to the one that was initially computed. clk_mux_determine_rate_flags() and the affected drivers will copy the request before forwarding it to the parents, so they won't be affected by the latter issue, but the former is still going to be there and will lead to erroneous data and context being passed to the various clock drivers in the same sub-tree. Let's create two new functions, clk_core_forward_rate_req() and clk_hw_forward_rate_request() for the framework and the clock providers that will copy a request from a child clock and update the context to match the parent's. We also update the relevant call sites in the framework and drivers to use that new function. Let's also add a test to make sure we avoid regressions there. Tested-by: Alexander Stein <alexander.stein@ew.tq-group.com> # imx8mp Tested-by: Marek Szyprowski <m.szyprowski@samsung.com> # exynos4210, meson g12b Signed-off-by: Maxime Ripard <maxime@cerno.tech> Link: https://lore.kernel.org/r/20220816112530.1837489-22-maxime@cerno.tech Tested-by: Linux Kernel Functional Testing <lkft@linaro.org> Tested-by: Naresh Kamboju <naresh.kamboju@linaro.org> Signed-off-by: Stephen Boyd <sboyd@kernel.org>
490 lines
13 KiB
C
490 lines
13 KiB
C
// SPDX-License-Identifier: GPL-2.0
|
|
/*
|
|
* Copyright (c) 2013 NVIDIA CORPORATION. All rights reserved.
|
|
*/
|
|
|
|
#include <linux/clk-provider.h>
|
|
#include <linux/device.h>
|
|
#include <linux/err.h>
|
|
#include <linux/slab.h>
|
|
|
|
static u8 clk_composite_get_parent(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *mux_ops = composite->mux_ops;
|
|
struct clk_hw *mux_hw = composite->mux_hw;
|
|
|
|
__clk_hw_set_clk(mux_hw, hw);
|
|
|
|
return mux_ops->get_parent(mux_hw);
|
|
}
|
|
|
|
static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *mux_ops = composite->mux_ops;
|
|
struct clk_hw *mux_hw = composite->mux_hw;
|
|
|
|
__clk_hw_set_clk(mux_hw, hw);
|
|
|
|
return mux_ops->set_parent(mux_hw, index);
|
|
}
|
|
|
|
static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
|
|
unsigned long parent_rate)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
|
|
return rate_ops->recalc_rate(rate_hw, parent_rate);
|
|
}
|
|
|
|
static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw,
|
|
struct clk_rate_request *req,
|
|
struct clk_hw *parent_hw,
|
|
const struct clk_ops *rate_ops)
|
|
{
|
|
long rate;
|
|
|
|
req->best_parent_hw = parent_hw;
|
|
req->best_parent_rate = clk_hw_get_rate(parent_hw);
|
|
|
|
if (rate_ops->determine_rate)
|
|
return rate_ops->determine_rate(rate_hw, req);
|
|
|
|
rate = rate_ops->round_rate(rate_hw, req->rate,
|
|
&req->best_parent_rate);
|
|
if (rate < 0)
|
|
return rate;
|
|
|
|
req->rate = rate;
|
|
|
|
return 0;
|
|
}
|
|
|
|
static int clk_composite_determine_rate(struct clk_hw *hw,
|
|
struct clk_rate_request *req)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
const struct clk_ops *mux_ops = composite->mux_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
struct clk_hw *mux_hw = composite->mux_hw;
|
|
struct clk_hw *parent;
|
|
unsigned long rate_diff;
|
|
unsigned long best_rate_diff = ULONG_MAX;
|
|
unsigned long best_rate = 0;
|
|
int i, ret;
|
|
|
|
if (rate_hw && rate_ops &&
|
|
(rate_ops->determine_rate || rate_ops->round_rate) &&
|
|
mux_hw && mux_ops && mux_ops->set_parent) {
|
|
req->best_parent_hw = NULL;
|
|
|
|
if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
|
|
struct clk_rate_request tmp_req;
|
|
|
|
parent = clk_hw_get_parent(mux_hw);
|
|
|
|
clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
|
|
ret = clk_composite_determine_rate_for_parent(rate_hw,
|
|
&tmp_req,
|
|
parent,
|
|
rate_ops);
|
|
if (ret)
|
|
return ret;
|
|
|
|
req->rate = tmp_req.rate;
|
|
req->best_parent_hw = tmp_req.best_parent_hw;
|
|
req->best_parent_rate = tmp_req.best_parent_rate;
|
|
|
|
return 0;
|
|
}
|
|
|
|
for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
|
|
struct clk_rate_request tmp_req;
|
|
|
|
parent = clk_hw_get_parent_by_index(mux_hw, i);
|
|
if (!parent)
|
|
continue;
|
|
|
|
clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
|
|
ret = clk_composite_determine_rate_for_parent(rate_hw,
|
|
&tmp_req,
|
|
parent,
|
|
rate_ops);
|
|
if (ret)
|
|
continue;
|
|
|
|
rate_diff = abs(req->rate - tmp_req.rate);
|
|
|
|
if (!rate_diff || !req->best_parent_hw
|
|
|| best_rate_diff > rate_diff) {
|
|
req->best_parent_hw = parent;
|
|
req->best_parent_rate = tmp_req.best_parent_rate;
|
|
best_rate_diff = rate_diff;
|
|
best_rate = tmp_req.rate;
|
|
}
|
|
|
|
if (!rate_diff)
|
|
return 0;
|
|
}
|
|
|
|
req->rate = best_rate;
|
|
return 0;
|
|
} else if (rate_hw && rate_ops && rate_ops->determine_rate) {
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
return rate_ops->determine_rate(rate_hw, req);
|
|
} else if (mux_hw && mux_ops && mux_ops->determine_rate) {
|
|
__clk_hw_set_clk(mux_hw, hw);
|
|
return mux_ops->determine_rate(mux_hw, req);
|
|
} else {
|
|
pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n");
|
|
return -EINVAL;
|
|
}
|
|
}
|
|
|
|
static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate,
|
|
unsigned long *prate)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
|
|
return rate_ops->round_rate(rate_hw, rate, prate);
|
|
}
|
|
|
|
static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
|
|
unsigned long parent_rate)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
|
|
return rate_ops->set_rate(rate_hw, rate, parent_rate);
|
|
}
|
|
|
|
static int clk_composite_set_rate_and_parent(struct clk_hw *hw,
|
|
unsigned long rate,
|
|
unsigned long parent_rate,
|
|
u8 index)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *rate_ops = composite->rate_ops;
|
|
const struct clk_ops *mux_ops = composite->mux_ops;
|
|
struct clk_hw *rate_hw = composite->rate_hw;
|
|
struct clk_hw *mux_hw = composite->mux_hw;
|
|
unsigned long temp_rate;
|
|
|
|
__clk_hw_set_clk(rate_hw, hw);
|
|
__clk_hw_set_clk(mux_hw, hw);
|
|
|
|
temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate);
|
|
if (temp_rate > rate) {
|
|
rate_ops->set_rate(rate_hw, rate, parent_rate);
|
|
mux_ops->set_parent(mux_hw, index);
|
|
} else {
|
|
mux_ops->set_parent(mux_hw, index);
|
|
rate_ops->set_rate(rate_hw, rate, parent_rate);
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
static int clk_composite_is_enabled(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *gate_ops = composite->gate_ops;
|
|
struct clk_hw *gate_hw = composite->gate_hw;
|
|
|
|
__clk_hw_set_clk(gate_hw, hw);
|
|
|
|
return gate_ops->is_enabled(gate_hw);
|
|
}
|
|
|
|
static int clk_composite_enable(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *gate_ops = composite->gate_ops;
|
|
struct clk_hw *gate_hw = composite->gate_hw;
|
|
|
|
__clk_hw_set_clk(gate_hw, hw);
|
|
|
|
return gate_ops->enable(gate_hw);
|
|
}
|
|
|
|
static void clk_composite_disable(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite = to_clk_composite(hw);
|
|
const struct clk_ops *gate_ops = composite->gate_ops;
|
|
struct clk_hw *gate_hw = composite->gate_hw;
|
|
|
|
__clk_hw_set_clk(gate_hw, hw);
|
|
|
|
gate_ops->disable(gate_hw);
|
|
}
|
|
|
|
static struct clk_hw *__clk_hw_register_composite(struct device *dev,
|
|
const char *name, const char * const *parent_names,
|
|
const struct clk_parent_data *pdata, int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
struct clk_hw *hw;
|
|
struct clk_init_data init = {};
|
|
struct clk_composite *composite;
|
|
struct clk_ops *clk_composite_ops;
|
|
int ret;
|
|
|
|
composite = kzalloc(sizeof(*composite), GFP_KERNEL);
|
|
if (!composite)
|
|
return ERR_PTR(-ENOMEM);
|
|
|
|
init.name = name;
|
|
init.flags = flags;
|
|
if (parent_names)
|
|
init.parent_names = parent_names;
|
|
else
|
|
init.parent_data = pdata;
|
|
init.num_parents = num_parents;
|
|
hw = &composite->hw;
|
|
|
|
clk_composite_ops = &composite->ops;
|
|
|
|
if (mux_hw && mux_ops) {
|
|
if (!mux_ops->get_parent) {
|
|
hw = ERR_PTR(-EINVAL);
|
|
goto err;
|
|
}
|
|
|
|
composite->mux_hw = mux_hw;
|
|
composite->mux_ops = mux_ops;
|
|
clk_composite_ops->get_parent = clk_composite_get_parent;
|
|
if (mux_ops->set_parent)
|
|
clk_composite_ops->set_parent = clk_composite_set_parent;
|
|
if (mux_ops->determine_rate)
|
|
clk_composite_ops->determine_rate = clk_composite_determine_rate;
|
|
}
|
|
|
|
if (rate_hw && rate_ops) {
|
|
if (!rate_ops->recalc_rate) {
|
|
hw = ERR_PTR(-EINVAL);
|
|
goto err;
|
|
}
|
|
clk_composite_ops->recalc_rate = clk_composite_recalc_rate;
|
|
|
|
if (rate_ops->determine_rate)
|
|
clk_composite_ops->determine_rate =
|
|
clk_composite_determine_rate;
|
|
else if (rate_ops->round_rate)
|
|
clk_composite_ops->round_rate =
|
|
clk_composite_round_rate;
|
|
|
|
/* .set_rate requires either .round_rate or .determine_rate */
|
|
if (rate_ops->set_rate) {
|
|
if (rate_ops->determine_rate || rate_ops->round_rate)
|
|
clk_composite_ops->set_rate =
|
|
clk_composite_set_rate;
|
|
else
|
|
WARN(1, "%s: missing round_rate op is required\n",
|
|
__func__);
|
|
}
|
|
|
|
composite->rate_hw = rate_hw;
|
|
composite->rate_ops = rate_ops;
|
|
}
|
|
|
|
if (mux_hw && mux_ops && rate_hw && rate_ops) {
|
|
if (mux_ops->set_parent && rate_ops->set_rate)
|
|
clk_composite_ops->set_rate_and_parent =
|
|
clk_composite_set_rate_and_parent;
|
|
}
|
|
|
|
if (gate_hw && gate_ops) {
|
|
if (!gate_ops->is_enabled || !gate_ops->enable ||
|
|
!gate_ops->disable) {
|
|
hw = ERR_PTR(-EINVAL);
|
|
goto err;
|
|
}
|
|
|
|
composite->gate_hw = gate_hw;
|
|
composite->gate_ops = gate_ops;
|
|
clk_composite_ops->is_enabled = clk_composite_is_enabled;
|
|
clk_composite_ops->enable = clk_composite_enable;
|
|
clk_composite_ops->disable = clk_composite_disable;
|
|
}
|
|
|
|
init.ops = clk_composite_ops;
|
|
composite->hw.init = &init;
|
|
|
|
ret = clk_hw_register(dev, hw);
|
|
if (ret) {
|
|
hw = ERR_PTR(ret);
|
|
goto err;
|
|
}
|
|
|
|
if (composite->mux_hw)
|
|
composite->mux_hw->clk = hw->clk;
|
|
|
|
if (composite->rate_hw)
|
|
composite->rate_hw->clk = hw->clk;
|
|
|
|
if (composite->gate_hw)
|
|
composite->gate_hw->clk = hw->clk;
|
|
|
|
return hw;
|
|
|
|
err:
|
|
kfree(composite);
|
|
return hw;
|
|
}
|
|
|
|
struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name,
|
|
const char * const *parent_names, int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
return __clk_hw_register_composite(dev, name, parent_names, NULL,
|
|
num_parents, mux_hw, mux_ops,
|
|
rate_hw, rate_ops, gate_hw,
|
|
gate_ops, flags);
|
|
}
|
|
EXPORT_SYMBOL_GPL(clk_hw_register_composite);
|
|
|
|
struct clk_hw *clk_hw_register_composite_pdata(struct device *dev,
|
|
const char *name,
|
|
const struct clk_parent_data *parent_data,
|
|
int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
return __clk_hw_register_composite(dev, name, NULL, parent_data,
|
|
num_parents, mux_hw, mux_ops,
|
|
rate_hw, rate_ops, gate_hw,
|
|
gate_ops, flags);
|
|
}
|
|
|
|
struct clk *clk_register_composite(struct device *dev, const char *name,
|
|
const char * const *parent_names, int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
struct clk_hw *hw;
|
|
|
|
hw = clk_hw_register_composite(dev, name, parent_names, num_parents,
|
|
mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops,
|
|
flags);
|
|
if (IS_ERR(hw))
|
|
return ERR_CAST(hw);
|
|
return hw->clk;
|
|
}
|
|
EXPORT_SYMBOL_GPL(clk_register_composite);
|
|
|
|
struct clk *clk_register_composite_pdata(struct device *dev, const char *name,
|
|
const struct clk_parent_data *parent_data,
|
|
int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
struct clk_hw *hw;
|
|
|
|
hw = clk_hw_register_composite_pdata(dev, name, parent_data,
|
|
num_parents, mux_hw, mux_ops, rate_hw, rate_ops,
|
|
gate_hw, gate_ops, flags);
|
|
if (IS_ERR(hw))
|
|
return ERR_CAST(hw);
|
|
return hw->clk;
|
|
}
|
|
|
|
void clk_unregister_composite(struct clk *clk)
|
|
{
|
|
struct clk_composite *composite;
|
|
struct clk_hw *hw;
|
|
|
|
hw = __clk_get_hw(clk);
|
|
if (!hw)
|
|
return;
|
|
|
|
composite = to_clk_composite(hw);
|
|
|
|
clk_unregister(clk);
|
|
kfree(composite);
|
|
}
|
|
|
|
void clk_hw_unregister_composite(struct clk_hw *hw)
|
|
{
|
|
struct clk_composite *composite;
|
|
|
|
composite = to_clk_composite(hw);
|
|
|
|
clk_hw_unregister(hw);
|
|
kfree(composite);
|
|
}
|
|
EXPORT_SYMBOL_GPL(clk_hw_unregister_composite);
|
|
|
|
static void devm_clk_hw_release_composite(struct device *dev, void *res)
|
|
{
|
|
clk_hw_unregister_composite(*(struct clk_hw **)res);
|
|
}
|
|
|
|
static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev,
|
|
const char *name, const char * const *parent_names,
|
|
const struct clk_parent_data *pdata, int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
struct clk_hw **ptr, *hw;
|
|
|
|
ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr),
|
|
GFP_KERNEL);
|
|
if (!ptr)
|
|
return ERR_PTR(-ENOMEM);
|
|
|
|
hw = __clk_hw_register_composite(dev, name, parent_names, pdata,
|
|
num_parents, mux_hw, mux_ops, rate_hw,
|
|
rate_ops, gate_hw, gate_ops, flags);
|
|
|
|
if (!IS_ERR(hw)) {
|
|
*ptr = hw;
|
|
devres_add(dev, ptr);
|
|
} else {
|
|
devres_free(ptr);
|
|
}
|
|
|
|
return hw;
|
|
}
|
|
|
|
struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev,
|
|
const char *name,
|
|
const struct clk_parent_data *parent_data,
|
|
int num_parents,
|
|
struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
|
|
struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
|
|
struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
|
|
unsigned long flags)
|
|
{
|
|
return __devm_clk_hw_register_composite(dev, name, NULL, parent_data,
|
|
num_parents, mux_hw, mux_ops,
|
|
rate_hw, rate_ops, gate_hw,
|
|
gate_ops, flags);
|
|
}
|