One fix for the composite clk that broke when we changed this clk type

to use the determine_rate instead of round_rate clk op by default. This
 caused lots of problems on Rockchip SoCs because they heavily use
 the composite clk code to model the clk tree.
 -----BEGIN PGP SIGNATURE-----
 
 iQJFBAABCAAvFiEE9L57QeeUxqYDyoaDrQKIl8bklSUFAmF8sUARHHNib3lkQGtl
 cm5lbC5vcmcACgkQrQKIl8bklSVDDhAAn4VSkWRS2mlwSWHCjrNc5XBVqTuzQX4k
 pH8k8rwLYJgEMnzN0a62I5kPUYcx47ILX/5+64vHfUyxYQsuDWu3O+Uvn8N5q8iD
 pXwH+Thac30DdCYoi7NRHmfKg0wFuMf4ExlLzfSP64OvPR4OeIz45BLo2KcnASy5
 9KW/epH89fwUDpE01p9wI+nS6ItJ3qTvm9PUIOWqUOJz0ZkBjsIid2I6y7k/HgEk
 sfjBq2swvxaF7cIuCA11vesJ8pFVSSRowHgHdGsHa3okrB/x0nsi2JW2ie2zt/VQ
 EFkuhuiWfs2O5brjqZiQLgkeggEKkO+9V4o30s3mVFLx9hcEsKR7a3XaPOtXdlHh
 XrnDKnspWV1HKRRceRMbNw2oxeaDcJ2RC5kXpAVJraGeMDjn1NyYMxCFOyuwY0q6
 8Mb7M1dRuMCdjIrhBGss+u5wg/wEG/vUZF6M4LBZAPzgRmX6lKVqzZrLH+9M0xSJ
 ey77No79nhntzPg57FZHED/ghL+5TNVyp9pO2rDcVcn1QjPGcNw3M9Fm/yfRPAJF
 C+5mvW5aoc+B6DFuQrpKSfUaa2KgqFKBj2CcEST/fcAH+FNKe9EZ8fkkM0jCdqZo
 nNK5CbsuW5HSCF+L5EALosru4krAFK/VNzXI7hzfraKOLymkw6h9j78F+Gi0qRzS
 0IHipNdFIhA=
 =5b/O
 -----END PGP SIGNATURE-----

Merge tag 'clk-fixes-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/clk/linux

Pull clk fix from Stephen Boyd:
 "One fix for the composite clk that broke when we changed this clk type
  to use the determine_rate instead of round_rate clk op by default.
  This caused lots of problems on Rockchip SoCs because they heavily use
  the composite clk code to model the clk tree"

* tag 'clk-fixes-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/clk/linux:
  clk: composite: Also consider .determine_rate for rate + mux composites
This commit is contained in:
Linus Torvalds 2021-10-30 09:55:46 -07:00
commit 3a4347d82e
1 changed files with 5 additions and 5 deletions

View File

@ -58,11 +58,8 @@ static int clk_composite_determine_rate(struct clk_hw *hw,
long rate;
int i;
if (rate_hw && rate_ops && rate_ops->determine_rate) {
__clk_hw_set_clk(rate_hw, hw);
return rate_ops->determine_rate(rate_hw, req);
} else if (rate_hw && rate_ops && rate_ops->round_rate &&
mux_hw && mux_ops && mux_ops->set_parent) {
if (rate_hw && rate_ops && rate_ops->round_rate &&
mux_hw && mux_ops && mux_ops->set_parent) {
req->best_parent_hw = NULL;
if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
@ -107,6 +104,9 @@ static int clk_composite_determine_rate(struct clk_hw *hw,
req->rate = best_rate;
return 0;
} else if (rate_hw && rate_ops && rate_ops->determine_rate) {
__clk_hw_set_clk(rate_hw, hw);
return rate_ops->determine_rate(rate_hw, req);
} else if (mux_hw && mux_ops && mux_ops->determine_rate) {
__clk_hw_set_clk(mux_hw, hw);
return mux_ops->determine_rate(mux_hw, req);