Merge branches 'clk-bulk-optional', 'clk-kirkwood', 'clk-socfpga' and 'clk-docs' into clk-next
- Add a clk_bulk_get_optional() API (with devm too) - Support for Marvell 98DX1135 SoCs * clk-bulk-optional: clk: Document some devm_clk_bulk*() APIs clk: Add devm_clk_bulk_get_optional() function clk: Add clk_bulk_get_optional() function * clk-kirkwood: clk: kirkwood: Add support for MV98DX1135 dt-bindings: clock: mvebu: Add compatible string for 98dx1135 core clock * clk-socfpga: clk: socfpga: stratix10: fix divider entry for the emac clocks clk: socfpga: stratix10: add additional clocks needed for the NAND IP * clk-docs: clk: Grammar missing "and", Spelling s/statisfied/satisfied/
This commit is contained in:
commit
dfe1d3a283
|
@ -59,6 +59,7 @@ Required properties:
|
|||
"marvell,dove-core-clock" - for Dove SoC core clocks
|
||||
"marvell,kirkwood-core-clock" - for Kirkwood SoC (except mv88f6180)
|
||||
"marvell,mv88f6180-core-clock" - for Kirkwood MV88f6180 SoC
|
||||
"marvell,mv98dx1135-core-clock" - for Kirkwood 98dx1135 SoC
|
||||
"marvell,mv88f5181-core-clock" - for Orion MV88F5181 SoC
|
||||
"marvell,mv88f5182-core-clock" - for Orion MV88F5182 SoC
|
||||
"marvell,mv88f5281-core-clock" - for Orion MV88F5281 SoC
|
||||
|
|
|
@ -244,6 +244,10 @@ CLOCK
|
|||
devm_clk_get()
|
||||
devm_clk_get_optional()
|
||||
devm_clk_put()
|
||||
devm_clk_bulk_get()
|
||||
devm_clk_bulk_get_all()
|
||||
devm_clk_bulk_get_optional()
|
||||
devm_get_clk_from_childl()
|
||||
devm_clk_hw_register()
|
||||
devm_of_clk_add_hw_provider()
|
||||
devm_clk_hw_register_clkdev()
|
||||
|
|
|
@ -75,8 +75,8 @@ void clk_bulk_put(int num_clks, struct clk_bulk_data *clks)
|
|||
}
|
||||
EXPORT_SYMBOL_GPL(clk_bulk_put);
|
||||
|
||||
int __must_check clk_bulk_get(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks)
|
||||
static int __clk_bulk_get(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks, bool optional)
|
||||
{
|
||||
int ret;
|
||||
int i;
|
||||
|
@ -88,10 +88,14 @@ int __must_check clk_bulk_get(struct device *dev, int num_clks,
|
|||
clks[i].clk = clk_get(dev, clks[i].id);
|
||||
if (IS_ERR(clks[i].clk)) {
|
||||
ret = PTR_ERR(clks[i].clk);
|
||||
clks[i].clk = NULL;
|
||||
|
||||
if (ret == -ENOENT && optional)
|
||||
continue;
|
||||
|
||||
if (ret != -EPROBE_DEFER)
|
||||
dev_err(dev, "Failed to get clk '%s': %d\n",
|
||||
clks[i].id, ret);
|
||||
clks[i].clk = NULL;
|
||||
goto err;
|
||||
}
|
||||
}
|
||||
|
@ -103,8 +107,21 @@ err:
|
|||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int __must_check clk_bulk_get(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks)
|
||||
{
|
||||
return __clk_bulk_get(dev, num_clks, clks, false);
|
||||
}
|
||||
EXPORT_SYMBOL(clk_bulk_get);
|
||||
|
||||
int __must_check clk_bulk_get_optional(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks)
|
||||
{
|
||||
return __clk_bulk_get(dev, num_clks, clks, true);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(clk_bulk_get_optional);
|
||||
|
||||
void clk_bulk_put_all(int num_clks, struct clk_bulk_data *clks)
|
||||
{
|
||||
if (IS_ERR_OR_NULL(clks))
|
||||
|
|
|
@ -52,8 +52,8 @@ static void devm_clk_bulk_release(struct device *dev, void *res)
|
|||
clk_bulk_put(devres->num_clks, devres->clks);
|
||||
}
|
||||
|
||||
int __must_check devm_clk_bulk_get(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks)
|
||||
static int __devm_clk_bulk_get(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks, bool optional)
|
||||
{
|
||||
struct clk_bulk_devres *devres;
|
||||
int ret;
|
||||
|
@ -63,6 +63,9 @@ int __must_check devm_clk_bulk_get(struct device *dev, int num_clks,
|
|||
if (!devres)
|
||||
return -ENOMEM;
|
||||
|
||||
if (optional)
|
||||
ret = clk_bulk_get_optional(dev, num_clks, clks);
|
||||
else
|
||||
ret = clk_bulk_get(dev, num_clks, clks);
|
||||
if (!ret) {
|
||||
devres->clks = clks;
|
||||
|
@ -74,8 +77,21 @@ int __must_check devm_clk_bulk_get(struct device *dev, int num_clks,
|
|||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int __must_check devm_clk_bulk_get(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks)
|
||||
{
|
||||
return __devm_clk_bulk_get(dev, num_clks, clks, false);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(devm_clk_bulk_get);
|
||||
|
||||
int __must_check devm_clk_bulk_get_optional(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks)
|
||||
{
|
||||
return __devm_clk_bulk_get(dev, num_clks, clks, true);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(devm_clk_bulk_get_optional);
|
||||
|
||||
int __must_check devm_clk_bulk_get_all(struct device *dev,
|
||||
struct clk_bulk_data **clks)
|
||||
{
|
||||
|
|
|
@ -2194,7 +2194,7 @@ int clk_set_rate(struct clk *clk, unsigned long rate)
|
|||
EXPORT_SYMBOL_GPL(clk_set_rate);
|
||||
|
||||
/**
|
||||
* clk_set_rate_exclusive - specify a new rate get exclusive control
|
||||
* clk_set_rate_exclusive - specify a new rate and get exclusive control
|
||||
* @clk: the clk whose rate is being changed
|
||||
* @rate: the new rate for clk
|
||||
*
|
||||
|
@ -2202,7 +2202,7 @@ EXPORT_SYMBOL_GPL(clk_set_rate);
|
|||
* within a critical section
|
||||
*
|
||||
* This can be used initially to ensure that at least 1 consumer is
|
||||
* statisfied when several consumers are competing for exclusivity over the
|
||||
* satisfied when several consumers are competing for exclusivity over the
|
||||
* same clock provider.
|
||||
*
|
||||
* The exclusivity is not applied if setting the rate failed.
|
||||
|
|
|
@ -185,6 +185,11 @@ static void __init mv88f6180_get_clk_ratio(
|
|||
}
|
||||
}
|
||||
|
||||
static u32 __init mv98dx1135_get_tclk_freq(void __iomem *sar)
|
||||
{
|
||||
return 166666667;
|
||||
}
|
||||
|
||||
static const struct coreclk_soc_desc kirkwood_coreclks = {
|
||||
.get_tclk_freq = kirkwood_get_tclk_freq,
|
||||
.get_cpu_freq = kirkwood_get_cpu_freq,
|
||||
|
@ -201,6 +206,14 @@ static const struct coreclk_soc_desc mv88f6180_coreclks = {
|
|||
.num_ratios = ARRAY_SIZE(kirkwood_coreclk_ratios),
|
||||
};
|
||||
|
||||
static const struct coreclk_soc_desc mv98dx1135_coreclks = {
|
||||
.get_tclk_freq = mv98dx1135_get_tclk_freq,
|
||||
.get_cpu_freq = kirkwood_get_cpu_freq,
|
||||
.get_clk_ratio = kirkwood_get_clk_ratio,
|
||||
.ratios = kirkwood_coreclk_ratios,
|
||||
.num_ratios = ARRAY_SIZE(kirkwood_coreclk_ratios),
|
||||
};
|
||||
|
||||
/*
|
||||
* Clock Gating Control
|
||||
*/
|
||||
|
@ -325,6 +338,8 @@ static void __init kirkwood_clk_init(struct device_node *np)
|
|||
|
||||
if (of_device_is_compatible(np, "marvell,mv88f6180-core-clock"))
|
||||
mvebu_coreclk_setup(np, &mv88f6180_coreclks);
|
||||
else if (of_device_is_compatible(np, "marvell,mv98dx1135-core-clock"))
|
||||
mvebu_coreclk_setup(np, &mv98dx1135_coreclks);
|
||||
else
|
||||
mvebu_coreclk_setup(np, &kirkwood_coreclks);
|
||||
|
||||
|
@ -339,3 +354,5 @@ CLK_OF_DECLARE(kirkwood_clk, "marvell,kirkwood-core-clock",
|
|||
kirkwood_clk_init);
|
||||
CLK_OF_DECLARE(mv88f6180_clk, "marvell,mv88f6180-core-clock",
|
||||
kirkwood_clk_init);
|
||||
CLK_OF_DECLARE(98dx1135_clk, "marvell,mv98dx1135-core-clock",
|
||||
kirkwood_clk_init);
|
||||
|
|
|
@ -103,9 +103,9 @@ static const struct stratix10_perip_cnt_clock s10_main_perip_cnt_clks[] = {
|
|||
{ STRATIX10_NOC_CLK, "noc_clk", NULL, noc_mux, ARRAY_SIZE(noc_mux),
|
||||
0, 0, 0, 0x3C, 1},
|
||||
{ STRATIX10_EMAC_A_FREE_CLK, "emaca_free_clk", NULL, emaca_free_mux, ARRAY_SIZE(emaca_free_mux),
|
||||
0, 0, 4, 0xB0, 0},
|
||||
0, 0, 2, 0xB0, 0},
|
||||
{ STRATIX10_EMAC_B_FREE_CLK, "emacb_free_clk", NULL, emacb_free_mux, ARRAY_SIZE(emacb_free_mux),
|
||||
0, 0, 4, 0xB0, 1},
|
||||
0, 0, 2, 0xB0, 1},
|
||||
{ STRATIX10_EMAC_PTP_FREE_CLK, "emac_ptp_free_clk", NULL, emac_ptp_free_mux,
|
||||
ARRAY_SIZE(emac_ptp_free_mux), 0, 0, 4, 0xB0, 2},
|
||||
{ STRATIX10_GPIO_DB_FREE_CLK, "gpio_db_free_clk", NULL, gpio_db_free_mux,
|
||||
|
@ -161,8 +161,12 @@ static const struct stratix10_gate_clock s10_gate_clks[] = {
|
|||
8, 0, 0, 0, 0, 0, 0},
|
||||
{ STRATIX10_SPI_M_CLK, "spi_m_clk", "l4_mp_clk", NULL, 1, 0, 0xA4,
|
||||
9, 0, 0, 0, 0, 0, 0},
|
||||
{ STRATIX10_NAND_CLK, "nand_clk", "l4_main_clk", NULL, 1, 0, 0xA4,
|
||||
{ STRATIX10_NAND_X_CLK, "nand_x_clk", "l4_mp_clk", NULL, 1, 0, 0xA4,
|
||||
10, 0, 0, 0, 0, 0, 0},
|
||||
{ STRATIX10_NAND_CLK, "nand_clk", "nand_x_clk", NULL, 1, 0, 0xA4,
|
||||
10, 0, 0, 0, 0, 0, 4},
|
||||
{ STRATIX10_NAND_ECC_CLK, "nand_ecc_clk", "nand_x_clk", NULL, 1, 0, 0xA4,
|
||||
10, 0, 0, 0, 0, 0, 4},
|
||||
};
|
||||
|
||||
static int s10_clk_register_c_perip(const struct stratix10_perip_c_clock *clks,
|
||||
|
|
|
@ -79,6 +79,8 @@
|
|||
#define STRATIX10_USB_CLK 59
|
||||
#define STRATIX10_SPI_M_CLK 60
|
||||
#define STRATIX10_NAND_CLK 61
|
||||
#define STRATIX10_NUM_CLKS 62
|
||||
#define STRATIX10_NAND_X_CLK 62
|
||||
#define STRATIX10_NAND_ECC_CLK 63
|
||||
#define STRATIX10_NUM_CLKS 64
|
||||
|
||||
#endif /* __STRATIX10_CLOCK_H */
|
||||
|
|
|
@ -332,6 +332,19 @@ int __must_check clk_bulk_get(struct device *dev, int num_clks,
|
|||
*/
|
||||
int __must_check clk_bulk_get_all(struct device *dev,
|
||||
struct clk_bulk_data **clks);
|
||||
|
||||
/**
|
||||
* clk_bulk_get_optional - lookup and obtain a number of references to clock producer
|
||||
* @dev: device for clock "consumer"
|
||||
* @num_clks: the number of clk_bulk_data
|
||||
* @clks: the clk_bulk_data table of consumer
|
||||
*
|
||||
* Behaves the same as clk_bulk_get() except where there is no clock producer.
|
||||
* In this case, instead of returning -ENOENT, the function returns 0 and
|
||||
* NULL for a clk for which a clock producer could not be determined.
|
||||
*/
|
||||
int __must_check clk_bulk_get_optional(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks);
|
||||
/**
|
||||
* devm_clk_bulk_get - managed get multiple clk consumers
|
||||
* @dev: device for clock "consumer"
|
||||
|
@ -346,6 +359,28 @@ int __must_check clk_bulk_get_all(struct device *dev,
|
|||
*/
|
||||
int __must_check devm_clk_bulk_get(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks);
|
||||
/**
|
||||
* devm_clk_bulk_get_optional - managed get multiple optional consumer clocks
|
||||
* @dev: device for clock "consumer"
|
||||
* @clks: pointer to the clk_bulk_data table of consumer
|
||||
*
|
||||
* Behaves the same as devm_clk_bulk_get() except where there is no clock
|
||||
* producer. In this case, instead of returning -ENOENT, the function returns
|
||||
* NULL for given clk. It is assumed all clocks in clk_bulk_data are optional.
|
||||
*
|
||||
* Returns 0 if all clocks specified in clk_bulk_data table are obtained
|
||||
* successfully or for any clk there was no clk provider available, otherwise
|
||||
* returns valid IS_ERR() condition containing errno.
|
||||
* The implementation uses @dev and @clk_bulk_data.id to determine the
|
||||
* clock consumer, and thereby the clock producer.
|
||||
* The clock returned is stored in each @clk_bulk_data.clk field.
|
||||
*
|
||||
* Drivers must assume that the clock source is not enabled.
|
||||
*
|
||||
* clk_bulk_get should not be called from within interrupt context.
|
||||
*/
|
||||
int __must_check devm_clk_bulk_get_optional(struct device *dev, int num_clks,
|
||||
struct clk_bulk_data *clks);
|
||||
/**
|
||||
* devm_clk_bulk_get_all - managed get multiple clk consumers
|
||||
* @dev: device for clock "consumer"
|
||||
|
@ -718,6 +753,12 @@ static inline int __must_check clk_bulk_get(struct device *dev, int num_clks,
|
|||
return 0;
|
||||
}
|
||||
|
||||
static inline int __must_check clk_bulk_get_optional(struct device *dev,
|
||||
int num_clks, struct clk_bulk_data *clks)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int __must_check clk_bulk_get_all(struct device *dev,
|
||||
struct clk_bulk_data **clks)
|
||||
{
|
||||
|
@ -741,6 +782,12 @@ static inline int __must_check devm_clk_bulk_get(struct device *dev, int num_clk
|
|||
return 0;
|
||||
}
|
||||
|
||||
static inline int __must_check devm_clk_bulk_get_optional(struct device *dev,
|
||||
int num_clks, struct clk_bulk_data *clks)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline int __must_check devm_clk_bulk_get_all(struct device *dev,
|
||||
struct clk_bulk_data **clks)
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue