thunderbolt: Make bandwidth allocation mode function names consistent
Make sure the DisplayPort bandwidth allocation mode function names are consistent with the existing ones, such as USB3. No functional changes. Signed-off-by: Mika Westerberg <mika.westerberg@linux.intel.com>
This commit is contained in:
parent
fd4d58d1fe
commit
8d73f6b8e0
|
@ -131,7 +131,7 @@ tb_attach_bandwidth_group(struct tb_cm *tcm, struct tb_port *in,
|
|||
static void tb_discover_bandwidth_group(struct tb_cm *tcm, struct tb_port *in,
|
||||
struct tb_port *out)
|
||||
{
|
||||
if (usb4_dp_port_bw_mode_enabled(in)) {
|
||||
if (usb4_dp_port_bandwidth_mode_enabled(in)) {
|
||||
int index, i;
|
||||
|
||||
index = usb4_dp_port_group_id(in);
|
||||
|
@ -1169,7 +1169,7 @@ tb_recalc_estimated_bandwidth_for_group(struct tb_bandwidth_group *group)
|
|||
struct tb_tunnel *tunnel;
|
||||
struct tb_port *out;
|
||||
|
||||
if (!usb4_dp_port_bw_mode_enabled(in))
|
||||
if (!usb4_dp_port_bandwidth_mode_enabled(in))
|
||||
continue;
|
||||
|
||||
tunnel = tb_find_tunnel(tb, TB_TUNNEL_DP, in, NULL);
|
||||
|
@ -1217,7 +1217,7 @@ tb_recalc_estimated_bandwidth_for_group(struct tb_bandwidth_group *group)
|
|||
else
|
||||
estimated_bw = estimated_up;
|
||||
|
||||
if (usb4_dp_port_set_estimated_bw(in, estimated_bw))
|
||||
if (usb4_dp_port_set_estimated_bandwidth(in, estimated_bw))
|
||||
tb_port_warn(in, "failed to update estimated bandwidth\n");
|
||||
}
|
||||
|
||||
|
@ -1912,12 +1912,12 @@ static void tb_handle_dp_bandwidth_request(struct work_struct *work)
|
|||
|
||||
tb_port_dbg(in, "handling bandwidth allocation request\n");
|
||||
|
||||
if (!usb4_dp_port_bw_mode_enabled(in)) {
|
||||
if (!usb4_dp_port_bandwidth_mode_enabled(in)) {
|
||||
tb_port_warn(in, "bandwidth allocation mode not enabled\n");
|
||||
goto unlock;
|
||||
}
|
||||
|
||||
ret = usb4_dp_port_requested_bw(in);
|
||||
ret = usb4_dp_port_requested_bandwidth(in);
|
||||
if (ret < 0) {
|
||||
if (ret == -ENODATA)
|
||||
tb_port_dbg(in, "no bandwidth request active\n");
|
||||
|
|
|
@ -1292,19 +1292,20 @@ int usb4_usb3_port_release_bandwidth(struct tb_port *port, int *upstream_bw,
|
|||
int *downstream_bw);
|
||||
|
||||
int usb4_dp_port_set_cm_id(struct tb_port *port, int cm_id);
|
||||
bool usb4_dp_port_bw_mode_supported(struct tb_port *port);
|
||||
bool usb4_dp_port_bw_mode_enabled(struct tb_port *port);
|
||||
int usb4_dp_port_set_cm_bw_mode_supported(struct tb_port *port, bool supported);
|
||||
bool usb4_dp_port_bandwidth_mode_supported(struct tb_port *port);
|
||||
bool usb4_dp_port_bandwidth_mode_enabled(struct tb_port *port);
|
||||
int usb4_dp_port_set_cm_bandwidth_mode_supported(struct tb_port *port,
|
||||
bool supported);
|
||||
int usb4_dp_port_group_id(struct tb_port *port);
|
||||
int usb4_dp_port_set_group_id(struct tb_port *port, int group_id);
|
||||
int usb4_dp_port_nrd(struct tb_port *port, int *rate, int *lanes);
|
||||
int usb4_dp_port_set_nrd(struct tb_port *port, int rate, int lanes);
|
||||
int usb4_dp_port_granularity(struct tb_port *port);
|
||||
int usb4_dp_port_set_granularity(struct tb_port *port, int granularity);
|
||||
int usb4_dp_port_set_estimated_bw(struct tb_port *port, int bw);
|
||||
int usb4_dp_port_allocated_bw(struct tb_port *port);
|
||||
int usb4_dp_port_allocate_bw(struct tb_port *port, int bw);
|
||||
int usb4_dp_port_requested_bw(struct tb_port *port);
|
||||
int usb4_dp_port_set_estimated_bandwidth(struct tb_port *port, int bw);
|
||||
int usb4_dp_port_allocated_bandwidth(struct tb_port *port);
|
||||
int usb4_dp_port_allocate_bandwidth(struct tb_port *port, int bw);
|
||||
int usb4_dp_port_requested_bandwidth(struct tb_port *port);
|
||||
|
||||
int usb4_pci_port_set_ext_encapsulation(struct tb_port *port, bool enable);
|
||||
|
||||
|
|
|
@ -641,7 +641,7 @@ static int tb_dp_xchg_caps(struct tb_tunnel *tunnel)
|
|||
in->cap_adap + DP_REMOTE_CAP, 1);
|
||||
}
|
||||
|
||||
static int tb_dp_bw_alloc_mode_enable(struct tb_tunnel *tunnel)
|
||||
static int tb_dp_bandwidth_alloc_mode_enable(struct tb_tunnel *tunnel)
|
||||
{
|
||||
int ret, estimated_bw, granularity, tmp;
|
||||
struct tb_port *out = tunnel->dst_port;
|
||||
|
@ -653,7 +653,7 @@ static int tb_dp_bw_alloc_mode_enable(struct tb_tunnel *tunnel)
|
|||
if (!bw_alloc_mode)
|
||||
return 0;
|
||||
|
||||
ret = usb4_dp_port_set_cm_bw_mode_supported(in, true);
|
||||
ret = usb4_dp_port_set_cm_bandwidth_mode_supported(in, true);
|
||||
if (ret)
|
||||
return ret;
|
||||
|
||||
|
@ -717,12 +717,12 @@ static int tb_dp_bw_alloc_mode_enable(struct tb_tunnel *tunnel)
|
|||
|
||||
tb_port_dbg(in, "estimated bandwidth %d Mb/s\n", estimated_bw);
|
||||
|
||||
ret = usb4_dp_port_set_estimated_bw(in, estimated_bw);
|
||||
ret = usb4_dp_port_set_estimated_bandwidth(in, estimated_bw);
|
||||
if (ret)
|
||||
return ret;
|
||||
|
||||
/* Initial allocation should be 0 according the spec */
|
||||
ret = usb4_dp_port_allocate_bw(in, 0);
|
||||
ret = usb4_dp_port_allocate_bandwidth(in, 0);
|
||||
if (ret)
|
||||
return ret;
|
||||
|
||||
|
@ -744,7 +744,7 @@ static int tb_dp_init(struct tb_tunnel *tunnel)
|
|||
if (!tb_switch_is_usb4(sw))
|
||||
return 0;
|
||||
|
||||
if (!usb4_dp_port_bw_mode_supported(in))
|
||||
if (!usb4_dp_port_bandwidth_mode_supported(in))
|
||||
return 0;
|
||||
|
||||
tb_port_dbg(in, "bandwidth allocation mode supported\n");
|
||||
|
@ -753,17 +753,17 @@ static int tb_dp_init(struct tb_tunnel *tunnel)
|
|||
if (ret)
|
||||
return ret;
|
||||
|
||||
return tb_dp_bw_alloc_mode_enable(tunnel);
|
||||
return tb_dp_bandwidth_alloc_mode_enable(tunnel);
|
||||
}
|
||||
|
||||
static void tb_dp_deinit(struct tb_tunnel *tunnel)
|
||||
{
|
||||
struct tb_port *in = tunnel->src_port;
|
||||
|
||||
if (!usb4_dp_port_bw_mode_supported(in))
|
||||
if (!usb4_dp_port_bandwidth_mode_supported(in))
|
||||
return;
|
||||
if (usb4_dp_port_bw_mode_enabled(in)) {
|
||||
usb4_dp_port_set_cm_bw_mode_supported(in, false);
|
||||
if (usb4_dp_port_bandwidth_mode_enabled(in)) {
|
||||
usb4_dp_port_set_cm_bandwidth_mode_supported(in, false);
|
||||
tb_port_dbg(in, "bandwidth allocation mode disabled\n");
|
||||
}
|
||||
}
|
||||
|
@ -827,21 +827,22 @@ static int tb_dp_nrd_bandwidth(struct tb_tunnel *tunnel, int *max_bw)
|
|||
return nrd_bw;
|
||||
}
|
||||
|
||||
static int tb_dp_bw_mode_consumed_bandwidth(struct tb_tunnel *tunnel,
|
||||
int *consumed_up, int *consumed_down)
|
||||
static int tb_dp_bandwidth_mode_consumed_bandwidth(struct tb_tunnel *tunnel,
|
||||
int *consumed_up,
|
||||
int *consumed_down)
|
||||
{
|
||||
struct tb_port *out = tunnel->dst_port;
|
||||
struct tb_port *in = tunnel->src_port;
|
||||
int ret, allocated_bw, max_bw;
|
||||
|
||||
if (!usb4_dp_port_bw_mode_enabled(in))
|
||||
if (!usb4_dp_port_bandwidth_mode_enabled(in))
|
||||
return -EOPNOTSUPP;
|
||||
|
||||
if (!tunnel->bw_mode)
|
||||
return -EOPNOTSUPP;
|
||||
|
||||
/* Read what was allocated previously if any */
|
||||
ret = usb4_dp_port_allocated_bw(in);
|
||||
ret = usb4_dp_port_allocated_bandwidth(in);
|
||||
if (ret < 0)
|
||||
return ret;
|
||||
allocated_bw = ret;
|
||||
|
@ -876,10 +877,10 @@ static int tb_dp_allocated_bandwidth(struct tb_tunnel *tunnel, int *allocated_up
|
|||
* If we have already set the allocated bandwidth then use that.
|
||||
* Otherwise we read it from the DPRX.
|
||||
*/
|
||||
if (usb4_dp_port_bw_mode_enabled(in) && tunnel->bw_mode) {
|
||||
if (usb4_dp_port_bandwidth_mode_enabled(in) && tunnel->bw_mode) {
|
||||
int ret, allocated_bw, max_bw;
|
||||
|
||||
ret = usb4_dp_port_allocated_bw(in);
|
||||
ret = usb4_dp_port_allocated_bandwidth(in);
|
||||
if (ret < 0)
|
||||
return ret;
|
||||
allocated_bw = ret;
|
||||
|
@ -911,7 +912,7 @@ static int tb_dp_alloc_bandwidth(struct tb_tunnel *tunnel, int *alloc_up,
|
|||
struct tb_port *in = tunnel->src_port;
|
||||
int max_bw, ret, tmp;
|
||||
|
||||
if (!usb4_dp_port_bw_mode_enabled(in))
|
||||
if (!usb4_dp_port_bandwidth_mode_enabled(in))
|
||||
return -EOPNOTSUPP;
|
||||
|
||||
ret = tb_dp_nrd_bandwidth(tunnel, &max_bw);
|
||||
|
@ -920,14 +921,14 @@ static int tb_dp_alloc_bandwidth(struct tb_tunnel *tunnel, int *alloc_up,
|
|||
|
||||
if (in->sw->config.depth < out->sw->config.depth) {
|
||||
tmp = min(*alloc_down, max_bw);
|
||||
ret = usb4_dp_port_allocate_bw(in, tmp);
|
||||
ret = usb4_dp_port_allocate_bandwidth(in, tmp);
|
||||
if (ret)
|
||||
return ret;
|
||||
*alloc_down = tmp;
|
||||
*alloc_up = 0;
|
||||
} else {
|
||||
tmp = min(*alloc_up, max_bw);
|
||||
ret = usb4_dp_port_allocate_bw(in, tmp);
|
||||
ret = usb4_dp_port_allocate_bandwidth(in, tmp);
|
||||
if (ret)
|
||||
return ret;
|
||||
*alloc_down = 0;
|
||||
|
@ -1048,8 +1049,8 @@ static int tb_dp_consumed_bandwidth(struct tb_tunnel *tunnel, int *consumed_up,
|
|||
* mode is enabled first and then read the bandwidth
|
||||
* through those registers.
|
||||
*/
|
||||
ret = tb_dp_bw_mode_consumed_bandwidth(tunnel, consumed_up,
|
||||
consumed_down);
|
||||
ret = tb_dp_bandwidth_mode_consumed_bandwidth(tunnel, consumed_up,
|
||||
consumed_down);
|
||||
if (ret < 0) {
|
||||
if (ret != -EOPNOTSUPP)
|
||||
return ret;
|
||||
|
|
|
@ -2294,13 +2294,14 @@ int usb4_dp_port_set_cm_id(struct tb_port *port, int cm_id)
|
|||
}
|
||||
|
||||
/**
|
||||
* usb4_dp_port_bw_mode_supported() - Is the bandwidth allocation mode supported
|
||||
* usb4_dp_port_bandwidth_mode_supported() - Is the bandwidth allocation mode
|
||||
* supported
|
||||
* @port: DP IN adapter to check
|
||||
*
|
||||
* Can be called to any DP IN adapter. Returns true if the adapter
|
||||
* supports USB4 bandwidth allocation mode, false otherwise.
|
||||
*/
|
||||
bool usb4_dp_port_bw_mode_supported(struct tb_port *port)
|
||||
bool usb4_dp_port_bandwidth_mode_supported(struct tb_port *port)
|
||||
{
|
||||
int ret;
|
||||
u32 val;
|
||||
|
@ -2317,13 +2318,14 @@ bool usb4_dp_port_bw_mode_supported(struct tb_port *port)
|
|||
}
|
||||
|
||||
/**
|
||||
* usb4_dp_port_bw_mode_enabled() - Is the bandwidth allocation mode enabled
|
||||
* usb4_dp_port_bandwidth_mode_enabled() - Is the bandwidth allocation mode
|
||||
* enabled
|
||||
* @port: DP IN adapter to check
|
||||
*
|
||||
* Can be called to any DP IN adapter. Returns true if the bandwidth
|
||||
* allocation mode has been enabled, false otherwise.
|
||||
*/
|
||||
bool usb4_dp_port_bw_mode_enabled(struct tb_port *port)
|
||||
bool usb4_dp_port_bandwidth_mode_enabled(struct tb_port *port)
|
||||
{
|
||||
int ret;
|
||||
u32 val;
|
||||
|
@ -2340,7 +2342,8 @@ bool usb4_dp_port_bw_mode_enabled(struct tb_port *port)
|
|||
}
|
||||
|
||||
/**
|
||||
* usb4_dp_port_set_cm_bw_mode_supported() - Set/clear CM support for bandwidth allocation mode
|
||||
* usb4_dp_port_set_cm_bandwidth_mode_supported() - Set/clear CM support for
|
||||
* bandwidth allocation mode
|
||||
* @port: DP IN adapter
|
||||
* @supported: Does the CM support bandwidth allocation mode
|
||||
*
|
||||
|
@ -2349,7 +2352,8 @@ bool usb4_dp_port_bw_mode_enabled(struct tb_port *port)
|
|||
* otherwise. Specifically returns %-OPNOTSUPP if the passed in adapter
|
||||
* does not support this.
|
||||
*/
|
||||
int usb4_dp_port_set_cm_bw_mode_supported(struct tb_port *port, bool supported)
|
||||
int usb4_dp_port_set_cm_bandwidth_mode_supported(struct tb_port *port,
|
||||
bool supported)
|
||||
{
|
||||
u32 val;
|
||||
int ret;
|
||||
|
@ -2623,7 +2627,7 @@ int usb4_dp_port_set_granularity(struct tb_port *port, int granularity)
|
|||
}
|
||||
|
||||
/**
|
||||
* usb4_dp_port_set_estimated_bw() - Set estimated bandwidth
|
||||
* usb4_dp_port_set_estimated_bandwidth() - Set estimated bandwidth
|
||||
* @port: DP IN adapter
|
||||
* @bw: Estimated bandwidth in Mb/s.
|
||||
*
|
||||
|
@ -2633,7 +2637,7 @@ int usb4_dp_port_set_granularity(struct tb_port *port, int granularity)
|
|||
* and negative errno otherwise. Specifically returns %-EOPNOTSUPP if
|
||||
* the adapter does not support this.
|
||||
*/
|
||||
int usb4_dp_port_set_estimated_bw(struct tb_port *port, int bw)
|
||||
int usb4_dp_port_set_estimated_bandwidth(struct tb_port *port, int bw)
|
||||
{
|
||||
u32 val, granularity;
|
||||
int ret;
|
||||
|
@ -2659,14 +2663,14 @@ int usb4_dp_port_set_estimated_bw(struct tb_port *port, int bw)
|
|||
}
|
||||
|
||||
/**
|
||||
* usb4_dp_port_allocated_bw() - Return allocated bandwidth
|
||||
* usb4_dp_port_allocated_bandwidth() - Return allocated bandwidth
|
||||
* @port: DP IN adapter
|
||||
*
|
||||
* Reads and returns allocated bandwidth for @port in Mb/s (taking into
|
||||
* account the programmed granularity). Returns negative errno in case
|
||||
* of error.
|
||||
*/
|
||||
int usb4_dp_port_allocated_bw(struct tb_port *port)
|
||||
int usb4_dp_port_allocated_bandwidth(struct tb_port *port)
|
||||
{
|
||||
u32 val, granularity;
|
||||
int ret;
|
||||
|
@ -2752,7 +2756,7 @@ static int usb4_dp_port_wait_and_clear_cm_ack(struct tb_port *port,
|
|||
}
|
||||
|
||||
/**
|
||||
* usb4_dp_port_allocate_bw() - Set allocated bandwidth
|
||||
* usb4_dp_port_allocate_bandwidth() - Set allocated bandwidth
|
||||
* @port: DP IN adapter
|
||||
* @bw: New allocated bandwidth in Mb/s
|
||||
*
|
||||
|
@ -2760,7 +2764,7 @@ static int usb4_dp_port_wait_and_clear_cm_ack(struct tb_port *port,
|
|||
* driver). Takes into account the programmed granularity. Returns %0 in
|
||||
* success and negative errno in case of error.
|
||||
*/
|
||||
int usb4_dp_port_allocate_bw(struct tb_port *port, int bw)
|
||||
int usb4_dp_port_allocate_bandwidth(struct tb_port *port, int bw)
|
||||
{
|
||||
u32 val, granularity;
|
||||
int ret;
|
||||
|
@ -2794,7 +2798,7 @@ int usb4_dp_port_allocate_bw(struct tb_port *port, int bw)
|
|||
}
|
||||
|
||||
/**
|
||||
* usb4_dp_port_requested_bw() - Read requested bandwidth
|
||||
* usb4_dp_port_requested_bandwidth() - Read requested bandwidth
|
||||
* @port: DP IN adapter
|
||||
*
|
||||
* Reads the DPCD (graphics driver) requested bandwidth and returns it
|
||||
|
@ -2803,7 +2807,7 @@ int usb4_dp_port_allocate_bw(struct tb_port *port, int bw)
|
|||
* the adapter does not support bandwidth allocation mode, and %ENODATA
|
||||
* if there is no active bandwidth request from the graphics driver.
|
||||
*/
|
||||
int usb4_dp_port_requested_bw(struct tb_port *port)
|
||||
int usb4_dp_port_requested_bandwidth(struct tb_port *port)
|
||||
{
|
||||
u32 val, granularity;
|
||||
int ret;
|
||||
|
|
Loading…
Reference in New Issue