drm/amd/display: Preserve gpu memory allocation for life of dc

[Why]
We want to keep the same buffer allocated for use during multiple
hardware initializations.

[How]
 - allocate gpu memory buffer on clock manager construct
 - free gpu memory buffer on clock manager destruct

Signed-off-by: Joshua Aberback <joshua.aberback@amd.com>
Reviewed-by: Jun Lei <Jun.Lei@amd.com>
Acked-by: Eryk Brol <eryk.brol@amd.com>
Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
This commit is contained in:
Joshua Aberback 2020-06-15 17:58:00 -04:00 committed by Alex Deucher
parent 6bc1242960
commit dd827a489c
3 changed files with 14 additions and 12 deletions

View File

@ -174,9 +174,6 @@ struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *p
case FAMILY_NV:
#if defined(CONFIG_DRM_AMD_DC_DCN3_0)
if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
/* TODO: to add SIENNA_CICHLID clk_mgr support, once CLK IP header files are available,
* for now use DCN3AG clk mgr.
*/
dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
break;
}

View File

@ -344,16 +344,12 @@ static void dcn3_update_clocks(struct clk_mgr *clk_mgr_base,
static void dcn3_notify_wm_ranges(struct clk_mgr *clk_mgr_base)
{
unsigned int i;
long long table_addr;
WatermarksExternal_t *table;
struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
WatermarksExternal_t *table = (WatermarksExternal_t *) clk_mgr->wm_range_table;
if (!clk_mgr->smu_present)
return;
/* need physical address of table to give to PMFW */
table = (WatermarksExternal_t *) dm_helpers_allocate_gpu_mem(clk_mgr->base.ctx, DC_MEM_ALLOC_TYPE_GART, sizeof(WatermarksExternal_t), &table_addr);
if (!table)
// should log failure
return;
@ -371,11 +367,9 @@ static void dcn3_notify_wm_ranges(struct clk_mgr *clk_mgr_base)
table->Watermarks.WatermarkRow[WM_DCEFCLK][i].Flags = clk_mgr->base.bw_params->wm_table.nv_entries[i].pmfw_breakdown.wm_type;
}
dcn30_smu_set_dram_addr_high(clk_mgr, table_addr >> 32);
dcn30_smu_set_dram_addr_low(clk_mgr, table_addr & 0xFFFFFFFF);
dcn30_smu_set_dram_addr_high(clk_mgr, clk_mgr->wm_range_table_addr >> 32);
dcn30_smu_set_dram_addr_low(clk_mgr, clk_mgr->wm_range_table_addr & 0xFFFFFFFF);
dcn30_smu_transfer_wm_table_dram_2_smu(clk_mgr);
dm_helpers_free_gpu_mem(clk_mgr->base.ctx, DC_MEM_ALLOC_TYPE_GART, table);
}
/* Set min memclk to minimum, either constrained by the current mode or DPM0 */
@ -534,10 +528,19 @@ void dcn3_clk_mgr_construct(
dce_clock_read_ss_info(clk_mgr);
clk_mgr->base.bw_params = kzalloc(sizeof(*clk_mgr->base.bw_params), GFP_KERNEL);
/* need physical address of table to give to PMFW */
clk_mgr->wm_range_table = dm_helpers_allocate_gpu_mem(clk_mgr->base.ctx,
DC_MEM_ALLOC_TYPE_GART, sizeof(WatermarksExternal_t),
&clk_mgr->wm_range_table_addr);
}
void dcn3_clk_mgr_destroy(struct clk_mgr_internal *clk_mgr)
{
if (clk_mgr->base.bw_params)
kfree(clk_mgr->base.bw_params);
if (clk_mgr->wm_range_table)
dm_helpers_free_gpu_mem(clk_mgr->base.ctx, DC_MEM_ALLOC_TYPE_GART,
clk_mgr->wm_range_table);
}

View File

@ -273,6 +273,8 @@ struct clk_mgr_internal {
#ifdef CONFIG_DRM_AMD_DC_DCN3_0
bool smu_present;
void *wm_range_table;
long long wm_range_table_addr;
#endif
};