cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

clk_mgr.c (9984B)


      1/*
      2 * Copyright 2012-16 Advanced Micro Devices, Inc.
      3 *
      4 * Permission is hereby granted, free of charge, to any person obtaining a
      5 * copy of this software and associated documentation files (the "Software"),
      6 * to deal in the Software without restriction, including without limitation
      7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
      8 * and/or sell copies of the Software, and to permit persons to whom the
      9 * Software is furnished to do so, subject to the following conditions:
     10 *
     11 * The above copyright notice and this permission notice shall be included in
     12 * all copies or substantial portions of the Software.
     13 *
     14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
     17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
     18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
     19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
     20 * OTHER DEALINGS IN THE SOFTWARE.
     21 *
     22 * Authors: AMD
     23 *
     24 */
     25
     26#include <linux/slab.h>
     27
     28#include "dal_asic_id.h"
     29#include "dc_types.h"
     30#include "dccg.h"
     31#include "clk_mgr_internal.h"
     32
     33#include "dce100/dce_clk_mgr.h"
     34#include "dce110/dce110_clk_mgr.h"
     35#include "dce112/dce112_clk_mgr.h"
     36#include "dce120/dce120_clk_mgr.h"
     37#include "dce60/dce60_clk_mgr.h"
     38#include "dcn10/rv1_clk_mgr.h"
     39#include "dcn10/rv2_clk_mgr.h"
     40#include "dcn20/dcn20_clk_mgr.h"
     41#include "dcn21/rn_clk_mgr.h"
     42#include "dcn201/dcn201_clk_mgr.h"
     43#include "dcn30/dcn30_clk_mgr.h"
     44#include "dcn301/vg_clk_mgr.h"
     45#include "dcn31/dcn31_clk_mgr.h"
     46#include "dcn315/dcn315_clk_mgr.h"
     47#include "dcn316/dcn316_clk_mgr.h"
     48
     49
     50int clk_mgr_helper_get_active_display_cnt(
     51		struct dc *dc,
     52		struct dc_state *context)
     53{
     54	int i, display_count;
     55
     56	display_count = 0;
     57	for (i = 0; i < context->stream_count; i++) {
     58		const struct dc_stream_state *stream = context->streams[i];
     59
     60		/*
     61		 * Only notify active stream or virtual stream.
     62		 * Need to notify virtual stream to work around
     63		 * headless case. HPD does not fire when system is in
     64		 * S0i2.
     65		 */
     66		if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
     67			display_count++;
     68	}
     69
     70	return display_count;
     71}
     72
     73int clk_mgr_helper_get_active_plane_cnt(
     74		struct dc *dc,
     75		struct dc_state *context)
     76{
     77	int i, total_plane_count;
     78
     79	total_plane_count = 0;
     80	for (i = 0; i < context->stream_count; i++) {
     81		const struct dc_stream_status stream_status = context->stream_status[i];
     82
     83		/*
     84		 * Sum up plane_count for all streams ( active and virtual ).
     85		 */
     86		total_plane_count += stream_status.plane_count;
     87	}
     88
     89	return total_plane_count;
     90}
     91
     92void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
     93{
     94	struct dc_link *edp_links[MAX_NUM_EDP];
     95	struct dc_link *edp_link = NULL;
     96	int edp_num;
     97	unsigned int panel_inst;
     98
     99	get_edp_links(dc, edp_links, &edp_num);
    100	if (dc->hwss.exit_optimized_pwr_state)
    101		dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
    102
    103	if (edp_num) {
    104		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
    105			bool allow_active = false;
    106
    107			edp_link = edp_links[panel_inst];
    108			if (!edp_link->psr_settings.psr_feature_enabled)
    109				continue;
    110			clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
    111			dc_link_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
    112		}
    113	}
    114
    115}
    116
    117void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
    118{
    119	struct dc_link *edp_links[MAX_NUM_EDP];
    120	struct dc_link *edp_link = NULL;
    121	int edp_num;
    122	unsigned int panel_inst;
    123
    124	get_edp_links(dc, edp_links, &edp_num);
    125	if (edp_num) {
    126		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
    127			edp_link = edp_links[panel_inst];
    128			if (!edp_link->psr_settings.psr_feature_enabled)
    129				continue;
    130			dc_link_set_psr_allow_active(edp_link,
    131					&clk_mgr->psr_allow_active_cache, false, false, NULL);
    132		}
    133	}
    134
    135	if (dc->hwss.optimize_pwr_state)
    136		dc->hwss.optimize_pwr_state(dc, dc->current_state);
    137
    138}
    139
    140struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
    141{
    142	struct hw_asic_id asic_id = ctx->asic_id;
    143
    144	switch (asic_id.chip_family) {
    145#if defined(CONFIG_DRM_AMD_DC_SI)
    146	case FAMILY_SI: {
    147		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    148
    149		if (clk_mgr == NULL) {
    150			BREAK_TO_DEBUGGER();
    151			return NULL;
    152		}
    153		dce60_clk_mgr_construct(ctx, clk_mgr);
    154		dce_clk_mgr_construct(ctx, clk_mgr);
    155		return &clk_mgr->base;
    156	}
    157#endif
    158	case FAMILY_CI:
    159	case FAMILY_KV: {
    160		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    161
    162		if (clk_mgr == NULL) {
    163			BREAK_TO_DEBUGGER();
    164			return NULL;
    165		}
    166		dce_clk_mgr_construct(ctx, clk_mgr);
    167		return &clk_mgr->base;
    168	}
    169	case FAMILY_CZ: {
    170		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    171
    172		if (clk_mgr == NULL) {
    173			BREAK_TO_DEBUGGER();
    174			return NULL;
    175		}
    176		dce110_clk_mgr_construct(ctx, clk_mgr);
    177		return &clk_mgr->base;
    178	}
    179	case FAMILY_VI: {
    180		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    181
    182		if (clk_mgr == NULL) {
    183			BREAK_TO_DEBUGGER();
    184			return NULL;
    185		}
    186		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
    187				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
    188			dce_clk_mgr_construct(ctx, clk_mgr);
    189			return &clk_mgr->base;
    190		}
    191		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
    192				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
    193				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
    194			dce112_clk_mgr_construct(ctx, clk_mgr);
    195			return &clk_mgr->base;
    196		}
    197		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
    198			dce112_clk_mgr_construct(ctx, clk_mgr);
    199			return &clk_mgr->base;
    200		}
    201		return &clk_mgr->base;
    202	}
    203	case FAMILY_AI: {
    204		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    205
    206		if (clk_mgr == NULL) {
    207			BREAK_TO_DEBUGGER();
    208			return NULL;
    209		}
    210		if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
    211			dce121_clk_mgr_construct(ctx, clk_mgr);
    212		else
    213			dce120_clk_mgr_construct(ctx, clk_mgr);
    214		return &clk_mgr->base;
    215	}
    216#if defined(CONFIG_DRM_AMD_DC_DCN)
    217	case FAMILY_RV: {
    218		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    219
    220		if (clk_mgr == NULL) {
    221			BREAK_TO_DEBUGGER();
    222			return NULL;
    223		}
    224
    225		if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
    226			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    227			return &clk_mgr->base;
    228		}
    229
    230		if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
    231			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    232			return &clk_mgr->base;
    233		}
    234		if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
    235			rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
    236			return &clk_mgr->base;
    237		}
    238		if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
    239				ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
    240			rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
    241			return &clk_mgr->base;
    242		}
    243		return &clk_mgr->base;
    244	}
    245	case FAMILY_NV: {
    246		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    247
    248		if (clk_mgr == NULL) {
    249			BREAK_TO_DEBUGGER();
    250			return NULL;
    251		}
    252		if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
    253			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    254			return &clk_mgr->base;
    255		}
    256		if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
    257			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    258			return &clk_mgr->base;
    259		}
    260		if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
    261			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    262			return &clk_mgr->base;
    263		}
    264		if (asic_id.chip_id == DEVICE_ID_NV_13FE) {
    265			dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    266			return &clk_mgr->base;
    267		}
    268		dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    269		return &clk_mgr->base;
    270	}
    271	case FAMILY_VGH:
    272		if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
    273			struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    274
    275			if (clk_mgr == NULL) {
    276				BREAK_TO_DEBUGGER();
    277				return NULL;
    278			}
    279			vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    280			return &clk_mgr->base.base;
    281		}
    282		break;
    283	case FAMILY_YELLOW_CARP: {
    284		struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    285
    286		if (clk_mgr == NULL) {
    287			BREAK_TO_DEBUGGER();
    288			return NULL;
    289		}
    290
    291		dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    292		return &clk_mgr->base.base;
    293	}
    294		break;
    295	case AMDGPU_FAMILY_GC_10_3_6: {
    296		struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    297
    298		if (clk_mgr == NULL) {
    299			BREAK_TO_DEBUGGER();
    300			return NULL;
    301		}
    302
    303		dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    304		return &clk_mgr->base.base;
    305	}
    306		break;
    307	case AMDGPU_FAMILY_GC_10_3_7: {
    308		struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
    309
    310		if (clk_mgr == NULL) {
    311			BREAK_TO_DEBUGGER();
    312			return NULL;
    313		}
    314
    315		dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
    316		return &clk_mgr->base.base;
    317	}
    318		break;
    319#endif
    320
    321	default:
    322		ASSERT(0); /* Unknown Asic */
    323		break;
    324	}
    325
    326	return NULL;
    327}
    328
    329void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
    330{
    331	struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
    332
    333#ifdef CONFIG_DRM_AMD_DC_DCN
    334	switch (clk_mgr_base->ctx->asic_id.chip_family) {
    335	case FAMILY_NV:
    336		if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
    337			dcn3_clk_mgr_destroy(clk_mgr);
    338		} else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
    339			dcn3_clk_mgr_destroy(clk_mgr);
    340		}
    341		if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
    342			dcn3_clk_mgr_destroy(clk_mgr);
    343		}
    344		break;
    345
    346	case FAMILY_VGH:
    347		if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
    348			vg_clk_mgr_destroy(clk_mgr);
    349		break;
    350
    351	case FAMILY_YELLOW_CARP:
    352		dcn31_clk_mgr_destroy(clk_mgr);
    353		break;
    354
    355	case AMDGPU_FAMILY_GC_10_3_6:
    356		dcn315_clk_mgr_destroy(clk_mgr);
    357		break;
    358
    359	case AMDGPU_FAMILY_GC_10_3_7:
    360		dcn316_clk_mgr_destroy(clk_mgr);
    361		break;
    362
    363	default:
    364		break;
    365	}
    366#endif
    367
    368	kfree(clk_mgr);
    369}
    370