diff options
Diffstat (limited to 'drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c')
-rw-r--r-- | drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c | 63 |
1 files changed, 62 insertions, 1 deletions
diff --git a/drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c b/drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c index 9be57389301b..9c765b04aae3 100644 --- a/drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c +++ b/drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c @@ -690,7 +690,6 @@ void amdgpu_virt_exchange_data(struct amdgpu_device *adev) } } - void amdgpu_detect_virtualization(struct amdgpu_device *adev) { uint32_t reg; @@ -707,6 +706,7 @@ void amdgpu_detect_virtualization(struct amdgpu_device *adev) case CHIP_SIENNA_CICHLID: case CHIP_ARCTURUS: case CHIP_ALDEBARAN: + case CHIP_IP_DISCOVERY: reg = RREG32(mmRCC_IOV_FUNC_IDENTIFIER); break; default: /* other chip doesn't support SRIOV */ @@ -726,6 +726,12 @@ void amdgpu_detect_virtualization(struct amdgpu_device *adev) adev->virt.caps |= AMDGPU_PASSTHROUGH_MODE; } + if (amdgpu_sriov_vf(adev) && adev->asic_type == CHIP_SIENNA_CICHLID) + /* VF MMIO access (except mailbox range) from CPU + * will be blocked during sriov runtime + */ + adev->virt.caps |= AMDGPU_VF_MMIO_ACCESS_PROTECT; + /* we have the ability to check now */ if (amdgpu_sriov_vf(adev)) { switch (adev->asic_type) { @@ -750,6 +756,7 @@ void amdgpu_detect_virtualization(struct amdgpu_device *adev) case CHIP_NAVI10: case CHIP_NAVI12: case CHIP_SIENNA_CICHLID: + case CHIP_IP_DISCOVERY: nv_set_virt_ops(adev); /* try send GPU_INIT_DATA request to host */ amdgpu_virt_request_init_data(adev); @@ -807,6 +814,60 @@ enum amdgpu_sriov_vf_mode amdgpu_virt_get_sriov_vf_mode(struct amdgpu_device *ad return mode; } +bool amdgpu_virt_fw_load_skip_check(struct amdgpu_device *adev, uint32_t ucode_id) +{ + switch (adev->ip_versions[MP0_HWIP][0]) { + case IP_VERSION(13, 0, 0): + /* no vf autoload, white list */ + if (ucode_id == AMDGPU_UCODE_ID_VCN1 || + ucode_id == AMDGPU_UCODE_ID_VCN) + return false; + else + return true; + case IP_VERSION(13, 0, 10): + /* white list */ + if (ucode_id == AMDGPU_UCODE_ID_CAP + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_PFP + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_ME + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_MEC + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_PFP_P0_STACK + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_PFP_P1_STACK + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_ME_P0_STACK + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_ME_P1_STACK + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_MEC_P0_STACK + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_MEC_P1_STACK + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_MEC_P2_STACK + || ucode_id == AMDGPU_UCODE_ID_CP_RS64_MEC_P3_STACK + || ucode_id == AMDGPU_UCODE_ID_CP_MES + || ucode_id == AMDGPU_UCODE_ID_CP_MES_DATA + || ucode_id == AMDGPU_UCODE_ID_CP_MES1 + || ucode_id == AMDGPU_UCODE_ID_CP_MES1_DATA + || ucode_id == AMDGPU_UCODE_ID_VCN1 + || ucode_id == AMDGPU_UCODE_ID_VCN) + return false; + else + return true; + default: + /* lagacy black list */ + if (ucode_id == AMDGPU_UCODE_ID_SDMA0 + || ucode_id == AMDGPU_UCODE_ID_SDMA1 + || ucode_id == AMDGPU_UCODE_ID_SDMA2 + || ucode_id == AMDGPU_UCODE_ID_SDMA3 + || ucode_id == AMDGPU_UCODE_ID_SDMA4 + || ucode_id == AMDGPU_UCODE_ID_SDMA5 + || ucode_id == AMDGPU_UCODE_ID_SDMA6 + || ucode_id == AMDGPU_UCODE_ID_SDMA7 + || ucode_id == AMDGPU_UCODE_ID_RLC_G + || ucode_id == AMDGPU_UCODE_ID_RLC_RESTORE_LIST_CNTL + || ucode_id == AMDGPU_UCODE_ID_RLC_RESTORE_LIST_GPM_MEM + || ucode_id == AMDGPU_UCODE_ID_RLC_RESTORE_LIST_SRM_MEM + || ucode_id == AMDGPU_UCODE_ID_SMC) + return true; + else + return false; + } +} + void amdgpu_virt_update_sriov_video_codec(struct amdgpu_device *adev, struct amdgpu_video_codec_info *encode, uint32_t encode_array_size, struct amdgpu_video_codec_info *decode, uint32_t decode_array_size) |