gpu: nvgpu: add ioctl to configure implicit ERRBAR

Add ioctl support to configure implicit ERRBAR by setting/unsetting
NV_PGRAPH_PRI_GPCS_TPCS_SM_SCH_MACRO_SCHED register.

Add gpu characteritics flag: NVGPU_SCHED_EXIT_WAIT_FOR_ERRBAR_SUPPORTED
to allow userspace driver to determine if implicit ERRBAR ioctl is
supported.

Bug: 200782861

Change-Id: I530a4cf73bc5c844e8d73094d3e23949568fe335
Signed-off-by: atanand <atanand@nvidia.com>
Reviewed-on: https://git-master.nvidia.com/r/c/linux-nvgpu/+/2718672
Reviewed-by: svc-mobile-coverity <svc-mobile-coverity@nvidia.com>
Reviewed-by: svc-mobile-cert <svc-mobile-cert@nvidia.com>
Reviewed-by: Sagar Kamble <skamble@nvidia.com>
Reviewed-by: Vaibhav Kachore <vkachore@nvidia.com>
GVS: Gerrit_Virtual_Submit
This commit is contained in:
atanand
2022-05-26 09:34:15 +00:00
committed by mobile promotions
parent faf18009cb
commit eae4593343
15 changed files with 181 additions and 2 deletions

View File

@@ -27,6 +27,7 @@
#include <nvgpu/os_sched.h>
#include <nvgpu/channel.h>
#include <nvgpu/tsg.h>
#include <nvgpu/atomic.h>
#include <nvgpu/rc.h>
#include <nvgpu/gk20a.h>
#include <nvgpu/error_notifier.h>
@@ -273,6 +274,14 @@ static int nvgpu_tsg_unbind_channel_common(struct nvgpu_tsg *tsg,
break;
}
}
while (nvgpu_atomic_read(&ch->sched_exit_wait_for_errbar_refcnt) > 0) {
err = nvgpu_tsg_set_sched_exit_wait_for_errbar(ch, false);
if (err != 0) {
nvgpu_err(g, "disable implicit ERRBAR failed ch:%u",
ch->chid);
break;
}
}
#endif
/* Remove channel from TSG and re-enable rest of the channels */
@@ -377,6 +386,14 @@ fail:
break;
}
}
while (nvgpu_atomic_read(&ch->sched_exit_wait_for_errbar_refcnt) > 0) {
err = nvgpu_tsg_set_sched_exit_wait_for_errbar(ch, false);
if (err != 0) {
nvgpu_err(g, "disable implicit ERRBAR failed ch:%u",
ch->chid);
break;
}
}
#endif
nvgpu_rwsem_down_write(&tsg->ch_list_lock);
@@ -1214,4 +1231,46 @@ int nvgpu_tsg_set_mmu_debug_mode(struct nvgpu_channel *ch, bool enable)
return err;
}
int nvgpu_tsg_set_sched_exit_wait_for_errbar(struct nvgpu_channel *ch, bool enable)
{
struct gk20a *g;
int err = 0;
struct nvgpu_tsg *tsg = nvgpu_tsg_from_ch(ch);
if (tsg == NULL) {
return -EINVAL;
}
g = ch->g;
if (g->ops.gr.set_sched_wait_for_errbar == NULL) {
return -ENOSYS;
}
if (enable) {
nvgpu_atomic_inc(&ch->sched_exit_wait_for_errbar_refcnt);
nvgpu_atomic_inc(&tsg->sched_exit_wait_for_errbar_refcnt);
} else {
if (nvgpu_atomic_read(&ch->sched_exit_wait_for_errbar_refcnt) != 0) {
nvgpu_atomic_dec(&ch->sched_exit_wait_for_errbar_refcnt);
}
if (nvgpu_atomic_read(&tsg->sched_exit_wait_for_errbar_refcnt) != 0) {
nvgpu_atomic_dec(&tsg->sched_exit_wait_for_errbar_refcnt);
}
}
/*
* enable GPC implict ERRBAR if it was requested for at
* least one channel in the TSG
*/
err = g->ops.gr.set_sched_wait_for_errbar(g, ch,
nvgpu_atomic_read(&tsg->sched_exit_wait_for_errbar_refcnt) > 0);
if (err != 0) {
nvgpu_err(g, "set implicit ERRBAR failed, err=%d", err);
return err;
}
return err;
}
#endif