mirror of
git://nv-tegra.nvidia.com/linux-nvgpu.git
synced 2025-12-23 09:57:08 +03:00
gpu: nvgpu: falcon interface update
-Added nvgpu_flcn_mem_scrub_wait() to falcon interface layer to poll imem/dmem scrubbing status complete check for 1msec with status check interval of 10usec. -Called nvgpu_flcn_mem_scrub_wait() in falcon reset interface to check scrubbing status upon falcon/engine reset. -Replaced mem scrubbing wait check code in pmu_enable_hw() by calling nvgpu_flcn_mem_scrub_wait() Bug 200346134 Change-Id: Iac68e24dea466f6dd5facc371947269db64d238d Signed-off-by: Mahantesh Kumbar <mkumbar@nvidia.com> Reviewed-on: https://git-master.nvidia.com/r/1598644 Reviewed-by: Automatic_Commit_Validation_User GVS: Gerrit_Virtual_Submit Reviewed-by: Vijayakumar Subbu <vsubbu@nvidia.com> Reviewed-by: mobile promotions <svcmobile_promotions@nvidia.com> Tested-by: mobile promotions <svcmobile_promotions@nvidia.com>
This commit is contained in:
committed by
mobile promotions
parent
76ad8e9fa8
commit
f53a0dd96b
@@ -25,6 +25,13 @@
|
|||||||
|
|
||||||
#include "gk20a/gk20a.h"
|
#include "gk20a/gk20a.h"
|
||||||
|
|
||||||
|
/* Dealy depends on memory size and pwr_clk
|
||||||
|
* delay = MAX {IMEM_SIZE, DMEM_SIZE} * 64 + 1) / pwr_clk
|
||||||
|
* Timeout set is 1msec & status check at interval 10usec
|
||||||
|
*/
|
||||||
|
#define MEM_SCRUBBING_TIMEOUT_MAX 1000
|
||||||
|
#define MEM_SCRUBBING_TIMEOUT_DEFAULT 10
|
||||||
|
|
||||||
int nvgpu_flcn_wait_idle(struct nvgpu_falcon *flcn)
|
int nvgpu_flcn_wait_idle(struct nvgpu_falcon *flcn)
|
||||||
{
|
{
|
||||||
struct gk20a *g = flcn->g;
|
struct gk20a *g = flcn->g;
|
||||||
@@ -56,15 +63,42 @@ int nvgpu_flcn_wait_idle(struct nvgpu_falcon *flcn)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int nvgpu_flcn_mem_scrub_wait(struct nvgpu_falcon *flcn)
|
||||||
|
{
|
||||||
|
struct nvgpu_timeout timeout;
|
||||||
|
int status = 0;
|
||||||
|
|
||||||
|
/* check IMEM/DMEM scrubbing complete status */
|
||||||
|
nvgpu_timeout_init(flcn->g, &timeout,
|
||||||
|
MEM_SCRUBBING_TIMEOUT_MAX /
|
||||||
|
MEM_SCRUBBING_TIMEOUT_DEFAULT,
|
||||||
|
NVGPU_TIMER_RETRY_TIMER);
|
||||||
|
do {
|
||||||
|
if (nvgpu_flcn_get_mem_scrubbing_status(flcn))
|
||||||
|
goto exit;
|
||||||
|
nvgpu_udelay(MEM_SCRUBBING_TIMEOUT_DEFAULT);
|
||||||
|
} while (!nvgpu_timeout_expired(&timeout));
|
||||||
|
|
||||||
|
if (nvgpu_timeout_peek_expired(&timeout))
|
||||||
|
status = -ETIMEDOUT;
|
||||||
|
|
||||||
|
exit:
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
int nvgpu_flcn_reset(struct nvgpu_falcon *flcn)
|
int nvgpu_flcn_reset(struct nvgpu_falcon *flcn)
|
||||||
{
|
{
|
||||||
int status = -EINVAL;
|
int status = 0;
|
||||||
|
|
||||||
if (flcn->flcn_ops.reset)
|
if (flcn->flcn_ops.reset) {
|
||||||
status = flcn->flcn_ops.reset(flcn);
|
status = flcn->flcn_ops.reset(flcn);
|
||||||
else
|
if (!status)
|
||||||
|
status = nvgpu_flcn_mem_scrub_wait(flcn);
|
||||||
|
} else {
|
||||||
nvgpu_warn(flcn->g, "Invalid op on falcon 0x%x ",
|
nvgpu_warn(flcn->g, "Invalid op on falcon 0x%x ",
|
||||||
flcn->flcn_id);
|
flcn->flcn_id);
|
||||||
|
status = -EINVAL;
|
||||||
|
}
|
||||||
|
|
||||||
return status;
|
return status;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,15 +31,11 @@
|
|||||||
|
|
||||||
#include "gk20a/gk20a.h"
|
#include "gk20a/gk20a.h"
|
||||||
|
|
||||||
#define PMU_MEM_SCRUBBING_TIMEOUT_MAX 1000
|
|
||||||
#define PMU_MEM_SCRUBBING_TIMEOUT_DEFAULT 10
|
|
||||||
|
|
||||||
static int nvgpu_pg_init_task(void *arg);
|
static int nvgpu_pg_init_task(void *arg);
|
||||||
|
|
||||||
static int pmu_enable_hw(struct nvgpu_pmu *pmu, bool enable)
|
static int pmu_enable_hw(struct nvgpu_pmu *pmu, bool enable)
|
||||||
{
|
{
|
||||||
struct gk20a *g = pmu->g;
|
struct gk20a *g = pmu->g;
|
||||||
struct nvgpu_timeout timeout;
|
|
||||||
int err = 0;
|
int err = 0;
|
||||||
|
|
||||||
nvgpu_log_fn(g, " %s ", g->name);
|
nvgpu_log_fn(g, " %s ", g->name);
|
||||||
@@ -56,29 +52,19 @@ static int pmu_enable_hw(struct nvgpu_pmu *pmu, bool enable)
|
|||||||
g->ops.clock_gating.blcg_pmu_load_gating_prod(g,
|
g->ops.clock_gating.blcg_pmu_load_gating_prod(g,
|
||||||
g->blcg_enabled);
|
g->blcg_enabled);
|
||||||
|
|
||||||
/* check for PMU IMEM/DMEM scrubbing complete status */
|
if (nvgpu_flcn_mem_scrub_wait(pmu->flcn)) {
|
||||||
nvgpu_timeout_init(g, &timeout,
|
/* keep PMU falcon/engine in reset
|
||||||
PMU_MEM_SCRUBBING_TIMEOUT_MAX /
|
* if IMEM/DMEM scrubbing fails
|
||||||
PMU_MEM_SCRUBBING_TIMEOUT_DEFAULT,
|
*/
|
||||||
NVGPU_TIMER_RETRY_TIMER);
|
g->ops.pmu.reset_engine(g, false);
|
||||||
do {
|
nvgpu_err(g, "Falcon mem scrubbing timeout");
|
||||||
if (nvgpu_flcn_get_mem_scrubbing_status(pmu->flcn))
|
err = -ETIMEDOUT;
|
||||||
goto exit;
|
}
|
||||||
|
} else {
|
||||||
nvgpu_udelay(PMU_MEM_SCRUBBING_TIMEOUT_DEFAULT);
|
|
||||||
} while (!nvgpu_timeout_expired(&timeout));
|
|
||||||
|
|
||||||
/* keep PMU falcon/engine in reset
|
|
||||||
* if IMEM/DMEM scrubbing fails
|
|
||||||
*/
|
|
||||||
g->ops.pmu.reset_engine(g, false);
|
|
||||||
nvgpu_err(g, "Falcon mem scrubbing timeout");
|
|
||||||
err = -ETIMEDOUT;
|
|
||||||
} else
|
|
||||||
/* keep PMU falcon/engine in reset */
|
/* keep PMU falcon/engine in reset */
|
||||||
g->ops.pmu.reset_engine(g, false);
|
g->ops.pmu.reset_engine(g, false);
|
||||||
|
}
|
||||||
|
|
||||||
exit:
|
|
||||||
nvgpu_log_fn(g, "%s Done, status - %d ", g->name, err);
|
nvgpu_log_fn(g, "%s Done, status - %d ", g->name, err);
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -222,6 +222,7 @@ int nvgpu_flcn_reset(struct nvgpu_falcon *flcn);
|
|||||||
void nvgpu_flcn_set_irq(struct nvgpu_falcon *flcn, bool enable,
|
void nvgpu_flcn_set_irq(struct nvgpu_falcon *flcn, bool enable,
|
||||||
u32 intr_mask, u32 intr_dest);
|
u32 intr_mask, u32 intr_dest);
|
||||||
bool nvgpu_flcn_get_mem_scrubbing_status(struct nvgpu_falcon *flcn);
|
bool nvgpu_flcn_get_mem_scrubbing_status(struct nvgpu_falcon *flcn);
|
||||||
|
int nvgpu_flcn_mem_scrub_wait(struct nvgpu_falcon *flcn);
|
||||||
bool nvgpu_flcn_get_cpu_halted_status(struct nvgpu_falcon *flcn);
|
bool nvgpu_flcn_get_cpu_halted_status(struct nvgpu_falcon *flcn);
|
||||||
bool nvgpu_flcn_get_idle_status(struct nvgpu_falcon *flcn);
|
bool nvgpu_flcn_get_idle_status(struct nvgpu_falcon *flcn);
|
||||||
int nvgpu_flcn_copy_from_dmem(struct nvgpu_falcon *flcn,
|
int nvgpu_flcn_copy_from_dmem(struct nvgpu_falcon *flcn,
|
||||||
|
|||||||
Reference in New Issue
Block a user