gpu: nvgpu: move local golden context memory allocation to poweorn

- Separate out local golden context memory allocation from
  nvgpu_gr_global_ctx_init_local_golden_image() into a new function
  nvgpu_gr_global_ctx_alloc_local_golden_image().
- Add a new member local_golden_image_copy to struct
  nvgpu_gr_obj_ctx_golden_image to store copy used for context
  verification.
- Allocate local golden context memory from nvgpu_gr_obj_ctx_init()
  which is called during poweron path.
- Remove memory allocation from nvgpu_gr_obj_ctx_save_golden_ctx().
- Disable test test_gr_obj_ctx_error_injection since it needs rework
  to accomodate the new changes.
- Fix below tests to allocate local golden context memory :
  test_gr_global_ctx_local_ctx_error_injection
  test_gr_setup_alloc_obj_ctx

Bug 3307637

Change-Id: I2f760d524881fd328346838ea9ce0234358f8e51
Signed-off-by: Deepak Nibade <dnibade@nvidia.com>
Reviewed-on: https://git-master.nvidia.com/r/c/linux-nvgpu/+/2633713
Tested-by: mobile promotions <svcmobile_promotions@nvidia.com>
Reviewed-by: mobile promotions <svcmobile_promotions@nvidia.com>
This commit is contained in:
Deepak Nibade
2021-11-29 17:08:06 +05:30
committed by mobile promotions
parent e1d6b8af8d
commit 9f55801a15
11 changed files with 102 additions and 127 deletions

View File

@@ -363,36 +363,35 @@ bool nvgpu_gr_global_ctx_buffer_ready(
return false;
}
struct nvgpu_gr_global_ctx_local_golden_image *
nvgpu_gr_global_ctx_init_local_golden_image(struct gk20a *g,
struct nvgpu_mem *source_mem, size_t size)
int nvgpu_gr_global_ctx_alloc_local_golden_image(struct gk20a *g,
struct nvgpu_gr_global_ctx_local_golden_image **img,
size_t size)
{
struct nvgpu_gr_global_ctx_local_golden_image *local_golden_image;
#ifdef NVGPU_UNITTEST_FAULT_INJECTION_ENABLEMENT
if (nvgpu_posix_fault_injection_handle_call(
nvgpu_local_golden_image_get_fault_injection())) {
return NULL;
}
#endif
local_golden_image = nvgpu_kzalloc(g, sizeof(*local_golden_image));
if (local_golden_image == NULL) {
return NULL;
return -ENOMEM;
}
local_golden_image->context = nvgpu_vzalloc(g, size);
if (local_golden_image->context == NULL) {
nvgpu_kfree(g, local_golden_image);
return NULL;
return -ENOMEM;
}
local_golden_image->size = size;
nvgpu_mem_rd_n(g, source_mem, 0, local_golden_image->context,
nvgpu_safe_cast_u64_to_u32(size));
*img = local_golden_image;
return 0;
}
return local_golden_image;
void nvgpu_gr_global_ctx_init_local_golden_image(struct gk20a *g,
struct nvgpu_gr_global_ctx_local_golden_image *local_golden_image,
struct nvgpu_mem *source_mem, size_t size)
{
nvgpu_mem_rd_n(g, source_mem, 0, local_golden_image->context,
nvgpu_safe_cast_u64_to_u32(local_golden_image->size));
}
#ifdef CONFIG_NVGPU_GR_GOLDEN_CTX_VERIFICATION

View File

@@ -629,14 +629,11 @@ static int nvgpu_gr_obj_ctx_save_golden_ctx(struct gk20a *g,
struct nvgpu_mem *gr_mem;
u64 size;
u32 data;
#ifdef CONFIG_NVGPU_GR_GOLDEN_CTX_VERIFICATION
struct nvgpu_gr_global_ctx_local_golden_image *local_golden_image_temp =
NULL;
#endif
nvgpu_log(g, gpu_dbg_gr, " ");
gr_mem = nvgpu_gr_ctx_get_ctx_mem(gr_ctx);
size = nvgpu_gr_obj_ctx_get_golden_image_size(golden_image);
#ifdef CONFIG_NVGPU_GR_GOLDEN_CTX_VERIFICATION
/*
@@ -644,15 +641,8 @@ static int nvgpu_gr_obj_ctx_save_golden_ctx(struct gk20a *g,
* before second golden context save. This temporary copy is
* saved in local_golden_image_temp.
*/
size = nvgpu_gr_obj_ctx_get_golden_image_size(golden_image);
local_golden_image_temp =
nvgpu_gr_global_ctx_init_local_golden_image(g, gr_mem, size);
if (local_golden_image_temp == NULL) {
err = -ENOMEM;
goto clean_up;
}
nvgpu_gr_global_ctx_init_local_golden_image(g,
golden_image->local_golden_image_copy, gr_mem, size);
#endif
data = g->ops.gr.falcon.get_fecs_current_ctx_data(g, inst_block);
@@ -662,23 +652,13 @@ static int nvgpu_gr_obj_ctx_save_golden_ctx(struct gk20a *g,
goto clean_up;
}
size = nvgpu_gr_obj_ctx_get_golden_image_size(golden_image);
golden_image->local_golden_image =
nvgpu_gr_global_ctx_init_local_golden_image(g, gr_mem, size);
if (golden_image->local_golden_image == NULL) {
err = -ENOMEM;
goto clean_up;
}
nvgpu_gr_global_ctx_init_local_golden_image(g,
golden_image->local_golden_image, gr_mem, size);
#ifdef CONFIG_NVGPU_GR_GOLDEN_CTX_VERIFICATION
/* Before second golden context save restore to before known state */
nvgpu_gr_global_ctx_load_local_golden_image(g,
local_golden_image_temp, gr_mem);
/* free local copy now */
nvgpu_gr_global_ctx_deinit_local_golden_image(g,
local_golden_image_temp);
local_golden_image_temp = NULL;
golden_image->local_golden_image_copy, gr_mem);
/* Initiate second golden context save */
data = g->ops.gr.falcon.get_fecs_current_ctx_data(g, inst_block);
@@ -689,32 +669,26 @@ static int nvgpu_gr_obj_ctx_save_golden_ctx(struct gk20a *g,
}
/* Copy the data to local buffer */
local_golden_image_temp =
nvgpu_gr_global_ctx_init_local_golden_image(g, gr_mem, size);
if (local_golden_image_temp == NULL) {
err = -ENOMEM;
goto clean_up;
}
nvgpu_gr_global_ctx_init_local_golden_image(g,
golden_image->local_golden_image_copy, gr_mem, size);
/* Compare two golden context images */
if (!nvgpu_gr_global_ctx_compare_golden_images(g,
nvgpu_mem_is_sysmem(gr_mem),
golden_image->local_golden_image,
local_golden_image_temp,
golden_image->local_golden_image_copy,
size)) {
nvgpu_err(g, "golden context mismatch");
err = -ENOMEM;
}
/* free temporary copy now */
nvgpu_gr_global_ctx_deinit_local_golden_image(g,
golden_image->local_golden_image_copy);
golden_image->local_golden_image_copy = NULL;
#endif
clean_up:
#ifdef CONFIG_NVGPU_GR_GOLDEN_CTX_VERIFICATION
if (local_golden_image_temp != NULL) {
nvgpu_gr_global_ctx_deinit_local_golden_image(g,
local_golden_image_temp);
}
#endif
if (err == 0) {
nvgpu_log(g, gpu_dbg_gr, "golden image saved with size = %llu", size);
}
@@ -961,6 +935,7 @@ int nvgpu_gr_obj_ctx_init(struct gk20a *g,
struct nvgpu_gr_obj_ctx_golden_image **gr_golden_image, u32 size)
{
struct nvgpu_gr_obj_ctx_golden_image *golden_image;
int err;
nvgpu_log(g, gpu_dbg_gr, "size = %u", size);
@@ -973,6 +948,24 @@ int nvgpu_gr_obj_ctx_init(struct gk20a *g,
nvgpu_mutex_init(&golden_image->ctx_mutex);
err = nvgpu_gr_global_ctx_alloc_local_golden_image(g,
&golden_image->local_golden_image, size);
if (err != 0) {
nvgpu_kfree(g, golden_image);
return err;
}
#ifdef CONFIG_NVGPU_GR_GOLDEN_CTX_VERIFICATION
err = nvgpu_gr_global_ctx_alloc_local_golden_image(g,
&golden_image->local_golden_image_copy, size);
if (err != 0) {
nvgpu_gr_global_ctx_deinit_local_golden_image(g,
golden_image->local_golden_image);
nvgpu_kfree(g, golden_image);
return err;
}
#endif
*gr_golden_image = golden_image;
return 0;

View File

@@ -53,6 +53,14 @@ struct nvgpu_gr_obj_ctx_golden_image {
* Pointer to local Golden context image struct.
*/
struct nvgpu_gr_global_ctx_local_golden_image *local_golden_image;
#ifdef CONFIG_NVGPU_GR_GOLDEN_CTX_VERIFICATION
/**
* Pointer to local Golden context image struct used for Golden
* context verification.
*/
struct nvgpu_gr_global_ctx_local_golden_image *local_golden_image_copy;
#endif
};
#endif /* NVGPU_GR_OBJ_CTX_PRIV_H */

View File

@@ -240,15 +240,30 @@ bool nvgpu_gr_global_ctx_buffer_ready(
u32 index);
/**
* @brief Initialize local golden context image.
* @brief Allocate memory for local golden context image.
*
* @param g [in] Pointer to GPU driver struct.
* @param source_mem [in] Pointer to source memory.
* @param size [in] Size of local golden context image.
* @param g [in] Pointer to GPU driver struct.
* @param local_golden_image [in] Pointer to local golden context image struct.
* @param size [in] Size of local golden context image.
*
* This function allocates memory to store local golden context image
* and also for #nvgpu_gr_global_ctx_local_golden_image structure.
*
* @return 0 in case of success, < 0 in case of failure.
* @retval -ENOMEM if local golden image memory allocation fails.
*/
int nvgpu_gr_global_ctx_alloc_local_golden_image(struct gk20a *g,
struct nvgpu_gr_global_ctx_local_golden_image **img,
size_t size);
/**
* @brief Initialize local golden context image.
*
* @param g [in] Pointer to GPU driver struct.
* @param local_golden_image [in] Pointer to local golden image to be initialized.
* @param source_mem [in] Pointer to source memory.
* @param size [in] Size of local golden context image.
*
* This function will then initialize local golden context image by
* copying contents of #source_mem into newly created image.
*
@@ -256,11 +271,9 @@ bool nvgpu_gr_global_ctx_buffer_ready(
* ever graphics context image for any channel. Subsequent graphics
* context allocations will re-use this local golden image to
* initialize. See #nvgpu_gr_global_ctx_load_local_golden_image.
*
* @return Pointer to local golden context image struct.
*/
struct nvgpu_gr_global_ctx_local_golden_image *
nvgpu_gr_global_ctx_init_local_golden_image(struct gk20a *g,
void nvgpu_gr_global_ctx_init_local_golden_image(struct gk20a *g,
struct nvgpu_gr_global_ctx_local_golden_image *local_golden_image,
struct nvgpu_mem *source_mem, size_t size);
/**