gpu: nvgpu: IOCTL to set stop_trigger type

Add IOCTL NVGPU_DBG_GPU_IOCTL_SET_NEXT_STOP_TRIGGER_TYPE
to set next stop_trigger type (either single SM or
broadcast to all SMs)

Also, expose below APIs to check and clear broadcast flag:
gk20a_dbg_gpu_broadcast_stop_trigger()
gk20a_dbg_gpu_clear_broadcast_stop_trigger()

Bug 200156699

Change-Id: I5e6cd4b84e601889fb172e0cdbb6bd5a0d366eab
Signed-off-by: Deepak Nibade <dnibade@nvidia.com>
Reviewed-on: http://git-master/r/925882
GVS: Gerrit_Virtual_Submit
Reviewed-by: Terje Bergstrom <tbergstrom@nvidia.com>
This commit is contained in:
Deepak Nibade
2015-12-29 13:47:39 +05:30
committed by Terje Bergstrom
parent 8b665ac6b2
commit 595fa71585
3 changed files with 82 additions and 1 deletions

View File

@@ -261,6 +261,51 @@ void gk20a_dbg_gpu_post_events(struct channel_gk20a *ch)
mutex_unlock(&ch->dbg_s_lock); mutex_unlock(&ch->dbg_s_lock);
} }
bool gk20a_dbg_gpu_broadcast_stop_trigger(struct channel_gk20a *ch)
{
struct dbg_session_gk20a *dbg_s;
bool broadcast = false;
gk20a_dbg(gpu_dbg_fn | gpu_dbg_gpu_dbg | gpu_dbg_intr, "");
/* guard against the session list being modified */
mutex_lock(&ch->dbg_s_lock);
list_for_each_entry(dbg_s, &ch->dbg_s_list, dbg_s_list_node) {
if (dbg_s->broadcast_stop_trigger) {
gk20a_dbg(gpu_dbg_gpu_dbg | gpu_dbg_fn | gpu_dbg_intr,
"stop trigger broadcast enabled");
broadcast = true;
break;
}
}
mutex_unlock(&ch->dbg_s_lock);
return broadcast;
}
int gk20a_dbg_gpu_clear_broadcast_stop_trigger(struct channel_gk20a *ch)
{
struct dbg_session_gk20a *dbg_s;
gk20a_dbg(gpu_dbg_fn | gpu_dbg_gpu_dbg | gpu_dbg_intr, "");
/* guard against the session list being modified */
mutex_lock(&ch->dbg_s_lock);
list_for_each_entry(dbg_s, &ch->dbg_s_list, dbg_s_list_node) {
if (dbg_s->broadcast_stop_trigger) {
gk20a_dbg(gpu_dbg_gpu_dbg | gpu_dbg_fn | gpu_dbg_intr,
"stop trigger broadcast disabled");
dbg_s->broadcast_stop_trigger = false;
}
}
mutex_unlock(&ch->dbg_s_lock);
return 0;
}
static int dbg_set_powergate(struct dbg_session_gk20a *dbg_s, static int dbg_set_powergate(struct dbg_session_gk20a *dbg_s,
__u32 powermode); __u32 powermode);
@@ -465,6 +510,21 @@ static void nvgpu_dbg_gpu_ioctl_get_timeout(struct dbg_session_gk20a *dbg_s,
args->enable = NVGPU_DBG_GPU_IOCTL_TIMEOUT_DISABLE; args->enable = NVGPU_DBG_GPU_IOCTL_TIMEOUT_DISABLE;
} }
static int nvgpu_dbg_gpu_ioctl_set_next_stop_trigger_type(
struct dbg_session_gk20a *dbg_s,
struct nvgpu_dbg_gpu_set_next_stop_trigger_type_args *args)
{
gk20a_dbg(gpu_dbg_fn | gpu_dbg_gpu_dbg, "");
gk20a_dbg_session_mutex_lock(dbg_s);
dbg_s->broadcast_stop_trigger = (args->broadcast != 0);
gk20a_dbg_session_mutex_unlock(dbg_s);
return 0;
}
long gk20a_dbg_gpu_dev_ioctl(struct file *filp, unsigned int cmd, long gk20a_dbg_gpu_dev_ioctl(struct file *filp, unsigned int cmd,
unsigned long arg) unsigned long arg)
{ {
@@ -542,6 +602,11 @@ long gk20a_dbg_gpu_dev_ioctl(struct file *filp, unsigned int cmd,
(struct nvgpu_dbg_gpu_pc_sampling_args *)buf); (struct nvgpu_dbg_gpu_pc_sampling_args *)buf);
break; break;
case NVGPU_DBG_GPU_IOCTL_SET_NEXT_STOP_TRIGGER_TYPE:
err = nvgpu_dbg_gpu_ioctl_set_next_stop_trigger_type(dbg_s,
(struct nvgpu_dbg_gpu_set_next_stop_trigger_type_args *)buf);
break;
case NVGPU_DBG_GPU_IOCTL_TIMEOUT: case NVGPU_DBG_GPU_IOCTL_TIMEOUT:
err = nvgpu_dbg_gpu_ioctl_timeout(dbg_s, err = nvgpu_dbg_gpu_ioctl_timeout(dbg_s,
(struct nvgpu_dbg_gpu_timeout_args *)buf); (struct nvgpu_dbg_gpu_timeout_args *)buf);

View File

@@ -79,8 +79,13 @@ struct dbg_session_gk20a {
/* event support */ /* event support */
struct dbg_gpu_session_events dbg_events; struct dbg_gpu_session_events dbg_events;
struct list_head dbg_s_list_node; struct list_head dbg_s_list_node;
bool broadcast_stop_trigger;
}; };
extern struct dbg_gpu_session_ops dbg_gpu_session_ops_gk20a; extern struct dbg_gpu_session_ops dbg_gpu_session_ops_gk20a;
bool gk20a_dbg_gpu_broadcast_stop_trigger(struct channel_gk20a *ch);
int gk20a_dbg_gpu_clear_broadcast_stop_trigger(struct channel_gk20a *ch);
#endif /* DBG_GPU_GK20A_H */ #endif /* DBG_GPU_GK20A_H */

View File

@@ -607,8 +607,19 @@ struct nvgpu_dbg_gpu_timeout_args {
#define NVGPU_DBG_GPU_IOCTL_GET_TIMEOUT \ #define NVGPU_DBG_GPU_IOCTL_GET_TIMEOUT \
_IOR(NVGPU_DBG_GPU_IOCTL_MAGIC, 11, struct nvgpu_dbg_gpu_timeout_args) _IOR(NVGPU_DBG_GPU_IOCTL_MAGIC, 11, struct nvgpu_dbg_gpu_timeout_args)
struct nvgpu_dbg_gpu_set_next_stop_trigger_type_args {
__u32 broadcast;
__u32 reserved;
};
#define NVGPU_DBG_GPU_IOCTL_SET_NEXT_STOP_TRIGGER_TYPE \
_IOWR(NVGPU_DBG_GPU_IOCTL_MAGIC, 12, struct nvgpu_dbg_gpu_set_next_stop_trigger_type_args)
#define NVGPU_DBG_GPU_IOCTL_LAST \ #define NVGPU_DBG_GPU_IOCTL_LAST \
_IOC_NR(NVGPU_DBG_GPU_IOCTL_GET_TIMEOUT) _IOC_NR(NVGPU_DBG_GPU_IOCTL_SET_NEXT_STOP_TRIGGER_TYPE)
#define NVGPU_DBG_GPU_IOCTL_MAX_ARG_SIZE \ #define NVGPU_DBG_GPU_IOCTL_MAX_ARG_SIZE \
sizeof(struct nvgpu_dbg_gpu_perfbuf_map_args) sizeof(struct nvgpu_dbg_gpu_perfbuf_map_args)