From ac2e423af85241e8dadf67e299295d23f133f410 Mon Sep 17 00:00:00 2001 From: Philip Elcan Date: Wed, 3 Oct 2018 09:15:35 -0400 Subject: [PATCH] gpu: nvgpu: add U*_MAX macros Linux prefers U8_MAX, U16_MAX, etc to UCHAR_MAX, UINT_MAX, etc, so define them for building nvgpu driver on non-Linux OSes. JIRA NVGPU-647 Change-Id: I141f87d19a561de71762f7edfe0b41dff6ad31ec Signed-off-by: Philip Elcan Reviewed-on: https://git-master.nvidia.com/r/1918214 Reviewed-by: svc-misra-checker Reviewed-by: Automatic_Commit_Validation_User GVS: Gerrit_Virtual_Submit Reviewed-by: Adeel Raza Reviewed-by: mobile promotions Tested-by: mobile promotions --- drivers/gpu/nvgpu/include/nvgpu/types.h | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/drivers/gpu/nvgpu/include/nvgpu/types.h b/drivers/gpu/nvgpu/include/nvgpu/types.h index 3295af261..5d27daeb6 100644 --- a/drivers/gpu/nvgpu/include/nvgpu/types.h +++ b/drivers/gpu/nvgpu/include/nvgpu/types.h @@ -60,11 +60,14 @@ #define U32(x) ((u32)(x)) #define U64(x) ((u64)(x)) -/* Linux uses U8_MAX instead of UCHAR_MAX. We define it here for non-Linux - * OSes +/* Linux uses U8_MAX, U32_MAX, etc instead of UCHAR_MAX, UINT32_MAX. We define + * them here for non-Linux OSes */ #if !defined(__KERNEL__) && !defined(U8_MAX) -#define U8_MAX ((u8)255) +#define U8_MAX ((u8)~0U) +#define U16_MAX ((u16)~0U) +#define U32_MAX ((u32)~0U) +#define U64_MAX ((u64)~0ULL) #endif #endif /* NVGPU_TYPES_H */