Updating prebuilts and/or headers

c2e810fc3453d74ee0493168dbf7981ba482acd3 - NVIDIA-kernel-module-source-TempVersion/SECURITY.md
7d577fdb9594ae572ff38fdda682a4796ab832ca - NVIDIA-kernel-module-source-TempVersion/COPYING
12f1806bdc25917299525e0e48815306159de132 - NVIDIA-kernel-module-source-TempVersion/Makefile
845f84d973e2d7122831bc1f118f27145c691080 - NVIDIA-kernel-module-source-TempVersion/README.md
4f4410c3c8db46e5a98d7a35f7d909a49de6cb43 - NVIDIA-kernel-module-source-TempVersion/kernel-open/Makefile
d8d7c839f0517ae8092f9c0679d5ca05f03ec741 - NVIDIA-kernel-module-source-TempVersion/kernel-open/conftest.sh
fb6731582ade01ed43aab7b0ad2907736547ee11 - NVIDIA-kernel-module-source-TempVersion/kernel-open/Kbuild
0b1508742a1c5a04b6c3a4be1b48b506f4180848 - NVIDIA-kernel-module-source-TempVersion/kernel-open/dkms.conf
1d17329caf26cdf931122b3c3b7edf4932f43c38 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-msi.h
88399279bd5e31b6e77cb32c7ef6220ce529526b - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-hypervisor.h
60ef64c0f15526ae2d786e5cec07f28570f0663b - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/conftest.h
ea98628370602119afb1a065ff954784757ddb10 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/os_dsi_panel_props.h
c06b2748cd7c8f86b5864d5e9abe6ecf0ab622f0 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-hash.h
423282211355a8cb20bff268166885ac90e2986c - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv_uvm_interface.h
c75bfc368c6ce3fc2c1a0c5062834e90d822b365 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-memdbg.h
35da37c070544f565d0f1de82abc7569b5df06af - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv_firmware_types.h
82940edf4650b9be67275d3a360ef4e63387a0a7 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/cpuopsys.h
1d8b347e4b92c340a0e9eac77e0f63b9fb4ae977 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-ioctl-numbers.h
4b7414705ce10f0a1e312c36a43824b59d572661 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvmisc.h
e4a4f57abb8769d204468b2f5000c81f5ea7c92f - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-procfs.h
9c4a7224553926aac9af460ae4e008bb7d023add - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-proto.h
b249abc0a7d0c9889008e98cb2f8515a9d310b85 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvgputypes.h
e20882a9b14f2bf887e7465d3f238e5ac17bc2f5 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv_speculation_barrier.h
5c4c05e5a638888babb5a8af2f0a61c94ecd150b - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvkms-format.h
b4c5d759f035b540648117b1bff6b1701476a398 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvCpuUuid.h
880e45b68b19fdb91ac94991f0e6d7fc3b406b1f - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-pci-types.h
c45b2faf17ca2a205c56daa11e3cb9d864be2238 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-modeset-interface.h
349696856890bdbe76f457376648522b35f874ef - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvimpshared.h
003b2cbe3d82e467c09371aee86e48d65ae6c29b - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-ioctl-numa.h
b642fb649ce2ba17f37c8aa73f61b38f99a74986 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-retpoline.h
1e7eec6561b04d2d21c3515987aaa116e9401c1f - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-kernel-interface-api.h
3b12d770f8592b94a8c7774c372e80ad08c5774c - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvi2c.h
b02c378ac0521c380fc2403f0520949f785b1db6 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-dmabuf.h
3100c536eb4c81ae913b92d4bc5905e752301311 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/os-interface.h
143051f69a53db0e7c5d2f846a9c14d666e264b4 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-kref.h
3a26838c4edd3525daa68ac6fc7b06842dc6fc07 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-gpu-info.h
7b2e2e6ff278acddc6980b330f68e374f38e0a6c - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-timer.h
fdbaee144adb26c00776b802560e15f775ed5aef - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-mm.h
befb2c0bf0a31b61be5469575ce3c73a9204f4e9 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv_stdarg.h
80fcb510fad25cb7a017139f487da1843b7cfcbd - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-lock.h
59d537c1d1b284a9d52277aff87c237e3ec2c99d - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-procfs-utils.h
e3362c33fe6c7cdec013eceac31e8f6f38dc465f - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv_uvm_types.h
5d8de06378994201e91c2179d149c0edcd694900 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvstatuscodes.h
95bf694a98ba78d5a19e66463b8adda631e6ce4c - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvstatus.h
4750735d6f3b334499c81d499a06a654a052713d - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-caps.h
009cd8e2b7ee8c0aeb05dac44cc84fc8f6f37c06 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvkms-kapi.h
2473d97c29e22920af1cf15b845287f24e78cdda - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-linux.h
4b1a6c372a531b0d3e0a4e9815dde74cb222447c - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/rm-gpu-ops.h
94ad0ba9fd6eb21445baec4fddd7c67a30cceefa - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-pci.h
f3e0f71abf34300d322e313adcd4fcbde9aa6f87 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-kthread-q.h
256b5dc6f28738b3ce656c984f01d8f3e13e9faa - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-pgprot.h
c57259130166701bf6d5e5bb1968397716d29fc0 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-platform.h
84e9b6cba7ba26ef4032666f769c5b43fa510aad - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-list-helpers.h
53ceca28c6a6da14ef62a4c57545089c48e6b2be - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv.h
910255a4d92e002463175a28e38c3f24716fb654 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvkms-api-types.h
42ece56d0459eb9f27b2497de48f08360c4f7f6b - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvlimits.h
4a8b7f3cc65fa530670f510796bef51cf8c4bb6b - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-register-module.h
5fd1da24ae8263c43dc5dada4702564b6f0ca3d9 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/dce_rm_client_ipc.h
906329ae5773732896e6fe94948f7674d0b04c17 - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/os_gpio.h
2f5fec803685c61c13f7955baaed056b5524652c - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-ioctl.h
d25291d32caef187daf3589ce4976e4fa6bec70d - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nv-time.h
8c041edbf4ed4fefdfd8006252cf542e34aa617b - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/nvtypes.h
cda75171ca7d8bf920aab6d56ef9aadec16fd15d - NVIDIA-kernel-module-source-TempVersion/kernel-open/common/inc/os/nv_memory_type.h
2ea1436104463c5e3d177e8574c3b4298976d37e - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-modeset/nvkms-ioctl.h
17855f638fd09abfec7d188e49b396793a9f6106 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-modeset/nvkms.h
8bcd1ca9c55362c03a435e226b05796be8c92226 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-modeset/nvidia-modeset-linux.c
0b7e063481a0e195c6e91a4d3464c4792c684f03 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-modeset/nv-kthread-q.c
07a2d5fa54ff88a0cb30c0945ef3c33ca630a490 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-modeset/nvidia-modeset.Kbuild
8a935bdda64e1d701279ef742b973c5dbed5727b - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-modeset/nvidia-modeset-os-interface.h
8bedc7374d7a43250e49fb09139c511b489d45e3 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-pci-table.h
9a0f445fda73c69e1bee7f6b121cbed33fcb01bf - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-mmap.c
5f2dafa23c74ba7b04aaf43ef5808457ba9be2fa - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv.c
95ae148b016e4111122c2d9f8f004b53e78998f3 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-memdbg.c
9fb0f406f9a5af431f1b72c9c4395b4933dbcf58 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nvidia.Kbuild
3ee953312a6a246d65520fc4a65407f448d1d2b8 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-gpio.c
cded6e9b6324fd429b865173596c8e549a682bba - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv_uvm_interface.c
9f2298f179ad00f1a914b26b274eb2a68068eece - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-i2c.c
c1ebcfec42f7898dd9d909eacd439d288b80523f - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/os-mlock.c
d11ab03a617b29efcf00f85e24ebce60f91cf82c - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-backlight.c
dc39c4ee87f4dc5f5ccc179a98e07ddb82bb8bce - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-modeset-interface.c
93511db745073b4a906fe28bea03c3b3d76d4df4 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-host1x.c
06e7ec77cd21c43f900984553a4960064753e444 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-platform-pm.c
335e7a5c99c7e8412a425adb82834234cd76b985 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/os-interface.c
cd7e12552cb5249e5c23147d5cc924681c691e8a - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-platform.c
805042e7cdb9663a0d3ca3064baeec8aa8eb3688 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-ibmnpu.c
c7f1aaa6a5f3a3cdf1e5f80adf40b3c9f185fb94 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-report-err.c
0b0ec8d75dfece909db55136731196162c4152d5 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-dmabuf.c
84d84563c003d3f568068e7322ce314387a6f579 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-caps.c
94c406f36836c3396b0ca08b4ff71496666b9c43 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/os-usermap.c
fbae5663e3c278d8206d07ec6446ca4c2781795f - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-ibmnpu.h
2c0d17f9babe897435c7dfa43adb96020f45da2b - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-dsi-parse-panel-props.c
9b701fe42a0e87d62c58b15c553086a608e89f7b - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-frontend.h
0ce95e5ed52d6d6ca2bb6aac33ca8f197145ec45 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-procfs-utils.c
cf90d9ea3abced81d182ab3c4161e1b5d3ad280d - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-rsync.h
02b1936dd9a9e30141245209d79b8304b7f12eb9 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-cray.c
5ad9d39b1dde261b61908fa039ca1b60aae46589 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-p2p.c
218aac0c408be15523a2d0b70fdbdadd7e1a2e48 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-imp.c
6d4fbea733fdcd92fc6a8a5884e8bb359f9e8abd - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/rmp2pdefines.h
5ac10d9b20ccd37e1e24d4a81b8ac8f83db981e4 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-vtophys.c
cbfee8ea704ceb9f223e4f32c57e515350b8d9fd - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-dma.c
fc566df59becef7bc7511ae62a9a97b1532a5af2 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-frontend.c
b71bf4426322ab59e78e2a1500509a5f4b2b71ab - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-pat.h
a3626bf1b80a81c14408c5181e8bd27696df2caf - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-pci.c
98c1be29932b843453567d4ada2f9912ea4523d7 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-vm.c
0b7e063481a0e195c6e91a4d3464c4792c684f03 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-kthread-q.c
61eadfa0f5b44a3d95e4d2d42d79321fc909c661 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-clk.c
4eee7319202366822e17d29ecec9f662c075e7ac - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-rsync.c
786a71433ddc0411783cb71d4062939981c7db1f - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-acpi.c
64f1c96761f6d9e7e02ab049dd0c810196568036 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-pat.c
d844fcaa5b02f1d1a753965a336287148b2ce689 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-p2p.h
dc165103f9196f5f9e97433ec32ef6dded86d4bb - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/os-registry.c
68d781e929d103e6fa55fa92b5d4f933fbfb6526 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-report-err.h
978d00b0d319c5ad5c0d3732b0e44f4ac0ac9a4c - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv_gpu_ops.h
fbfa2125b2bac1953af6d6fd99352898e516a686 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-msi.c
027fd0ab218eb98abe2b66d05f10b14ebb57e7a3 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-nano-timer.c
07f95171c241880c472a630d1ee38fb222be4d59 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nvidia-sources.Kbuild
a392fa800565c8345b07af5132db7078b914d59f - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/os-pci.c
ee894ec530acbd765c04aec93c1c312d42210aeb - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-ipc-soc.c
f179d308e984ff44a82f6e1c6007624f1ac916ba - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-procfs.c
50c54c3fced0934d04ef66231cc4420f6a0dda6c - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-reg.h
7ac10bc4b3b1c5a261388c3f5f9ce0e9b35d7b44 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-usermap.c
d9221522e02e18b037b8929fbc075dc3c1e58654 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia/nv-pci-table.c
8bedc7374d7a43250e49fb09139c511b489d45e3 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nv-pci-table.h
eca70b3b8146903ec678a60eebb0462e6ccf4569 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-encoder.h
54cd87e7f8eca85599aad4fcf70573f6361c4332 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm.Kbuild
e4bb0073eb9d6f965923bb9874e4714518850a27 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-connector.h
99642b76e9a84b5a1d2e2f4a8c7fb7bcd77a44fd - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm.h
8b2063f0cc2e328f4f986c2ce556cfb626c89810 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-utils.c
6528efa1f8061678b8543c5c0be8761cab860858 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-modeset.h
ab63f2a971db8bf10585b1a05fe0e3ca180ad6c7 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-os-interface.h
40b5613d1fbbe6b74bff67a5d07974ad321f75f0 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-utils.h
f927e6af2d72cf389851b558a0b1400e0f1cec7c - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-helper.c
8c95aa7ab01dd928974ce7880a532557209bd8e0 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-gem.h
b0db208983d1f403fad72067d5557a0c40410fc1 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-connector.c
23586447526d9ffedd7878b6cf5ba00139fadb5e - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-gem-user-memory.h
cbcd6e13d84ea6b52db12eda98be38e321888eb0 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-prime-fence.h
a7bc26c1078e95f9ff49c164f3652787adf1fef3 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-modeset.c
3703b18511fc6e6eec502ba25c961b8026ab064b - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-crtc.c
c8982ace6fc79f75c092662902c0c61371195f0c - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-linux.c
66b33e4ac9abe09835635f6776c1222deefad741 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-fb.h
6d65ea9f067e09831a8196022bfe00a145bec270 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-gem-dma-buf.h
45ec9fd1abfe9a0c7f9ffaf665014cec89c9e7e6 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-crtc.h
7129c765da5bfb77788441fed39b46dc7dc0fa8e - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-gem-nvkms-memory.c
59bb05ef214b5c5f2fe3cf70142dabd47ea70650 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-ioctl.h
6ed7d41b0740987793f6c07d472893af308cfa0f - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-prime-fence.c
044071d60c8cc8ea66c6caaf1b70fe01c4081ad3 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-conftest.h
708d02c8bcdfb12e4d55896e667821357c8251ec - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-priv.h
dc0fe38909e2f38e919495b7b4f21652a035a3ee - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm.c
e4efab24f90d397c270568abb337ab815a447fec - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-dma-fence-helper.h
b775af5899366845f9b87393d17a0ab0f1f6a725 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-gem.c
1e05d0ff4e51a10fa3fcd6519dc915bf13aa69c0 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-helper.h
dd478f7ddb2875fc9ff608858df8d24a62f00152 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-dma-resv-helper.h
892cac6dd51ccfde68b3c29a5676504f93ee8cd7 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-format.c
355126d65ea1472ce3b278066811d4fb764354ec - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-gem-user-memory.c
5209eba37913f5d621a13091783622759706e6e3 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-fb.c
e362c64aa67b47becdbf5c8ba2a245e135adeedf - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-gem-dma-buf.c
9a882b31b2acc9e1ad3909c0061eee536e648aae - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-drv.h
170fc390de57f4dd92cf5005a8feabc4e90462d2 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-drv.c
97b6c56b1407de976898e0a8b5a8f38a5211f8bb - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-format.h
d862cc13c29bbce52f6b380b7a0a45a07fe9cbac - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-encoder.c
c294224282118c70cd546ae024a95479ad9b1de4 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nvidia-drm-gem-nvkms-memory.h
d9221522e02e18b037b8929fbc075dc3c1e58654 - NVIDIA-kernel-module-source-TempVersion/kernel-open/nvidia-drm/nv-pci-table.c
bda08c8398f68ffc2866ebc390dc63a09a16b0b9 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/utils/unix_rm_handle.c
e903bbbecf4fb3085aaccca0628f0a0e4aba3e58 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/utils/nv_mode_timings_utils.c
5ef40af650eb65b2c87572a1bbfe655d8821f2d5 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/utils/nv_memory_tracker.c
26f2a36442266c5d2664d509ecfd31094a83e152 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/utils/nv_vasprintf.c
9e008270f277e243f9167ab50401602378a2a6e8 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/utils/interface/nv_vasprintf.h
8d9c4d69394b23d689a4aa6727eb3da1d383765a - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/utils/interface/unix_rm_handle.h
07c675d22c4f0f4be6647b65b6487e2d6927c347 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/utils/interface/nv_memory_tracker.h
667b361db93e35d12d979c47e4d7a68be9aa93b6 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/utils/interface/nv_mode_timings_utils.h
881cbcc7ed39ea9198279136205dbe40142be35e - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/inc/nv_assert.h
1c947cfc8a133b00727104684764e5bb900c9d28 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/inc/nv_mode_timings.h
83044eb5259200922f78ad3248fbc1d4de1ec098 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/inc/nv_common_utils.h
2476f128437c0520204e13a4ddd2239ff3f40c21 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/inc/nv-float.h
a8e49041c1b95431e604852ad0fa3612548e3c82 - NVIDIA-kernel-module-source-TempVersion/src/common/unix/common/inc/nv_dpy_id.h
e3be7ba45506c42d2fca87e9da45db75ced750ca - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_common.h
f669280a5e86ba51b691e2609fa7d8c223bd85dc - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_C671.c
7c2fe72426fa304315e169e91dc6c1c58b5422fd - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_0073.c
381e1b8aeaa8bd586c51db1f9b37d3634285c16a - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_class.h
67db549636b67a32d646fb7fc6c8db2f13689ecc - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_9271.c
5e12a290fc91202e4ba9e823b6d8457594ed72d3 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmi_frlInterface.h
d2c79c8a4e914519d653d1f14f706ec4a1f787e8 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_9171.c
15d54c86d78404639c7f151adc672e19472dcf4a - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt.c
9be7b7be94a35d1d9a04f269ff560dbbb7860a2a - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_9571.c
54a1b5e5aaf0848a72befc896ed12f1de433ad4f - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_9471.c
443c0a4b17a0019e4de3032c93c5cac258529f01 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_internal.h
e6d500269128cbd93790fe68fbcad5ba45c2ba7d - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt_C371.c
90e8ce7638a28cd781b5d30df565116dc1cea9e8 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/hdmipacket/nvhdmipkt.h
f75b1d98895bdccda0db2d8dd8feba53b88180c5 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/displayid.h
65f2503bea8aa1847948cc0d628493e89775c4f3 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_displayid20.c
28d7b753825d5f4a9402aff14488c125453e95c5 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_tv.c
b4813a5e854e75fb38f460e0c27dca8e1ce8dc21 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_edid.c
1290abde75d218ae24f930c3b011042a3f360c2e - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/displayid20.h
4a2ad30f49ed92694b717a99ce7adeeb565e8a37 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_edidext_861.c
439ef00ffa340bd1b6506970d154a33ca4b64b4a - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_dmt.c
cfaa569ac3d63484c86e8a8d7a483dd849f96be8 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_edidext_displayid20.c
1997adbf2f6f5be7eb6c7a88e6660391a85d891b - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_gtf.c
49df9034c1634d0a9588e5588efa832a71750a37 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_cvt.c
58b68f1272b069bb7819cbe86fd9e19d8acd0571 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/edid.h
890d8c2898a3277b0fed360301c2dc2688724f47 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_util.c
3023a58fd19d32280607d4027b09fe51fdb7a096 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_dsc_pps.h
e66a20fc1579b0dd1392033089f97cf170e8cf10 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/dpsdp.h
b5bd3a58b499216e4fe0e0c9c99525b07ac237dc - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_dsc_pps.c
f531475d8b978bca5b79d39d729b0c9986fe7b36 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvtiming.h
95dae946088f21339299dae48eeafaab31b97b05 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvtiming_pvt.h
0a04709ebdc4acb12038656c433e10c4e7096518 - NVIDIA-kernel-module-source-TempVersion/src/common/modeset/timing/nvt_edidext_displayid.c
1ff879eca2a273293b5cd6048419b2d2d8063b93 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_mulAdd.c
1a86a6948bf6768bd23a19f1f05d40968c1d2b15 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_rem.c
c3ce12c227d25bc0de48fbcf914fc208e2448741 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_sub.c
fb062ecbe62a1f5878fd47f0c61490f2bde279dd - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_roundToI32.c
38bd00e9c4d2f1354c611404cca6209a6c417669 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_countLeadingZeros64.c
0e9694d551848d88531f5461a9b3b91611652e9a - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_ui32_r_minMag.c
9f4d355d85fbe998e243fe4c7bbf8ad23062b6e2 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/i64_to_f64.c
23b76c1d0be64e27a6f7e2ea7b8919f1a45a8e7c - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_ui32_r_minMag.c
5c4ee32cc78efc718aaa60ec31d0b00b1bee3c2c - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_ui64_r_minMag.c
09cb0cdb90eb23b53cd9c1a76ba26021084710d1 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_addMagsF32.c
00c612847b3bd227a006a4a2697df85866b80315 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_mulAddF32.c
29321080baa7eab86947ac825561fdcff54a0e43 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/i32_to_f32.c
2e0fec421f4defd293cf55c5f3af7d91f4b7d2cc - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/ui64_to_f32.c
ebb4f674b6213fec29761fc4e05c1e3ddeda6d17 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_mulAdd.c
2e5c29d842a8ebc5fbf987068dc9394cee609cc7 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_ui64.c
daeb408588738b3eb4c8b092d7f92ac597cf1fc6 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_rem.c
da3b3f94a817909a3dc93ca5fa7675805c7979e0 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_isSignalingNaN.c
bc992c88f3de09e3a82447cf06dbde7c6604f7f8 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_f32.c
dafa667ee5dd52c97fc0c3b7144f6b619406c225 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_mulAddF64.c
2960704c290f29aae36b8fe006884d5c4abcabb4 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_div.c
d4b26dc407a891e9ff5324853f1845a99c5d5cd2 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_i32.c
0adfa7e174cdb488bb22b06642e14e7fc6f49c67 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_roundToI64.c
fd40a71c7ebf9d632a384fadf9487cfef4f3ea98 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_shiftRightJam128.c
9a5b93459ace2da23964da98617d6b18006fab86 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_countLeadingZeros8.c
ae25eea499b3ea5bdd96c905fd0542da11083048 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_normRoundPackToF64.c
729e790328168c64d65a1355e990274c249bbb3a - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_i32_r_minMag.c
296c40b0589536cb9af3231ad3dcd7f2baaa6887 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_lt.c
5c1026617c588bcf5f1e59230bd5bb900600b9ac - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_mul.c
4b37be398b3e73ae59245f03b2ba2394fc902b4d - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_normSubnormalF64Sig.c
69dc4cc63b2a9873a6eb636ee7cb704cbd502001 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_ui32.c
d0f8f08c225b60d88b6358d344404ba9df3038ec - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_normSubnormalF32Sig.c
c951c9dffa123e4f77ed235eca49ef9b67f9f3d2 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_subMagsF64.c
dde685423af544e5359efdb51b4bf9457c67fa3b - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_sqrt.c
577821f706c7de4ca327c1e2fcc34161c96c89f3 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_i64_r_minMag.c
5a5e0d9f1ee7e8c0d1d4f9fbcf6eba330a5f1792 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_isSignalingNaN.c
84b0a01ba2a667eb28b166d45bd91352ead83e69 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/i64_to_f32.c
513a7d1c3053fc119efcd8ae1bcc9652edc45315 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_lt.c
4445b1fbbd507144f038fd939311ff95bc2cf5f1 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/ui64_to_f64.c
b9fd15957f7ae5effeccb5d8adaa7434b43f44e1 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_roundToUI64.c
ab19c6b50c40b8089cb915226d4553d1aa902b0e - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_i32_r_minMag.c
7bc81f5bc894118c08bfd52b59e010bc068ed762 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/ui32_to_f32.c
7c8e5ab3f9bf6b2764ce5fffe80b2674be566a12 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/softfloat_state.c
ec1a797b11f6e846928a4a49a8756f288bda1dfa - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/i32_to_f64.c
86fdc2472526375539216461732d1db6a9f85b55 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_roundPackToF32.c
b22876b0695f58ee56143c9f461f1dde32fefbf3 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_ui64.c
d701741d8d6a92bb890e53deda1b795f5787f465 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_le.c
baa7af4eea226140c26ffe6ab02a863d07f729fb - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_eq_signaling.c
ce37cdce572a3b02d42120e81c4969b39d1a67b6 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_i32.c
0108fe6f0d394ad72083aff9bb58507f97a0b669 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/ui32_to_f64.c
b8c5ccc1e511637d8b2ba2657de4937b80c01c07 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_le.c
54cbeb5872a86e822bda852ec15d3dcdad4511ce - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_add.c
c29536f617d71fe30accac44b2f1df61c98a97dc - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_div.c
871cb1a4037d7b4e73cb20ad18390736eea7ae36 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_ui64_r_minMag.c
21a6232d93734b01692689258a3fdfbbf4ff089d - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_roundToUI32.c
760fd7c257a1f915b61a1089b2acb143c18a082e - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_addMagsF64.c
5e6f9e120a17cc73297a35e4d57e4b9cbce01780 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_mul64To128.c
0bf499c0e3a54186fa32b38b310cc9d98ccdcfe3 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_eq.c
29396b7c23941024a59d5ea06698d2fbc7e1a6ca - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_to_i64.c
108eec2abf1cddb397ce9f652465c2e52f7c143b - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_roundToInt.c
fe06512577e642b09196d46430d038d027491e9f - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_eq_signaling.c
d19ff7dfece53875f2d6c6f7dd9e7772f7b0b7ec - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_i64_r_minMag.c
1484fc96d7731695bda674e99947280a86990997 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_i64.c
8e58f0258218475616ff4e6317516d40ad475626 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_lt_quiet.c
6fa7493285fe2f7fdc0ac056a6367e90327905c2 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_sub.c
aaf6ccb77a1a89fa055a0fb63513297b35e2e54b - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_le_quiet.c
bbc70102b30f152a560eb98e7a1a4b11b9ede85e - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_sqrt.c
e0ad81cfb5d2c0e74dc4ece9518ca15ffc77beaf - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_roundToInt.c
50b3147f8413f0595a4c3d6e6eeab84c1ffecada - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_normRoundPackToF32.c
50daf9186bc5d0180d1453c957164b136d5ffc89 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_eq.c
6f83fa864007e8227ae09bb36a7fdc18832d4445 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_mul.c
a94c8c2bd74633027e52e96f41d24714d8081eb4 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_approxRecipSqrt_1Ks.c
e7890082ce426d88b4ec93893da32e306478c0d1 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_approxRecipSqrt32_1.c
2db07bbb8242bc55a24ef483af6d648db0660de0 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_add.c
9266c83f3e50093cc45d7be6ab993a0e72af1685 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_roundPackToF64.c
00ab2120f71117161d4f6daaa9b90a3036a99841 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_ui32.c
824383b03952c611154bea0a862da2b9e2a43827 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_subMagsF32.c
68843a93e1f46195243ef1164f611b759cf19d17 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_le_quiet.c
e4930e155580a0f5aa7f3694a6205bc9aebfe7aa - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_f64.c
054b23a974fc8d0bab232be433c4e516e6c1250a - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f64_lt_quiet.c
0d8e42636a3409a647291fdb388001c2b11bba07 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/f32_to_f16.c
d9a86343e6cc75714f65f690082dd4b0ba724be9 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/s_roundPackToF16.c
1dd1b424087d9c872684df0c1b4063b077992d5f - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/s_f64UIToCommonNaN.c
86cda6550cb02bbf595d1667573e4be83702a95e - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/specialize.h
21a11759ed2afd746a47c4d78b67640c2d052165 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/s_commonNaNToF32UI.c
a6d5c83f6a0542b33ac9c23ac65ef69002cfff9d - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/s_propagateNaNF32UI.c
3d0dbc0a672d039a6346e1c21ddf87ffc9181978 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/s_f32UIToCommonNaN.c
252c816378fddab616b1f2a61e9fedd549224483 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/s_commonNaNToF64UI.c
d8b0c55a49c4fa0b040541db6d5ff634d7d103e7 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/s_propagateNaNF64UI.c
d152bc457b655725185bdff42b36bb96d6e6715e - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/s_commonNaNToF16UI.c
0cbae7a5abc336331d460cbd3640d2cda02af434 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/8086-SSE/softfloat_raiseFlags.c
1ded4df85ff5fa904fa54c27d681265425be1658 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/include/primitiveTypes.h
f36c896cfa01f1de9f9420189319e4e00c7fc52a - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/include/internals.h
9645e179cf888bcd0e3836e8126b204b4b42b315 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/include/softfloat.h
de09949a0ca5cd2a84b882b5b5c874d01d3ae11a - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/include/primitives.h
4cd1d6cfca3936a39aab9bc0eb622f5c7c848be1 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/source/include/softfloat_types.h
b882497ae393bf66a728dae395b64ac53602a1a5 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/nvidia/nv-softfloat.h
be9407a273620c0ba619b53ed72d59d52620c3e4 - NVIDIA-kernel-module-source-TempVersion/src/common/softfloat/nvidia/platform.h
91e9bc3214d6bb9b20bc8001d85fe8699df5184a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvos.h
88399279bd5e31b6e77cb32c7ef6220ce529526b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nv-hypervisor.h
f28f98589e65b71e47dbcb2c4230538ae0545e75 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/cpuopsys.h
4b7414705ce10f0a1e312c36a43824b59d572661 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvmisc.h
af0bc90b3ad4767de53b8ff91e246fdab0146e8b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvsecurityinfo.h
a506a41b8dcf657fb39a740ffc1dfd83835d6c89 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvcfg_sdk.h
b249abc0a7d0c9889008e98cb2f8515a9d310b85 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvgputypes.h
ae60d53603c7ddbbd72d4e16ce2951f3d42aed32 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nverror.h
a31b82c454df785a1d7893af38e83443cfe6f2fc - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvdisptypes.h
ffa91e1110a5cc286ec44a7bda5461b2be941ea2 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nv_vgpu_types.h
9bca638f5832d831880f090c583fac6fc8cf6ee6 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/dpringbuffertypes.h
821a01976045d7c3d2ac35b0f115e90a9e95f8e8 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvimpshared.h
1e7eec6561b04d2d21c3515987aaa116e9401c1f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nv-kernel-interface-api.h
3b12d770f8592b94a8c7774c372e80ad08c5774c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvi2c.h
befb2c0bf0a31b61be5469575ce3c73a9204f4e9 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nv_stdarg.h
5d8de06378994201e91c2179d149c0edcd694900 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvstatuscodes.h
95bf694a98ba78d5a19e66463b8adda631e6ce4c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvstatus.h
50d31a6d133b0ea9230f9dc1b701ce16a88a7935 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/rs_access.h
eb42327a2b948b79edc04d9145c7aa5b2a2b420e - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvlimits.h
9f2e225f027f5a04d1104d29a0039cd2bb7dd85a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvfixedtypes.h
a9bf4969ae3e39cc315b6180ee7055e0ad1279c6 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/nvtypes.h
00e9a0ace4b59958a8b048229fb22b4d9e2f8864 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl90cd.h
3449834cb8b8c630ab1de6df30503c846b26e86b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl90ec.h
f779cd0470e428160fc590b590f2cd4855950058 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl402c.h
7c4aef225d174ecbe1130d63b8e8ff752bddf48e - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0041.h
5abe75cf18a2fede23529194b406c3cf742edced - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrlxxxx.h
c8490da9f200f4dbbac7ebe636f3a83485f3001c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073.h
1022bba330a71b92dcc81f47ba460209fcc70cd0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0002.h
b72318d58806bfd25f922107a606b222baa2e28c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl30f1.h
7a0c878431a9b0d9dda117f165946b1cdf8ebbde - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0020.h
e2d8133537e2687df022c6a966c55fbfea1974f3 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0004.h
9c6a4f1d864b5161564869b19f8cb2ce9d629c1d - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl003e.h
0639d6cd553994aff4195e8e7547eebf8e713145 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080.h
79204c26eb58ee812cc2f72ee1f6d4d7d93817c7 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080.h
ea9aac6f0e23f0de444ac3919c35e4b78c18c942 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080bif.h
f7435e356d54d682a949734574388abbe7ffe1d0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080base.h
64f849ed19609320461b8938f24f0b40fb1a35b0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080cipher.h
d107e41878b5bc50a5c8b29684122c9589625a6f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080perf.h
f4a4eeb35e15e0642d1bf4e2e5b31394f4cbbfa1 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080host.h
b7b0360b1a6ca78267fa10f7adcd370da86513c3 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080bsp.h
862a17958488d69ca3e92c42ee1bed55cb299fa4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080gpu.h
bb4182eeea20779f62165d2d50ed209b6a07e54e - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080msenc.h
b7f2957f506dc285acb87d41d34cfd60408b00ae - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080rc.h
c72f147e8fb78126d13567278239acfcd9b9cc1f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080internal.h
8dd5acedc0b1613314eb3fe9130a9c282bd49ca1 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080clk.h
681c94b982e29049638814f6c1e4eb508f8b0bf3 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080unix.h
3646710984d5c3024d16f9ab346222ad6dfdb4f0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080fifo.h
6c34803c213ea0a28114bc921e1867cefebec088 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080dma.h
76c9f104e04a8fd9e73e03ad59b2e72264c5f169 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080nvjpg.h
9e61da81ecdff15d63f9ae8a1c2f0960b820c65c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080gr.h
dac18fcaf5d652b21f84cfba455f4f5972e786c5 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0080/ctrl0080fb.h
d51e47795dfe1fc0bae31b9379d6a39ac4d3080f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080bios.h
8a613db1c31724a577c4718752c15d9754882f48 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080lpwr.h
3966d65c9701bf97c807cf87838a08cda10f418d - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080tmr.h
a1830232f18afe44230d6a8598c50b3fc7656089 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080nvd.h
2dd40e3e41d74de3865bc700acc9ab7e0540c647 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080gpio.h
f97e7f88aa17788bbbebf55807e449c0ee016384 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080ucodefuzzer.h
b2b6b3b413ae17af1afde2fc8672cd1bf48e7b19 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080illum.h
3c7130d0613d3c8baef6b23bb63c6ee7a10ed21b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080cipher.h
39f5e838aa6ab007c56e7a59c7d2986d1a7aa34a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080ce.h
6679d97e3852ed78ee44780408c523b94f426ca4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080perf_cf_pwr_model.h
090f908931690302e3a2c77f3ce41c4de0c61efc - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080vfe.h
7c4e426dee0ae86c00b3bd10873a1a2bd94ed3b2 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080power.h
5bdddb9a949a78443f83a7da81ad5fee8a300c44 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080perf_cf.h
d084d99035f4cc34cd803ff4a5328b9e10ea77fc - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080boardobj.h
4b8fa2ce546ae3f06b7dc61df3d534449cdb5b2d - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080clkavfs.h
8855ee8bad2f2169ebd147e7ac77d9f1340cbad8 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080spi.h
82a2e7a2fc6501163d07870f3f640a591f4a8996 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080volt.h
f3a855fe7a91c2acf2be41629ce906996e01a9fc - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080perf.h
3d8e37aa8485aadf55335d8f9f913273d90a2442 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080boardobjgrpclasses.h
da220a5608a0e4c73fa0315b13e2b29d92b114e9 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080dmabuf.h
6834a9c75265c25adfb03f0b2dbfe0559f28cadf - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080dma.h
051dbfd1d5ff02b2771bc9b3fad8aaef29aab9ae - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080base.h
c3a75647f5ca6cd7b456511af36a9de6d90329c3 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080nvlink.h
82364e263f43ea028c2d66db58887958bdef64b0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080fb.h
143c1c24ec926142d1f84dec7a543f2b98541545 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080fuse.h
1684a3a8111fd3d83363cebe68d016a54eaaf686 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080clk.h
72292c9844eaf24c38967dd4a879c0c0f070a0de - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080gr.h
091f7bac99f5c786a64b6fa59d9d27af786bab10 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080acr.h
c0181e959c1ba5ebfc3f130c8764687b58453f9b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080fla.h
2a11fc0a499f8293b83e08572f5e6be04bd1da61 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080gpu.h
a44d2f1b31b8ec124355018204909df19df09748 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080unix.h
8ef946f1d7545277ef64891b45a29db44c4e9913 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080fan.h
774fd1e730d1d853bf97946f7ecd24c6648c7af4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080hshub.h
22d828c87b223f937c589a0e863a25d95b734371 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080event.h
7d3819683e9f562a87f36a3e23c043b2b6fd814e - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080rc.h
7d27fafff043d290b2ec1d2dddbecea2f1df4704 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080gsp.h
27ad8b5c2406fcd572cd098dd215e93ae1db99e3 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080mc.h
783db6da0b92b6b8ae26b180129beb0bccb13a5b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080thermal.h
e6f6beaed64167088608027b442f5449cff027c1 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080common.h
6b4418e269bb97b9996b05ea153ccd195c661e11 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080fifo.h
0ac7e4eb4d952c84c6f4e697cbfcb355069377c2 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080flcn.h
1651ec548a2899391a05bc6463b3f7162c7807ab - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080internal.h
bc22bf13b7d99ee6f80c30b569e084a2b03e385a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080grmgr.h
1ebfe9f0f9a7d2dd2873df82bbc78b1ec982ca93 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080pmumon.h
291f91212d5a37aae46a2944cf89f4b74b1d1809 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080pmgr.h
82aa4d6108ce6abebcbbc95afcb7a6350e287f5f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080i2c.h
c4474dc1f53661c67d8fce5303dcc636d9ad3b8f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080gpumon.h
18814de559257f07bad8a0a9006ac9751fcfa1cb - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080ecc.h
e9d692b06c70951dbbd0663a89f822153bce1146 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080bus.h
1248e113751f8ed9e4111e86a7f7fb632b102eca - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073stereo.h
b921747a65c67fa093de08fa782c164d048824b0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073system.h
7e0773f7bf13350a9fd25b0df4d6c45a55a008df - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073event.h
8fd661537cc4eb55c167b9daae404bfb82408bfe - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073svp.h
f88f1c519a242dfa71221bdcdafc7deab14d8503 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073dp.h
ccc48726d7da49cddc4d4f86d8dbd2ad585f7b38 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073base.h
3dc187adc0a848e68f62a6a7eb99ac02ee6502cc - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073dpu.h
f3b81a241efe1224798b17c062e33936469c3c2b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073internal.h
09dedebdcff3244ab8f607a7152e9116d821f9c1 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073specific.h
440314f66374d35a1628ee8bd61836a80ab421eb - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073dfp.h
92be535d68a7f18088921faa3f1742298ad341c3 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073psr.h
84fb76f9cff38c797b139cba40175717591d49df - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070common.h
2f92bebccb9da5246b19bd13ff0e6e79de79bc3b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070verif.h
aec1b750866e34f9626e48c535336f93c5c246fa - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070seq.h
9031642283b59ee6d52e2e1ca54332df5c2f7acc - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070rg.h
e10cbe4875736ef16072232789dd3f48647c022f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070impoverrides.h
91cccede5c4f26a6b6ca7ba4bc292f3d908a88d4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070base.h
f47136417885a729f9c5dee375ec9dec1bd170e0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070chnc.h
f523fe4a55a6a9d01f41f9f34ff149ed75b2e739 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070event.h
ad7604ced12ee18c569d2a7ebe71e185ebff3fd4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070system.h
209ef519cb73395cea7d66016448ebc3c6bf6fe4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl5070/ctrl5070or.h
4a3e7d71b9169d703d9373ff80b02a63825a80e4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000unix.h
4d9116d23d27a3fc39c366f2685243b83ef7d485 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000diag.h
abe79ad927e7c70b7c1a8eb687052a782efcd5f4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000nvd.h
ef180860a1ccbcb9f5d2f8a6656a345eef76a2a7 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000base.h
f7e56d494fea02515180f21b0f56ae0aff583be4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000gpuacct.h
b66a45c83c84f6d458ef19fd7e0f972f2eabd109 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000vgpu.h
2518a62952c72ee6f3447bc8dc417129f6ac26a4 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000system.h
9373c51ca29afec3368fb5b8c2a2f05b0920f291 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000gpu.h
0ee647b929e55cf39da7e26ffc0f027676fa52fa - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000syncgpuboost.h
6e5b278451308efbb6911a8ab03b0feba504d035 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000client.h
c905766589d17fcb99a5d73846ed61f7b7db56fe - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000event.h
323fcc6af8c30d5ef292ae90810c5c2fa2009e20 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000proc.h
382dc80790d870047db7cea957ef208d4439801e - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000gspc.h
825f4d976c76d375803e42967fdab53e7814d18d - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrl0000/ctrl0000gsync.h
8294d43d202a9cd78367f2e69388a6c6f2c369f7 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrlc372/ctrlc372base.h
cf78a847e0882e1d164eccdb86ea033126019599 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrlc372/ctrlc372chnc.h
76c31150e2f589fbb96cfc06cdc6c1801e128656 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrlc370/ctrlc370base.h
7f5548026751a8caaebc245945ccdc4bb037b566 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrlc370/ctrlc370chnc.h
7812ba094d95c1b6d65afc6a1d26930400b8b96f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrlc370/ctrlc370event.h
f1dae17e75a24c28135cf073bf29f9609a2418e3 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrlc370/ctrlc370rg.h
24782552a13f627e2e94ebb5f7021246a0c0dc53 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/ctrl/ctrlc370/ctrlc370verif.h
127f78d2bb92ef3f74effd00c2c67cf7db5382fe - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc67d.h
bb79bbd1b0a37283802bc59f184abe0f9ced08a5 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0040.h
4a6444c347825e06bdd62401120553469f79c188 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl917dcrcnotif.h
2f87e87bcf9f38017ad84417d332a6aa7022c88f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9471.h
0d8975eec1e3222694e98eb69ddb2c01accf1ba6 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0000_notification.h
c2600834921f8a6aad6a0404076fa76f9bc1c04d - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc37b.h
861b9d7581eab4a2b8cc7269b5d0e0d1294048d1 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0005.h
92c2dab6bc48f32f46c6bbc282c63cb4ec7a50bf - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9170.h
0285aed652c6aedd392092cdf2c7b28fde13a263 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl00fc.h
dec74b9cf8062f1a0a8bbeca58b4f98722fd94b0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0076.h
a30755b3003023c093f8724cf9a2e0b0c301b586 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9010.h
cb610aaae807d182b4a2ee46b9b43ebfa4a49a08 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc57e.h
bb8d15aee43e1feb76fddf80398e93fd805f1ddb - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl2082.h
02906b5ba8aab0736a38fd1f6d7b4f6026a5185b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc57esw.h
ccefba28a2c7979701f963f2c358b4414b84ca98 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9570.h
2e3d5c71793820d90973d547d8afdf41ff989f89 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc67a.h
204feb997ba42deab327d570e5f12235d5160f00 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc57a.h
03ab4e08e8685696477b62eb1a825e5198d61b8a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0080.h
545dd1899c6988ffe5f50300232bd862d915cd5b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc770.h
022e8405220e482f83629dd482efee81cc49f665 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc77f.h
36b0dd6de0d0b49d435a4662c35d1f4ae5b2b1bc - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9870.h
02ff42b6686954e4571b8a318575372239db623b - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl30f1_notification.h
82c9df617999f93ebd9362851966f601b8131fdd - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc570.h
eac86d7180236683b86f980f89ec7ebfe6c85791 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl957d.h
866977d299eac812b41eb702a517e27bdc56e875 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc37a.h
78259dc2a70da76ef222ac2dc460fe3caa32457a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc37e.h
31939808cd46382b1c63bc1e0bd4af953302773f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl977d.h
11fd2de68ab82b81211aa20c66a9a6595199f673 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9270.h
05605d914edda157385e430ccdbeb3fcd8ad3c36 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9171.h
9db39be032023bff165cd9d36bee2466617015a5 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0002.h
76c430d54887ed14cace9409712259e10f042b4c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl00c1.h
e63ed2e1ff3fe2a5b29cfc334d3da611db2aadf6 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc37dcrcnotif.h
ea10b0d938d9314638882fdc20b9158a193f7b08 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl5070.h
f5760f5054538f4ecf04d94fb1582a80a930bc29 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc673.h
b1133e9abe15cf7b22c04d9627afa2027e781b81 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl917c.h
9bd9f416844d798f352fcc6c8aaf2c251253c068 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl90cd.h
04ab1761d913030cb7485149ecd365f2f9c0f7da - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0005_notification.h
fb5ef3d6734a2ee6baba7981cdf6419d013cee85 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc671.h
ddbffcce44afa7c07924fd64a608f7f3fe608ccc - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0071.h
68c953956a63ef8f7f9bcbe71057af510f4597c1 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clb0b5sw.h
38265d86eb7c771d2d3fc5102d53e6a170a7f560 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0041.h
941a031920c0b3bb16473a6a3d4ba8c52c1259d7 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl917e.h
a23967cf3b15eefe0cc37fef5d03dfc716770d85 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc372sw.h
9b2d08d7a37beea802642f807d40413c7f9a8212 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc37d.h
e0c9a155f829c158c02c21b49c083168f8b00cbe - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc37dswspare.h
95d99f0805c8451f0f221483b3618e4dbd1e1dd8 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl90f1.h
8b75d2586151302d181f59d314b6b3f9f80b8986 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc573.h
ff47d8a4b4bdb3b9cd04ddb7666005ac7fcf2231 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl003e.h
026f66c4cc7baad36f1af740ae885dae58498e07 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc371.h
15136a724baab270914a01a8c0e8f2c2c83675b6 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl00c3.h
4bbb861011139be1c76b521eaa7ae10951d5bf9a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl2081.h
d1a19dee52b3318714026f4fcc748cfa4681cd25 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc370.h
158c98c8721d558ab64a025e6fdd04ce7a16ba9e - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl947d.h
435a34753d445eb9711c7132d70bd26df2b8bdab - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl917d.h
326dbbeb275b4fc29f6a7e2e42b32736474fec04 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9571.h
1409efc057e4f0d55602f374ec006f9db7ad3926 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0000.h
bd27ceb75c4604fef53658f16a5012d97c1534b2 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9470.h
e6818f1728a66a70080e87dac15a6f92dd875b4e - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl927d.h
11b19cb8d722146044ad5a12ae96c13ed5b122b6 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl917b.h
1efc9d4aa038f208cd19533f6188ac3a629bf31a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl917a.h
c2d8bb02052e80cd0d11695e734f5e05ab7faeb5 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl907dswspare.h
4b8f95693f79a036317ab2f85e150c102ad782e9 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl84a0.h
a7c7899429766c092ee3ecf5f672b75bef55216c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9271.h
15d1f928a9b3f36065e377e29367577ae92ab065 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0080_notification.h
a26ddc6c62faac1ecd5c5f43499aab32c70f32cb - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc67b.h
b29ba657f62f8d8d28a8bdd2976ef3ac8aa6075f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0073.h
c5ef1b16b2bd2e33f52b71f2b78db789ebb844f0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl9770.h
ecc56a5803b85187aa95b788aedd4fa2262c1bb6 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl2080.h
dd4f75c438d19c27e52f25b36fc8ded1ce02133c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl917cswspare.h
6db83e33cb3432f34d4b55c3de222eaf793a90f0 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl00b1.h
b29ea3f13f501327c060b9ddfac5834ed396414a - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl30f1.h
4d5ccf08ab73343343e0c804002a621996866161 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0092.h
593384ce8938ceeec46c782d6869eda3c7b8c274 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl900e.h
95ca0b08eed54d1c6dd76fdf9cf4715007df1b20 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0020.h
c61f8348c2978eef0a07191aaf92bd73e935f7bd - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc67e.h
509c56534ed6d48b06494bb22d3cf58d63254a05 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc574.h
da8d312d2fdc6012e354df4fa71ed62ae4aac369 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl927c.h
5416c871e8d50a4e76cbad446030dbedbe1644fd - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl00f2.h
b7a5b31a8c3606aa98ba823e37e21520b55ba95c - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl402c.h
26c3ccc33328a66ad3bcfe999424dffda991264f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc670.h
28867d69a6ceac83da53a11a5e1ef87d9476f0be - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc57d.h
053e3c0de24348d3f7e7fe9cbd1743f46be7a978 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0004.h
060722ac6a529a379375bb399785cbf2380db4fd - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc373.h
13f8e49349460ef0480b74a7043d0591cf3eb68f - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/clc57b.h
e72a7871d872b2eb823cc67c0a7d4cafb3d0ca18 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl90ec.h
ba76ecbebe0ed71ea861ed7016abbfc16ced2df7 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl5070_notification.h
bae36cac0a8d83003ded2305409192995d264d04 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl0001.h
ab27db8414f1400a3f4d9011e83ac49628b4fe91 - NVIDIA-kernel-module-source-TempVersion/src/common/sdk/nvidia/inc/class/cl987d.h
70b155b0da07a92ede884a9cec715f67e6b5c3e8 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_list.cpp
c70d946adb4029b3476873887488748162b88b0b - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_messagecodings.cpp
ac08ccd5c2e3fadf10ae53e46e582489d1579ed0 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_edid.cpp
6fd536d1849ea4cce5d9b72d1dcbc1db9c818b4e - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_groupimpl.cpp
d63fed0074b22584686ad4d0cdaa4388b42194d6 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_watermark.cpp
a5df56b2cf8df9d4d8ab6fa2b3521649ef09384a - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_wardatabase.cpp
f56f92e32710b0342805b785d34ba1a9f2a54ed3 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_guid.cpp
554e6b7dadbb68ac0f3d2e368ca3fd90832ea254 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_discovery.cpp
60994cb1131d4d37b2d3fce6cc59dfea5ebb4129 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_connectorimpl.cpp
37eabb1ab51cb38660eb24e294c63c8320750b96 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_sst_edid.cpp
a0d24a4bd71f999adbaa876168adef5a7d95f2b8 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_configcaps.cpp
fa4f4869d3d63c0180f30ae3736600a6627284c6 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_merger.cpp
d991afdb694634e9df756184b5951739fc3fd0ab - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_auxretry.cpp
1543bbaba8f3e149239cf44be3c0d080c624d5ba - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_buffer.cpp
56ee9318a7b51a04baa1d25d7d9a798c733dc1bc - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_vrr.cpp
9f31213ab8037d7bb18c96a67d2630d61546544a - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_mst_edid.cpp
fea946e5320e7de8e9229bca8d4a6a14b9e8db59 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_crc.cpp
719d2ddbfb8555636496cb5dd74ee6776059db92 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_timer.cpp
f83b3c17e9f26651f12c8835a682abdd66aed3a2 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_splitter.cpp
e874ffeaeb6deec57605bf91eaa2af116a9762bd - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_bitstream.cpp
c62ef84471074a9ed428b4a03e644885989b0b83 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_evoadapter.cpp
38fe8122aba8a1bc5745d81192ec7fc75934dd0d - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_deviceimpl.cpp
66e91795dc65e1bc13c545a84556d200c8eb7bd5 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_messages.cpp
4803cde0fffcf89fed46d6deaeba5c96c669a908 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dp_messageheader.cpp
fe8007b3d98dad71b17595ecb67af77b198827a0 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/src/dptestutil/dp_testmessage.cpp
62d03d24af041276ba2abb96fa1634ae4f99ea8a - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_connectorimpl.h
aeadcb0bc061b5db0fdf8aa67c1b5703976aa946 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_connector.h
01f1dd58ed5bb12503fa45be7a6657cde0a857e2 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_guid.h
07d22f84e6a386dad251761278a828dab64b6dd5 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_bitstream.h
11487c992494f502d1c48ff00982998504336800 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_internal.h
f6e1b0850f5ed0f23f263d4104523d9290bb8669 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_vrr.h
02b65d96a7a345eaa87042faf6dd94052235009c - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_messageheader.h
e27519c72e533a69f7433638a1d292fb9df8772e - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_crc.h
543efa25367763292067245cbc39c1382c35df77 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_discovery.h
39aece5465100489867001bf57446bcfc4999c24 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_evoadapter.h
6e515f398e9ae1b603e49ec32576ccd0ce5d8828 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_messagecodings.h
070b4f6216f19feebb6a67cbb9c3eb22dc60cf74 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_buffer.h
36e80dd13c5adc64c3adc9a931d5ebbf922e9502 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_groupimpl.h
7974abf146f1f14cd3e3854ef63ddf52ebbeb222 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_deviceimpl.h
cdb1e7797c250b0a7c0449e2df5ce71e42b83432 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_merger.h
0f747fdf03bebdcd86dbdf16d00ee2d044bc906c - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_messages.h
325818d0a4d1b15447923e2ed92c938d293dc079 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_hostimp.h
2067e2ca3b86014c3e6dfc51d6574d87ae12d907 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_timer.h
d876d77caef3541ae05f310857f3d32e642fba04 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_auxdefs.h
78595e6262d5ab0e6232392dc0852feaf83c7585 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_auxbus.h
b4d8c44957efc90ba97092987e6e43c48e85ac86 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_address.h
78c6d7d85b47636fbb21153425ef90c6d0b2d4e2 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_configcaps.h
3b74682e142e94b1c68bf619169f12e5805044bc - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_watermark.h
8f83883126b853c97e5859dafd98847ec54d36ac - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_splitter.h
7b7d9a137027fbbedfc041465987fa4ed4198ce4 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_edid.h
cca426d571c6b01f7953180e2e550e55c629f0f4 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_auxretry.h
80380945c76c58648756446435d615f74630f2da - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_timeout.h
e2075486b392d6b231f2f133922ac096ca4bc095 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_ringbuffer.h
3eea80c74a22de43b6edad21ea5873c791e093e2 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_mainlink.h
d1e8c84f279cb30978d32c784107c0247afa6e66 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_linkconfig.h
750ecc85242882a9e428d5a5cf1a64f418d59c5f - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_object.h
379d3933c90eaf9c35a0bad2bd6af960a321465f - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_wardatabase.h
e02e5621eaea52a2266a86dcd587f4714680caf4 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_linkedlist.h
5dff32bd1018e2c5c2540ea7fb571dbea596d5b1 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_regkeydatabase.h
4a098c4d09dedc33b86748d5fe9a30d097675e9f - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_list.h
5bd3706ceea585df76a75dda7f9581b91ee8f998 - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dp_tracing.h
020194b85245bad5de4dfe372a7ccb0c247d6ede - NVIDIA-kernel-module-source-TempVersion/src/common/displayport/inc/dptestutil/dp_testmessage.h
2f60ba753549b232e1b995046a356dbe0eced04a - NVIDIA-kernel-module-source-TempVersion/src/common/shared/nvstatus/nvstatus.c
ebccc5c2af2863509e957fe98b01d9a14d8b0367 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nv_list.h
cd902d07cc83444b150453d7baefd0e234c26ac2 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvUnixVersion.h
b85b49fc4ed38a241c79731a02b3b040a654a52a - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvctassert.h
764e5c4364922e3953b4db0411d1d3c3bdac99f4 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvlog_defs.h
8f0d91e1a8f0d3474fb91dc3e6234e55d2c79fcc - NVIDIA-kernel-module-source-TempVersion/src/common/inc/rmosxfac.h
f59a2759281341e56372d3cb37b16715944dd8e1 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvPNPVendorIds.h
e015e955a05908d4a2202213353eac89f1b80ff6 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvSha1.h
b58ed1b4372a5c84d5f3755b7090b196179a2729 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nv_speculation_barrier.h
b4c5d759f035b540648117b1bff6b1701476a398 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvCpuUuid.h
4282574b39d1bcaf394b63aca8769bb52462b89b - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvBinSegment.h
a27eb14c54c6acb647a95c264b90e25f07fc757e - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvBldVer.h
5257e84f2048b01258c78cec70987f158f6b0c44 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvlog_inc.h
963aebc9ec7bcb9c445eee419f72289b21680cdd - NVIDIA-kernel-module-source-TempVersion/src/common/inc/hdmi_spec.h
62e510fa46465f69e9c55fabf1c8124bee3091c4 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvHdmiFrlCommon.h
3bf0416186ee90833c727f01cc891bd568ea9d0f - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvVer.h
a346380cebac17412b4efc0aef2fad27c33b8fb5 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/nvlog_inc2.h
d2b4cc6228c4b13ef77e47bf30326826c5662ed4 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/swref/published/nv_ref.h
06aa739230c00998e039b0104e5d73da85c322fe - NVIDIA-kernel-module-source-TempVersion/src/common/inc/swref/published/nv_arch.h
86a59440492fd6f869aef3509f0e64a492b4550d - NVIDIA-kernel-module-source-TempVersion/src/common/inc/swref/published/turing/tu102/dev_mmu.h
38edc89fd4148b5b013b9e07081ba1e9b34516ac - NVIDIA-kernel-module-source-TempVersion/src/common/inc/swref/published/turing/tu102/kind_macros.h
f9311a35f375c7453d99fdde3876440b54d4cb5a - NVIDIA-kernel-module-source-TempVersion/src/common/inc/swref/published/disp/v03_00/dev_disp.h
1ea0c3d6ea0c79c01accc7b25d15b421ab49a55d - NVIDIA-kernel-module-source-TempVersion/src/common/inc/swref/published/disp/v04_02/dev_disp.h
a26df21c3cc3eeb395428101f11da68386e0d72b - NVIDIA-kernel-module-source-TempVersion/src/common/inc/displayport/dpcd14.h
8159b4189c577d545c1280d7d905a2dc2ba29fa7 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/displayport/dpcd.h
96b9560d322f43a980db5d6cc5072e9e81fdb9d2 - NVIDIA-kernel-module-source-TempVersion/src/common/inc/displayport/displayport.h
249d4f7317ce68c3ceb64e2b1ee257cc75eb002b - NVIDIA-kernel-module-source-TempVersion/src/common/inc/displayport/dpcd20.h
e1b414712accfd7c690b2fdf7139f0aaf865fc47 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/Makefile
17855f638fd09abfec7d188e49b396793a9f6106 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/os-interface/include/nvkms.h
8a935bdda64e1d701279ef742b973c5dbed5727b - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/os-interface/include/nvidia-modeset-os-interface.h
16a2e187afedf93bade7967816b0723708544e0d - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-modeset-workarea.h
20213d53bb52bf9f38400e35d7963d0f4db22f96 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-evo-states.h
70d9251f331bbf28f5c5bbdf939ebad94db9362d - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-softfloat.h
8a6f26ccf2e563b78f6e189c999ba470ed35271d - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-evo.h
853d9005ec695cb5a1c7966a1f93fe0c9c8278cf - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-hdmi.h
d4889d903bf4de06d85e55b005206ed57f28af69 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-lut.h
6b21a68e254becdd2641bc456f194f54c23abe51 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-framelock.h
c1c7047929aafc849a924c7fa9f8bc206b8e7524 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/g_nvkms-evo-states.h
71e8c5d3c4dfec6f2261654c3fc91210bff78da9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-surface.h
64af1df50d2a5b827c1c829a303844de20527522 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-rm.h
260b6ef87c755e55a803adad4ce49f2d57315f9a - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-event.h
4f5d723c80f607a0e5f797835d561795dbe40ada - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-cursor.h
f5f3b11c78a8b0eef40c09e1751615a47f516edb - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-hal.h
d3f5bc85b538a3a1d4c2389c81001be91205ec9f - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-modeset-types.h
9c90df1fa1b6dd33a7e330c47e94b5b9194ad419 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-3dvision.h
be3a1682574426c1bf75fcdf88278c18f2783c3f - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-dpy.h
8f1994f3f8d100ddcf8b23f5b24872bed939d885 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-vrr.h
75e8a8747795fad89b4d2b662477e5454863dcc7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-flip.h
d7861e2373ac04ffaf6c15caeba887f727aa41fb - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-dma.h
182a47c12496b8b7da1c4fe7035d6b36d7316322 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-prealloc-types.h
248d900394aa2b58669300af4f5d26eac23edd23 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-types.h
ef78e73ec9c0b8341bd83306d1f3b2c35e20c43a - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-utils.h
867e3091a945d3d43b2f28393b40edeb9d27597b - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-rmapi.h
c1904d38785649d2614563d0cd7de28a15ce4486 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-modeset.h
cc09ecd5ab724b244017929444309f8e77fc5a63 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-modepool.h
412d8028a548e67e9ef85cb7d3f88385e70c56f9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-console-restore.h
33dbf734c9757c2c40adb2fb185e964870217743 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-flip-workarea.h
ebafc51b2b274cd1818e471850a5efa9618eb17d - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-prealloc.h
4020b2a0d4f177c143db40b33d122017416dfa2e - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-evo1.h
be6e0e97c1e7ffc0daa2f14ef7b05b9f9c11dc16 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-attributes.h
9dd131355ed1e25a7cee7bfef00501cf6427ae92 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/nvkms-private.h
17f6fbbd5e0a75faec21347b691f44dcb65c01aa - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/dp/nvdp-connector.h
4625828efd425e1b29835ab91fcc3d2d85e92389 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/dp/nvdp-connector-event-sink.h
a8fbb7a071c0e7b326f384fed7547e7b6ec81c3e - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/dp/nvdp-timer.h
52b6c19cce320677bd3a4dfcf1698b236f29e59e - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/include/dp/nvdp-device.h
a0cc9f36fdd73c99ad8f264efa58043d42353b0a - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/lib/nvkms-sync.c
381fba24abae75d98b3ada184ed0cd57335819a9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/lib/nvkms-format.c
281fdc23f82d8bdb94b26d0093b444eb0c056f51 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/interface/nvkms-sync.h
445a409950ab8f36cfa24d1dc73e59718d335263 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/interface/nvkms-api.h
2ea1436104463c5e3d177e8574c3b4298976d37e - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/interface/nvkms-ioctl.h
5c4c05e5a638888babb5a8af2f0a61c94ecd150b - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/interface/nvkms-format.h
910255a4d92e002463175a28e38c3f24716fb654 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/interface/nvkms-api-types.h
e48c2ec8145a6f2099dddb24d2900e3ae94ec02e - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/kapi/include/nvkms-kapi-internal.h
727bd77cfbc9ac4989c2ab7eec171ceb516510aa - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/kapi/include/nvkms-kapi-notifiers.h
009cd8e2b7ee8c0aeb05dac44cc84fc8f6f37c06 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/kapi/interface/nvkms-kapi.h
fb242aa7a53983118ee019415076033e596374af - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/kapi/interface/nvkms-kapi-private.h
f6875ef0da055900ef6ef1da5dc94cba2837e4d0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/kapi/src/nvkms-kapi-channelevent.c
01d943d6edb0c647c2b8dbc44460948665b03e7a - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/kapi/src/nvkms-kapi-notifiers.c
394ea31caa5957cfb2c8bb8c3cc0e4703213fe7f - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/kapi/src/nvkms-kapi.c
3f978853dfa0435b746ff8c954b8e5e5f0451b43 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-modepool.c
85ddb19f89833ca57fd2deff2e2b4566e162a56c - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-hal.c
8415bcd6ab34e356374659e965790a0715ed7971 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-prealloc.c
c98f76bcfc7c654a619762ebc3a2599f9aa89f8d - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-3dvision.c
5fb73f35841c41e7376531732cb12303224e61ad - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-lut.c
e9626eee225e58ec2d5be756c5015775ca5e54b9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-vrr.c
86da3c7c09354d2c49d95562aba15cbedb543d9b - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-evo1.c
89baced4cf1a96b7693c9e2f85b01093bbba73f7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-rm.c
7ef594aea1e80408148c3661477a4edc6e8d8d50 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-flip.c
07c2f10473e2fbe921b2781cc107b5e56e6373e3 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-attributes.c
d28cd72c8dca4cb54a15630b80026eca57a9ed80 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-evo.c
da726d20eea99a96af4c10aace88f419e8ee2a34 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-event.c
5c79c271609ebcc739f8d73d7d47f0b376298438 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-rmapi-dgpu.c
b55665d7bceaad04bbf29a68f44536518302c3d6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-evo2.c
6b79c2ce1658722fa6b3a70fb5e36f37c40d8f96 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-modeset.c
1918ca3aa611cd9dfc79d46d038ab22706f0b1ed - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-cursor3.c
add6682206360cb899ae13bae6dc2c19d830d7b7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-dpy.c
c2870190ca4c4d5b3a439386583d0a7c193d6263 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-hw-states.c
f27f52dc428a6adeb936c8cf99e1fc2d8b0ad667 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-dma.c
5acf19920d56793d96c80e8461b0d0213c871b34 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-surface.c
c2d0e6bef0c4929a3ca4adfd74bd6168fa4aa000 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-framelock.c
673ad86616f9863766bfec0e118c918297d32010 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/g_nvkms-evo-states.c
c799d52bdc792efc377fb5cd307b0eb445c44d6a - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-cursor2.c
0d39e349fdf33d550497527fc8d43f14e752df6c - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-hdmi.c
8f22c278a5839d36f74f85469b2d927d9265cb80 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-utils.c
eb09642e8b5d9333699f817caaf20483c840b376 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms.c
ab17e5b4cafa92aa03691a0c187ef8c9ae53fa59 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-cursor.c
574b1268ff83e4e5ed4da15609247a5c0ec8f51b - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-console-restore.c
b44193cbf1371ca1abfda36e705edbad1d473e88 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/nvkms-evo3.c
8af6062034d464f778969e26d3bf5a9b4cdaccf0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-connector.cpp
69fed95ab3954dd5cb26590d02cd8ba09cdff1ac - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-connector-event-sink.hpp
6b985fc50b5040ce1a81418bed73a60edb5d3289 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-timer.hpp
f2a05c29383bfc8631ad31909f31a8351501eb27 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-device.cpp
31767fd551f3c89e5b00f54147b6a8e8fa3320e3 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-connector-event-sink.cpp
110ac212ee8832c3fa3c4f45d6d33eed0301e992 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-host.cpp
51af3c1ee6b74ee0c9add3fb7d50cbc502980789 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-evo-interface.hpp
f96cd982b4c05351faa31d04ac30d6fa7c866bcb - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-timer.cpp
f6c3e8bd4ee13970737e96f9d9a3e4d8afdf9695 - NVIDIA-kernel-module-source-TempVersion/src/nvidia-modeset/src/dp/nvdp-evo-interface.cpp
9767fbc3273e17e7b2e68374bfab0824bea34add - NVIDIA-kernel-module-source-TempVersion/src/nvidia/Makefile
c5f16fdf43ca3d2845d120c219d1da11257072b0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/nv-kernel.ld
d1089d8ee0ffcdbf73a42d7c4edb90769aa79d8c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/common/inc/nvrangetypes.h
aba0bd796d932fa19e8fad55ed683ae57d68bffb - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv-priv.h
1d8b347e4b92c340a0e9eac77e0f63b9fb4ae977 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv-ioctl-numbers.h
499e72dad20bcc283ee307471f8539b315211da4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv-unix-nvos-params-wrappers.h
40cb3c112bbcb6ae83a9186d0c9fa1857cf6a126 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/os-interface.h
1b53bbf5f8452b8057ff2dd7828947a047db38d0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv_escape.h
3a26838c4edd3525daa68ac6fc7b06842dc6fc07 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv-gpu-info.h
e3679844971ecc4447259fb1bdf4fafbbdff2395 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/osapi.h
4750735d6f3b334499c81d499a06a654a052713d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv-caps.h
1e89b4a52a5cdc6cac511ff148c7448d53cf5d5c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/os_custom.h
d576ede913ef8cf4347ef0e8dbfe9c2d992b7965 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv.h
ddfedb3b81feb09ea9daadf1a7f63f6309ee6e3b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/rmobjexportimport.h
9c7b09c55aabbd670c860bdaf8ec9e8ff254b5e9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv-kernel-rmapi-ops.h
cc3b2163238b2a8acb7e3ca213fb1ae6c5f0a409 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/osfuncs.h
2f5fec803685c61c13f7955baaed056b5524652c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv-ioctl.h
e08f597ce97fb1691bcea37b4d017831a457d027 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/include/nv-reg.h
6ebda7ea5b17b7b9bfa9387fc838db9f0c3405a5 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/osinit.c
b5b409625fde1b640e4e93276e35248f0fccfa4c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/gcc_helper.c
9d9035afd7af31f30cdbf2d4c75e5e09180f0981 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/osunix.c
21ac9d6932199ce0755dbead297eb03c9900f8c9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/power-management-tegra.c
49dc935d4475b572478c63324f0832c972a4277d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/os.c
532366fd9a288a812eca78b92b304ba3625f8c0a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/exports-stubs.c
006e77a594ae98067059ad3d7e93821316859063 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/os-hypervisor-stubs.c
f134270af5ecd7c5ba91bf5228fe3166b101dd6e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/escape.c
690927567b5344c8030e2c52d91f824bb94e956c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/registry.c
53cd45a8121f8acb72be746e389246e1424176f7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/osapi.c
05b5aa5ad6a7df974f05608622ae260d70a550db - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/osmemdesc.c
fb5272f3d0e465aedbc99ddcabb1c6c428837a6e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/arch/nvalloc/unix/src/rmobjexportimport.c
0cff83f4fdcc8d025cd68e0a12faaeead09fa03b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/kernel/inc/tmr.h
7df66a87c9498ae73c986e60fcb9cb1cbcd19e19 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/kernel/inc/objrpc.h
1feab39692ea8796ac7675f4780dfd51e6e16326 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/kernel/inc/objtmr.h
28d6a6ae495d9bc032c084980ebf5d94448bcf29 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/kernel/inc/vgpu/rpc_headers.h
31deee778df2651d3d21b4d9c8ab180b8dc1ff14 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/kernel/inc/vgpu/rpc_vgpu.h
961ed81de50e67eadf163a3a8008ce1fde1d880c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/kernel/inc/vgpu/rpc_hal_stubs.h
4db7387cc1ce08ccc62404b80b19c7f1b685e746 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/kernel/inc/vgpu/rpc.h
e4d88af4eb51d32288f913d90e490e329884970b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/kernel/inc/vgpu/rpc_global_enums.h
35da37c070544f565d0f1de82abc7569b5df06af - NVIDIA-kernel-module-source-TempVersion/src/nvidia/interface/nv_firmware_types.h
df4d313c66e75fa9f4a1ff8ea2c389a6ecd6eb3d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/interface/acpigenfuncs.h
bff92c9767308a13df1d0858d5f9c82af155679a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/interface/nvacpitypes.h
db0dc6915302888de06e3aa094d961cfe25e0059 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/interface/nvrm_registry.h
059c1ab76a5f097593f0f8a79203e14a9cec6287 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/interface/deprecated/rmapi_deprecated_utils.c
d50ff73efaf5bc7e9cb3f67ed07ede01e8fad6f6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/interface/deprecated/rmapi_deprecated.h
671286de97aa63201a363fd7a22c92ee8afe4c7c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/eng_state.c
6fa4ba2da905692cd39ec09054f2bd6621aa2a7a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_resource_desc.c
5a97d4f8ce101908f1a67ffe9cc8ed00b6bf43b2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_resource.c
1653c7b99cfc86db6692d9d8d6de19f1b24b9071 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_uuid.c
caf2b80fa0f01b9a3efcd8326bf6375455f2e1b9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_access.c
4e1be780ac696a61f056933e5550040a2d42c6bd - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_device_mapping.c
0824d200569def5bf480f2a5127911ed0ea881e6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/device_share.c
f6b4e40b638faf9770b632b404170e1ceb949be5 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_gspclient.c
db44a803d81d42bfaf84f7ea1e09dc53c662acef - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_timeout.c
9515ea68cdac85989e4d53d4c1251115291708dd - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu.c
08be13ced6566aced2f3446bb657dae8efb41fbe - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_rmapi.c
77573c8518ac7622211c4bdd16524d369cc14b96 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/device_ctrl.c
fa854efc5cdf4d167dee13302ee8377191624d95 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/device.c
89543f7085fbc2ca01b5a8baae33b5de921c79e9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/gpu_t234d_kernel.c
0e4c2d88b61a0cf63045fe70e5ba2c81c44e37af - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/arch/t23x/kern_gpu_t234d.c
acb2a62fb60e08eb6d16518c43c974783139813b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/timer/timer.c
834efbfff64c0d01272e49a08bd6196e341985a8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/timer/timer_ostimer.c
dd0bd914c6c7bfeabdd9fe87fb984702e0765624 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/disp_objs.c
19447ad30b3fc2ee308bcc45e3409bafa5defe0d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/disp_object_kern_ctrl_minimal.c
3abbef0a6fc95d6f7c7c5a16cbbbb51aaa457cc0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/disp_sf_user.c
0918cada217ca1883527fe805fc30babf7b8038d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/disp_channel.c
e1a6dfb38025abeb5adfda929f61eb6ee44b5c84 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/disp_common_kern_ctrl_minimal.c
ed25b1e99b860468bbf22c10177e0ba99c73894f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/disp_capabilities.c
8cd12c2da71acede5046c772f14aff7cbd88af12 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/kern_disp.c
01e8b56f7677f5cb7f950d9aa9bd37d04153085b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/inst_mem/disp_inst_mem.c
629566bf98be863b12e6dc6aab53d8f5ea13988c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/inst_mem/arch/v03/disp_inst_mem_0300.c
b41502d73d7781496845377cebd0d445b8ca9dc6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/arch/v03/kern_disp_0300.c
8a418dce9fbeb99d5d6e175ed8c88811866f3450 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/arch/v04/kern_disp_0402.c
e26ade846573c08f7494f17a233b8a9e14685329 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/disp/head/kernel_head.c
d6e1bd038fa0eff5d3684a5a2c766fdac77f1198 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/mem_mgr/mem_utils.c
d4a07d1c6beb7ddb229ed6e5374343b6ce916d84 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/mem_mgr/mem_desc.c
bc2b57acc8fa8644615168e3ddbaf7ac161a7a04 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/mem_mgr/context_dma.c
2bb921b462c4b50d1f42b39b4728374c7433c8cb - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/mem_mgr/arch/turing/mem_mgr_tu102_base.c
086e9a51757c3989dfe0bf89ca6c0b9c7734104a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/subdevice/generic_engine.c
56be7a21457145c3c6b2df7beb4c828b7bd1a3b4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/subdevice/subdevice.c
5be208cc0e1eae1f85f00bb0b502fdba74d6656c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/subdevice/subdevice_ctrl_timer_kernel.c
a64c51c515eb76208a822f1f623d11e2edd8d7ac - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/subdevice/subdevice_ctrl_gpu_kernel.c
a54628e9d2733c6d0470e1e73bca1573e6486ab3 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/subdevice/subdevice_ctrl_event_kernel.c
1f4d15f959df38f4f6ea48c7b10fc859c6e04b12 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/audio/hda_codec_api.c
c3b93cf7e3c97beb1072135a58d211f67722ad10 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/dce_client/dce_client_rpc.c
7db9691e2078d4b093f2e09c8ba0e6245e505ef1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu/dce_client/dce_client.c
f89e982b0e31a1898e1e4749c9a8ae9f0bb59a0c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/deprecated_context.c
d92267a3394ded5d7d218530fd16ce00a920b1d6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/alloc_free.c
2279fd14aab9b5f20b8fc21f04dd0fca41e418c9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/event_notification.c
11a547cbfdbce000a6e5edf48492f5b930ddbdca - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/rs_utils.c
81f66675295315cfc52be225c2e9ee912b56fbac - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/sharing.c
569f56831cde7bdc528ac2e543eea485025ec6f0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/client.c
05669e008dfd89e5c81381e6c60230c1fe17a876 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/resource_desc.c
820b6e63c2b11b0764305c483142f626b6f72038 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/rpc_common.c
bc83726df04c30d02a1852a10a22c77fdb3ef7a7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/resource_desc.h
5f194ba056b018a8194c16b0bbb6e49c1b80a996 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/param_copy.c
e40f6742084cd04252f3ec8b8499a26547b478bc - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/mapping.c
ac6a5b3adf15eac4a7bd9ae24981f6f5fc727097 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/deprecated_context.h
3a0f999e390d93b0db8272f55fbec56f6b055fe4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/rmapi_utils.c
78f1e379c3d1df9e34baba77f78f48b8585bdc74 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/event_buffer.c
8e40d2f35828468f34cf6863f9bf99c20dbfc827 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/rmapi_cache.c
b441ee824e9c15c82956254704949317024ceb41 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/entry_points.h
277441b3da96fc01199f1d2f5102490e2e6cd830 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/control.c
38d0205b68ea2c82709b42eb7e8b9cf92cec8828 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/rmapi_stubs.c
2f89b9059467e7f67a6a52c46aecae5cb0364ab6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/binary_api.c
46aa43b18480d2eb7519b2dcd0fe6a68c79b8881 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/resource.c
f2c7d77e4183994d7ee414e2a87745fcd23d995e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/mapping_cpu.c
6f46dd43e4b3f2ad803a4c9492cb927aebffc1f0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/client_resource.c
59d42b6a123b062237b3b6ca382211e35057ef1e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/resource_list.h
ddaf2b8e424df9147a4e2fecf3942b64b1d2b001 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/entry_points.c
68cc7b258f934097e9dc31a38e7e3bf2ce2fe5d1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/event.c
c3820fa4bb1192a9317ca834aeee3434c7eb8059 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/rmapi/rmapi.c
ea7be8a55a3310aa1c3926ed69c86a6491925e08 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/diagnostics/nvlog.c
70507a8d43797eb3cdc13408ae8635f4a2eebce0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/diagnostics/nvlog_printf.c
b3a29311cc22e2dae686f8ed2df6bc828aa826cf - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/diagnostics/profiler.c
af4ffa4b423e07cf40eb863c11dbf515c7104874 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/os/os_timer.c
1793e056a0afcc5e1f5bb58b207b49c5f1556eca - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/os/os_stubs.c
63e5e17280d865ace8cdd8eb8a2598d3d7830ad7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/os/os_sanity.c
8e5af753de1725dd919185c29d03ccb0934fab6e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/os/os_init.c
8d96c1b4c00f3a029ba8c27dd2e8e88405c3a1b6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/core/locks_minimal.c
c0822891f614e6ec847acb971e68aad8847e0cd7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/core/locks_common.c
c68f2c96bfc6fce483a332a5824656d72986a145 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/core/system.c
37000b419d23a8b052fc1218f09815fafb1d89c9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/core/hal_mgr.c
7b9c95f912b203c68b6ba1f62470dffee4b4efe3 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/core/thread_state.c
677c655b0b8e86bdab13cdd4044de38647b00eec - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/core/hal/hal.c
8eac3ea49f9a53063f7106211e5236372d87bdaf - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/core/hal/info_block.c
b9eabee9140c62385d070628948af0dcda3b0b1a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/core/hal/hals_all.c
003e3012e87b8f8f655749db88141d74660e8d8e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu_mgr/gpu_mgr.c
a5a31b9b62e6d19b934411995c315d4fdac71ca0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu_mgr/gpu_db.c
37d1e3dd86e6409b8e461f90386e013194c9e4d1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu_mgr/gpu_mgmt_api.c
ed24c0406c85dc27f0fca1bac8b0dcb7a60dca2d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/gpu_mgr/gpu_group.c
6aa752ae480e883d077de842f02444151947f82f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/virt_mem_mgr.c
956b7871a267b7d381d1cd7d4689ef1aec1da415 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/mem.c
9d9fcd87d784a758659b6cc8a522eaf9beac4b6c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/standard_mem.c
15f3290908931a9e4d74b0c0ec9e460956e39089 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/system_mem.c
623dad3ec0172ed7b3818caece0db5687d587ff3 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/os_desc_mem.c
64bd2007101cbf718beb707898e85f40071ae405 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/syncpoint_mem.c
94acdcebee0cdcbf359b15803ec841e5284e1ff2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/vaspace.c
079893039c2802e1b0e6fcab5d0ee0e4dc608c84 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/io_vaspace.c
5b9048e62581a3fbb0227d1a46c4ee8d8397bf5b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/kernel/mem_mgr/mem_mgr_internal.h
78cbb6428372c25eba0ccf8c08e7d36d18e4bae8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/lib/base_utils.c
6d5915924b4e26a5e7592427e34b77596162d0fe - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/lib/zlib/inflate.c
cade0f7049cdb2ab423a073887ed20ba1abdb17e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/utils/nvassert.c
8a4e2aec6fc01ce1133cfc7ef80b6363c5394208 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvoc/src/runtime.c
8ed5171254e51e59fc5586e729793831165b8c0c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/tls/tls.c
206dda159ecbc0340ac9329250302c76a504e5a8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/prereq_tracker/prereq_tracker.c
d48d51a880fced52ad6e323d984e872ccf9ef3bd - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/resserv/src/rs_client.c
d0ae6d7a363db3fdf54ae1a760630b52a2019637 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/resserv/src/rs_resource.c
883ad1cf4ed1714eb74d44d3b9a41d6a4723b650 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/resserv/src/rs_server.c
0c9581aa68a77cb9977a7fbcfd2077ccb618206e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/resserv/src/rs_access_rights.c
dac54d97b38ad722198ec918668f175dc5122e4e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/resserv/src/rs_access_map.c
1f2e9d09e658474b36d0b0ecd9380d0d2bcc86b2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/resserv/src/rs_domain.c
d3e5f13be70c8e458401ec9bdad007dfadedcc11 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvbitvector/nvbitvector.c
836ba8b401fb6b6fcf4ccde1b644ebaefc3d8ee1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/ioaccess/ioaccess.c
9c40bfebe2c57b972683e45dc15f358aaa2280f8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/eventbuffer/eventbufferproducer.c
8f41e7127a65102f0035c03536c701b7ecdaa909 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/string/string_generic.c
b528ef8e238dd2c22c6549057b54fe33039c6473 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/memory/memory_tracking.c
b6d6074ca77856fc5fe4ff1534c08c023ee592a4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/memory/memory_unix_kernel_os.c
caff00b37e7f58fde886abcc2737c08526fa089e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/memory/memory_generic.h
66e79047600e0a40c50e709c6c82402d9b205ad0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/crypto/crypto_random_xorshift.c
da86b765702196eb0011ac9d14873fbc1589d48b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/thread/thread_unix_kernel_os.c
7cdc50ee31b9cde14c0ce6fcd390c5d4564e433d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/cpu/cpu_common.c
a305654bafc883ad28a134a04e83bbd409e0fc06 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/cpu/cpu_common.h
2fa76d2d5ba7212f826b656aa683223a470e484c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/core/core.c
6f6c83e9ee6d91fc8700e5015440f2bc72e6600b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/sync/sync_rwlock.c
9b69fbf3efea6ba58f9ba7cb0189c9264c994657 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/sync/sync_common.h
b55b7b59f35d848d5a3b43d63da4d2f7b0af5d3e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/sync/sync_unix_kernel_os.c
7416712aa964befcf8fede86e5a604871a2d00b8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/sync/inc/sync_rwlock_def.h
6dd0c5f2384610ea075642d8e403ddd8c8db371a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/sync/inc/sync_unix_kernel_os_def.h
87ac95cf569bb550adb3577c6a6658d094c59999 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/util/util_gcc_clang.c
a045a19d750d48387640ab659bb30f724c34b8c8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/util/util_unix_kernel_os.c
f0c486c1ad0f7d9516b13a02d52b4d857d8865b1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/nvport/util/util_compiler_switch.c
595a6238b9f04887dd418be43ff31f3e7ca6b121 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/containers/map.c
4418c0344b64740050ff8ef6ee085f0687a323d4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/containers/list.c
057ad074f6252f7809a88f918986d7d5aacff568 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/containers/queue.c
2389c9dd3b13fd2ff26d2d1342c515579079bc71 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/containers/multimap.c
2975e5cecee2c1fd5f69a8ffc20a49016e83025c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/containers/btree/btree.c
f0ce913eb568f85e6e1c1b8965f2cd2b98e81928 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/src/libraries/containers/eheap/eheap_old.c
cba2c17804f6f2062dc5d75583e4a03e03016d1d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_capabilities_nvoc.h
133e94f73c781709f407b03d8cdfdd8865c39b4b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_sf_user_nvoc.c
801eb295d07258ad70b99cb0fe85f3421690e0c4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rmconfig_private.h
46c1a2066ead316ea69c60dc323bdb649bc11c0f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_binary_api_nvoc.c
f9bdef39159a8475626a0edcbc3a53505a0ff80a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_os_hal.h
958d9a2cddc91edfafb5c2f3d9622443ac49a6ef - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_objtmr_nvoc.c
d405e01478d26ea99cc0012fa2d6e0021bbe6213 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_db_nvoc.c
182602832a033b3e2d5f88d4ba8febe63eeb2f9e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_client_resource_nvoc.c
376572489e0d4211663da22d5b0de7c7e740fb29 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hal_mgr_nvoc.h
e3c4822ac998ab5c7946919c85011f6172dc35ee - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_mem_nvoc.c
fa5e1c6001e60f77415d0a8f87c8b548b12e1217 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_mem_mgr_nvoc.c
ddc0ac4e1d8b8aef15e147f1f85f8df37c196763 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hal_register.h
4fe5357eabd0c5e351fb965ceead308240f68eb1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_objtmr_nvoc.h
4f4acfdefc7b9a0cdfe2d5840cc18c9c33366053 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_object_nvoc.h
1d66bab50a7d39faa2b0fec469a4512d2c7610d5 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rmconfig_util.c
fbcbeb92e46ba11ac26c04c9688b3ffcf10f5c53 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_prereq_tracker_nvoc.h
e449382e19e4dcfcf0aec0babe5a1c8ce2f4249b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_kern_disp_nvoc.c
87a5ae8e07103074020ba052ca45ab39e918d3bd - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_resource_nvoc.c
47b7744ddd01b821bf2fd25fdb25c8d6d55ee01d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_prereq_tracker_nvoc.c
c46cae4a17181c48bafc01237b83537df61c41ae - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hal_nvoc.c
f42bfa3b5a801358d30f852625d8456290550f46 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_inst_mem_nvoc.h
59a87763c6abdc54828f2785a7d90e43e607bc87 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_inst_mem_nvoc.c
da3cc08f12ccee23bcb1c0d0c757b8bbcb81e4fd - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rs_server_nvoc.h
6fd6953e4ae0af707376a40ea0e4f3e70872be7b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_os_desc_mem_nvoc.h
162777624d03af2f17dfdc28bc35143e2ec6cdee - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_os_nvoc.c
b82e5db65ad41764f456d6f924c89d76c165e48d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_system_nvoc.h
63e9d0416d5ca1fdf547b5fba9ec76e54690c9dc - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_ref_count_nvoc.h
26b240cb74736e7ed85cb0775e4ddda45b3a804e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_nvoc.c
499a3d9c61a86b667cc77cf8653a71f7fe85078a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_nv_name_released.h
ac842d9de5eae74ef02b0a75259fb016b80c6eac - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_objs_nvoc.c
88d336f88c9b72ec2c1352d4ebe00c0831eafbca - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_sdk-structures.h
fb78615cde6323784f51d33f2acd61fd4030fee0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_device_nvoc.c
213ebb4fdfa3c2f64b5f998e2ad990e448d4a104 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_nv_debug_dump_nvoc.h
a6174ad345cfdf926cbb4c86c7e8eeadfccb0ddf - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_class_list.c
fa785f8138598af783aefecf10b141d524e6bb42 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_virt_mem_mgr_nvoc.c
de97c5afdc34cb9aff23c3ba166e21f660cf1f47 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hal.h
f9bdef39159a8475626a0edcbc3a53505a0ff80a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_os_private.h
53b2c39666e1da206d44d69d54009f20440503bc - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_eng_state_nvoc.h
93f9738c0e8aa715592306ddf023adf6b548dcc4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_nvh_state.h
2b49950ba8f540ed4231c3334810edbb212bb859 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_client_resource_nvoc.h
d614f90730e2ee78bc3aae47b4e7976500e166e7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_io_vaspace_nvoc.h
4302502637f5c4146cb963801258444f2d8173e1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_allclasses.h
7bb406aa863430507bdf07b5f3e519c0d756220a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rs_resource_nvoc.c
6f3fc9676df77fa24c49140331b87ed5988ed57c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/rmconfig.h
cb02e66e5fc06aa340ab460c977961701e9ba295 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_subdevice_nvoc.c
079ac6d2a90bd2fc9413e092a729202dbc5f724a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_system_mem_nvoc.h
65d1ace1e68c9b39cce6db61aa8b86ee47a0ae4b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_halspec_nvoc.c
e0988b45cf712f1a7662b6f822eaed3ffd9938f3 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_mgmt_api_nvoc.h
40c937ca657bda9c0b67bd24c5047d39e596c16c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_channel_nvoc.c
f8e842add67dc070cc011ea103fc56cfd81c8b9a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_chips2halspec_nvoc.c
3a5457a216d197af8f120c660690a55ee44bdd8e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_generic_engine_nvoc.c
21e3cf689d84b1a28e11f66cc68a0bc6713108b0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rs_server_nvoc.c
edead99d125425ddf8f2fa4e4261b8cc3bf566fc - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_standard_mem_nvoc.c
b07c2c5e8df4de2bb9d242fd1606f1a57b8a742d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_io_vaspace_nvoc.c
bfabd5155af3172e1c0a5a0b66721ff830c7b68f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hypervisor_nvoc.h
cc635daf3d7a9a176580951841b82e9eb0d6f5ad - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_kernel_head_nvoc.c
757b3ecf94d0c8914a32c4bd302f8ccfa4027856 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_syncpoint_mem_nvoc.c
6263c1ceca0797d34a102f9846acd1fdef06fb60 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_resserv_nvoc.h
3b0e038829647cfe0d8807579db33416a420d1d2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_chips2halspec.h
abda8536d885be1422810c184b936bbc880972eb - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_os_desc_mem_nvoc.c
f6f40d568bcf2ae89547ad054f9b5357bac366ab - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_event_nvoc.h
ceb4dd72148dfe4a0581631147e8d7636abfd61f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_chips2halspec_nvoc.h
41784541b2e9ee778b52e686288fe492c0276fec - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hal_mgr_nvoc.c
d32d0b65f5f76cb56ca7cd83c0adfe5cb5330924 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_resource_nvoc.h
d04adc777f547ae6d1369cf4c94963e5abf90b86 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_context_dma_nvoc.c
ac3965eea078f1998c3a3041f14212578682e599 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_vaspace_nvoc.c
0dae533422e24d91a29c82d7be619160bbb6f6be - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_context_dma_nvoc.h
3f5a391895fc900396bae68761fe9b4dcb382ec0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_event_buffer_nvoc.h
9eb042cd3feb89e0964e3f4b948ee690f02bf604 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_dce_client_nvoc.h
285af0d0517cb191387a05ad596f74291ec81737 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_mem_desc_nvoc.h
9646d1c4d472ad800c7c93eec15cc03dd9201073 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_objs_nvoc.h
c370a103a4c1c9cf2df3763988e77ef8f7bc6afb - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_db_nvoc.h
2239839c8a780a87e786439a49ab63e25d25001a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rmconfig_util.h
09597f23d6a5440258656be81e7e6709390128f8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hal_private.h
8e0e60f6d30bbed679c43b4997875989314ee88c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_dce_client_nvoc.c
dec0f585ca46dc8e1aae49c8ea58db5a415de65c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rpc-message-header.h
871fd0260ab9c164b8f6a7d1aba4563af622f1ac - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_channel_nvoc.h
205490d6651110f28009e752fa286f818bed22fb - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_syncpoint_mem_nvoc.h
07a37ff685e68a703455e0ed7db7940697487ed2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_system_nvoc.c
cc71518b4151dc2ee0592bbd2866d437043d0e1a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_kernel_head_nvoc.h
2c28d729456749f16ae03fb48b1e416706762805 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_resource_fwd_decls_nvoc.h
59c3612a596ad6b996c9d1506f9893bd1b5effee - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_mgr_nvoc.c
81a6a28692f50efeebecad125de0585dd711ff36 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_device_nvoc.h
3f581df19314b273244c4c42ea915ec8ef0d8ce2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rs_client_nvoc.h
e839f8a5ebef5f28818bb5824bd7c52320db9a74 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_sf_user_nvoc.h
e0b8f64c042dcbb6340552cb3517dabdeb490f1b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hal_nvoc.h
7523c2ee9228ad0e2fb3566b23b9720d7896afae - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_eng_state_nvoc.c
ad50b3dbe1685eefe51c4fc296f3eade70789dfb - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_resource_nvoc.h
ca042cfcdfe8cc8a141f8bb5c9e6c05d8a71b707 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hda_codec_api_nvoc.h
2ab6933e07a84c64dfcbeef3b3f4e3f14249d8c8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_tmr_nvoc.h
ffd4f01212709e321d4097e424fe5d32038f5d8b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_mgmt_api_nvoc.c
12776c69191b583ffcf0914697cf41802f52ef01 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hal_archimpl.h
05cb2fed8648f07b54dc2e8bacbafb323ea8262e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_standard_mem_nvoc.h
0b15dd4515c5e436a659883a48e62bf3c68bf439 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_nvoc.h
0269da77a8db8efde1debc8236f2b3de2cd2597e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_eng_desc_nvoc.h
1bdccdbabf5ae52fd65b829c35079bb7a8734939 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_tmr_nvoc.c
410a759c949904b7ae1eecafb31143fad579c0a1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rs_client_nvoc.c
73c598515eb7985c8f4cace0946ec9613960be6c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_group_nvoc.c
73a37ad59b9b13b61eb944748b6c2ba3cad7b630 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_traceable_nvoc.h
8915f69e67e1f3a809a5479e36280df06ce8dd90 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_system_mem_nvoc.c
d792fbb20b6ca5f2d62addf6a94b0c5027ae15fe - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_subdevice_nvoc.h
6124890a54e529dff8b9d6ecf8f4bebe1e10a8a2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_os_nvoc.h
cb03502bf603c88b709ec803b60efd1d6f8e5ee1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rpc-structures.h
b378d336af4d5cb4b1fb13b85042fad1fe02f4cc - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_journal_nvoc.h
7c1b36cca9e8bf1fe18284685a6a80620df348cb - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_client_nvoc.h
cd833a822c1ce96c79135ba7221d24f347ceadb1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_mem_mgr_nvoc.h
a016a7d8e07389736c388cb973f3b2a177ea917d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_disp_capabilities_nvoc.c
42d784e8b478bbf48293a805aa227f0abdf1923b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_event_nvoc.c
b29061454e7d8daa0cef0787f12726d105faf5c4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_resource_nvoc.c
4b9f2ee66b59181f226e1af5087db6ea80f1ee27 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_virt_mem_mgr_nvoc.h
23d16b4534103f24fac5bb86eb8bab40e5bcba57 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_hda_codec_api_nvoc.c
e48b8b6ba9da5630a7ade526acbb94e50d9b636d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_vaspace_nvoc.h
b86536778197748c707c3e9e4c73c5fbcb037e32 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_generic_engine_nvoc.h
07fd5f5534a6d751107f582ba187c7a53a139954 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_rs_resource_nvoc.h
f4a5684d5a877b90c7ae7b66436117c6feb65f91 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_mgr_nvoc.h
ab79a1418b65b9d65081456583169f516dd510c9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_event_buffer_nvoc.c
bd048add5f0781d90b55a5293881a2f59ace3070 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_binary_api_nvoc.h
e50c91a674508b23b072e0dd2edbf743f24b333d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_object_nvoc.c
df070e15630a11b2f4b64d52228fa5a6e7ab2aa9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_halspec_nvoc.h
0f3140b5eae77a6055f32a91cb13b026bbb23905 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_kern_disp_nvoc.h
76b1f545e3712a2f8e7c31b101acd9dd682c52f8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_traceable_nvoc.c
14450b18d002d4e1786d4630ef4f1994c07ef188 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_odb.h
7b0201852361118f277ee7cc6dd16212c0192f71 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_gpu_group_nvoc.h
3d3385445934719abda1fefd4eb0762937be0e61 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_client_nvoc.c
c4fde03d5939b0eef108fde9c2f10661568f22a9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/generated/g_mem_nvoc.h
5fd1da24ae8263c43dc5dada4702564b6f0ca3d9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/os/dce_rm_client_ipc.h
76b24227c65570898c19e16bf35b2cad143f3d05 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu.h
61c7d3ac2dc61ee81abd743a6536a439592ee162 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu_child_list.h
bf894a769c46d5d173e3875cd9667bb3fe82feb9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu_timeout.h
f17b704f2489ffedcc057d4a6da77c42ece42923 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu_resource.h
0e8353854e837f0ef0fbf0d5ff5d7a25aa1eef7c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/eng_state.h
426c6ab6cecc3b1ba540b01309d1603301a86db1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/eng_desc.h
c33ab6494c9423c327707fce2bcb771328984a3c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu_halspec.h
6b27c9edf93f29a31787d9acaaefb2cefc31e7d4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu_device_mapping.h
1938fd2511213c8003864d879cf1c41ae1169a5f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu_uuid.h
cf3d1427394c425c543e253adf443192ca613762 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu_access.h
ce3302c1890e2f7990434f7335cb619b12dee854 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gpu_resource_desc.h
97d0a067e89251672f191788abe81cf26dcb335f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/device/device.h
61711ed293ee6974a6ed9a8a3732ae5fedcdc666 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/kern_disp_max.h
b39826404d84e0850aa3385691d8dde6e30d70d4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/disp_sf_user.h
51a209575d3e3fe8feb7269ece7df0846e18ca2a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/kern_disp_type.h
277a2719f8c063037c6a9ed55ade2b1cb17f48ae - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/disp_capabilities.h
74bc902cd00b17da3a1dfa7fd3ebc058de439b76 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/disp_channel.h
be7da8d1106ee14ff808d86abffb86794299b2df - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/disp_objs.h
576216219d27aa887beeccefc22bcead4d1234d7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/kern_disp.h
5179f01acf7e9e251552dc17c0dcd84f7d341d82 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/inst_mem/disp_inst_mem.h
9a33a37c6cea9bad513aa14c942c689f28f7c0d8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/head/kernel_head.h
f758ea5f9cbd23a678290ef0b8d98d470e3499e0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/disp/vblank_callback/vblank.h
6756126ddd616d6393037bebf371fceacaf3a9f1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/mem_mgr/context_dma.h
20416f7239833dcaa743bbf988702610e9251289 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/mem_mgr/mem_mgr.h
a29f55d5fbc90dade83df3ef3263018633675284 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/mem_mgr/virt_mem_allocator_common.h
82abc2458910250c1a912e023f37e87c1c9bbb9e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/mem_mgr/heap_base.h
889ba18a43cc2b5c5e970a90ddcb770ce873b785 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/mem_mgr/mem_desc.h
b52e6a0499640e651aa4200b2c8a1653df04a420 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/mem_mgr/mem_utils.h
24d01769b39a6dd62574a95fad64443b05872151 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/subdevice/subdevice.h
efc50bb2ff6ccf1b7715fd413ca680034920758e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/subdevice/generic_engine.h
ccca322d29ae171ee81c95d58e31f1c109429ae7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/gsp/message_queue.h
1e3bebe46b7f2f542eedace554a4156b3afb51f1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/audio/hda_codec_api.h
ce4e0f7177f46f4fc507a68b635e5395a3f7dde6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu/dce_client/dce_client.h
5f60ac544252b894ac7ecc0c6dc4446e6275eae5 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/rmapi.h
2baec15f4c68a9c59dd107a0db288e39914e6737 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/client.h
a92dbf2870fe0df245ea8967f2f6a68f5075ecaf - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/resource_fwd_decls.h
61e3704cd51161c9804cb168d5ce4553b7311973 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/resource.h
99a27d87c7f1487f8df5781d284c2e9a83525892 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/binary_api.h
497492340cea19a93b62da69ca2000b811c8f5d6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/event_buffer.h
f3028fbcafe73212a94d295951122b532ff5445b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/rs_utils.h
b4bae9ea958b4d014908459e08c93319784c47dd - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/event.h
ac9288d75555180c1d5dd6dd7e0e11fb57a967f2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/exports.h
2b23f2dbd8f3f63a17a1b63ebb40a2fd7fd8801a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/alloc_size.h
c9cb08c7c73c0bdd75a320640d16bf4b4defe873 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/mapping_list.h
f19dad1746e639d866c700c2f871fcc0144f2e5e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/control.h
f1713ecc0b3e58e46c346409dbf4630aa6f7f3ed - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/param_copy.h
255c28b9bd27098382bace05af3ad7f195d12895 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/rmapi_utils.h
4453fe6463e3155063f2bdbf36f44697606a80a5 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/rmapi/client_resource.h
7615ac3a83d0ad23b2160ff8ad90bec9eb1f3c6c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/diagnostics/journal.h
b259f23312abe56d34a8f0da36ef549ef60ba5b0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/diagnostics/nv_debug_dump.h
c6efd51b8b8447829a0867cd7fb7a5a5a2fb1e3d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/diagnostics/traceable.h
7e75b5d99376fba058b31996d49449f8fe62d3f0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/diagnostics/profiler.h
fd780f85cb1cd0fd3914fa31d1bd4933437b791d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/diagnostics/tracer.h
3a28bf1692efb34d2161907c3781401951cc2d4f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/diagnostics/journal_structs.h
c8496199cd808ed4c79d8e149961e721ad96714e - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/os/capability.h
e5b881419bc00d925eba9f8493f6b36cf3ce7ca7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/os/os_stub.h
408c0340350b813c3cba17fd36171075e156df72 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/os/os.h
cda75171ca7d8bf920aab6d56ef9aadec16fd15d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/os/nv_memory_type.h
af25180a08db4d5d20afd09f948b15d8c4d2d738 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/os/os_fixed_mode_timings_props.h
457c02092adfc1587d6e3cd866e28c567acbc43a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/info_block.h
bffae4da6a1f9b7dc7c879587fd674b49b46dac1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/core.h
cbfff1f06eecc99fb5a1c82d43397043058f02fc - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/printf.h
f929d43974893cd155ab2f5f77606f0040fe3e39 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/locks.h
b5859c7862fb3eeb266f7213845885789801194a - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/system.h
37f267155ddfc3db38f110dbb0397f0463d055ff - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/strict.h
bdc4ab675c6f6c4bd77c3aaf08aa5c865b186802 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/hal.h
ed496ab6e8b64d3398f929146e908c5a453a03d9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/prelude.h
b319914c97f9978488e8fb049d39c72ed64fd4d2 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/thread_state.h
b00302aec7e4f4e3b89a2f699f8b1f18fc17b1ba - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/core/hal_mgr.h
8ef620afdf720259cead00d20fae73d31e59c2f7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/virtualization/hypervisor/hypervisor.h
2c48d7335bdb0b7ea88b78216c0aeab2e11e00c1 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu_mgr/gpu_mgmt_api.h
e188d9f2d042ffe029b96d8fbb16c79a0fc0fb01 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu_mgr/gpu_db.h
ea32018e3464bb1ac792e39227badf482fa2dc67 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu_mgr/gpu_group.h
5b151d0d97b83c9fb76b76c476947f9e15e774ad - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/gpu_mgr/gpu_mgr.h
0ce5d6370c086d2944b2e8d31ff72a510d98dc8f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/mem_mgr/virt_mem_mgr.h
4c386104eaead66c66df11258c3f1182b46e96ee - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/mem_mgr/syncpoint_mem.h
a5f49a031db4171228a27482d091283e84632ace - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/mem_mgr/system_mem.h
d15991bc770c5ab41fe746995294c5213efa056b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/mem_mgr/io_vaspace.h
5ae08b2077506cbc41e40e1b3672e615ce9d910f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/mem_mgr/vaspace.h
02d6a37ef1bb057604cb98a905fa02429f200c96 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/mem_mgr/mem.h
1a08e83fd6f0a072d6887c60c529e29211bcd007 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/mem_mgr/os_desc_mem.h
2d4afabd63699feec3aea5e89601db009fc51a08 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/mem_mgr/standard_mem.h
5e9928552086947b10092792db4a8c4c57a84adf - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/platform/acpi_common.h
2f05394872ffa95d700b7822489fa59f74ad5819 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/kernel/platform/sli/sli.h
fff3ebc8527b34f8c463daad4d20ee5e33321344 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/lib/ref_count.h
04dba2b7a6a360f3e855a7d6a7484ddcdfb90c19 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/lib/base_utils.h
f8d9eb5f6a6883de962b63b4b7de35c01b20182f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/lib/protobuf/prb.h
601edb7333b87349d791d430f1cac84fb6fbb919 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/lib/zlib/inflate.h
9255fff39d7422ca4a56ba5ab60866779201d3e8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/poolalloc.h
8dd7f2d9956278ed036bbc288bff4dde86a9b509 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/eventbufferproducer.h
e53d5fc9b66dbec4c947224050866cec30b2f537 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/utils/nvrange.h
398e4cd63852a18da6e42b920eacd927a2c38bc0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/utils/nv_enum.h
ba3c81e9eae32eefbf81818b48fdf6ccd7e73163 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/utils/nvmacro.h
18321894aa7631b491ea39edc2d45d1028cdc9c6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/utils/nvprintf.h
167f49cccc912430bb6b3cb77395f665a32cc8be - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/utils/nvbitvector.h
1ed5d8ae82f37112b163187fa48d2720957e6bdf - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/utils/nvassert.h
62a18f19f79512ebccdf286068e0b557c7926e13 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvoc/runtime.h
00433b51c4d6254fd4dfc3dcd9b4ad59e485e7c0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvoc/object.h
1b28bd0ee2e560ca2854a73a3ee5fb1cf713d013 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvoc/utility.h
5cadc87ba685991c7d4c6d453dcc9a2cca4398bf - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvoc/prelude.h
664ff0e10e893923b70425fa49c9c48ed0735573 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvoc/rtti.h
bdb558ee8f782e6be06fc262820f6bd9ce75bd51 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/tls/tls.h
56b8bae7756ed36d0831f76f95033f74eaab01db - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/prereq_tracker/prereq_tracker.h
7239704e6fe88b9d75984fb5e9f4b5706502d7f3 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvlog/nvlog_printf.h
e08146f5de1596f5337c49cfbe180e30e880dedb - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvlog/nvlog.h
d2c035e67e295b8f33f0fc52d9c30e43c5d7c2ba - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvlog/internal/nvlog_printf_internal.h
cd033fe116a41285a979e629a2ee7b11ec99369f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/resserv/rs_access_rights.h
2dec1c73507f66736674d203cc4a00813ccb11bc - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/resserv/rs_domain.h
a0d3d164eb92280353cdc4458d2561aae8a68c1d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/resserv/rs_server.h
89ece4711626bf1e4197c69bd5754e2798214d76 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/resserv/resserv.h
bacdb2c1a1dbf182a0a3be15efa0a5f83365118f - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/resserv/rs_resource.h
df174d6b4f718ef699ca6f38c16aaeffa111ad3c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/resserv/rs_access_map.h
841ddca998b570feb1d59b50d644c8f2b59ae8e9 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/resserv/rs_client.h
b795f5cb77ecd2cc407102900b63977cfb34bbfd - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/ioaccess/ioaccess.h
3dcee4e110f4c571e7f49fae2f2d0630d008a906 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/nvport.h
46345715dde843be2890b33f191b2f3b69385e0d - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/memory.h
a1d93b6ec8ff01a3c2651e772a826ee11a7781d7 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/util.h
b93c2532babf176f7b91735682e7d7cdc41f96f8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/debug.h
147d47ef4bd860394d1d8ae82c68d97887e2898b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/core.h
6d698ca4fc5e48c525f214a57e1de0cc4aa9e36b - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/thread.h
3e656d5ed1f5df898ec444921ce77a40ead66b28 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/atomic.h
3ac7ddf3d402f3fd20cffe9d4e93f457de319605 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/sync.h
2487ffc1eb1e50b27ba07e0581da543d80bdaa72 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/safe.h
22420ad669a9809602f111385b7840556e58ecff - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/cpu.h
6ad1beaa2783a57330240d47b373930cd36ca5d0 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/crypto.h
2805fad632acad045044e0b8417de88032177300 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/string.h
23afbd04f4e4b3301edcfdec003c8e936d898e38 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/debug_unix_kernel_os.h
eedda5c4b0611c3b95f726b0a2db4b0a23b7b1cf - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/atomic_gcc.h
a8c9b83169aceb5f97d9f7a411db449496dc18f6 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/util_generic.h
aafca30178f49676f640be9c6d34f623a3e3a9a4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/safe_generic.h
600ad8781585e87df49ab1aaa39a07c8e8de74f5 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/util_gcc_clang.h
0747ee16c7e6c726f568867d0fbbad411c8795c8 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/sync_tracking.h
2a76929dc6b0e8624d02002600bc454cc851dee4 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/atomic_clang.h
1d6a239ed6c8dab1397f056a81ff456141ec7f9c - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/util_valist.h
31f2042e852f074970644903335af5ffa2b59c38 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/nvport/inline/memory_tracking.h
65a237b66732aafe39bc4a14d87debd2b094fb83 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/containers/map.h
c9e75f7b02241ededa5328a4f559e70dec60d159 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/containers/type_safety.h
3924b67e6d63e9a15876331c695daaf679454b05 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/containers/list.h
a28ab42de95e4878fb46e19d7b965c23f92b3213 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/containers/btree.h
4cd6b110470da3aee29e999e096ca582104fab21 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/containers/queue.h
1dacc1c1efc757c12e4c64eac171474a798b86fd - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/containers/eheap_old.h
969cbac56935a80fafd7cceff157b27e623f9429 - NVIDIA-kernel-module-source-TempVersion/src/nvidia/inc/libraries/containers/multimap.h

Change-Id: I19565adc2503125a30a3ce9b8df155929548bcdb
This commit is contained in:
svcmobrel-release
2022-08-15 08:54:29 -07:00
parent 0872bd5b3b
commit 8ef68d7c1e
1166 changed files with 460314 additions and 0 deletions

View File

@@ -0,0 +1,215 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2021-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/*
* g_allclasses.h
*
* Pull in all class headers or class number declarations.
* The class list is generated by chip-config from Classes.pm
*
* NOTE: this file may be included multiple times
*
*/
#if defined(SDK_ALL_CLASSES_INCLUDE_FULL_HEADER)
#include <class/cl0000.h> // NV01_ROOT
#include <class/cl0001.h> // NV01_ROOT_NON_PRIV
#include <class/cl0041.h> // NV01_ROOT_CLIENT
#include <class/cl0020.h> // NV0020_GPU_MANAGEMENT
#include <class/cl0080.h> // NV01_DEVICE_0
#include <class/cl2080.h> // NV20_SUBDEVICE_0
#include <class/cl2081.h> // NV2081_BINAPI
#include <class/cl2082.h> // NV2082_BINAPI_PRIVILEGED
#include <class/cl0002.h> // NV01_CONTEXT_DMA
#include <class/cl003e.h> // NV01_MEMORY_SYSTEM
#include <class/cl00c3.h> // NV01_MEMORY_SYNCPOINT
#include <class/cl0071.h> // NV01_MEMORY_SYSTEM_OS_DESCRIPTOR
#include <class/cl00f2.h> // IO_VASPACE_A
#include <class/cl0005.h> // NV01_EVENT
#include <nvos.h> // NV01_EVENT_KERNEL_CALLBACK
#include <nvos.h> // NV01_EVENT_OS_EVENT
#include <nvos.h> // NV01_EVENT_KERNEL_CALLBACK_EX
#include <class/clc372sw.h> // NVC372_DISPLAY_SW
#include <class/clc673.h> // NVC673_DISP_CAPABILITIES
#include <class/cl0073.h> // NV04_DISPLAY_COMMON
#include <class/clc670.h> // NVC670_DISPLAY
#include <class/clc671.h> // NVC671_DISP_SF_USER
#include <class/clc67a.h> // NVC67A_CURSOR_IMM_CHANNEL_PIO
#include <class/clc67b.h> // NVC67B_WINDOW_IMM_CHANNEL_DMA
#include <class/clc67d.h> // NVC67D_CORE_CHANNEL_DMA
#include <class/clc67e.h> // NVC67E_WINDOW_CHANNEL_DMA
#include <class/clc77f.h> // NVC77F_ANY_CHANNEL_DMA
#include <class/cl90ec.h> // GF100_HDACODEC
#else // defined(SDK_ALL_CLASSES_INCLUDE_FULL_HEADER)
#ifndef NV01_ROOT
#define NV01_ROOT (0x00000000)
#endif
#ifndef NV1_ROOT
#define NV1_ROOT (0x00000000) // alias
#endif
#ifndef NV01_NULL_OBJECT
#define NV01_NULL_OBJECT (0x00000000) // alias
#endif
#ifndef NV1_NULL_OBJECT
#define NV1_NULL_OBJECT (0x00000000) // alias
#endif
#ifndef NV01_ROOT_NON_PRIV
#define NV01_ROOT_NON_PRIV (0x00000001)
#endif
#ifndef NV1_ROOT_NON_PRIV
#define NV1_ROOT_NON_PRIV (0x00000001) // alias
#endif
#ifndef NV01_ROOT_CLIENT
#define NV01_ROOT_CLIENT (0x00000041)
#endif
#ifndef NV0020_GPU_MANAGEMENT
#define NV0020_GPU_MANAGEMENT (0x00000020)
#endif
#ifndef NV01_DEVICE_0
#define NV01_DEVICE_0 (0x00000080)
#endif
#ifndef NV20_SUBDEVICE_0
#define NV20_SUBDEVICE_0 (0x00002080)
#endif
#ifndef NV2081_BINAPI
#define NV2081_BINAPI (0x00002081)
#endif
#ifndef NV2082_BINAPI_PRIVILEGED
#define NV2082_BINAPI_PRIVILEGED (0x00002082)
#endif
#ifndef NV01_CONTEXT_DMA
#define NV01_CONTEXT_DMA (0x00000002)
#endif
#ifndef NV01_MEMORY_SYSTEM
#define NV01_MEMORY_SYSTEM (0x0000003e)
#endif
#ifndef NV1_MEMORY_SYSTEM
#define NV1_MEMORY_SYSTEM (0x0000003e) // alias
#endif
#ifndef NV01_MEMORY_SYNCPOINT
#define NV01_MEMORY_SYNCPOINT (0x000000c3)
#endif
#ifndef NV01_MEMORY_SYSTEM_OS_DESCRIPTOR
#define NV01_MEMORY_SYSTEM_OS_DESCRIPTOR (0x00000071)
#endif
#ifndef IO_VASPACE_A
#define IO_VASPACE_A (0x000000f2)
#endif
#ifndef NV01_EVENT
#define NV01_EVENT (0x00000005)
#endif
#ifndef NV1_EVENT
#define NV1_EVENT (0x00000005) // alias
#endif
#ifndef NV01_EVENT_KERNEL_CALLBACK
#define NV01_EVENT_KERNEL_CALLBACK (0x00000078)
#endif
#ifndef NV1_EVENT_KERNEL_CALLBACK
#define NV1_EVENT_KERNEL_CALLBACK (0x00000078) // alias
#endif
#ifndef NV01_EVENT_OS_EVENT
#define NV01_EVENT_OS_EVENT (0x00000079)
#endif
#ifndef NV1_EVENT_OS_EVENT
#define NV1_EVENT_OS_EVENT (0x00000079) // alias
#endif
#ifndef NV01_EVENT_WIN32_EVENT
#define NV01_EVENT_WIN32_EVENT (0x00000079) // alias
#endif
#ifndef NV1_EVENT_WIN32_EVENT
#define NV1_EVENT_WIN32_EVENT (0x00000079) // alias
#endif
#ifndef NV01_EVENT_KERNEL_CALLBACK_EX
#define NV01_EVENT_KERNEL_CALLBACK_EX (0x0000007e)
#endif
#ifndef NV1_EVENT_KERNEL_CALLBACK_EX
#define NV1_EVENT_KERNEL_CALLBACK_EX (0x0000007e) // alias
#endif
#ifndef NVC372_DISPLAY_SW
#define NVC372_DISPLAY_SW (0x0000c372)
#endif
#ifndef NVC673_DISP_CAPABILITIES
#define NVC673_DISP_CAPABILITIES (0x0000c673)
#endif
#ifndef NV04_DISPLAY_COMMON
#define NV04_DISPLAY_COMMON (0x00000073)
#endif
#ifndef NVC670_DISPLAY
#define NVC670_DISPLAY (0x0000c670)
#endif
#ifndef NVC671_DISP_SF_USER
#define NVC671_DISP_SF_USER (0x0000c671)
#endif
#ifndef NVC67A_CURSOR_IMM_CHANNEL_PIO
#define NVC67A_CURSOR_IMM_CHANNEL_PIO (0x0000c67a)
#endif
#ifndef NVC67B_WINDOW_IMM_CHANNEL_DMA
#define NVC67B_WINDOW_IMM_CHANNEL_DMA (0x0000c67b)
#endif
#ifndef NVC67D_CORE_CHANNEL_DMA
#define NVC67D_CORE_CHANNEL_DMA (0x0000c67d)
#endif
#ifndef NVC67E_WINDOW_CHANNEL_DMA
#define NVC67E_WINDOW_CHANNEL_DMA (0x0000c67e)
#endif
#ifndef NVC77F_ANY_CHANNEL_DMA
#define NVC77F_ANY_CHANNEL_DMA (0x0000c77f)
#endif
#ifndef GF100_HDACODEC
#define GF100_HDACODEC (0x000090ec)
#endif
#endif // defined(SDK_ALL_CLASSES_INCLUDE_FULL_HEADER)

View File

@@ -0,0 +1,659 @@
#define NVOC_BINARY_API_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_binary_api_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xb7a47c = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_BinaryApi;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
void __nvoc_init_BinaryApi(BinaryApi*);
void __nvoc_init_funcTable_BinaryApi(BinaryApi*);
NV_STATUS __nvoc_ctor_BinaryApi(BinaryApi*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_BinaryApi(BinaryApi*);
void __nvoc_dtor_BinaryApi(BinaryApi*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_BinaryApi;
static const struct NVOC_RTTI __nvoc_rtti_BinaryApi_BinaryApi = {
/*pClassDef=*/ &__nvoc_class_def_BinaryApi,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_BinaryApi,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApi_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApi, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApi_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApi, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApi_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApi, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApi_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApi, __nvoc_base_GpuResource.__nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApi_GpuResource = {
/*pClassDef=*/ &__nvoc_class_def_GpuResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApi, __nvoc_base_GpuResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_BinaryApi = {
/*numRelatives=*/ 6,
/*relatives=*/ {
&__nvoc_rtti_BinaryApi_BinaryApi,
&__nvoc_rtti_BinaryApi_GpuResource,
&__nvoc_rtti_BinaryApi_RmResource,
&__nvoc_rtti_BinaryApi_RmResourceCommon,
&__nvoc_rtti_BinaryApi_RsResource,
&__nvoc_rtti_BinaryApi_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_BinaryApi =
{
/*classInfo=*/ {
/*size=*/ sizeof(BinaryApi),
/*classId=*/ classId(BinaryApi),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "BinaryApi",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_BinaryApi,
/*pCastInfo=*/ &__nvoc_castinfo_BinaryApi,
/*pExportInfo=*/ &__nvoc_export_info_BinaryApi
};
static NV_STATUS __nvoc_thunk_BinaryApi_gpuresControl(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return binapiControl((struct BinaryApi *)(((unsigned char *)pResource) - __nvoc_rtti_BinaryApi_GpuResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_GpuResource_binapiShareCallback(struct BinaryApi *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApi_GpuResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiUnmap(struct BinaryApi *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApi_GpuResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_binapiGetMemInterMapParams(struct BinaryApi *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_BinaryApi_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_binapiGetMemoryMappingDescriptor(struct BinaryApi *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_BinaryApi_RmResource.offset), ppMemDesc);
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiGetMapAddrSpace(struct BinaryApi *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return gpuresGetMapAddrSpace((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApi_GpuResource.offset), pCallContext, mapFlags, pAddrSpace);
}
static NvHandle __nvoc_thunk_GpuResource_binapiGetInternalObjectHandle(struct BinaryApi *pGpuResource) {
return gpuresGetInternalObjectHandle((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApi_GpuResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_binapiControlFilter(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_binapiAddAdditionalDependants(struct RsClient *pClient, struct BinaryApi *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RsResource.offset), pReference);
}
static NvU32 __nvoc_thunk_RsResource_binapiGetRefCount(struct BinaryApi *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RmResource_binapiCheckMemInterUnmap(struct BinaryApi *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_BinaryApi_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_binapiMapTo(struct BinaryApi *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_binapiControl_Prologue(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiGetRegBaseOffsetAndSize(struct BinaryApi *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return gpuresGetRegBaseOffsetAndSize((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApi_GpuResource.offset), pGpu, pOffset, pSize);
}
static NvBool __nvoc_thunk_RsResource_binapiCanCopy(struct BinaryApi *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiInternalControlForward(struct BinaryApi *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return gpuresInternalControlForward((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApi_GpuResource.offset), command, pParams, size);
}
static void __nvoc_thunk_RsResource_binapiPreDestruct(struct BinaryApi *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_binapiUnmapFrom(struct BinaryApi *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_binapiControl_Epilogue(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_binapiControlLookup(struct BinaryApi *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiMap(struct BinaryApi *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return gpuresMap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApi_GpuResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_binapiAccessCallback(struct BinaryApi *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApi_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_BinaryApi =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_GpuResource(GpuResource*);
void __nvoc_dtor_BinaryApi(BinaryApi *pThis) {
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_BinaryApi(BinaryApi *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_GpuResource(GpuResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_BinaryApi(BinaryApi *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_GpuResource(&pThis->__nvoc_base_GpuResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_BinaryApi_fail_GpuResource;
__nvoc_init_dataField_BinaryApi(pThis);
status = __nvoc_binapiConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_BinaryApi_fail__init;
goto __nvoc_ctor_BinaryApi_exit; // Success
__nvoc_ctor_BinaryApi_fail__init:
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_ctor_BinaryApi_fail_GpuResource:
__nvoc_ctor_BinaryApi_exit:
return status;
}
static void __nvoc_init_funcTable_BinaryApi_1(BinaryApi *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__binapiControl__ = &binapiControl_IMPL;
pThis->__nvoc_base_GpuResource.__gpuresControl__ = &__nvoc_thunk_BinaryApi_gpuresControl;
pThis->__binapiShareCallback__ = &__nvoc_thunk_GpuResource_binapiShareCallback;
pThis->__binapiUnmap__ = &__nvoc_thunk_GpuResource_binapiUnmap;
pThis->__binapiGetMemInterMapParams__ = &__nvoc_thunk_RmResource_binapiGetMemInterMapParams;
pThis->__binapiGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_binapiGetMemoryMappingDescriptor;
pThis->__binapiGetMapAddrSpace__ = &__nvoc_thunk_GpuResource_binapiGetMapAddrSpace;
pThis->__binapiGetInternalObjectHandle__ = &__nvoc_thunk_GpuResource_binapiGetInternalObjectHandle;
pThis->__binapiControlFilter__ = &__nvoc_thunk_RsResource_binapiControlFilter;
pThis->__binapiAddAdditionalDependants__ = &__nvoc_thunk_RsResource_binapiAddAdditionalDependants;
pThis->__binapiGetRefCount__ = &__nvoc_thunk_RsResource_binapiGetRefCount;
pThis->__binapiCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_binapiCheckMemInterUnmap;
pThis->__binapiMapTo__ = &__nvoc_thunk_RsResource_binapiMapTo;
pThis->__binapiControl_Prologue__ = &__nvoc_thunk_RmResource_binapiControl_Prologue;
pThis->__binapiGetRegBaseOffsetAndSize__ = &__nvoc_thunk_GpuResource_binapiGetRegBaseOffsetAndSize;
pThis->__binapiCanCopy__ = &__nvoc_thunk_RsResource_binapiCanCopy;
pThis->__binapiInternalControlForward__ = &__nvoc_thunk_GpuResource_binapiInternalControlForward;
pThis->__binapiPreDestruct__ = &__nvoc_thunk_RsResource_binapiPreDestruct;
pThis->__binapiUnmapFrom__ = &__nvoc_thunk_RsResource_binapiUnmapFrom;
pThis->__binapiControl_Epilogue__ = &__nvoc_thunk_RmResource_binapiControl_Epilogue;
pThis->__binapiControlLookup__ = &__nvoc_thunk_RsResource_binapiControlLookup;
pThis->__binapiMap__ = &__nvoc_thunk_GpuResource_binapiMap;
pThis->__binapiAccessCallback__ = &__nvoc_thunk_RmResource_binapiAccessCallback;
}
void __nvoc_init_funcTable_BinaryApi(BinaryApi *pThis) {
__nvoc_init_funcTable_BinaryApi_1(pThis);
}
void __nvoc_init_GpuResource(GpuResource*);
void __nvoc_init_BinaryApi(BinaryApi *pThis) {
pThis->__nvoc_pbase_BinaryApi = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource;
pThis->__nvoc_pbase_GpuResource = &pThis->__nvoc_base_GpuResource;
__nvoc_init_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_init_funcTable_BinaryApi(pThis);
}
NV_STATUS __nvoc_objCreate_BinaryApi(BinaryApi **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
BinaryApi *pThis;
pThis = portMemAllocNonPaged(sizeof(BinaryApi));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(BinaryApi));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_BinaryApi);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_BinaryApi(pThis);
status = __nvoc_ctor_BinaryApi(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_BinaryApi_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_BinaryApi_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_BinaryApi(BinaryApi **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_BinaryApi(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x1c0579 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_BinaryApiPrivileged;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_BinaryApi;
void __nvoc_init_BinaryApiPrivileged(BinaryApiPrivileged*);
void __nvoc_init_funcTable_BinaryApiPrivileged(BinaryApiPrivileged*);
NV_STATUS __nvoc_ctor_BinaryApiPrivileged(BinaryApiPrivileged*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_BinaryApiPrivileged(BinaryApiPrivileged*);
void __nvoc_dtor_BinaryApiPrivileged(BinaryApiPrivileged*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_BinaryApiPrivileged;
static const struct NVOC_RTTI __nvoc_rtti_BinaryApiPrivileged_BinaryApiPrivileged = {
/*pClassDef=*/ &__nvoc_class_def_BinaryApiPrivileged,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_BinaryApiPrivileged,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApiPrivileged_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApiPrivileged, __nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApiPrivileged_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApiPrivileged, __nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApiPrivileged_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApiPrivileged, __nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApiPrivileged_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApiPrivileged, __nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApiPrivileged_GpuResource = {
/*pClassDef=*/ &__nvoc_class_def_GpuResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApiPrivileged, __nvoc_base_BinaryApi.__nvoc_base_GpuResource),
};
static const struct NVOC_RTTI __nvoc_rtti_BinaryApiPrivileged_BinaryApi = {
/*pClassDef=*/ &__nvoc_class_def_BinaryApi,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(BinaryApiPrivileged, __nvoc_base_BinaryApi),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_BinaryApiPrivileged = {
/*numRelatives=*/ 7,
/*relatives=*/ {
&__nvoc_rtti_BinaryApiPrivileged_BinaryApiPrivileged,
&__nvoc_rtti_BinaryApiPrivileged_BinaryApi,
&__nvoc_rtti_BinaryApiPrivileged_GpuResource,
&__nvoc_rtti_BinaryApiPrivileged_RmResource,
&__nvoc_rtti_BinaryApiPrivileged_RmResourceCommon,
&__nvoc_rtti_BinaryApiPrivileged_RsResource,
&__nvoc_rtti_BinaryApiPrivileged_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_BinaryApiPrivileged =
{
/*classInfo=*/ {
/*size=*/ sizeof(BinaryApiPrivileged),
/*classId=*/ classId(BinaryApiPrivileged),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "BinaryApiPrivileged",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_BinaryApiPrivileged,
/*pCastInfo=*/ &__nvoc_castinfo_BinaryApiPrivileged,
/*pExportInfo=*/ &__nvoc_export_info_BinaryApiPrivileged
};
static NV_STATUS __nvoc_thunk_BinaryApiPrivileged_binapiControl(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return binapiprivControl((struct BinaryApiPrivileged *)(((unsigned char *)pResource) - __nvoc_rtti_BinaryApiPrivileged_BinaryApi.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_GpuResource_binapiprivShareCallback(struct BinaryApiPrivileged *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApiPrivileged_GpuResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiprivUnmap(struct BinaryApiPrivileged *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApiPrivileged_GpuResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_binapiprivGetMemInterMapParams(struct BinaryApiPrivileged *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_BinaryApiPrivileged_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_binapiprivGetMemoryMappingDescriptor(struct BinaryApiPrivileged *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_BinaryApiPrivileged_RmResource.offset), ppMemDesc);
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiprivGetMapAddrSpace(struct BinaryApiPrivileged *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return gpuresGetMapAddrSpace((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApiPrivileged_GpuResource.offset), pCallContext, mapFlags, pAddrSpace);
}
static NvHandle __nvoc_thunk_GpuResource_binapiprivGetInternalObjectHandle(struct BinaryApiPrivileged *pGpuResource) {
return gpuresGetInternalObjectHandle((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApiPrivileged_GpuResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_binapiprivControlFilter(struct BinaryApiPrivileged *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_binapiprivAddAdditionalDependants(struct RsClient *pClient, struct BinaryApiPrivileged *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RsResource.offset), pReference);
}
static NvU32 __nvoc_thunk_RsResource_binapiprivGetRefCount(struct BinaryApiPrivileged *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RmResource_binapiprivCheckMemInterUnmap(struct BinaryApiPrivileged *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_BinaryApiPrivileged_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_binapiprivMapTo(struct BinaryApiPrivileged *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_binapiprivControl_Prologue(struct BinaryApiPrivileged *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiprivGetRegBaseOffsetAndSize(struct BinaryApiPrivileged *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return gpuresGetRegBaseOffsetAndSize((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApiPrivileged_GpuResource.offset), pGpu, pOffset, pSize);
}
static NvBool __nvoc_thunk_RsResource_binapiprivCanCopy(struct BinaryApiPrivileged *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiprivInternalControlForward(struct BinaryApiPrivileged *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return gpuresInternalControlForward((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApiPrivileged_GpuResource.offset), command, pParams, size);
}
static void __nvoc_thunk_RsResource_binapiprivPreDestruct(struct BinaryApiPrivileged *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_binapiprivUnmapFrom(struct BinaryApiPrivileged *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_binapiprivControl_Epilogue(struct BinaryApiPrivileged *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_binapiprivControlLookup(struct BinaryApiPrivileged *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_GpuResource_binapiprivMap(struct BinaryApiPrivileged *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return gpuresMap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_BinaryApiPrivileged_GpuResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_binapiprivAccessCallback(struct BinaryApiPrivileged *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_BinaryApiPrivileged_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_BinaryApiPrivileged =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_BinaryApi(BinaryApi*);
void __nvoc_dtor_BinaryApiPrivileged(BinaryApiPrivileged *pThis) {
__nvoc_dtor_BinaryApi(&pThis->__nvoc_base_BinaryApi);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_BinaryApiPrivileged(BinaryApiPrivileged *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_BinaryApi(BinaryApi* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_BinaryApiPrivileged(BinaryApiPrivileged *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_BinaryApi(&pThis->__nvoc_base_BinaryApi, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_BinaryApiPrivileged_fail_BinaryApi;
__nvoc_init_dataField_BinaryApiPrivileged(pThis);
status = __nvoc_binapiprivConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_BinaryApiPrivileged_fail__init;
goto __nvoc_ctor_BinaryApiPrivileged_exit; // Success
__nvoc_ctor_BinaryApiPrivileged_fail__init:
__nvoc_dtor_BinaryApi(&pThis->__nvoc_base_BinaryApi);
__nvoc_ctor_BinaryApiPrivileged_fail_BinaryApi:
__nvoc_ctor_BinaryApiPrivileged_exit:
return status;
}
static void __nvoc_init_funcTable_BinaryApiPrivileged_1(BinaryApiPrivileged *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__binapiprivControl__ = &binapiprivControl_IMPL;
pThis->__nvoc_base_BinaryApi.__binapiControl__ = &__nvoc_thunk_BinaryApiPrivileged_binapiControl;
pThis->__binapiprivShareCallback__ = &__nvoc_thunk_GpuResource_binapiprivShareCallback;
pThis->__binapiprivUnmap__ = &__nvoc_thunk_GpuResource_binapiprivUnmap;
pThis->__binapiprivGetMemInterMapParams__ = &__nvoc_thunk_RmResource_binapiprivGetMemInterMapParams;
pThis->__binapiprivGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_binapiprivGetMemoryMappingDescriptor;
pThis->__binapiprivGetMapAddrSpace__ = &__nvoc_thunk_GpuResource_binapiprivGetMapAddrSpace;
pThis->__binapiprivGetInternalObjectHandle__ = &__nvoc_thunk_GpuResource_binapiprivGetInternalObjectHandle;
pThis->__binapiprivControlFilter__ = &__nvoc_thunk_RsResource_binapiprivControlFilter;
pThis->__binapiprivAddAdditionalDependants__ = &__nvoc_thunk_RsResource_binapiprivAddAdditionalDependants;
pThis->__binapiprivGetRefCount__ = &__nvoc_thunk_RsResource_binapiprivGetRefCount;
pThis->__binapiprivCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_binapiprivCheckMemInterUnmap;
pThis->__binapiprivMapTo__ = &__nvoc_thunk_RsResource_binapiprivMapTo;
pThis->__binapiprivControl_Prologue__ = &__nvoc_thunk_RmResource_binapiprivControl_Prologue;
pThis->__binapiprivGetRegBaseOffsetAndSize__ = &__nvoc_thunk_GpuResource_binapiprivGetRegBaseOffsetAndSize;
pThis->__binapiprivCanCopy__ = &__nvoc_thunk_RsResource_binapiprivCanCopy;
pThis->__binapiprivInternalControlForward__ = &__nvoc_thunk_GpuResource_binapiprivInternalControlForward;
pThis->__binapiprivPreDestruct__ = &__nvoc_thunk_RsResource_binapiprivPreDestruct;
pThis->__binapiprivUnmapFrom__ = &__nvoc_thunk_RsResource_binapiprivUnmapFrom;
pThis->__binapiprivControl_Epilogue__ = &__nvoc_thunk_RmResource_binapiprivControl_Epilogue;
pThis->__binapiprivControlLookup__ = &__nvoc_thunk_RsResource_binapiprivControlLookup;
pThis->__binapiprivMap__ = &__nvoc_thunk_GpuResource_binapiprivMap;
pThis->__binapiprivAccessCallback__ = &__nvoc_thunk_RmResource_binapiprivAccessCallback;
}
void __nvoc_init_funcTable_BinaryApiPrivileged(BinaryApiPrivileged *pThis) {
__nvoc_init_funcTable_BinaryApiPrivileged_1(pThis);
}
void __nvoc_init_BinaryApi(BinaryApi*);
void __nvoc_init_BinaryApiPrivileged(BinaryApiPrivileged *pThis) {
pThis->__nvoc_pbase_BinaryApiPrivileged = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource;
pThis->__nvoc_pbase_GpuResource = &pThis->__nvoc_base_BinaryApi.__nvoc_base_GpuResource;
pThis->__nvoc_pbase_BinaryApi = &pThis->__nvoc_base_BinaryApi;
__nvoc_init_BinaryApi(&pThis->__nvoc_base_BinaryApi);
__nvoc_init_funcTable_BinaryApiPrivileged(pThis);
}
NV_STATUS __nvoc_objCreate_BinaryApiPrivileged(BinaryApiPrivileged **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
BinaryApiPrivileged *pThis;
pThis = portMemAllocNonPaged(sizeof(BinaryApiPrivileged));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(BinaryApiPrivileged));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_BinaryApiPrivileged);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_BinaryApi.__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_BinaryApiPrivileged(pThis);
status = __nvoc_ctor_BinaryApiPrivileged(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_BinaryApiPrivileged_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_BinaryApiPrivileged_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_BinaryApiPrivileged(BinaryApiPrivileged **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_BinaryApiPrivileged(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,416 @@
#ifndef _G_BINARY_API_NVOC_H_
#define _G_BINARY_API_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2021-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_binary_api_nvoc.h"
#ifndef BINARY_API_H
#define BINARY_API_H
#include "core/core.h"
#include "rmapi/resource.h"
#include "gpu/gpu_resource.h"
#include "resserv/rs_resource.h"
#include "rmapi/control.h"
#ifdef NVOC_BINARY_API_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct BinaryApi {
const struct NVOC_RTTI *__nvoc_rtti;
struct GpuResource __nvoc_base_GpuResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct BinaryApi *__nvoc_pbase_BinaryApi;
NV_STATUS (*__binapiControl__)(struct BinaryApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__binapiShareCallback__)(struct BinaryApi *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__binapiUnmap__)(struct BinaryApi *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__binapiGetMemInterMapParams__)(struct BinaryApi *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__binapiGetMemoryMappingDescriptor__)(struct BinaryApi *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__binapiGetMapAddrSpace__)(struct BinaryApi *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NvHandle (*__binapiGetInternalObjectHandle__)(struct BinaryApi *);
NV_STATUS (*__binapiControlFilter__)(struct BinaryApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__binapiAddAdditionalDependants__)(struct RsClient *, struct BinaryApi *, RsResourceRef *);
NvU32 (*__binapiGetRefCount__)(struct BinaryApi *);
NV_STATUS (*__binapiCheckMemInterUnmap__)(struct BinaryApi *, NvBool);
NV_STATUS (*__binapiMapTo__)(struct BinaryApi *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__binapiControl_Prologue__)(struct BinaryApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__binapiGetRegBaseOffsetAndSize__)(struct BinaryApi *, struct OBJGPU *, NvU32 *, NvU32 *);
NvBool (*__binapiCanCopy__)(struct BinaryApi *);
NV_STATUS (*__binapiInternalControlForward__)(struct BinaryApi *, NvU32, void *, NvU32);
void (*__binapiPreDestruct__)(struct BinaryApi *);
NV_STATUS (*__binapiUnmapFrom__)(struct BinaryApi *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__binapiControl_Epilogue__)(struct BinaryApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__binapiControlLookup__)(struct BinaryApi *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__binapiMap__)(struct BinaryApi *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__binapiAccessCallback__)(struct BinaryApi *, struct RsClient *, void *, RsAccessRight);
};
#ifndef __NVOC_CLASS_BinaryApi_TYPEDEF__
#define __NVOC_CLASS_BinaryApi_TYPEDEF__
typedef struct BinaryApi BinaryApi;
#endif /* __NVOC_CLASS_BinaryApi_TYPEDEF__ */
#ifndef __nvoc_class_id_BinaryApi
#define __nvoc_class_id_BinaryApi 0xb7a47c
#endif /* __nvoc_class_id_BinaryApi */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_BinaryApi;
#define __staticCast_BinaryApi(pThis) \
((pThis)->__nvoc_pbase_BinaryApi)
#ifdef __nvoc_binary_api_h_disabled
#define __dynamicCast_BinaryApi(pThis) ((BinaryApi*)NULL)
#else //__nvoc_binary_api_h_disabled
#define __dynamicCast_BinaryApi(pThis) \
((BinaryApi*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(BinaryApi)))
#endif //__nvoc_binary_api_h_disabled
NV_STATUS __nvoc_objCreateDynamic_BinaryApi(BinaryApi**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_BinaryApi(BinaryApi**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_BinaryApi(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_BinaryApi((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define binapiControl(pResource, pCallContext, pParams) binapiControl_DISPATCH(pResource, pCallContext, pParams)
#define binapiShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) binapiShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define binapiUnmap(pGpuResource, pCallContext, pCpuMapping) binapiUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define binapiGetMemInterMapParams(pRmResource, pParams) binapiGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define binapiGetMemoryMappingDescriptor(pRmResource, ppMemDesc) binapiGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define binapiGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) binapiGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define binapiGetInternalObjectHandle(pGpuResource) binapiGetInternalObjectHandle_DISPATCH(pGpuResource)
#define binapiControlFilter(pResource, pCallContext, pParams) binapiControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define binapiAddAdditionalDependants(pClient, pResource, pReference) binapiAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define binapiGetRefCount(pResource) binapiGetRefCount_DISPATCH(pResource)
#define binapiCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) binapiCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define binapiMapTo(pResource, pParams) binapiMapTo_DISPATCH(pResource, pParams)
#define binapiControl_Prologue(pResource, pCallContext, pParams) binapiControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define binapiGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) binapiGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
#define binapiCanCopy(pResource) binapiCanCopy_DISPATCH(pResource)
#define binapiInternalControlForward(pGpuResource, command, pParams, size) binapiInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define binapiPreDestruct(pResource) binapiPreDestruct_DISPATCH(pResource)
#define binapiUnmapFrom(pResource, pParams) binapiUnmapFrom_DISPATCH(pResource, pParams)
#define binapiControl_Epilogue(pResource, pCallContext, pParams) binapiControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define binapiControlLookup(pResource, pParams, ppEntry) binapiControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define binapiMap(pGpuResource, pCallContext, pParams, pCpuMapping) binapiMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define binapiAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) binapiAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS binapiControl_IMPL(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
static inline NV_STATUS binapiControl_DISPATCH(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__binapiControl__(pResource, pCallContext, pParams);
}
static inline NvBool binapiShareCallback_DISPATCH(struct BinaryApi *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__binapiShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS binapiUnmap_DISPATCH(struct BinaryApi *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__binapiUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS binapiGetMemInterMapParams_DISPATCH(struct BinaryApi *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__binapiGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS binapiGetMemoryMappingDescriptor_DISPATCH(struct BinaryApi *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__binapiGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS binapiGetMapAddrSpace_DISPATCH(struct BinaryApi *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__binapiGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline NvHandle binapiGetInternalObjectHandle_DISPATCH(struct BinaryApi *pGpuResource) {
return pGpuResource->__binapiGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS binapiControlFilter_DISPATCH(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__binapiControlFilter__(pResource, pCallContext, pParams);
}
static inline void binapiAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct BinaryApi *pResource, RsResourceRef *pReference) {
pResource->__binapiAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 binapiGetRefCount_DISPATCH(struct BinaryApi *pResource) {
return pResource->__binapiGetRefCount__(pResource);
}
static inline NV_STATUS binapiCheckMemInterUnmap_DISPATCH(struct BinaryApi *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__binapiCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS binapiMapTo_DISPATCH(struct BinaryApi *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__binapiMapTo__(pResource, pParams);
}
static inline NV_STATUS binapiControl_Prologue_DISPATCH(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__binapiControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS binapiGetRegBaseOffsetAndSize_DISPATCH(struct BinaryApi *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pGpuResource->__binapiGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
}
static inline NvBool binapiCanCopy_DISPATCH(struct BinaryApi *pResource) {
return pResource->__binapiCanCopy__(pResource);
}
static inline NV_STATUS binapiInternalControlForward_DISPATCH(struct BinaryApi *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__binapiInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline void binapiPreDestruct_DISPATCH(struct BinaryApi *pResource) {
pResource->__binapiPreDestruct__(pResource);
}
static inline NV_STATUS binapiUnmapFrom_DISPATCH(struct BinaryApi *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__binapiUnmapFrom__(pResource, pParams);
}
static inline void binapiControl_Epilogue_DISPATCH(struct BinaryApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__binapiControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS binapiControlLookup_DISPATCH(struct BinaryApi *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__binapiControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS binapiMap_DISPATCH(struct BinaryApi *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__binapiMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool binapiAccessCallback_DISPATCH(struct BinaryApi *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__binapiAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS binapiConstruct_IMPL(struct BinaryApi *arg_pResource, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_binapiConstruct(arg_pResource, arg_pCallContext, arg_pParams) binapiConstruct_IMPL(arg_pResource, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
#ifdef NVOC_BINARY_API_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct BinaryApiPrivileged {
const struct NVOC_RTTI *__nvoc_rtti;
struct BinaryApi __nvoc_base_BinaryApi;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct BinaryApi *__nvoc_pbase_BinaryApi;
struct BinaryApiPrivileged *__nvoc_pbase_BinaryApiPrivileged;
NV_STATUS (*__binapiprivControl__)(struct BinaryApiPrivileged *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__binapiprivShareCallback__)(struct BinaryApiPrivileged *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__binapiprivUnmap__)(struct BinaryApiPrivileged *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__binapiprivGetMemInterMapParams__)(struct BinaryApiPrivileged *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__binapiprivGetMemoryMappingDescriptor__)(struct BinaryApiPrivileged *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__binapiprivGetMapAddrSpace__)(struct BinaryApiPrivileged *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NvHandle (*__binapiprivGetInternalObjectHandle__)(struct BinaryApiPrivileged *);
NV_STATUS (*__binapiprivControlFilter__)(struct BinaryApiPrivileged *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__binapiprivAddAdditionalDependants__)(struct RsClient *, struct BinaryApiPrivileged *, RsResourceRef *);
NvU32 (*__binapiprivGetRefCount__)(struct BinaryApiPrivileged *);
NV_STATUS (*__binapiprivCheckMemInterUnmap__)(struct BinaryApiPrivileged *, NvBool);
NV_STATUS (*__binapiprivMapTo__)(struct BinaryApiPrivileged *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__binapiprivControl_Prologue__)(struct BinaryApiPrivileged *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__binapiprivGetRegBaseOffsetAndSize__)(struct BinaryApiPrivileged *, struct OBJGPU *, NvU32 *, NvU32 *);
NvBool (*__binapiprivCanCopy__)(struct BinaryApiPrivileged *);
NV_STATUS (*__binapiprivInternalControlForward__)(struct BinaryApiPrivileged *, NvU32, void *, NvU32);
void (*__binapiprivPreDestruct__)(struct BinaryApiPrivileged *);
NV_STATUS (*__binapiprivUnmapFrom__)(struct BinaryApiPrivileged *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__binapiprivControl_Epilogue__)(struct BinaryApiPrivileged *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__binapiprivControlLookup__)(struct BinaryApiPrivileged *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__binapiprivMap__)(struct BinaryApiPrivileged *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__binapiprivAccessCallback__)(struct BinaryApiPrivileged *, struct RsClient *, void *, RsAccessRight);
};
#ifndef __NVOC_CLASS_BinaryApiPrivileged_TYPEDEF__
#define __NVOC_CLASS_BinaryApiPrivileged_TYPEDEF__
typedef struct BinaryApiPrivileged BinaryApiPrivileged;
#endif /* __NVOC_CLASS_BinaryApiPrivileged_TYPEDEF__ */
#ifndef __nvoc_class_id_BinaryApiPrivileged
#define __nvoc_class_id_BinaryApiPrivileged 0x1c0579
#endif /* __nvoc_class_id_BinaryApiPrivileged */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_BinaryApiPrivileged;
#define __staticCast_BinaryApiPrivileged(pThis) \
((pThis)->__nvoc_pbase_BinaryApiPrivileged)
#ifdef __nvoc_binary_api_h_disabled
#define __dynamicCast_BinaryApiPrivileged(pThis) ((BinaryApiPrivileged*)NULL)
#else //__nvoc_binary_api_h_disabled
#define __dynamicCast_BinaryApiPrivileged(pThis) \
((BinaryApiPrivileged*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(BinaryApiPrivileged)))
#endif //__nvoc_binary_api_h_disabled
NV_STATUS __nvoc_objCreateDynamic_BinaryApiPrivileged(BinaryApiPrivileged**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_BinaryApiPrivileged(BinaryApiPrivileged**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_BinaryApiPrivileged(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_BinaryApiPrivileged((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define binapiprivControl(pResource, pCallContext, pParams) binapiprivControl_DISPATCH(pResource, pCallContext, pParams)
#define binapiprivShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) binapiprivShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define binapiprivUnmap(pGpuResource, pCallContext, pCpuMapping) binapiprivUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define binapiprivGetMemInterMapParams(pRmResource, pParams) binapiprivGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define binapiprivGetMemoryMappingDescriptor(pRmResource, ppMemDesc) binapiprivGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define binapiprivGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) binapiprivGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define binapiprivGetInternalObjectHandle(pGpuResource) binapiprivGetInternalObjectHandle_DISPATCH(pGpuResource)
#define binapiprivControlFilter(pResource, pCallContext, pParams) binapiprivControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define binapiprivAddAdditionalDependants(pClient, pResource, pReference) binapiprivAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define binapiprivGetRefCount(pResource) binapiprivGetRefCount_DISPATCH(pResource)
#define binapiprivCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) binapiprivCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define binapiprivMapTo(pResource, pParams) binapiprivMapTo_DISPATCH(pResource, pParams)
#define binapiprivControl_Prologue(pResource, pCallContext, pParams) binapiprivControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define binapiprivGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) binapiprivGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
#define binapiprivCanCopy(pResource) binapiprivCanCopy_DISPATCH(pResource)
#define binapiprivInternalControlForward(pGpuResource, command, pParams, size) binapiprivInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define binapiprivPreDestruct(pResource) binapiprivPreDestruct_DISPATCH(pResource)
#define binapiprivUnmapFrom(pResource, pParams) binapiprivUnmapFrom_DISPATCH(pResource, pParams)
#define binapiprivControl_Epilogue(pResource, pCallContext, pParams) binapiprivControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define binapiprivControlLookup(pResource, pParams, ppEntry) binapiprivControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define binapiprivMap(pGpuResource, pCallContext, pParams, pCpuMapping) binapiprivMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define binapiprivAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) binapiprivAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS binapiprivControl_IMPL(struct BinaryApiPrivileged *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
static inline NV_STATUS binapiprivControl_DISPATCH(struct BinaryApiPrivileged *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__binapiprivControl__(pResource, pCallContext, pParams);
}
static inline NvBool binapiprivShareCallback_DISPATCH(struct BinaryApiPrivileged *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__binapiprivShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS binapiprivUnmap_DISPATCH(struct BinaryApiPrivileged *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__binapiprivUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS binapiprivGetMemInterMapParams_DISPATCH(struct BinaryApiPrivileged *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__binapiprivGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS binapiprivGetMemoryMappingDescriptor_DISPATCH(struct BinaryApiPrivileged *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__binapiprivGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS binapiprivGetMapAddrSpace_DISPATCH(struct BinaryApiPrivileged *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__binapiprivGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline NvHandle binapiprivGetInternalObjectHandle_DISPATCH(struct BinaryApiPrivileged *pGpuResource) {
return pGpuResource->__binapiprivGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS binapiprivControlFilter_DISPATCH(struct BinaryApiPrivileged *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__binapiprivControlFilter__(pResource, pCallContext, pParams);
}
static inline void binapiprivAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct BinaryApiPrivileged *pResource, RsResourceRef *pReference) {
pResource->__binapiprivAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 binapiprivGetRefCount_DISPATCH(struct BinaryApiPrivileged *pResource) {
return pResource->__binapiprivGetRefCount__(pResource);
}
static inline NV_STATUS binapiprivCheckMemInterUnmap_DISPATCH(struct BinaryApiPrivileged *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__binapiprivCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS binapiprivMapTo_DISPATCH(struct BinaryApiPrivileged *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__binapiprivMapTo__(pResource, pParams);
}
static inline NV_STATUS binapiprivControl_Prologue_DISPATCH(struct BinaryApiPrivileged *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__binapiprivControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS binapiprivGetRegBaseOffsetAndSize_DISPATCH(struct BinaryApiPrivileged *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pGpuResource->__binapiprivGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
}
static inline NvBool binapiprivCanCopy_DISPATCH(struct BinaryApiPrivileged *pResource) {
return pResource->__binapiprivCanCopy__(pResource);
}
static inline NV_STATUS binapiprivInternalControlForward_DISPATCH(struct BinaryApiPrivileged *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__binapiprivInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline void binapiprivPreDestruct_DISPATCH(struct BinaryApiPrivileged *pResource) {
pResource->__binapiprivPreDestruct__(pResource);
}
static inline NV_STATUS binapiprivUnmapFrom_DISPATCH(struct BinaryApiPrivileged *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__binapiprivUnmapFrom__(pResource, pParams);
}
static inline void binapiprivControl_Epilogue_DISPATCH(struct BinaryApiPrivileged *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__binapiprivControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS binapiprivControlLookup_DISPATCH(struct BinaryApiPrivileged *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__binapiprivControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS binapiprivMap_DISPATCH(struct BinaryApiPrivileged *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__binapiprivMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool binapiprivAccessCallback_DISPATCH(struct BinaryApiPrivileged *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__binapiprivAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS binapiprivConstruct_IMPL(struct BinaryApiPrivileged *arg_pResource, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_binapiprivConstruct(arg_pResource, arg_pCallContext, arg_pParams) binapiprivConstruct_IMPL(arg_pResource, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_BINARY_API_NVOC_H_

View File

@@ -0,0 +1,3 @@
#include "g_chips2halspec_nvoc.h"

View File

@@ -0,0 +1,45 @@
#define NVOC_CHIPS2HALSPEC_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_chips2halspec_nvoc.h"
void __nvoc_init_halspec_ChipHal(ChipHal *pChipHal, NvU32 arch, NvU32 impl, NvU32 hidrev)
{
// T234D
if(arch == 0x0 && impl == 0x0 && hidrev == 0x235)
{
pChipHal->__nvoc_HalVarIdx = 80;
}
}
void __nvoc_init_halspec_RmVariantHal(RmVariantHal *pRmVariantHal, RM_RUNTIME_VARIANT rmVariant)
{
// PF_KERNEL_ONLY
if(rmVariant == 0x2)
{
pRmVariantHal->__nvoc_HalVarIdx = 1;
}
}
void __nvoc_init_halspec_DispIpHal(DispIpHal *pDispIpHal, NvU32 ipver)
{
// DISPv0402
if(ipver == 0x4020000)
{
pDispIpHal->__nvoc_HalVarIdx = 12;
}
}
void __nvoc_init_halspec_DpuIpHal(DpuIpHal *pDpuIpHal, NvU32 ipver)
{
// DPUv0000
if(ipver == 0x0)
{
pDpuIpHal->__nvoc_HalVarIdx = 5;
}
}

View File

@@ -0,0 +1,118 @@
#ifndef _G_CHIPS2HALSPEC_NVOC_H_
#define _G_CHIPS2HALSPEC_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
#include "g_chips2halspec_nvoc.h"
#ifndef _CHIPS_2_HALSPEC_H_
#define _CHIPS_2_HALSPEC_H_
#include "nvtypes.h"
#include "rmconfig.h"
// Several WARs that only visible by NVOC compiler
#define GPUHAL_ARCH(x) NV_PMC_BOOT_0_ARCHITECTURE_##x
#define GPUHAL_IMPL(x) NV_PMC_BOOT_0_IMPLEMENTATION_##x
// Create alias 'group' to provide a concise syntax
#define group variant_group
// Use in hal block to indicate that the function isn't wried to any enabled chips
#define __disabled__ false
struct ChipHal {
unsigned short __nvoc_HalVarIdx;
};
typedef struct ChipHal ChipHal;
void __nvoc_init_halspec_ChipHal(ChipHal*, NvU32, NvU32, NvU32);
/*
* RM Runtime Variant Halspec
*
* One group of Hal Variants that presents two perspectives:
*
* Operating Environment Perspective: VF / PF / UCODE
* VF | PF | UCODE = true
* VF & PF & UCODE = false
*
* VF : RM is running in VGPU Guest environment. Equals to IS_VIRTUAL(pGpu)
* PF : RM is running in Host/Baremetal in standard PCIE environment
* UCODE : RM is running on microcontroller
*
* Functionality-Based Perspective: KERNEL_ONLY / PHYSICAL_ONLY / MONOLITHIC
* KERNEL_ONLY | PHYSICAL_ONLY | MONOLITHIC = true
* KERNEL_ONLY & PHYSICAL_ONLY & MONOLITHIC = false
*
* KERNEL_ONLY : RM does not own HW. The physical part is offloaded to Ucode.
* PHYSICAL_ONLY : RM owns HW but does not expose services to RM Clients
* MONOLITHIC : RM owns both the interface to the client and the underlying HW.
*
* Note: GSP Client "IS_GSP_CLIENT(pGpu) maps to "PF_KERNEL_ONLY"
* DCE Client maps to "PF_KERNEL_ONLY & T234D"
*
*
* HAL Variants
* +--------+ +----------------+
* | VF | <-----| VF |--+
* +--------+ +----------------+ | +---------------+
* |--> | KERNEL_ONLY |
* +----------------+ | +---------------+
* +--| PF_KERNEL_ONLY |--+
* +--------+ | +----------------+
* | PF | <--|
* +--------+ | +----------------+ +---------------+
* +--| PF_MONOLITHIC |-----> | MONOLITHIC |
* +----------------+ +---------------+
*
* +--------+ +----------------+ +---------------+
* | UCODE | <-----| UCODE |-----> | PHYSICAL_ONLY |
* +--------+ +----------------+ +---------------+
*
* */
typedef enum _RM_RUNTIME_VARIANT {
RM_RUNTIME_VARIANT_VF = 1,
RM_RUNTIME_VARIANT_PF_KERNEL_ONLY = 2,
RM_RUNTIME_VARIANT_PF_MONOLITHIC = 3,
RM_RUNTIME_VARIANT_UCODE = 4,
} RM_RUNTIME_VARIANT;
struct RmVariantHal {
unsigned short __nvoc_HalVarIdx;
};
typedef struct RmVariantHal RmVariantHal;
void __nvoc_init_halspec_RmVariantHal(RmVariantHal*, RM_RUNTIME_VARIANT);
/* DISP IP versions */
struct DispIpHal {
unsigned short __nvoc_HalVarIdx;
};
typedef struct DispIpHal DispIpHal;
void __nvoc_init_halspec_DispIpHal(DispIpHal*, NvU32);
/* The 'delete' rules for DispIpHal and ChipHal */
// delete DISPv0402 & ~T234D;
// delete ~DISPv0402 & T234D;
/* DPU IP versions */
struct DpuIpHal {
unsigned short __nvoc_HalVarIdx;
};
typedef struct DpuIpHal DpuIpHal;
void __nvoc_init_halspec_DpuIpHal(DpuIpHal*, NvU32);
/* The 'delete' rules for DpuIpHal and ChipHal */
#undef group
#endif /* _CHIPS_2_HALSPEC_H_ */
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_CHIPS2HALSPEC_NVOC_H_

View File

@@ -0,0 +1,385 @@
#define NVOC_CLIENT_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_client_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x21d236 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_UserInfo;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsShared;
void __nvoc_init_UserInfo(UserInfo*);
void __nvoc_init_funcTable_UserInfo(UserInfo*);
NV_STATUS __nvoc_ctor_UserInfo(UserInfo*);
void __nvoc_init_dataField_UserInfo(UserInfo*);
void __nvoc_dtor_UserInfo(UserInfo*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_UserInfo;
static const struct NVOC_RTTI __nvoc_rtti_UserInfo_UserInfo = {
/*pClassDef=*/ &__nvoc_class_def_UserInfo,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_UserInfo,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_UserInfo_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(UserInfo, __nvoc_base_RsShared.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_UserInfo_RsShared = {
/*pClassDef=*/ &__nvoc_class_def_RsShared,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(UserInfo, __nvoc_base_RsShared),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_UserInfo = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_UserInfo_UserInfo,
&__nvoc_rtti_UserInfo_RsShared,
&__nvoc_rtti_UserInfo_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_UserInfo =
{
/*classInfo=*/ {
/*size=*/ sizeof(UserInfo),
/*classId=*/ classId(UserInfo),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "UserInfo",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_UserInfo,
/*pCastInfo=*/ &__nvoc_castinfo_UserInfo,
/*pExportInfo=*/ &__nvoc_export_info_UserInfo
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_UserInfo =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RsShared(RsShared*);
void __nvoc_dtor_UserInfo(UserInfo *pThis) {
__nvoc_userinfoDestruct(pThis);
__nvoc_dtor_RsShared(&pThis->__nvoc_base_RsShared);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_UserInfo(UserInfo *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RsShared(RsShared* );
NV_STATUS __nvoc_ctor_UserInfo(UserInfo *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RsShared(&pThis->__nvoc_base_RsShared);
if (status != NV_OK) goto __nvoc_ctor_UserInfo_fail_RsShared;
__nvoc_init_dataField_UserInfo(pThis);
status = __nvoc_userinfoConstruct(pThis);
if (status != NV_OK) goto __nvoc_ctor_UserInfo_fail__init;
goto __nvoc_ctor_UserInfo_exit; // Success
__nvoc_ctor_UserInfo_fail__init:
__nvoc_dtor_RsShared(&pThis->__nvoc_base_RsShared);
__nvoc_ctor_UserInfo_fail_RsShared:
__nvoc_ctor_UserInfo_exit:
return status;
}
static void __nvoc_init_funcTable_UserInfo_1(UserInfo *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_UserInfo(UserInfo *pThis) {
__nvoc_init_funcTable_UserInfo_1(pThis);
}
void __nvoc_init_RsShared(RsShared*);
void __nvoc_init_UserInfo(UserInfo *pThis) {
pThis->__nvoc_pbase_UserInfo = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RsShared.__nvoc_base_Object;
pThis->__nvoc_pbase_RsShared = &pThis->__nvoc_base_RsShared;
__nvoc_init_RsShared(&pThis->__nvoc_base_RsShared);
__nvoc_init_funcTable_UserInfo(pThis);
}
NV_STATUS __nvoc_objCreate_UserInfo(UserInfo **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
UserInfo *pThis;
pThis = portMemAllocNonPaged(sizeof(UserInfo));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(UserInfo));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_UserInfo);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RsShared.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RsShared.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_UserInfo(pThis);
status = __nvoc_ctor_UserInfo(pThis);
if (status != NV_OK) goto __nvoc_objCreate_UserInfo_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_UserInfo_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_UserInfo(UserInfo **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_UserInfo(ppThis, pParent, createFlags);
return status;
}
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xb23d83 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmClient;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsClient;
void __nvoc_init_RmClient(RmClient*);
void __nvoc_init_funcTable_RmClient(RmClient*);
NV_STATUS __nvoc_ctor_RmClient(RmClient*, struct PORT_MEM_ALLOCATOR * arg_pAllocator, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_RmClient(RmClient*);
void __nvoc_dtor_RmClient(RmClient*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_RmClient;
static const struct NVOC_RTTI __nvoc_rtti_RmClient_RmClient = {
/*pClassDef=*/ &__nvoc_class_def_RmClient,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_RmClient,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_RmClient_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(RmClient, __nvoc_base_RsClient.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_RmClient_RsClient = {
/*pClassDef=*/ &__nvoc_class_def_RsClient,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(RmClient, __nvoc_base_RsClient),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_RmClient = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_RmClient_RmClient,
&__nvoc_rtti_RmClient_RsClient,
&__nvoc_rtti_RmClient_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_RmClient =
{
/*classInfo=*/ {
/*size=*/ sizeof(RmClient),
/*classId=*/ classId(RmClient),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "RmClient",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_RmClient,
/*pCastInfo=*/ &__nvoc_castinfo_RmClient,
/*pExportInfo=*/ &__nvoc_export_info_RmClient
};
static NV_STATUS __nvoc_thunk_RmClient_clientValidate(struct RsClient *pClient, const API_SECURITY_INFO *pSecInfo) {
return rmclientValidate((struct RmClient *)(((unsigned char *)pClient) - __nvoc_rtti_RmClient_RsClient.offset), pSecInfo);
}
static NV_STATUS __nvoc_thunk_RmClient_clientFreeResource(struct RsClient *pClient, struct RsServer *pServer, struct RS_RES_FREE_PARAMS_INTERNAL *pParams) {
return rmclientFreeResource((struct RmClient *)(((unsigned char *)pClient) - __nvoc_rtti_RmClient_RsClient.offset), pServer, pParams);
}
static NV_STATUS __nvoc_thunk_RmClient_clientInterMap(struct RsClient *pClient, struct RsResourceRef *pMapperRef, struct RsResourceRef *pMappableRef, struct RS_INTER_MAP_PARAMS *pParams) {
return rmclientInterMap((struct RmClient *)(((unsigned char *)pClient) - __nvoc_rtti_RmClient_RsClient.offset), pMapperRef, pMappableRef, pParams);
}
static void __nvoc_thunk_RmClient_clientInterUnmap(struct RsClient *pClient, struct RsResourceRef *pMapperRef, struct RS_INTER_UNMAP_PARAMS *pParams) {
rmclientInterUnmap((struct RmClient *)(((unsigned char *)pClient) - __nvoc_rtti_RmClient_RsClient.offset), pMapperRef, pParams);
}
static NV_STATUS __nvoc_thunk_RmClient_clientPostProcessPendingFreeList(struct RsClient *pClient, struct RsResourceRef **ppFirstLowPriRef) {
return rmclientPostProcessPendingFreeList((struct RmClient *)(((unsigned char *)pClient) - __nvoc_rtti_RmClient_RsClient.offset), ppFirstLowPriRef);
}
static NV_STATUS __nvoc_thunk_RsClient_rmclientDestructResourceRef(struct RmClient *pClient, RsServer *pServer, struct RsResourceRef *pResourceRef) {
return clientDestructResourceRef((struct RsClient *)(((unsigned char *)pClient) + __nvoc_rtti_RmClient_RsClient.offset), pServer, pResourceRef);
}
static NV_STATUS __nvoc_thunk_RsClient_rmclientValidateNewResourceHandle(struct RmClient *pClient, NvHandle hResource, NvBool bRestrict) {
return clientValidateNewResourceHandle((struct RsClient *)(((unsigned char *)pClient) + __nvoc_rtti_RmClient_RsClient.offset), hResource, bRestrict);
}
static NV_STATUS __nvoc_thunk_RsClient_rmclientShareResource(struct RmClient *pClient, struct RsResourceRef *pResourceRef, RS_SHARE_POLICY *pSharePolicy, struct CALL_CONTEXT *pCallContext) {
return clientShareResource((struct RsClient *)(((unsigned char *)pClient) + __nvoc_rtti_RmClient_RsClient.offset), pResourceRef, pSharePolicy, pCallContext);
}
static NV_STATUS __nvoc_thunk_RsClient_rmclientUnmapMemory(struct RmClient *pClient, struct RsResourceRef *pResourceRef, struct RS_LOCK_INFO *pLockInfo, struct RsCpuMapping **ppCpuMapping, API_SECURITY_INFO *pSecInfo) {
return clientUnmapMemory((struct RsClient *)(((unsigned char *)pClient) + __nvoc_rtti_RmClient_RsClient.offset), pResourceRef, pLockInfo, ppCpuMapping, pSecInfo);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_RmClient =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RsClient(RsClient*);
void __nvoc_dtor_RmClient(RmClient *pThis) {
__nvoc_rmclientDestruct(pThis);
__nvoc_dtor_RsClient(&pThis->__nvoc_base_RsClient);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_RmClient(RmClient *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RsClient(RsClient* , struct PORT_MEM_ALLOCATOR *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_RmClient(RmClient *pThis, struct PORT_MEM_ALLOCATOR * arg_pAllocator, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RsClient(&pThis->__nvoc_base_RsClient, arg_pAllocator, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_RmClient_fail_RsClient;
__nvoc_init_dataField_RmClient(pThis);
status = __nvoc_rmclientConstruct(pThis, arg_pAllocator, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_RmClient_fail__init;
goto __nvoc_ctor_RmClient_exit; // Success
__nvoc_ctor_RmClient_fail__init:
__nvoc_dtor_RsClient(&pThis->__nvoc_base_RsClient);
__nvoc_ctor_RmClient_fail_RsClient:
__nvoc_ctor_RmClient_exit:
return status;
}
static void __nvoc_init_funcTable_RmClient_1(RmClient *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__rmclientValidate__ = &rmclientValidate_IMPL;
pThis->__rmclientFreeResource__ = &rmclientFreeResource_IMPL;
pThis->__rmclientInterMap__ = &rmclientInterMap_IMPL;
pThis->__rmclientInterUnmap__ = &rmclientInterUnmap_IMPL;
pThis->__rmclientPostProcessPendingFreeList__ = &rmclientPostProcessPendingFreeList_IMPL;
pThis->__nvoc_base_RsClient.__clientValidate__ = &__nvoc_thunk_RmClient_clientValidate;
pThis->__nvoc_base_RsClient.__clientFreeResource__ = &__nvoc_thunk_RmClient_clientFreeResource;
pThis->__nvoc_base_RsClient.__clientInterMap__ = &__nvoc_thunk_RmClient_clientInterMap;
pThis->__nvoc_base_RsClient.__clientInterUnmap__ = &__nvoc_thunk_RmClient_clientInterUnmap;
pThis->__nvoc_base_RsClient.__clientPostProcessPendingFreeList__ = &__nvoc_thunk_RmClient_clientPostProcessPendingFreeList;
pThis->__rmclientDestructResourceRef__ = &__nvoc_thunk_RsClient_rmclientDestructResourceRef;
pThis->__rmclientValidateNewResourceHandle__ = &__nvoc_thunk_RsClient_rmclientValidateNewResourceHandle;
pThis->__rmclientShareResource__ = &__nvoc_thunk_RsClient_rmclientShareResource;
pThis->__rmclientUnmapMemory__ = &__nvoc_thunk_RsClient_rmclientUnmapMemory;
}
void __nvoc_init_funcTable_RmClient(RmClient *pThis) {
__nvoc_init_funcTable_RmClient_1(pThis);
}
void __nvoc_init_RsClient(RsClient*);
void __nvoc_init_RmClient(RmClient *pThis) {
pThis->__nvoc_pbase_RmClient = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RsClient.__nvoc_base_Object;
pThis->__nvoc_pbase_RsClient = &pThis->__nvoc_base_RsClient;
__nvoc_init_RsClient(&pThis->__nvoc_base_RsClient);
__nvoc_init_funcTable_RmClient(pThis);
}
NV_STATUS __nvoc_objCreate_RmClient(RmClient **ppThis, Dynamic *pParent, NvU32 createFlags, struct PORT_MEM_ALLOCATOR * arg_pAllocator, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
RmClient *pThis;
pThis = portMemAllocNonPaged(sizeof(RmClient));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(RmClient));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_RmClient);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RsClient.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RsClient.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_RmClient(pThis);
status = __nvoc_ctor_RmClient(pThis, arg_pAllocator, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_RmClient_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_RmClient_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_RmClient(RmClient **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct PORT_MEM_ALLOCATOR * arg_pAllocator = va_arg(args, struct PORT_MEM_ALLOCATOR *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_RmClient(ppThis, pParent, createFlags, arg_pAllocator, arg_pParams);
return status;
}

View File

@@ -0,0 +1,323 @@
#ifndef _G_CLIENT_NVOC_H_
#define _G_CLIENT_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2016-2020 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_client_nvoc.h"
#ifndef _CLIENT_H_
#define _CLIENT_H_
#include "ctrl/ctrl0000/ctrl0000proc.h" // NV_PROC_NAME_MAX_LENGTH
#include "containers/btree.h"
#include "resserv/resserv.h"
#include "nvoc/prelude.h"
#include "resserv/rs_client.h"
#include "rmapi/resource.h"
#include "rmapi/event.h"
#include "nvsecurityinfo.h"
// event information definitions
typedef struct _def_client_system_event_info CLI_SYSTEM_EVENT_INFO, *PCLI_SYSTEM_EVENT_INFO;
/**
* This ref-counted object is shared by all clients that were registered under
* the same user and is used to identify clients from the same user.
*/
#ifdef NVOC_CLIENT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct UserInfo {
const struct NVOC_RTTI *__nvoc_rtti;
struct RsShared __nvoc_base_RsShared;
struct Object *__nvoc_pbase_Object;
struct RsShared *__nvoc_pbase_RsShared;
struct UserInfo *__nvoc_pbase_UserInfo;
PUID_TOKEN pUidToken;
};
#ifndef __NVOC_CLASS_UserInfo_TYPEDEF__
#define __NVOC_CLASS_UserInfo_TYPEDEF__
typedef struct UserInfo UserInfo;
#endif /* __NVOC_CLASS_UserInfo_TYPEDEF__ */
#ifndef __nvoc_class_id_UserInfo
#define __nvoc_class_id_UserInfo 0x21d236
#endif /* __nvoc_class_id_UserInfo */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_UserInfo;
#define __staticCast_UserInfo(pThis) \
((pThis)->__nvoc_pbase_UserInfo)
#ifdef __nvoc_client_h_disabled
#define __dynamicCast_UserInfo(pThis) ((UserInfo*)NULL)
#else //__nvoc_client_h_disabled
#define __dynamicCast_UserInfo(pThis) \
((UserInfo*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(UserInfo)))
#endif //__nvoc_client_h_disabled
NV_STATUS __nvoc_objCreateDynamic_UserInfo(UserInfo**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_UserInfo(UserInfo**, Dynamic*, NvU32);
#define __objCreate_UserInfo(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_UserInfo((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
NV_STATUS userinfoConstruct_IMPL(struct UserInfo *arg_pUserInfo);
#define __nvoc_userinfoConstruct(arg_pUserInfo) userinfoConstruct_IMPL(arg_pUserInfo)
void userinfoDestruct_IMPL(struct UserInfo *pUserInfo);
#define __nvoc_userinfoDestruct(pUserInfo) userinfoDestruct_IMPL(pUserInfo)
#undef PRIVATE_FIELD
// Flags for RmClient
#define RMAPI_CLIENT_FLAG_RM_INTERNAL_CLIENT 0x00000001
#define RMAPI_CLIENT_FLAG_DELETE_PENDING 0x00000002
// Values for client debugger state
#define RMAPI_CLIENT_DEBUGGER_STATE_NOT_SET 0x00000000
#define RMAPI_CLIENT_DEBUGGER_STATE_COMPUTE_ACTIVE 0x00000001
#define RMAPI_CLIENT_DEBUGGER_STATE_DEBUG_ACTIVE 0x00000002
#ifdef NVOC_CLIENT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct RmClient {
const struct NVOC_RTTI *__nvoc_rtti;
struct RsClient __nvoc_base_RsClient;
struct Object *__nvoc_pbase_Object;
struct RsClient *__nvoc_pbase_RsClient;
struct RmClient *__nvoc_pbase_RmClient;
NV_STATUS (*__rmclientValidate__)(struct RmClient *, const API_SECURITY_INFO *);
NV_STATUS (*__rmclientFreeResource__)(struct RmClient *, struct RsServer *, struct RS_RES_FREE_PARAMS_INTERNAL *);
NV_STATUS (*__rmclientInterMap__)(struct RmClient *, struct RsResourceRef *, struct RsResourceRef *, struct RS_INTER_MAP_PARAMS *);
void (*__rmclientInterUnmap__)(struct RmClient *, struct RsResourceRef *, struct RS_INTER_UNMAP_PARAMS *);
NV_STATUS (*__rmclientPostProcessPendingFreeList__)(struct RmClient *, struct RsResourceRef **);
NV_STATUS (*__rmclientDestructResourceRef__)(struct RmClient *, RsServer *, struct RsResourceRef *);
NV_STATUS (*__rmclientValidateNewResourceHandle__)(struct RmClient *, NvHandle, NvBool);
NV_STATUS (*__rmclientShareResource__)(struct RmClient *, struct RsResourceRef *, RS_SHARE_POLICY *, struct CALL_CONTEXT *);
NV_STATUS (*__rmclientUnmapMemory__)(struct RmClient *, struct RsResourceRef *, struct RS_LOCK_INFO *, struct RsCpuMapping **, API_SECURITY_INFO *);
RS_PRIV_LEVEL cachedPrivilege;
NvBool bIsRootNonPriv;
NvU32 ProcID;
NvU32 SubProcessID;
char SubProcessName[100];
NvBool bIsSubProcessDisabled;
NvU32 Flags;
NvU32 ClientDebuggerState;
void *pOSInfo;
char name[100];
CLI_SYSTEM_EVENT_INFO CliSysEventInfo;
PSECURITY_TOKEN pSecurityToken;
struct UserInfo *pUserInfo;
NvBool bIsClientVirtualMode;
PNODE pCliSyncGpuBoostTree;
};
#ifndef __NVOC_CLASS_RmClient_TYPEDEF__
#define __NVOC_CLASS_RmClient_TYPEDEF__
typedef struct RmClient RmClient;
#endif /* __NVOC_CLASS_RmClient_TYPEDEF__ */
#ifndef __nvoc_class_id_RmClient
#define __nvoc_class_id_RmClient 0xb23d83
#endif /* __nvoc_class_id_RmClient */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmClient;
#define __staticCast_RmClient(pThis) \
((pThis)->__nvoc_pbase_RmClient)
#ifdef __nvoc_client_h_disabled
#define __dynamicCast_RmClient(pThis) ((RmClient*)NULL)
#else //__nvoc_client_h_disabled
#define __dynamicCast_RmClient(pThis) \
((RmClient*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(RmClient)))
#endif //__nvoc_client_h_disabled
NV_STATUS __nvoc_objCreateDynamic_RmClient(RmClient**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_RmClient(RmClient**, Dynamic*, NvU32, struct PORT_MEM_ALLOCATOR * arg_pAllocator, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_RmClient(ppNewObj, pParent, createFlags, arg_pAllocator, arg_pParams) \
__nvoc_objCreate_RmClient((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pAllocator, arg_pParams)
#define rmclientValidate(pClient, pSecInfo) rmclientValidate_DISPATCH(pClient, pSecInfo)
#define rmclientFreeResource(pClient, pServer, pParams) rmclientFreeResource_DISPATCH(pClient, pServer, pParams)
#define rmclientInterMap(pClient, pMapperRef, pMappableRef, pParams) rmclientInterMap_DISPATCH(pClient, pMapperRef, pMappableRef, pParams)
#define rmclientInterUnmap(pClient, pMapperRef, pParams) rmclientInterUnmap_DISPATCH(pClient, pMapperRef, pParams)
#define rmclientPostProcessPendingFreeList(pClient, ppFirstLowPriRef) rmclientPostProcessPendingFreeList_DISPATCH(pClient, ppFirstLowPriRef)
#define rmclientDestructResourceRef(pClient, pServer, pResourceRef) rmclientDestructResourceRef_DISPATCH(pClient, pServer, pResourceRef)
#define rmclientValidateNewResourceHandle(pClient, hResource, bRestrict) rmclientValidateNewResourceHandle_DISPATCH(pClient, hResource, bRestrict)
#define rmclientShareResource(pClient, pResourceRef, pSharePolicy, pCallContext) rmclientShareResource_DISPATCH(pClient, pResourceRef, pSharePolicy, pCallContext)
#define rmclientUnmapMemory(pClient, pResourceRef, pLockInfo, ppCpuMapping, pSecInfo) rmclientUnmapMemory_DISPATCH(pClient, pResourceRef, pLockInfo, ppCpuMapping, pSecInfo)
NV_STATUS rmclientValidate_IMPL(struct RmClient *pClient, const API_SECURITY_INFO *pSecInfo);
static inline NV_STATUS rmclientValidate_DISPATCH(struct RmClient *pClient, const API_SECURITY_INFO *pSecInfo) {
return pClient->__rmclientValidate__(pClient, pSecInfo);
}
NV_STATUS rmclientFreeResource_IMPL(struct RmClient *pClient, struct RsServer *pServer, struct RS_RES_FREE_PARAMS_INTERNAL *pParams);
static inline NV_STATUS rmclientFreeResource_DISPATCH(struct RmClient *pClient, struct RsServer *pServer, struct RS_RES_FREE_PARAMS_INTERNAL *pParams) {
return pClient->__rmclientFreeResource__(pClient, pServer, pParams);
}
NV_STATUS rmclientInterMap_IMPL(struct RmClient *pClient, struct RsResourceRef *pMapperRef, struct RsResourceRef *pMappableRef, struct RS_INTER_MAP_PARAMS *pParams);
static inline NV_STATUS rmclientInterMap_DISPATCH(struct RmClient *pClient, struct RsResourceRef *pMapperRef, struct RsResourceRef *pMappableRef, struct RS_INTER_MAP_PARAMS *pParams) {
return pClient->__rmclientInterMap__(pClient, pMapperRef, pMappableRef, pParams);
}
void rmclientInterUnmap_IMPL(struct RmClient *pClient, struct RsResourceRef *pMapperRef, struct RS_INTER_UNMAP_PARAMS *pParams);
static inline void rmclientInterUnmap_DISPATCH(struct RmClient *pClient, struct RsResourceRef *pMapperRef, struct RS_INTER_UNMAP_PARAMS *pParams) {
pClient->__rmclientInterUnmap__(pClient, pMapperRef, pParams);
}
NV_STATUS rmclientPostProcessPendingFreeList_IMPL(struct RmClient *pClient, struct RsResourceRef **ppFirstLowPriRef);
static inline NV_STATUS rmclientPostProcessPendingFreeList_DISPATCH(struct RmClient *pClient, struct RsResourceRef **ppFirstLowPriRef) {
return pClient->__rmclientPostProcessPendingFreeList__(pClient, ppFirstLowPriRef);
}
static inline NV_STATUS rmclientDestructResourceRef_DISPATCH(struct RmClient *pClient, RsServer *pServer, struct RsResourceRef *pResourceRef) {
return pClient->__rmclientDestructResourceRef__(pClient, pServer, pResourceRef);
}
static inline NV_STATUS rmclientValidateNewResourceHandle_DISPATCH(struct RmClient *pClient, NvHandle hResource, NvBool bRestrict) {
return pClient->__rmclientValidateNewResourceHandle__(pClient, hResource, bRestrict);
}
static inline NV_STATUS rmclientShareResource_DISPATCH(struct RmClient *pClient, struct RsResourceRef *pResourceRef, RS_SHARE_POLICY *pSharePolicy, struct CALL_CONTEXT *pCallContext) {
return pClient->__rmclientShareResource__(pClient, pResourceRef, pSharePolicy, pCallContext);
}
static inline NV_STATUS rmclientUnmapMemory_DISPATCH(struct RmClient *pClient, struct RsResourceRef *pResourceRef, struct RS_LOCK_INFO *pLockInfo, struct RsCpuMapping **ppCpuMapping, API_SECURITY_INFO *pSecInfo) {
return pClient->__rmclientUnmapMemory__(pClient, pResourceRef, pLockInfo, ppCpuMapping, pSecInfo);
}
NV_STATUS rmclientConstruct_IMPL(struct RmClient *arg_pClient, struct PORT_MEM_ALLOCATOR *arg_pAllocator, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_rmclientConstruct(arg_pClient, arg_pAllocator, arg_pParams) rmclientConstruct_IMPL(arg_pClient, arg_pAllocator, arg_pParams)
void rmclientDestruct_IMPL(struct RmClient *pClient);
#define __nvoc_rmclientDestruct(pClient) rmclientDestruct_IMPL(pClient)
RS_PRIV_LEVEL rmclientGetCachedPrivilege_IMPL(struct RmClient *pClient);
#ifdef __nvoc_client_h_disabled
static inline RS_PRIV_LEVEL rmclientGetCachedPrivilege(struct RmClient *pClient) {
NV_ASSERT_FAILED_PRECOMP("RmClient was disabled!");
RS_PRIV_LEVEL ret;
portMemSet(&ret, 0, sizeof(RS_PRIV_LEVEL));
return ret;
}
#else //__nvoc_client_h_disabled
#define rmclientGetCachedPrivilege(pClient) rmclientGetCachedPrivilege_IMPL(pClient)
#endif //__nvoc_client_h_disabled
NvBool rmclientIsAdmin_IMPL(struct RmClient *pClient, RS_PRIV_LEVEL privLevel);
#ifdef __nvoc_client_h_disabled
static inline NvBool rmclientIsAdmin(struct RmClient *pClient, RS_PRIV_LEVEL privLevel) {
NV_ASSERT_FAILED_PRECOMP("RmClient was disabled!");
return NV_FALSE;
}
#else //__nvoc_client_h_disabled
#define rmclientIsAdmin(pClient, privLevel) rmclientIsAdmin_IMPL(pClient, privLevel)
#endif //__nvoc_client_h_disabled
void rmclientSetClientFlags_IMPL(struct RmClient *pClient, NvU32 clientFlags);
#ifdef __nvoc_client_h_disabled
static inline void rmclientSetClientFlags(struct RmClient *pClient, NvU32 clientFlags) {
NV_ASSERT_FAILED_PRECOMP("RmClient was disabled!");
}
#else //__nvoc_client_h_disabled
#define rmclientSetClientFlags(pClient, clientFlags) rmclientSetClientFlags_IMPL(pClient, clientFlags)
#endif //__nvoc_client_h_disabled
void *rmclientGetSecurityToken_IMPL(struct RmClient *pClient);
#ifdef __nvoc_client_h_disabled
static inline void *rmclientGetSecurityToken(struct RmClient *pClient) {
NV_ASSERT_FAILED_PRECOMP("RmClient was disabled!");
return NULL;
}
#else //__nvoc_client_h_disabled
#define rmclientGetSecurityToken(pClient) rmclientGetSecurityToken_IMPL(pClient)
#endif //__nvoc_client_h_disabled
NvBool rmclientIsCapableOrAdmin_IMPL(struct RmClient *pClient, NvU32 capability, RS_PRIV_LEVEL privLevel);
#ifdef __nvoc_client_h_disabled
static inline NvBool rmclientIsCapableOrAdmin(struct RmClient *pClient, NvU32 capability, RS_PRIV_LEVEL privLevel) {
NV_ASSERT_FAILED_PRECOMP("RmClient was disabled!");
return NV_FALSE;
}
#else //__nvoc_client_h_disabled
#define rmclientIsCapableOrAdmin(pClient, capability, privLevel) rmclientIsCapableOrAdmin_IMPL(pClient, capability, privLevel)
#endif //__nvoc_client_h_disabled
NvBool rmclientIsCapable_IMPL(struct RmClient *pClient, NvU32 capability);
#ifdef __nvoc_client_h_disabled
static inline NvBool rmclientIsCapable(struct RmClient *pClient, NvU32 capability) {
NV_ASSERT_FAILED_PRECOMP("RmClient was disabled!");
return NV_FALSE;
}
#else //__nvoc_client_h_disabled
#define rmclientIsCapable(pClient, capability) rmclientIsCapable_IMPL(pClient, capability)
#endif //__nvoc_client_h_disabled
#undef PRIVATE_FIELD
MAKE_LIST(RmClientList, RmClient*);
extern RmClientList g_clientListBehindGpusLock;
MAKE_LIST(UserInfoList, UserInfo*);
extern UserInfoList g_userInfoList;
//
// Convenience rmclientXxxByHandle util macros. Ideally, code operates on
// pClient directly instead of hClient but providing these for compatibility
// to hClient-heavy code.
//
RS_PRIV_LEVEL rmclientGetCachedPrivilegeByHandle(NvHandle hClient);
NvBool rmclientIsAdminByHandle(NvHandle hClient, RS_PRIV_LEVEL privLevel);
NvBool rmclientSetClientFlagsByHandle(NvHandle hClient, NvU32 clientFlags);
void rmclientPromoteDebuggerStateByHandle(NvHandle hClient, NvU32 newMinimumState);
void *rmclientGetSecurityTokenByHandle(NvHandle hClient);
NV_STATUS rmclientUserClientSecurityCheckByHandle(NvHandle hClient, const API_SECURITY_INFO *pSecInfo);
NvBool rmclientIsCapableOrAdminByHandle(NvHandle hClient, NvU32 capability, RS_PRIV_LEVEL privLevel);
NvBool rmclientIsCapableByHandle(NvHandle hClient, NvU32 capability);
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_CLIENT_NVOC_H_

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,635 @@
#ifndef _G_CLIENT_RESOURCE_NVOC_H_
#define _G_CLIENT_RESOURCE_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2016-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_client_resource_nvoc.h"
#ifndef _CLIENT_RESOURCE_H_
#define _CLIENT_RESOURCE_H_
#include "resserv/resserv.h"
#include "nvoc/prelude.h"
#include "resserv/rs_client.h"
#include "rmapi/resource.h"
#include "rmapi/event.h"
#include "rmapi/control.h"
#include "ctrl/ctrl0000/ctrl0000gpu.h"
#include "ctrl/ctrl0000/ctrl0000gpuacct.h"
#include "ctrl/ctrl0000/ctrl0000gsync.h"
#include "ctrl/ctrl0000/ctrl0000diag.h"
#include "ctrl/ctrl0000/ctrl0000event.h"
#include "ctrl/ctrl0000/ctrl0000nvd.h"
#include "ctrl/ctrl0000/ctrl0000proc.h"
#include "ctrl/ctrl0000/ctrl0000syncgpuboost.h"
#include "ctrl/ctrl0000/ctrl0000gspc.h"
#include "ctrl/ctrl0000/ctrl0000vgpu.h"
#include "ctrl/ctrl0000/ctrl0000client.h"
/* include appropriate os-specific command header */
#if defined(NV_UNIX) || defined(NV_QNX)
#include "ctrl/ctrl0000/ctrl0000unix.h"
#endif
#ifdef NVOC_CLIENT_RESOURCE_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct RmClientResource {
const struct NVOC_RTTI *__nvoc_rtti;
struct RsClientResource __nvoc_base_RsClientResource;
struct RmResourceCommon __nvoc_base_RmResourceCommon;
struct Notifier __nvoc_base_Notifier;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RsClientResource *__nvoc_pbase_RsClientResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct INotifier *__nvoc_pbase_INotifier;
struct Notifier *__nvoc_pbase_Notifier;
struct RmClientResource *__nvoc_pbase_RmClientResource;
NvBool (*__cliresAccessCallback__)(struct RmClientResource *, struct RsClient *, void *, RsAccessRight);
NvBool (*__cliresShareCallback__)(struct RmClientResource *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__cliresCtrlCmdSystemGetCpuInfo__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_GET_CPU_INFO_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemGetFeatures__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_GET_FEATURES_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemGetBuildVersionV2__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_GET_BUILD_VERSION_V2_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemSetMemorySize__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_SET_MEMORY_SIZE_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemGetClassList__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_GET_CLASSLIST_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemNotifyEvent__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_NOTIFY_EVENT_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemDebugCtrlRmMsg__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_DEBUG_RMMSG_CTRL_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemGetPrivilegedStatus__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_GET_PRIVILEGED_STATUS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemGetFabricStatus__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_GET_FABRIC_STATUS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemGetRmInstanceId__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_GET_RM_INSTANCE_ID_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemGetClientDatabaseInfo__)(struct RmClientResource *, NV0000_CTRL_SYSTEM_GET_CLIENT_DATABASE_INFO_PARAMS *);
NV_STATUS (*__cliresCtrlCmdClientGetAddrSpaceType__)(struct RmClientResource *, NV0000_CTRL_CLIENT_GET_ADDR_SPACE_TYPE_PARAMS *);
NV_STATUS (*__cliresCtrlCmdClientGetHandleInfo__)(struct RmClientResource *, NV0000_CTRL_CLIENT_GET_HANDLE_INFO_PARAMS *);
NV_STATUS (*__cliresCtrlCmdClientGetAccessRights__)(struct RmClientResource *, NV0000_CTRL_CLIENT_GET_ACCESS_RIGHTS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdClientSetInheritedSharePolicy__)(struct RmClientResource *, NV0000_CTRL_CLIENT_SET_INHERITED_SHARE_POLICY_PARAMS *);
NV_STATUS (*__cliresCtrlCmdClientShareObject__)(struct RmClientResource *, NV0000_CTRL_CLIENT_SHARE_OBJECT_PARAMS *);
NV_STATUS (*__cliresCtrlCmdClientGetChildHandle__)(struct RmClientResource *, NV0000_CTRL_CMD_CLIENT_GET_CHILD_HANDLE_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetAttachedIds__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_ATTACHED_IDS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetIdInfo__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_ID_INFO_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetIdInfoV2__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_ID_INFO_V2_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetInitStatus__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_INIT_STATUS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetDeviceIds__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_DEVICE_IDS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetProbedIds__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_PROBED_IDS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuAttachIds__)(struct RmClientResource *, NV0000_CTRL_GPU_ATTACH_IDS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuDetachIds__)(struct RmClientResource *, NV0000_CTRL_GPU_DETACH_IDS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetSvmSize__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_SVM_SIZE_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetPciInfo__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_PCI_INFO_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetUuidInfo__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_UUID_INFO_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetUuidFromGpuId__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_UUID_FROM_GPU_ID_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuModifyGpuDrainState__)(struct RmClientResource *, NV0000_CTRL_GPU_MODIFY_DRAIN_STATE_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuQueryGpuDrainState__)(struct RmClientResource *, NV0000_CTRL_GPU_QUERY_DRAIN_STATE_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuGetMemOpEnable__)(struct RmClientResource *, NV0000_CTRL_GPU_GET_MEMOP_ENABLE_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGpuDisableNvlinkInit__)(struct RmClientResource *, NV0000_CTRL_GPU_DISABLE_NVLINK_INIT_PARAMS *);
NV_STATUS (*__cliresCtrlCmdLegacyConfig__)(struct RmClientResource *, NV0000_CTRL_GPU_LEGACY_CONFIG_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGsyncGetAttachedIds__)(struct RmClientResource *, NV0000_CTRL_GSYNC_GET_ATTACHED_IDS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdGsyncGetIdInfo__)(struct RmClientResource *, NV0000_CTRL_GSYNC_GET_ID_INFO_PARAMS *);
NV_STATUS (*__cliresCtrlCmdEventSetNotification__)(struct RmClientResource *, NV0000_CTRL_EVENT_SET_NOTIFICATION_PARAMS *);
NV_STATUS (*__cliresCtrlCmdEventGetSystemEventStatus__)(struct RmClientResource *, NV0000_CTRL_GET_SYSTEM_EVENT_STATUS_PARAMS *);
NV_STATUS (*__cliresCtrlCmdOsUnixExportObjectToFd__)(struct RmClientResource *, NV0000_CTRL_OS_UNIX_EXPORT_OBJECT_TO_FD_PARAMS *);
NV_STATUS (*__cliresCtrlCmdOsUnixImportObjectFromFd__)(struct RmClientResource *, NV0000_CTRL_OS_UNIX_IMPORT_OBJECT_FROM_FD_PARAMS *);
NV_STATUS (*__cliresCtrlCmdOsUnixGetExportObjectInfo__)(struct RmClientResource *, NV0000_CTRL_OS_UNIX_GET_EXPORT_OBJECT_INFO_PARAMS *);
NV_STATUS (*__cliresCtrlCmdOsUnixCreateExportObjectFd__)(struct RmClientResource *, NV0000_CTRL_OS_UNIX_CREATE_EXPORT_OBJECT_FD_PARAMS *);
NV_STATUS (*__cliresCtrlCmdOsUnixExportObjectsToFd__)(struct RmClientResource *, NV0000_CTRL_OS_UNIX_EXPORT_OBJECTS_TO_FD_PARAMS *);
NV_STATUS (*__cliresCtrlCmdOsUnixImportObjectsFromFd__)(struct RmClientResource *, NV0000_CTRL_OS_UNIX_IMPORT_OBJECTS_FROM_FD_PARAMS *);
NV_STATUS (*__cliresCtrlCmdOsUnixFlushUserCache__)(struct RmClientResource *, NV0000_CTRL_OS_UNIX_FLUSH_USER_CACHE_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSetSubProcessID__)(struct RmClientResource *, NV0000_CTRL_SET_SUB_PROCESS_ID_PARAMS *);
NV_STATUS (*__cliresCtrlCmdDisableSubProcessUserdIsolation__)(struct RmClientResource *, NV0000_CTRL_DISABLE_SUB_PROCESS_USERD_ISOLATION_PARAMS *);
NV_STATUS (*__cliresCtrlCmdSystemSyncExternalFabricMgmt__)(struct RmClientResource *, NV0000_CTRL_CMD_SYSTEM_SYNC_EXTERNAL_FABRIC_MGMT_PARAMS *);
NV_STATUS (*__cliresControl__)(struct RmClientResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__cliresUnmap__)(struct RmClientResource *, struct CALL_CONTEXT *, RsCpuMapping *);
NV_STATUS (*__cliresMapTo__)(struct RmClientResource *, RS_RES_MAP_TO_PARAMS *);
void (*__cliresSetNotificationShare__)(struct RmClientResource *, struct NotifShare *);
NV_STATUS (*__cliresControlFilter__)(struct RmClientResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__cliresAddAdditionalDependants__)(struct RsClient *, struct RmClientResource *, RsResourceRef *);
NvU32 (*__cliresGetRefCount__)(struct RmClientResource *);
NV_STATUS (*__cliresUnregisterEvent__)(struct RmClientResource *, NvHandle, NvHandle, NvHandle, NvHandle);
NvBool (*__cliresCanCopy__)(struct RmClientResource *);
NV_STATUS (*__cliresControl_Prologue__)(struct RmClientResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__cliresPreDestruct__)(struct RmClientResource *);
NV_STATUS (*__cliresUnmapFrom__)(struct RmClientResource *, RS_RES_UNMAP_FROM_PARAMS *);
PEVENTNOTIFICATION *(*__cliresGetNotificationListPtr__)(struct RmClientResource *);
void (*__cliresControl_Epilogue__)(struct RmClientResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
struct NotifShare *(*__cliresGetNotificationShare__)(struct RmClientResource *);
NV_STATUS (*__cliresControlLookup__)(struct RmClientResource *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__cliresMap__)(struct RmClientResource *, struct CALL_CONTEXT *, RS_CPU_MAP_PARAMS *, RsCpuMapping *);
NV_STATUS (*__cliresGetOrAllocNotifShare__)(struct RmClientResource *, NvHandle, NvHandle, struct NotifShare **);
};
#ifndef __NVOC_CLASS_RmClientResource_TYPEDEF__
#define __NVOC_CLASS_RmClientResource_TYPEDEF__
typedef struct RmClientResource RmClientResource;
#endif /* __NVOC_CLASS_RmClientResource_TYPEDEF__ */
#ifndef __nvoc_class_id_RmClientResource
#define __nvoc_class_id_RmClientResource 0x37a701
#endif /* __nvoc_class_id_RmClientResource */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmClientResource;
#define __staticCast_RmClientResource(pThis) \
((pThis)->__nvoc_pbase_RmClientResource)
#ifdef __nvoc_client_resource_h_disabled
#define __dynamicCast_RmClientResource(pThis) ((RmClientResource*)NULL)
#else //__nvoc_client_resource_h_disabled
#define __dynamicCast_RmClientResource(pThis) \
((RmClientResource*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(RmClientResource)))
#endif //__nvoc_client_resource_h_disabled
NV_STATUS __nvoc_objCreateDynamic_RmClientResource(RmClientResource**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_RmClientResource(RmClientResource**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_RmClientResource(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_RmClientResource((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define cliresAccessCallback(pRmCliRes, pInvokingClient, pAllocParams, accessRight) cliresAccessCallback_DISPATCH(pRmCliRes, pInvokingClient, pAllocParams, accessRight)
#define cliresShareCallback(pRmCliRes, pInvokingClient, pParentRef, pSharePolicy) cliresShareCallback_DISPATCH(pRmCliRes, pInvokingClient, pParentRef, pSharePolicy)
#define cliresCtrlCmdSystemGetCpuInfo(pRmCliRes, pCpuInfoParams) cliresCtrlCmdSystemGetCpuInfo_DISPATCH(pRmCliRes, pCpuInfoParams)
#define cliresCtrlCmdSystemGetFeatures(pRmCliRes, pParams) cliresCtrlCmdSystemGetFeatures_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemGetBuildVersionV2(pRmCliRes, pParams) cliresCtrlCmdSystemGetBuildVersionV2_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemSetMemorySize(pRmCliRes, pParams) cliresCtrlCmdSystemSetMemorySize_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemGetClassList(pRmCliRes, pParams) cliresCtrlCmdSystemGetClassList_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemNotifyEvent(pRmCliRes, pParams) cliresCtrlCmdSystemNotifyEvent_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemDebugCtrlRmMsg(pRmCliRes, pParams) cliresCtrlCmdSystemDebugCtrlRmMsg_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemGetPrivilegedStatus(pRmCliRes, pParams) cliresCtrlCmdSystemGetPrivilegedStatus_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemGetFabricStatus(pRmCliRes, pParams) cliresCtrlCmdSystemGetFabricStatus_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemGetRmInstanceId(pRmCliRes, pRmInstanceIdParams) cliresCtrlCmdSystemGetRmInstanceId_DISPATCH(pRmCliRes, pRmInstanceIdParams)
#define cliresCtrlCmdSystemGetClientDatabaseInfo(pRmCliRes, pParams) cliresCtrlCmdSystemGetClientDatabaseInfo_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdClientGetAddrSpaceType(pRmCliRes, pParams) cliresCtrlCmdClientGetAddrSpaceType_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdClientGetHandleInfo(pRmCliRes, pParams) cliresCtrlCmdClientGetHandleInfo_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdClientGetAccessRights(pRmCliRes, pParams) cliresCtrlCmdClientGetAccessRights_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdClientSetInheritedSharePolicy(pRmCliRes, pParams) cliresCtrlCmdClientSetInheritedSharePolicy_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdClientShareObject(pRmCliRes, pParams) cliresCtrlCmdClientShareObject_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdClientGetChildHandle(pRmCliRes, pParams) cliresCtrlCmdClientGetChildHandle_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdGpuGetAttachedIds(pRmCliRes, pGpuAttachedIds) cliresCtrlCmdGpuGetAttachedIds_DISPATCH(pRmCliRes, pGpuAttachedIds)
#define cliresCtrlCmdGpuGetIdInfo(pRmCliRes, pGpuIdInfoParams) cliresCtrlCmdGpuGetIdInfo_DISPATCH(pRmCliRes, pGpuIdInfoParams)
#define cliresCtrlCmdGpuGetIdInfoV2(pRmCliRes, pGpuIdInfoParams) cliresCtrlCmdGpuGetIdInfoV2_DISPATCH(pRmCliRes, pGpuIdInfoParams)
#define cliresCtrlCmdGpuGetInitStatus(pRmCliRes, pGpuInitStatusParams) cliresCtrlCmdGpuGetInitStatus_DISPATCH(pRmCliRes, pGpuInitStatusParams)
#define cliresCtrlCmdGpuGetDeviceIds(pRmCliRes, pDeviceIdsParams) cliresCtrlCmdGpuGetDeviceIds_DISPATCH(pRmCliRes, pDeviceIdsParams)
#define cliresCtrlCmdGpuGetProbedIds(pRmCliRes, pGpuProbedIds) cliresCtrlCmdGpuGetProbedIds_DISPATCH(pRmCliRes, pGpuProbedIds)
#define cliresCtrlCmdGpuAttachIds(pRmCliRes, pGpuAttachIds) cliresCtrlCmdGpuAttachIds_DISPATCH(pRmCliRes, pGpuAttachIds)
#define cliresCtrlCmdGpuDetachIds(pRmCliRes, pGpuDetachIds) cliresCtrlCmdGpuDetachIds_DISPATCH(pRmCliRes, pGpuDetachIds)
#define cliresCtrlCmdGpuGetSvmSize(pRmCliRes, pSvmSizeGetParams) cliresCtrlCmdGpuGetSvmSize_DISPATCH(pRmCliRes, pSvmSizeGetParams)
#define cliresCtrlCmdGpuGetPciInfo(pRmCliRes, pPciInfoParams) cliresCtrlCmdGpuGetPciInfo_DISPATCH(pRmCliRes, pPciInfoParams)
#define cliresCtrlCmdGpuGetUuidInfo(pRmCliRes, pParams) cliresCtrlCmdGpuGetUuidInfo_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdGpuGetUuidFromGpuId(pRmCliRes, pParams) cliresCtrlCmdGpuGetUuidFromGpuId_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdGpuModifyGpuDrainState(pRmCliRes, pParams) cliresCtrlCmdGpuModifyGpuDrainState_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdGpuQueryGpuDrainState(pRmCliRes, pParams) cliresCtrlCmdGpuQueryGpuDrainState_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdGpuGetMemOpEnable(pRmCliRes, pMemOpEnableParams) cliresCtrlCmdGpuGetMemOpEnable_DISPATCH(pRmCliRes, pMemOpEnableParams)
#define cliresCtrlCmdGpuDisableNvlinkInit(pRmCliRes, pParams) cliresCtrlCmdGpuDisableNvlinkInit_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdLegacyConfig(pRmCliRes, pParams) cliresCtrlCmdLegacyConfig_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdGsyncGetAttachedIds(pRmCliRes, pGsyncAttachedIds) cliresCtrlCmdGsyncGetAttachedIds_DISPATCH(pRmCliRes, pGsyncAttachedIds)
#define cliresCtrlCmdGsyncGetIdInfo(pRmCliRes, pGsyncIdInfoParams) cliresCtrlCmdGsyncGetIdInfo_DISPATCH(pRmCliRes, pGsyncIdInfoParams)
#define cliresCtrlCmdEventSetNotification(pRmCliRes, pEventSetNotificationParams) cliresCtrlCmdEventSetNotification_DISPATCH(pRmCliRes, pEventSetNotificationParams)
#define cliresCtrlCmdEventGetSystemEventStatus(pRmCliRes, pSystemEventStatusParams) cliresCtrlCmdEventGetSystemEventStatus_DISPATCH(pRmCliRes, pSystemEventStatusParams)
#define cliresCtrlCmdOsUnixExportObjectToFd(pRmCliRes, pParams) cliresCtrlCmdOsUnixExportObjectToFd_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdOsUnixImportObjectFromFd(pRmCliRes, pParams) cliresCtrlCmdOsUnixImportObjectFromFd_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdOsUnixGetExportObjectInfo(pRmCliRes, pParams) cliresCtrlCmdOsUnixGetExportObjectInfo_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdOsUnixCreateExportObjectFd(pRmCliRes, pParams) cliresCtrlCmdOsUnixCreateExportObjectFd_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdOsUnixExportObjectsToFd(pRmCliRes, pParams) cliresCtrlCmdOsUnixExportObjectsToFd_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdOsUnixImportObjectsFromFd(pRmCliRes, pParams) cliresCtrlCmdOsUnixImportObjectsFromFd_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdOsUnixFlushUserCache(pRmCliRes, pAddressSpaceParams) cliresCtrlCmdOsUnixFlushUserCache_DISPATCH(pRmCliRes, pAddressSpaceParams)
#define cliresCtrlCmdSetSubProcessID(pRmCliRes, pParams) cliresCtrlCmdSetSubProcessID_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdDisableSubProcessUserdIsolation(pRmCliRes, pParams) cliresCtrlCmdDisableSubProcessUserdIsolation_DISPATCH(pRmCliRes, pParams)
#define cliresCtrlCmdSystemSyncExternalFabricMgmt(pRmCliRes, pExtFabricMgmtParams) cliresCtrlCmdSystemSyncExternalFabricMgmt_DISPATCH(pRmCliRes, pExtFabricMgmtParams)
#define cliresControl(pResource, pCallContext, pParams) cliresControl_DISPATCH(pResource, pCallContext, pParams)
#define cliresUnmap(pResource, pCallContext, pCpuMapping) cliresUnmap_DISPATCH(pResource, pCallContext, pCpuMapping)
#define cliresMapTo(pResource, pParams) cliresMapTo_DISPATCH(pResource, pParams)
#define cliresSetNotificationShare(pNotifier, pNotifShare) cliresSetNotificationShare_DISPATCH(pNotifier, pNotifShare)
#define cliresControlFilter(pResource, pCallContext, pParams) cliresControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define cliresAddAdditionalDependants(pClient, pResource, pReference) cliresAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define cliresGetRefCount(pResource) cliresGetRefCount_DISPATCH(pResource)
#define cliresUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) cliresUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
#define cliresCanCopy(pResource) cliresCanCopy_DISPATCH(pResource)
#define cliresControl_Prologue(pResource, pCallContext, pParams) cliresControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define cliresPreDestruct(pResource) cliresPreDestruct_DISPATCH(pResource)
#define cliresUnmapFrom(pResource, pParams) cliresUnmapFrom_DISPATCH(pResource, pParams)
#define cliresGetNotificationListPtr(pNotifier) cliresGetNotificationListPtr_DISPATCH(pNotifier)
#define cliresControl_Epilogue(pResource, pCallContext, pParams) cliresControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define cliresGetNotificationShare(pNotifier) cliresGetNotificationShare_DISPATCH(pNotifier)
#define cliresControlLookup(pResource, pParams, ppEntry) cliresControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define cliresMap(pResource, pCallContext, pParams, pCpuMapping) cliresMap_DISPATCH(pResource, pCallContext, pParams, pCpuMapping)
#define cliresGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) cliresGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
NvBool cliresAccessCallback_IMPL(struct RmClientResource *pRmCliRes, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight);
static inline NvBool cliresAccessCallback_DISPATCH(struct RmClientResource *pRmCliRes, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pRmCliRes->__cliresAccessCallback__(pRmCliRes, pInvokingClient, pAllocParams, accessRight);
}
NvBool cliresShareCallback_IMPL(struct RmClientResource *pRmCliRes, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy);
static inline NvBool cliresShareCallback_DISPATCH(struct RmClientResource *pRmCliRes, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pRmCliRes->__cliresShareCallback__(pRmCliRes, pInvokingClient, pParentRef, pSharePolicy);
}
NV_STATUS cliresCtrlCmdSystemGetCpuInfo_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_CPU_INFO_PARAMS *pCpuInfoParams);
static inline NV_STATUS cliresCtrlCmdSystemGetCpuInfo_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_CPU_INFO_PARAMS *pCpuInfoParams) {
return pRmCliRes->__cliresCtrlCmdSystemGetCpuInfo__(pRmCliRes, pCpuInfoParams);
}
NV_STATUS cliresCtrlCmdSystemGetFeatures_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_FEATURES_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemGetFeatures_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_FEATURES_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemGetFeatures__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemGetBuildVersionV2_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_BUILD_VERSION_V2_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemGetBuildVersionV2_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_BUILD_VERSION_V2_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemGetBuildVersionV2__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemSetMemorySize_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_SET_MEMORY_SIZE_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemSetMemorySize_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_SET_MEMORY_SIZE_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemSetMemorySize__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemGetClassList_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_CLASSLIST_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemGetClassList_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_CLASSLIST_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemGetClassList__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemNotifyEvent_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_NOTIFY_EVENT_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemNotifyEvent_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_NOTIFY_EVENT_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemNotifyEvent__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemDebugCtrlRmMsg_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_DEBUG_RMMSG_CTRL_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemDebugCtrlRmMsg_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_DEBUG_RMMSG_CTRL_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemDebugCtrlRmMsg__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemGetPrivilegedStatus_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_PRIVILEGED_STATUS_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemGetPrivilegedStatus_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_PRIVILEGED_STATUS_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemGetPrivilegedStatus__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemGetFabricStatus_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_FABRIC_STATUS_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemGetFabricStatus_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_FABRIC_STATUS_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemGetFabricStatus__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemGetRmInstanceId_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_RM_INSTANCE_ID_PARAMS *pRmInstanceIdParams);
static inline NV_STATUS cliresCtrlCmdSystemGetRmInstanceId_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_RM_INSTANCE_ID_PARAMS *pRmInstanceIdParams) {
return pRmCliRes->__cliresCtrlCmdSystemGetRmInstanceId__(pRmCliRes, pRmInstanceIdParams);
}
NV_STATUS cliresCtrlCmdSystemGetClientDatabaseInfo_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_CLIENT_DATABASE_INFO_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSystemGetClientDatabaseInfo_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SYSTEM_GET_CLIENT_DATABASE_INFO_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSystemGetClientDatabaseInfo__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdClientGetAddrSpaceType_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_GET_ADDR_SPACE_TYPE_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdClientGetAddrSpaceType_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_GET_ADDR_SPACE_TYPE_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdClientGetAddrSpaceType__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdClientGetHandleInfo_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_GET_HANDLE_INFO_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdClientGetHandleInfo_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_GET_HANDLE_INFO_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdClientGetHandleInfo__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdClientGetAccessRights_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_GET_ACCESS_RIGHTS_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdClientGetAccessRights_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_GET_ACCESS_RIGHTS_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdClientGetAccessRights__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdClientSetInheritedSharePolicy_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_SET_INHERITED_SHARE_POLICY_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdClientSetInheritedSharePolicy_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_SET_INHERITED_SHARE_POLICY_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdClientSetInheritedSharePolicy__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdClientShareObject_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_SHARE_OBJECT_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdClientShareObject_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_CLIENT_SHARE_OBJECT_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdClientShareObject__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdClientGetChildHandle_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_CMD_CLIENT_GET_CHILD_HANDLE_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdClientGetChildHandle_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_CMD_CLIENT_GET_CHILD_HANDLE_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdClientGetChildHandle__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdGpuGetAttachedIds_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_ATTACHED_IDS_PARAMS *pGpuAttachedIds);
static inline NV_STATUS cliresCtrlCmdGpuGetAttachedIds_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_ATTACHED_IDS_PARAMS *pGpuAttachedIds) {
return pRmCliRes->__cliresCtrlCmdGpuGetAttachedIds__(pRmCliRes, pGpuAttachedIds);
}
NV_STATUS cliresCtrlCmdGpuGetIdInfo_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_ID_INFO_PARAMS *pGpuIdInfoParams);
static inline NV_STATUS cliresCtrlCmdGpuGetIdInfo_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_ID_INFO_PARAMS *pGpuIdInfoParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetIdInfo__(pRmCliRes, pGpuIdInfoParams);
}
NV_STATUS cliresCtrlCmdGpuGetIdInfoV2_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_ID_INFO_V2_PARAMS *pGpuIdInfoParams);
static inline NV_STATUS cliresCtrlCmdGpuGetIdInfoV2_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_ID_INFO_V2_PARAMS *pGpuIdInfoParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetIdInfoV2__(pRmCliRes, pGpuIdInfoParams);
}
NV_STATUS cliresCtrlCmdGpuGetInitStatus_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_INIT_STATUS_PARAMS *pGpuInitStatusParams);
static inline NV_STATUS cliresCtrlCmdGpuGetInitStatus_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_INIT_STATUS_PARAMS *pGpuInitStatusParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetInitStatus__(pRmCliRes, pGpuInitStatusParams);
}
NV_STATUS cliresCtrlCmdGpuGetDeviceIds_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_DEVICE_IDS_PARAMS *pDeviceIdsParams);
static inline NV_STATUS cliresCtrlCmdGpuGetDeviceIds_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_DEVICE_IDS_PARAMS *pDeviceIdsParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetDeviceIds__(pRmCliRes, pDeviceIdsParams);
}
NV_STATUS cliresCtrlCmdGpuGetProbedIds_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_PROBED_IDS_PARAMS *pGpuProbedIds);
static inline NV_STATUS cliresCtrlCmdGpuGetProbedIds_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_PROBED_IDS_PARAMS *pGpuProbedIds) {
return pRmCliRes->__cliresCtrlCmdGpuGetProbedIds__(pRmCliRes, pGpuProbedIds);
}
NV_STATUS cliresCtrlCmdGpuAttachIds_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_ATTACH_IDS_PARAMS *pGpuAttachIds);
static inline NV_STATUS cliresCtrlCmdGpuAttachIds_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_ATTACH_IDS_PARAMS *pGpuAttachIds) {
return pRmCliRes->__cliresCtrlCmdGpuAttachIds__(pRmCliRes, pGpuAttachIds);
}
NV_STATUS cliresCtrlCmdGpuDetachIds_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_DETACH_IDS_PARAMS *pGpuDetachIds);
static inline NV_STATUS cliresCtrlCmdGpuDetachIds_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_DETACH_IDS_PARAMS *pGpuDetachIds) {
return pRmCliRes->__cliresCtrlCmdGpuDetachIds__(pRmCliRes, pGpuDetachIds);
}
NV_STATUS cliresCtrlCmdGpuGetSvmSize_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_SVM_SIZE_PARAMS *pSvmSizeGetParams);
static inline NV_STATUS cliresCtrlCmdGpuGetSvmSize_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_SVM_SIZE_PARAMS *pSvmSizeGetParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetSvmSize__(pRmCliRes, pSvmSizeGetParams);
}
NV_STATUS cliresCtrlCmdGpuGetPciInfo_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_PCI_INFO_PARAMS *pPciInfoParams);
static inline NV_STATUS cliresCtrlCmdGpuGetPciInfo_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_PCI_INFO_PARAMS *pPciInfoParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetPciInfo__(pRmCliRes, pPciInfoParams);
}
NV_STATUS cliresCtrlCmdGpuGetUuidInfo_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_UUID_INFO_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdGpuGetUuidInfo_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_UUID_INFO_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetUuidInfo__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdGpuGetUuidFromGpuId_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_UUID_FROM_GPU_ID_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdGpuGetUuidFromGpuId_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_UUID_FROM_GPU_ID_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetUuidFromGpuId__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdGpuModifyGpuDrainState_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_MODIFY_DRAIN_STATE_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdGpuModifyGpuDrainState_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_MODIFY_DRAIN_STATE_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdGpuModifyGpuDrainState__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdGpuQueryGpuDrainState_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_QUERY_DRAIN_STATE_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdGpuQueryGpuDrainState_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_QUERY_DRAIN_STATE_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdGpuQueryGpuDrainState__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdGpuGetMemOpEnable_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_MEMOP_ENABLE_PARAMS *pMemOpEnableParams);
static inline NV_STATUS cliresCtrlCmdGpuGetMemOpEnable_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_GET_MEMOP_ENABLE_PARAMS *pMemOpEnableParams) {
return pRmCliRes->__cliresCtrlCmdGpuGetMemOpEnable__(pRmCliRes, pMemOpEnableParams);
}
NV_STATUS cliresCtrlCmdGpuDisableNvlinkInit_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_DISABLE_NVLINK_INIT_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdGpuDisableNvlinkInit_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_DISABLE_NVLINK_INIT_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdGpuDisableNvlinkInit__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdLegacyConfig_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_LEGACY_CONFIG_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdLegacyConfig_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GPU_LEGACY_CONFIG_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdLegacyConfig__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdGsyncGetAttachedIds_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GSYNC_GET_ATTACHED_IDS_PARAMS *pGsyncAttachedIds);
static inline NV_STATUS cliresCtrlCmdGsyncGetAttachedIds_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GSYNC_GET_ATTACHED_IDS_PARAMS *pGsyncAttachedIds) {
return pRmCliRes->__cliresCtrlCmdGsyncGetAttachedIds__(pRmCliRes, pGsyncAttachedIds);
}
NV_STATUS cliresCtrlCmdGsyncGetIdInfo_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GSYNC_GET_ID_INFO_PARAMS *pGsyncIdInfoParams);
static inline NV_STATUS cliresCtrlCmdGsyncGetIdInfo_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GSYNC_GET_ID_INFO_PARAMS *pGsyncIdInfoParams) {
return pRmCliRes->__cliresCtrlCmdGsyncGetIdInfo__(pRmCliRes, pGsyncIdInfoParams);
}
NV_STATUS cliresCtrlCmdEventSetNotification_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_EVENT_SET_NOTIFICATION_PARAMS *pEventSetNotificationParams);
static inline NV_STATUS cliresCtrlCmdEventSetNotification_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_EVENT_SET_NOTIFICATION_PARAMS *pEventSetNotificationParams) {
return pRmCliRes->__cliresCtrlCmdEventSetNotification__(pRmCliRes, pEventSetNotificationParams);
}
NV_STATUS cliresCtrlCmdEventGetSystemEventStatus_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_GET_SYSTEM_EVENT_STATUS_PARAMS *pSystemEventStatusParams);
static inline NV_STATUS cliresCtrlCmdEventGetSystemEventStatus_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_GET_SYSTEM_EVENT_STATUS_PARAMS *pSystemEventStatusParams) {
return pRmCliRes->__cliresCtrlCmdEventGetSystemEventStatus__(pRmCliRes, pSystemEventStatusParams);
}
NV_STATUS cliresCtrlCmdOsUnixExportObjectToFd_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_EXPORT_OBJECT_TO_FD_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdOsUnixExportObjectToFd_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_EXPORT_OBJECT_TO_FD_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdOsUnixExportObjectToFd__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdOsUnixImportObjectFromFd_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_IMPORT_OBJECT_FROM_FD_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdOsUnixImportObjectFromFd_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_IMPORT_OBJECT_FROM_FD_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdOsUnixImportObjectFromFd__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdOsUnixGetExportObjectInfo_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_GET_EXPORT_OBJECT_INFO_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdOsUnixGetExportObjectInfo_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_GET_EXPORT_OBJECT_INFO_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdOsUnixGetExportObjectInfo__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdOsUnixCreateExportObjectFd_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_CREATE_EXPORT_OBJECT_FD_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdOsUnixCreateExportObjectFd_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_CREATE_EXPORT_OBJECT_FD_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdOsUnixCreateExportObjectFd__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdOsUnixExportObjectsToFd_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_EXPORT_OBJECTS_TO_FD_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdOsUnixExportObjectsToFd_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_EXPORT_OBJECTS_TO_FD_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdOsUnixExportObjectsToFd__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdOsUnixImportObjectsFromFd_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_IMPORT_OBJECTS_FROM_FD_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdOsUnixImportObjectsFromFd_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_IMPORT_OBJECTS_FROM_FD_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdOsUnixImportObjectsFromFd__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdOsUnixFlushUserCache_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_FLUSH_USER_CACHE_PARAMS *pAddressSpaceParams);
static inline NV_STATUS cliresCtrlCmdOsUnixFlushUserCache_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_OS_UNIX_FLUSH_USER_CACHE_PARAMS *pAddressSpaceParams) {
return pRmCliRes->__cliresCtrlCmdOsUnixFlushUserCache__(pRmCliRes, pAddressSpaceParams);
}
NV_STATUS cliresCtrlCmdSetSubProcessID_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_SET_SUB_PROCESS_ID_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdSetSubProcessID_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_SET_SUB_PROCESS_ID_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdSetSubProcessID__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdDisableSubProcessUserdIsolation_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_DISABLE_SUB_PROCESS_USERD_ISOLATION_PARAMS *pParams);
static inline NV_STATUS cliresCtrlCmdDisableSubProcessUserdIsolation_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_DISABLE_SUB_PROCESS_USERD_ISOLATION_PARAMS *pParams) {
return pRmCliRes->__cliresCtrlCmdDisableSubProcessUserdIsolation__(pRmCliRes, pParams);
}
NV_STATUS cliresCtrlCmdSystemSyncExternalFabricMgmt_IMPL(struct RmClientResource *pRmCliRes, NV0000_CTRL_CMD_SYSTEM_SYNC_EXTERNAL_FABRIC_MGMT_PARAMS *pExtFabricMgmtParams);
static inline NV_STATUS cliresCtrlCmdSystemSyncExternalFabricMgmt_DISPATCH(struct RmClientResource *pRmCliRes, NV0000_CTRL_CMD_SYSTEM_SYNC_EXTERNAL_FABRIC_MGMT_PARAMS *pExtFabricMgmtParams) {
return pRmCliRes->__cliresCtrlCmdSystemSyncExternalFabricMgmt__(pRmCliRes, pExtFabricMgmtParams);
}
static inline NV_STATUS cliresControl_DISPATCH(struct RmClientResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__cliresControl__(pResource, pCallContext, pParams);
}
static inline NV_STATUS cliresUnmap_DISPATCH(struct RmClientResource *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return pResource->__cliresUnmap__(pResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS cliresMapTo_DISPATCH(struct RmClientResource *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__cliresMapTo__(pResource, pParams);
}
static inline void cliresSetNotificationShare_DISPATCH(struct RmClientResource *pNotifier, struct NotifShare *pNotifShare) {
pNotifier->__cliresSetNotificationShare__(pNotifier, pNotifShare);
}
static inline NV_STATUS cliresControlFilter_DISPATCH(struct RmClientResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__cliresControlFilter__(pResource, pCallContext, pParams);
}
static inline void cliresAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct RmClientResource *pResource, RsResourceRef *pReference) {
pResource->__cliresAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 cliresGetRefCount_DISPATCH(struct RmClientResource *pResource) {
return pResource->__cliresGetRefCount__(pResource);
}
static inline NV_STATUS cliresUnregisterEvent_DISPATCH(struct RmClientResource *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return pNotifier->__cliresUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
static inline NvBool cliresCanCopy_DISPATCH(struct RmClientResource *pResource) {
return pResource->__cliresCanCopy__(pResource);
}
static inline NV_STATUS cliresControl_Prologue_DISPATCH(struct RmClientResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__cliresControl_Prologue__(pResource, pCallContext, pParams);
}
static inline void cliresPreDestruct_DISPATCH(struct RmClientResource *pResource) {
pResource->__cliresPreDestruct__(pResource);
}
static inline NV_STATUS cliresUnmapFrom_DISPATCH(struct RmClientResource *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__cliresUnmapFrom__(pResource, pParams);
}
static inline PEVENTNOTIFICATION *cliresGetNotificationListPtr_DISPATCH(struct RmClientResource *pNotifier) {
return pNotifier->__cliresGetNotificationListPtr__(pNotifier);
}
static inline void cliresControl_Epilogue_DISPATCH(struct RmClientResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__cliresControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline struct NotifShare *cliresGetNotificationShare_DISPATCH(struct RmClientResource *pNotifier) {
return pNotifier->__cliresGetNotificationShare__(pNotifier);
}
static inline NV_STATUS cliresControlLookup_DISPATCH(struct RmClientResource *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__cliresControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS cliresMap_DISPATCH(struct RmClientResource *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return pResource->__cliresMap__(pResource, pCallContext, pParams, pCpuMapping);
}
static inline NV_STATUS cliresGetOrAllocNotifShare_DISPATCH(struct RmClientResource *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return pNotifier->__cliresGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
}
NV_STATUS cliresConstruct_IMPL(struct RmClientResource *arg_pRmCliRes, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_cliresConstruct(arg_pRmCliRes, arg_pCallContext, arg_pParams) cliresConstruct_IMPL(arg_pRmCliRes, arg_pCallContext, arg_pParams)
void cliresDestruct_IMPL(struct RmClientResource *pRmCliRes);
#define __nvoc_cliresDestruct(pRmCliRes) cliresDestruct_IMPL(pRmCliRes)
#undef PRIVATE_FIELD
NV_STATUS CliGetSystemP2pCaps(NvU32 *gpuIds,
NvU32 gpuCount,
NvU32 *p2pCaps,
NvU32 *p2pOptimalReadCEs,
NvU32 *p2pOptimalWriteCEs,
NvU8 *p2pCapsStatus,
NvU32 *pBusPeerIds);
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_CLIENT_RESOURCE_NVOC_H_

View File

@@ -0,0 +1,427 @@
#define NVOC_CONTEXT_DMA_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_context_dma_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x88441b = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_ContextDma;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_INotifier;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Notifier;
void __nvoc_init_ContextDma(ContextDma*);
void __nvoc_init_funcTable_ContextDma(ContextDma*);
NV_STATUS __nvoc_ctor_ContextDma(ContextDma*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_ContextDma(ContextDma*);
void __nvoc_dtor_ContextDma(ContextDma*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_ContextDma;
static const struct NVOC_RTTI __nvoc_rtti_ContextDma_ContextDma = {
/*pClassDef=*/ &__nvoc_class_def_ContextDma,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_ContextDma,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_ContextDma_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(ContextDma, __nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_ContextDma_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(ContextDma, __nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_ContextDma_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(ContextDma, __nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_ContextDma_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(ContextDma, __nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_ContextDma_INotifier = {
/*pClassDef=*/ &__nvoc_class_def_INotifier,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(ContextDma, __nvoc_base_Notifier.__nvoc_base_INotifier),
};
static const struct NVOC_RTTI __nvoc_rtti_ContextDma_Notifier = {
/*pClassDef=*/ &__nvoc_class_def_Notifier,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(ContextDma, __nvoc_base_Notifier),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_ContextDma = {
/*numRelatives=*/ 7,
/*relatives=*/ {
&__nvoc_rtti_ContextDma_ContextDma,
&__nvoc_rtti_ContextDma_Notifier,
&__nvoc_rtti_ContextDma_INotifier,
&__nvoc_rtti_ContextDma_RmResource,
&__nvoc_rtti_ContextDma_RmResourceCommon,
&__nvoc_rtti_ContextDma_RsResource,
&__nvoc_rtti_ContextDma_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_ContextDma =
{
/*classInfo=*/ {
/*size=*/ sizeof(ContextDma),
/*classId=*/ classId(ContextDma),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "ContextDma",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_ContextDma,
/*pCastInfo=*/ &__nvoc_castinfo_ContextDma,
/*pExportInfo=*/ &__nvoc_export_info_ContextDma
};
static NV_STATUS __nvoc_thunk_ContextDma_resMapTo(struct RsResource *pContextDma, struct RS_RES_MAP_TO_PARAMS *pParams) {
return ctxdmaMapTo((struct ContextDma *)(((unsigned char *)pContextDma) - __nvoc_rtti_ContextDma_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_ContextDma_resUnmapFrom(struct RsResource *pContextDma, struct RS_RES_UNMAP_FROM_PARAMS *pParams) {
return ctxdmaUnmapFrom((struct ContextDma *)(((unsigned char *)pContextDma) - __nvoc_rtti_ContextDma_RsResource.offset), pParams);
}
static NvBool __nvoc_thunk_RmResource_ctxdmaShareCallback(struct ContextDma *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return rmresShareCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RmResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_RmResource_ctxdmaCheckMemInterUnmap(struct ContextDma *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_ContextDma_RmResource.offset), bSubdeviceHandleProvided);
}
static NvBool __nvoc_thunk_RmResource_ctxdmaAccessCallback(struct ContextDma *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
static NV_STATUS __nvoc_thunk_RmResource_ctxdmaGetMemInterMapParams(struct ContextDma *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_ContextDma_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_ctxdmaGetMemoryMappingDescriptor(struct ContextDma *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_ContextDma_RmResource.offset), ppMemDesc);
}
static void __nvoc_thunk_Notifier_ctxdmaSetNotificationShare(struct ContextDma *pNotifier, struct NotifShare *pNotifShare) {
notifySetNotificationShare((struct Notifier *)(((unsigned char *)pNotifier) + __nvoc_rtti_ContextDma_Notifier.offset), pNotifShare);
}
static NV_STATUS __nvoc_thunk_RsResource_ctxdmaControl(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControl((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_ctxdmaControlFilter(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset), pCallContext, pParams);
}
static NvU32 __nvoc_thunk_RsResource_ctxdmaGetRefCount(struct ContextDma *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_Notifier_ctxdmaUnregisterEvent(struct ContextDma *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return notifyUnregisterEvent((struct Notifier *)(((unsigned char *)pNotifier) + __nvoc_rtti_ContextDma_Notifier.offset), hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
static NV_STATUS __nvoc_thunk_RsResource_ctxdmaUnmap(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return resUnmap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset), pCallContext, pCpuMapping);
}
static NvBool __nvoc_thunk_RsResource_ctxdmaCanCopy(struct ContextDma *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RmResource_ctxdmaControl_Prologue(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RmResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_ctxdmaAddAdditionalDependants(struct RsClient *pClient, struct ContextDma *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset), pReference);
}
static void __nvoc_thunk_RsResource_ctxdmaPreDestruct(struct ContextDma *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset));
}
static PEVENTNOTIFICATION *__nvoc_thunk_Notifier_ctxdmaGetNotificationListPtr(struct ContextDma *pNotifier) {
return notifyGetNotificationListPtr((struct Notifier *)(((unsigned char *)pNotifier) + __nvoc_rtti_ContextDma_Notifier.offset));
}
static void __nvoc_thunk_RmResource_ctxdmaControl_Epilogue(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RmResource.offset), pCallContext, pParams);
}
static struct NotifShare *__nvoc_thunk_Notifier_ctxdmaGetNotificationShare(struct ContextDma *pNotifier) {
return notifyGetNotificationShare((struct Notifier *)(((unsigned char *)pNotifier) + __nvoc_rtti_ContextDma_Notifier.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_ctxdmaControlLookup(struct ContextDma *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_RsResource_ctxdmaMap(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return resMap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_ContextDma_RsResource.offset), pCallContext, pParams, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_Notifier_ctxdmaGetOrAllocNotifShare(struct ContextDma *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return notifyGetOrAllocNotifShare((struct Notifier *)(((unsigned char *)pNotifier) + __nvoc_rtti_ContextDma_Notifier.offset), hNotifierClient, hNotifierResource, ppNotifShare);
}
#if !defined(NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG)
#define NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(x) (0)
#endif
static const struct NVOC_EXPORTED_METHOD_DEF __nvoc_exported_method_def_ContextDma[] =
{
{ /* [0] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x0u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) ctxdmaCtrlCmdUpdateContextdma_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x0u)
/*flags=*/ 0x0u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x20101u,
/*paramSize=*/ sizeof(NV0002_CTRL_UPDATE_CONTEXTDMA_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_ContextDma.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "ctxdmaCtrlCmdUpdateContextdma"
#endif
},
{ /* [1] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) ctxdmaCtrlCmdBindContextdma_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
/*flags=*/ 0x10u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x20102u,
/*paramSize=*/ sizeof(NV0002_CTRL_BIND_CONTEXTDMA_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_ContextDma.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "ctxdmaCtrlCmdBindContextdma"
#endif
},
{ /* [2] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) ctxdmaCtrlCmdUnbindContextdma_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
/*flags=*/ 0x10u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x20103u,
/*paramSize=*/ sizeof(NV0002_CTRL_UNBIND_CONTEXTDMA_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_ContextDma.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "ctxdmaCtrlCmdUnbindContextdma"
#endif
},
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_ContextDma =
{
/*numEntries=*/ 3,
/*pExportEntries=*/ __nvoc_exported_method_def_ContextDma
};
void __nvoc_dtor_RmResource(RmResource*);
void __nvoc_dtor_Notifier(Notifier*);
void __nvoc_dtor_ContextDma(ContextDma *pThis) {
__nvoc_ctxdmaDestruct(pThis);
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_dtor_Notifier(&pThis->__nvoc_base_Notifier);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_ContextDma(ContextDma *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RmResource(RmResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_Notifier(Notifier* , struct CALL_CONTEXT *);
NV_STATUS __nvoc_ctor_ContextDma(ContextDma *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RmResource(&pThis->__nvoc_base_RmResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_ContextDma_fail_RmResource;
status = __nvoc_ctor_Notifier(&pThis->__nvoc_base_Notifier, arg_pCallContext);
if (status != NV_OK) goto __nvoc_ctor_ContextDma_fail_Notifier;
__nvoc_init_dataField_ContextDma(pThis);
status = __nvoc_ctxdmaConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_ContextDma_fail__init;
goto __nvoc_ctor_ContextDma_exit; // Success
__nvoc_ctor_ContextDma_fail__init:
__nvoc_dtor_Notifier(&pThis->__nvoc_base_Notifier);
__nvoc_ctor_ContextDma_fail_Notifier:
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_ctor_ContextDma_fail_RmResource:
__nvoc_ctor_ContextDma_exit:
return status;
}
static void __nvoc_init_funcTable_ContextDma_1(ContextDma *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__ctxdmaValidate__ = &ctxdmaValidate_IMPL;
pThis->__ctxdmaGetKernelVA__ = &ctxdmaGetKernelVA_IMPL;
pThis->__ctxdmaMapTo__ = &ctxdmaMapTo_IMPL;
pThis->__ctxdmaUnmapFrom__ = &ctxdmaUnmapFrom_IMPL;
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x0u)
pThis->__ctxdmaCtrlCmdUpdateContextdma__ = &ctxdmaCtrlCmdUpdateContextdma_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
pThis->__ctxdmaCtrlCmdBindContextdma__ = &ctxdmaCtrlCmdBindContextdma_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
pThis->__ctxdmaCtrlCmdUnbindContextdma__ = &ctxdmaCtrlCmdUnbindContextdma_IMPL;
#endif
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__resMapTo__ = &__nvoc_thunk_ContextDma_resMapTo;
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__resUnmapFrom__ = &__nvoc_thunk_ContextDma_resUnmapFrom;
pThis->__ctxdmaShareCallback__ = &__nvoc_thunk_RmResource_ctxdmaShareCallback;
pThis->__ctxdmaCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_ctxdmaCheckMemInterUnmap;
pThis->__ctxdmaAccessCallback__ = &__nvoc_thunk_RmResource_ctxdmaAccessCallback;
pThis->__ctxdmaGetMemInterMapParams__ = &__nvoc_thunk_RmResource_ctxdmaGetMemInterMapParams;
pThis->__ctxdmaGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_ctxdmaGetMemoryMappingDescriptor;
pThis->__ctxdmaSetNotificationShare__ = &__nvoc_thunk_Notifier_ctxdmaSetNotificationShare;
pThis->__ctxdmaControl__ = &__nvoc_thunk_RsResource_ctxdmaControl;
pThis->__ctxdmaControlFilter__ = &__nvoc_thunk_RsResource_ctxdmaControlFilter;
pThis->__ctxdmaGetRefCount__ = &__nvoc_thunk_RsResource_ctxdmaGetRefCount;
pThis->__ctxdmaUnregisterEvent__ = &__nvoc_thunk_Notifier_ctxdmaUnregisterEvent;
pThis->__ctxdmaUnmap__ = &__nvoc_thunk_RsResource_ctxdmaUnmap;
pThis->__ctxdmaCanCopy__ = &__nvoc_thunk_RsResource_ctxdmaCanCopy;
pThis->__ctxdmaControl_Prologue__ = &__nvoc_thunk_RmResource_ctxdmaControl_Prologue;
pThis->__ctxdmaAddAdditionalDependants__ = &__nvoc_thunk_RsResource_ctxdmaAddAdditionalDependants;
pThis->__ctxdmaPreDestruct__ = &__nvoc_thunk_RsResource_ctxdmaPreDestruct;
pThis->__ctxdmaGetNotificationListPtr__ = &__nvoc_thunk_Notifier_ctxdmaGetNotificationListPtr;
pThis->__ctxdmaControl_Epilogue__ = &__nvoc_thunk_RmResource_ctxdmaControl_Epilogue;
pThis->__ctxdmaGetNotificationShare__ = &__nvoc_thunk_Notifier_ctxdmaGetNotificationShare;
pThis->__ctxdmaControlLookup__ = &__nvoc_thunk_RsResource_ctxdmaControlLookup;
pThis->__ctxdmaMap__ = &__nvoc_thunk_RsResource_ctxdmaMap;
pThis->__ctxdmaGetOrAllocNotifShare__ = &__nvoc_thunk_Notifier_ctxdmaGetOrAllocNotifShare;
}
void __nvoc_init_funcTable_ContextDma(ContextDma *pThis) {
__nvoc_init_funcTable_ContextDma_1(pThis);
}
void __nvoc_init_RmResource(RmResource*);
void __nvoc_init_Notifier(Notifier*);
void __nvoc_init_ContextDma(ContextDma *pThis) {
pThis->__nvoc_pbase_ContextDma = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_RmResource;
pThis->__nvoc_pbase_INotifier = &pThis->__nvoc_base_Notifier.__nvoc_base_INotifier;
pThis->__nvoc_pbase_Notifier = &pThis->__nvoc_base_Notifier;
__nvoc_init_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_init_Notifier(&pThis->__nvoc_base_Notifier);
__nvoc_init_funcTable_ContextDma(pThis);
}
NV_STATUS __nvoc_objCreate_ContextDma(ContextDma **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
ContextDma *pThis;
pThis = portMemAllocNonPaged(sizeof(ContextDma));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(ContextDma));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_ContextDma);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_ContextDma(pThis);
status = __nvoc_ctor_ContextDma(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_ContextDma_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_ContextDma_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_ContextDma(ContextDma **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_ContextDma(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,356 @@
#ifndef _G_CONTEXT_DMA_NVOC_H_
#define _G_CONTEXT_DMA_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_context_dma_nvoc.h"
#ifndef CONTEXT_DMA_H
#define CONTEXT_DMA_H
#include "core/core.h"
#include "gpu/mem_mgr/mem_desc.h"
#include "rmapi/resource.h"
#include "rmapi/event.h"
#include "ctrl/ctrl0002.h"
#include "rmapi/control.h" // for macro RMCTRL_EXPORT etc.
#include "nvlimits.h"
struct Device;
#ifndef __NVOC_CLASS_Device_TYPEDEF__
#define __NVOC_CLASS_Device_TYPEDEF__
typedef struct Device Device;
#endif /* __NVOC_CLASS_Device_TYPEDEF__ */
#ifndef __nvoc_class_id_Device
#define __nvoc_class_id_Device 0xe0ac20
#endif /* __nvoc_class_id_Device */
struct Memory;
#ifndef __NVOC_CLASS_Memory_TYPEDEF__
#define __NVOC_CLASS_Memory_TYPEDEF__
typedef struct Memory Memory;
#endif /* __NVOC_CLASS_Memory_TYPEDEF__ */
#ifndef __nvoc_class_id_Memory
#define __nvoc_class_id_Memory 0x4789f2
#endif /* __nvoc_class_id_Memory */
/*!
* RM internal class representing NV01_CONTEXT_DMA
*/
#ifdef NVOC_CONTEXT_DMA_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct ContextDma {
const struct NVOC_RTTI *__nvoc_rtti;
struct RmResource __nvoc_base_RmResource;
struct Notifier __nvoc_base_Notifier;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct INotifier *__nvoc_pbase_INotifier;
struct Notifier *__nvoc_pbase_Notifier;
struct ContextDma *__nvoc_pbase_ContextDma;
NV_STATUS (*__ctxdmaValidate__)(struct ContextDma *, NvU64, NvU64);
NV_STATUS (*__ctxdmaGetKernelVA__)(struct ContextDma *, NvU64, NvU64, void **, NvU32);
NV_STATUS (*__ctxdmaMapTo__)(struct ContextDma *, struct RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__ctxdmaUnmapFrom__)(struct ContextDma *, struct RS_RES_UNMAP_FROM_PARAMS *);
NV_STATUS (*__ctxdmaCtrlCmdUpdateContextdma__)(struct ContextDma *, NV0002_CTRL_UPDATE_CONTEXTDMA_PARAMS *);
NV_STATUS (*__ctxdmaCtrlCmdBindContextdma__)(struct ContextDma *, NV0002_CTRL_BIND_CONTEXTDMA_PARAMS *);
NV_STATUS (*__ctxdmaCtrlCmdUnbindContextdma__)(struct ContextDma *, NV0002_CTRL_UNBIND_CONTEXTDMA_PARAMS *);
NvBool (*__ctxdmaShareCallback__)(struct ContextDma *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__ctxdmaCheckMemInterUnmap__)(struct ContextDma *, NvBool);
NvBool (*__ctxdmaAccessCallback__)(struct ContextDma *, struct RsClient *, void *, RsAccessRight);
NV_STATUS (*__ctxdmaGetMemInterMapParams__)(struct ContextDma *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__ctxdmaGetMemoryMappingDescriptor__)(struct ContextDma *, struct MEMORY_DESCRIPTOR **);
void (*__ctxdmaSetNotificationShare__)(struct ContextDma *, struct NotifShare *);
NV_STATUS (*__ctxdmaControl__)(struct ContextDma *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__ctxdmaControlFilter__)(struct ContextDma *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvU32 (*__ctxdmaGetRefCount__)(struct ContextDma *);
NV_STATUS (*__ctxdmaUnregisterEvent__)(struct ContextDma *, NvHandle, NvHandle, NvHandle, NvHandle);
NV_STATUS (*__ctxdmaUnmap__)(struct ContextDma *, struct CALL_CONTEXT *, RsCpuMapping *);
NvBool (*__ctxdmaCanCopy__)(struct ContextDma *);
NV_STATUS (*__ctxdmaControl_Prologue__)(struct ContextDma *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__ctxdmaAddAdditionalDependants__)(struct RsClient *, struct ContextDma *, RsResourceRef *);
void (*__ctxdmaPreDestruct__)(struct ContextDma *);
PEVENTNOTIFICATION *(*__ctxdmaGetNotificationListPtr__)(struct ContextDma *);
void (*__ctxdmaControl_Epilogue__)(struct ContextDma *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
struct NotifShare *(*__ctxdmaGetNotificationShare__)(struct ContextDma *);
NV_STATUS (*__ctxdmaControlLookup__)(struct ContextDma *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__ctxdmaMap__)(struct ContextDma *, struct CALL_CONTEXT *, RS_CPU_MAP_PARAMS *, RsCpuMapping *);
NV_STATUS (*__ctxdmaGetOrAllocNotifShare__)(struct ContextDma *, NvHandle, NvHandle, struct NotifShare **);
NvU32 Class;
NvU32 Flags;
NvBool bReadOnly;
NvU32 CacheSnoop;
NvU32 Type;
NvU64 Limit;
NV_ADDRESS_SPACE AddressSpace;
NvBool bUnicast;
void *KernelVAddr[8];
void *KernelPriv;
NvU64 FbAperture[8];
NvU64 FbApertureLen[8];
struct Memory *pMemory;
struct MEMORY_DESCRIPTOR *pMemDesc;
NvU32 Instance[8];
NvU32 InstRefCount[8];
struct OBJGPU *pGpu;
struct Device *pDevice;
};
#ifndef __NVOC_CLASS_ContextDma_TYPEDEF__
#define __NVOC_CLASS_ContextDma_TYPEDEF__
typedef struct ContextDma ContextDma;
#endif /* __NVOC_CLASS_ContextDma_TYPEDEF__ */
#ifndef __nvoc_class_id_ContextDma
#define __nvoc_class_id_ContextDma 0x88441b
#endif /* __nvoc_class_id_ContextDma */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_ContextDma;
#define __staticCast_ContextDma(pThis) \
((pThis)->__nvoc_pbase_ContextDma)
#ifdef __nvoc_context_dma_h_disabled
#define __dynamicCast_ContextDma(pThis) ((ContextDma*)NULL)
#else //__nvoc_context_dma_h_disabled
#define __dynamicCast_ContextDma(pThis) \
((ContextDma*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(ContextDma)))
#endif //__nvoc_context_dma_h_disabled
NV_STATUS __nvoc_objCreateDynamic_ContextDma(ContextDma**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_ContextDma(ContextDma**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_ContextDma(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_ContextDma((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define ctxdmaValidate(pContextDma, start, len) ctxdmaValidate_DISPATCH(pContextDma, start, len)
#define ctxdmaGetKernelVA(pContextDma, start, len, arg0, VA_idx) ctxdmaGetKernelVA_DISPATCH(pContextDma, start, len, arg0, VA_idx)
#define ctxdmaMapTo(pContextDma, pParams) ctxdmaMapTo_DISPATCH(pContextDma, pParams)
#define ctxdmaUnmapFrom(pContextDma, pParams) ctxdmaUnmapFrom_DISPATCH(pContextDma, pParams)
#define ctxdmaCtrlCmdUpdateContextdma(pContextDma, pUpdateCtxtDmaParams) ctxdmaCtrlCmdUpdateContextdma_DISPATCH(pContextDma, pUpdateCtxtDmaParams)
#define ctxdmaCtrlCmdBindContextdma(pContextDma, pBindCtxtDmaParams) ctxdmaCtrlCmdBindContextdma_DISPATCH(pContextDma, pBindCtxtDmaParams)
#define ctxdmaCtrlCmdUnbindContextdma(pContextDma, pUnbindCtxtDmaParams) ctxdmaCtrlCmdUnbindContextdma_DISPATCH(pContextDma, pUnbindCtxtDmaParams)
#define ctxdmaShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) ctxdmaShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
#define ctxdmaCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) ctxdmaCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define ctxdmaAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) ctxdmaAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
#define ctxdmaGetMemInterMapParams(pRmResource, pParams) ctxdmaGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define ctxdmaGetMemoryMappingDescriptor(pRmResource, ppMemDesc) ctxdmaGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define ctxdmaSetNotificationShare(pNotifier, pNotifShare) ctxdmaSetNotificationShare_DISPATCH(pNotifier, pNotifShare)
#define ctxdmaControl(pResource, pCallContext, pParams) ctxdmaControl_DISPATCH(pResource, pCallContext, pParams)
#define ctxdmaControlFilter(pResource, pCallContext, pParams) ctxdmaControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define ctxdmaGetRefCount(pResource) ctxdmaGetRefCount_DISPATCH(pResource)
#define ctxdmaUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) ctxdmaUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
#define ctxdmaUnmap(pResource, pCallContext, pCpuMapping) ctxdmaUnmap_DISPATCH(pResource, pCallContext, pCpuMapping)
#define ctxdmaCanCopy(pResource) ctxdmaCanCopy_DISPATCH(pResource)
#define ctxdmaControl_Prologue(pResource, pCallContext, pParams) ctxdmaControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define ctxdmaAddAdditionalDependants(pClient, pResource, pReference) ctxdmaAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define ctxdmaPreDestruct(pResource) ctxdmaPreDestruct_DISPATCH(pResource)
#define ctxdmaGetNotificationListPtr(pNotifier) ctxdmaGetNotificationListPtr_DISPATCH(pNotifier)
#define ctxdmaControl_Epilogue(pResource, pCallContext, pParams) ctxdmaControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define ctxdmaGetNotificationShare(pNotifier) ctxdmaGetNotificationShare_DISPATCH(pNotifier)
#define ctxdmaControlLookup(pResource, pParams, ppEntry) ctxdmaControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define ctxdmaMap(pResource, pCallContext, pParams, pCpuMapping) ctxdmaMap_DISPATCH(pResource, pCallContext, pParams, pCpuMapping)
#define ctxdmaGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) ctxdmaGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
NV_STATUS ctxdmaValidate_IMPL(struct ContextDma *pContextDma, NvU64 start, NvU64 len);
static inline NV_STATUS ctxdmaValidate_DISPATCH(struct ContextDma *pContextDma, NvU64 start, NvU64 len) {
return pContextDma->__ctxdmaValidate__(pContextDma, start, len);
}
NV_STATUS ctxdmaGetKernelVA_IMPL(struct ContextDma *pContextDma, NvU64 start, NvU64 len, void **arg0, NvU32 VA_idx);
static inline NV_STATUS ctxdmaGetKernelVA_DISPATCH(struct ContextDma *pContextDma, NvU64 start, NvU64 len, void **arg0, NvU32 VA_idx) {
return pContextDma->__ctxdmaGetKernelVA__(pContextDma, start, len, arg0, VA_idx);
}
NV_STATUS ctxdmaMapTo_IMPL(struct ContextDma *pContextDma, struct RS_RES_MAP_TO_PARAMS *pParams);
static inline NV_STATUS ctxdmaMapTo_DISPATCH(struct ContextDma *pContextDma, struct RS_RES_MAP_TO_PARAMS *pParams) {
return pContextDma->__ctxdmaMapTo__(pContextDma, pParams);
}
NV_STATUS ctxdmaUnmapFrom_IMPL(struct ContextDma *pContextDma, struct RS_RES_UNMAP_FROM_PARAMS *pParams);
static inline NV_STATUS ctxdmaUnmapFrom_DISPATCH(struct ContextDma *pContextDma, struct RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pContextDma->__ctxdmaUnmapFrom__(pContextDma, pParams);
}
NV_STATUS ctxdmaCtrlCmdUpdateContextdma_IMPL(struct ContextDma *pContextDma, NV0002_CTRL_UPDATE_CONTEXTDMA_PARAMS *pUpdateCtxtDmaParams);
static inline NV_STATUS ctxdmaCtrlCmdUpdateContextdma_DISPATCH(struct ContextDma *pContextDma, NV0002_CTRL_UPDATE_CONTEXTDMA_PARAMS *pUpdateCtxtDmaParams) {
return pContextDma->__ctxdmaCtrlCmdUpdateContextdma__(pContextDma, pUpdateCtxtDmaParams);
}
NV_STATUS ctxdmaCtrlCmdBindContextdma_IMPL(struct ContextDma *pContextDma, NV0002_CTRL_BIND_CONTEXTDMA_PARAMS *pBindCtxtDmaParams);
static inline NV_STATUS ctxdmaCtrlCmdBindContextdma_DISPATCH(struct ContextDma *pContextDma, NV0002_CTRL_BIND_CONTEXTDMA_PARAMS *pBindCtxtDmaParams) {
return pContextDma->__ctxdmaCtrlCmdBindContextdma__(pContextDma, pBindCtxtDmaParams);
}
NV_STATUS ctxdmaCtrlCmdUnbindContextdma_IMPL(struct ContextDma *pContextDma, NV0002_CTRL_UNBIND_CONTEXTDMA_PARAMS *pUnbindCtxtDmaParams);
static inline NV_STATUS ctxdmaCtrlCmdUnbindContextdma_DISPATCH(struct ContextDma *pContextDma, NV0002_CTRL_UNBIND_CONTEXTDMA_PARAMS *pUnbindCtxtDmaParams) {
return pContextDma->__ctxdmaCtrlCmdUnbindContextdma__(pContextDma, pUnbindCtxtDmaParams);
}
static inline NvBool ctxdmaShareCallback_DISPATCH(struct ContextDma *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pResource->__ctxdmaShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS ctxdmaCheckMemInterUnmap_DISPATCH(struct ContextDma *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__ctxdmaCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NvBool ctxdmaAccessCallback_DISPATCH(struct ContextDma *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__ctxdmaAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
static inline NV_STATUS ctxdmaGetMemInterMapParams_DISPATCH(struct ContextDma *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__ctxdmaGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS ctxdmaGetMemoryMappingDescriptor_DISPATCH(struct ContextDma *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__ctxdmaGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline void ctxdmaSetNotificationShare_DISPATCH(struct ContextDma *pNotifier, struct NotifShare *pNotifShare) {
pNotifier->__ctxdmaSetNotificationShare__(pNotifier, pNotifShare);
}
static inline NV_STATUS ctxdmaControl_DISPATCH(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__ctxdmaControl__(pResource, pCallContext, pParams);
}
static inline NV_STATUS ctxdmaControlFilter_DISPATCH(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__ctxdmaControlFilter__(pResource, pCallContext, pParams);
}
static inline NvU32 ctxdmaGetRefCount_DISPATCH(struct ContextDma *pResource) {
return pResource->__ctxdmaGetRefCount__(pResource);
}
static inline NV_STATUS ctxdmaUnregisterEvent_DISPATCH(struct ContextDma *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return pNotifier->__ctxdmaUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
static inline NV_STATUS ctxdmaUnmap_DISPATCH(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return pResource->__ctxdmaUnmap__(pResource, pCallContext, pCpuMapping);
}
static inline NvBool ctxdmaCanCopy_DISPATCH(struct ContextDma *pResource) {
return pResource->__ctxdmaCanCopy__(pResource);
}
static inline NV_STATUS ctxdmaControl_Prologue_DISPATCH(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__ctxdmaControl_Prologue__(pResource, pCallContext, pParams);
}
static inline void ctxdmaAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct ContextDma *pResource, RsResourceRef *pReference) {
pResource->__ctxdmaAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline void ctxdmaPreDestruct_DISPATCH(struct ContextDma *pResource) {
pResource->__ctxdmaPreDestruct__(pResource);
}
static inline PEVENTNOTIFICATION *ctxdmaGetNotificationListPtr_DISPATCH(struct ContextDma *pNotifier) {
return pNotifier->__ctxdmaGetNotificationListPtr__(pNotifier);
}
static inline void ctxdmaControl_Epilogue_DISPATCH(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__ctxdmaControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline struct NotifShare *ctxdmaGetNotificationShare_DISPATCH(struct ContextDma *pNotifier) {
return pNotifier->__ctxdmaGetNotificationShare__(pNotifier);
}
static inline NV_STATUS ctxdmaControlLookup_DISPATCH(struct ContextDma *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__ctxdmaControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS ctxdmaMap_DISPATCH(struct ContextDma *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return pResource->__ctxdmaMap__(pResource, pCallContext, pParams, pCpuMapping);
}
static inline NV_STATUS ctxdmaGetOrAllocNotifShare_DISPATCH(struct ContextDma *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return pNotifier->__ctxdmaGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
}
NV_STATUS ctxdmaConstruct_IMPL(struct ContextDma *arg_pCtxdma, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_ctxdmaConstruct(arg_pCtxdma, arg_pCallContext, arg_pParams) ctxdmaConstruct_IMPL(arg_pCtxdma, arg_pCallContext, arg_pParams)
void ctxdmaDestruct_IMPL(struct ContextDma *pCtxdma);
#define __nvoc_ctxdmaDestruct(pCtxdma) ctxdmaDestruct_IMPL(pCtxdma)
NvBool ctxdmaIsBound_IMPL(struct ContextDma *pContextDma);
#ifdef __nvoc_context_dma_h_disabled
static inline NvBool ctxdmaIsBound(struct ContextDma *pContextDma) {
NV_ASSERT_FAILED_PRECOMP("ContextDma was disabled!");
return NV_FALSE;
}
#else //__nvoc_context_dma_h_disabled
#define ctxdmaIsBound(pContextDma) ctxdmaIsBound_IMPL(pContextDma)
#endif //__nvoc_context_dma_h_disabled
NV_STATUS ctxdmaGetByHandle_IMPL(struct RsClient *pClient, NvHandle hContextDma, struct ContextDma **arg0);
#define ctxdmaGetByHandle(pClient, hContextDma, arg0) ctxdmaGetByHandle_IMPL(pClient, hContextDma, arg0)
#undef PRIVATE_FIELD
// ****************************************************************************
// Deprecated Definitions
// ****************************************************************************
#if RM_STRICT_CONFIG_EMIT_DEPRECATED_CONTEXT_DMA_DEFINITIONS == 1
/**
* @warning This function is deprecated! Please use ctxdmaGetByHandle.
*/
NV_STATUS CliGetContextDma(NvHandle hClient, NvHandle hContextDma, struct ContextDma **);
#endif
#endif /* CONTEXT_DMA_H */
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_CONTEXT_DMA_NVOC_H_

View File

@@ -0,0 +1,286 @@
#define NVOC_DCE_CLIENT_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_dce_client_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x61649c = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJDCECLIENTRM;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJENGSTATE;
void __nvoc_init_OBJDCECLIENTRM(OBJDCECLIENTRM*);
void __nvoc_init_funcTable_OBJDCECLIENTRM(OBJDCECLIENTRM*);
NV_STATUS __nvoc_ctor_OBJDCECLIENTRM(OBJDCECLIENTRM*);
void __nvoc_init_dataField_OBJDCECLIENTRM(OBJDCECLIENTRM*);
void __nvoc_dtor_OBJDCECLIENTRM(OBJDCECLIENTRM*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJDCECLIENTRM;
static const struct NVOC_RTTI __nvoc_rtti_OBJDCECLIENTRM_OBJDCECLIENTRM = {
/*pClassDef=*/ &__nvoc_class_def_OBJDCECLIENTRM,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJDCECLIENTRM,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJDCECLIENTRM_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJDCECLIENTRM, __nvoc_base_OBJENGSTATE.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE = {
/*pClassDef=*/ &__nvoc_class_def_OBJENGSTATE,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJDCECLIENTRM, __nvoc_base_OBJENGSTATE),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJDCECLIENTRM = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_OBJDCECLIENTRM_OBJDCECLIENTRM,
&__nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE,
&__nvoc_rtti_OBJDCECLIENTRM_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJDCECLIENTRM =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJDCECLIENTRM),
/*classId=*/ classId(OBJDCECLIENTRM),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJDCECLIENTRM",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJDCECLIENTRM,
/*pCastInfo=*/ &__nvoc_castinfo_OBJDCECLIENTRM,
/*pExportInfo=*/ &__nvoc_export_info_OBJDCECLIENTRM
};
static NV_STATUS __nvoc_thunk_OBJDCECLIENTRM_engstateConstructEngine(struct OBJGPU *arg0, struct OBJENGSTATE *arg1, ENGDESCRIPTOR arg2) {
return dceclientConstructEngine(arg0, (struct OBJDCECLIENTRM *)(((unsigned char *)arg1) - __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), arg2);
}
static void __nvoc_thunk_OBJDCECLIENTRM_engstateStateDestroy(struct OBJGPU *arg0, struct OBJENGSTATE *arg1) {
dceclientStateDestroy(arg0, (struct OBJDCECLIENTRM *)(((unsigned char *)arg1) - __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJDCECLIENTRM_engstateStateLoad(struct OBJGPU *arg0, struct OBJENGSTATE *arg1, NvU32 arg2) {
return dceclientStateLoad(arg0, (struct OBJDCECLIENTRM *)(((unsigned char *)arg1) - __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), arg2);
}
static NV_STATUS __nvoc_thunk_OBJDCECLIENTRM_engstateStateUnload(struct OBJGPU *arg0, struct OBJENGSTATE *arg1, NvU32 arg2) {
return dceclientStateUnload(arg0, (struct OBJDCECLIENTRM *)(((unsigned char *)arg1) - __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), arg2);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientReconcileTunableState(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunableState) {
return engstateReconcileTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientStateInitLocked(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return engstateStateInitLocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientStatePreLoad(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, NvU32 arg0) {
return engstateStatePreLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientStatePostUnload(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, NvU32 arg0) {
return engstateStatePostUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientStatePreUnload(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, NvU32 arg0) {
return engstateStatePreUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientStateInitUnlocked(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return engstateStateInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset));
}
static void __nvoc_thunk_OBJENGSTATE_dceclientInitMissing(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
engstateInitMissing(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientStatePreInitLocked(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return engstateStatePreInitLocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientStatePreInitUnlocked(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return engstateStatePreInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientGetTunableState(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunableState) {
return engstateGetTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientCompareTunableState(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunables1, void *pTunables2) {
return engstateCompareTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), pTunables1, pTunables2);
}
static void __nvoc_thunk_OBJENGSTATE_dceclientFreeTunableState(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunableState) {
engstateFreeTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientStatePostLoad(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, NvU32 arg0) {
return engstateStatePostLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientAllocTunableState(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void **ppTunableState) {
return engstateAllocTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), ppTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_dceclientSetTunableState(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunableState) {
return engstateSetTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset), pTunableState);
}
static NvBool __nvoc_thunk_OBJENGSTATE_dceclientIsPresent(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return engstateIsPresent(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJDCECLIENTRM_OBJENGSTATE.offset));
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJDCECLIENTRM =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_dtor_OBJDCECLIENTRM(OBJDCECLIENTRM *pThis) {
__nvoc_dtor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJDCECLIENTRM(OBJDCECLIENTRM *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_OBJENGSTATE(OBJENGSTATE* );
NV_STATUS __nvoc_ctor_OBJDCECLIENTRM(OBJDCECLIENTRM *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
if (status != NV_OK) goto __nvoc_ctor_OBJDCECLIENTRM_fail_OBJENGSTATE;
__nvoc_init_dataField_OBJDCECLIENTRM(pThis);
goto __nvoc_ctor_OBJDCECLIENTRM_exit; // Success
__nvoc_ctor_OBJDCECLIENTRM_fail_OBJENGSTATE:
__nvoc_ctor_OBJDCECLIENTRM_exit:
return status;
}
static void __nvoc_init_funcTable_OBJDCECLIENTRM_1(OBJDCECLIENTRM *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__dceclientConstructEngine__ = &dceclientConstructEngine_IMPL;
pThis->__dceclientStateDestroy__ = &dceclientStateDestroy_IMPL;
pThis->__dceclientStateLoad__ = &dceclientStateLoad_IMPL;
pThis->__dceclientStateUnload__ = &dceclientStateUnload_IMPL;
pThis->__nvoc_base_OBJENGSTATE.__engstateConstructEngine__ = &__nvoc_thunk_OBJDCECLIENTRM_engstateConstructEngine;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateDestroy__ = &__nvoc_thunk_OBJDCECLIENTRM_engstateStateDestroy;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateLoad__ = &__nvoc_thunk_OBJDCECLIENTRM_engstateStateLoad;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateUnload__ = &__nvoc_thunk_OBJDCECLIENTRM_engstateStateUnload;
pThis->__dceclientReconcileTunableState__ = &__nvoc_thunk_OBJENGSTATE_dceclientReconcileTunableState;
pThis->__dceclientStateInitLocked__ = &__nvoc_thunk_OBJENGSTATE_dceclientStateInitLocked;
pThis->__dceclientStatePreLoad__ = &__nvoc_thunk_OBJENGSTATE_dceclientStatePreLoad;
pThis->__dceclientStatePostUnload__ = &__nvoc_thunk_OBJENGSTATE_dceclientStatePostUnload;
pThis->__dceclientStatePreUnload__ = &__nvoc_thunk_OBJENGSTATE_dceclientStatePreUnload;
pThis->__dceclientStateInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_dceclientStateInitUnlocked;
pThis->__dceclientInitMissing__ = &__nvoc_thunk_OBJENGSTATE_dceclientInitMissing;
pThis->__dceclientStatePreInitLocked__ = &__nvoc_thunk_OBJENGSTATE_dceclientStatePreInitLocked;
pThis->__dceclientStatePreInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_dceclientStatePreInitUnlocked;
pThis->__dceclientGetTunableState__ = &__nvoc_thunk_OBJENGSTATE_dceclientGetTunableState;
pThis->__dceclientCompareTunableState__ = &__nvoc_thunk_OBJENGSTATE_dceclientCompareTunableState;
pThis->__dceclientFreeTunableState__ = &__nvoc_thunk_OBJENGSTATE_dceclientFreeTunableState;
pThis->__dceclientStatePostLoad__ = &__nvoc_thunk_OBJENGSTATE_dceclientStatePostLoad;
pThis->__dceclientAllocTunableState__ = &__nvoc_thunk_OBJENGSTATE_dceclientAllocTunableState;
pThis->__dceclientSetTunableState__ = &__nvoc_thunk_OBJENGSTATE_dceclientSetTunableState;
pThis->__dceclientIsPresent__ = &__nvoc_thunk_OBJENGSTATE_dceclientIsPresent;
}
void __nvoc_init_funcTable_OBJDCECLIENTRM(OBJDCECLIENTRM *pThis) {
__nvoc_init_funcTable_OBJDCECLIENTRM_1(pThis);
}
void __nvoc_init_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_init_OBJDCECLIENTRM(OBJDCECLIENTRM *pThis) {
pThis->__nvoc_pbase_OBJDCECLIENTRM = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object;
pThis->__nvoc_pbase_OBJENGSTATE = &pThis->__nvoc_base_OBJENGSTATE;
__nvoc_init_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
__nvoc_init_funcTable_OBJDCECLIENTRM(pThis);
}
NV_STATUS __nvoc_objCreate_OBJDCECLIENTRM(OBJDCECLIENTRM **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJDCECLIENTRM *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJDCECLIENTRM));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJDCECLIENTRM));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJDCECLIENTRM);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJDCECLIENTRM(pThis);
status = __nvoc_ctor_OBJDCECLIENTRM(pThis);
if (status != NV_OK) goto __nvoc_objCreate_OBJDCECLIENTRM_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJDCECLIENTRM_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJDCECLIENTRM(OBJDCECLIENTRM **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJDCECLIENTRM(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,349 @@
#ifndef _G_DCE_CLIENT_NVOC_H_
#define _G_DCE_CLIENT_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2020-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_dce_client_nvoc.h"
#ifndef _DCE_CLIENT_H_
#define _DCE_CLIENT_H_
/*!
* @file dce_client.h
* @brief Provides definitions for all DceClient data structures and interfaces.
*/
#include "gpu/eng_state.h"
#include "core/core.h"
#include "objrpc.h"
#include "os/dce_rm_client_ipc.h"
#include "class/cl0000.h"
#include "class/cl0080.h"
#include "class/cl2080.h"
#include "class/cl0073.h"
#include "class/clc372sw.h"
typedef struct
{
NvHandle hClient;
NvHandle hParent;
NvHandle hObject;
NvU32 hClass;
NV0000_ALLOC_PARAMETERS rootAllocParams;
NvBool valid;
} ROOT;
typedef struct
{
NvHandle hClient;
NvHandle hParent;
NvHandle hObject;
NvU32 hClass;
NV0080_ALLOC_PARAMETERS deviceAllocParams;
NvBool valid;
} DEVICE;
typedef struct
{
NvHandle hClient;
NvHandle hParent;
NvHandle hObject;
NvU32 hClass;
NV2080_ALLOC_PARAMETERS subdeviceAllocParams;
NvBool valid;
} SUBDEVICE;
typedef struct
{
NvHandle hClient;
NvHandle hParent;
NvHandle hObject;
NvU32 hClass;
NVOS21_PARAMETERS displayCommonAllocParams;
NvBool valid;
} DISPLAY_COMMON;
typedef struct
{
NvHandle hClient;
NvHandle hParent;
NvHandle hObject;
NvU32 hClass;
NVOS21_PARAMETERS displaySWAllocParams;
NvBool valid;
} DISPLAY_SW;
/*!
* Max no of RM clients
*/
#define MAX_RM_CLIENTS 5
/*!
* Temporary alias of DceClient to OBJDCECLIENTRM
*/
#define DceClient OBJDCECLIENTRM
/*!
* Defines the structure used to contain all generic information related to
* the DceClient.
*/
#ifdef NVOC_DCE_CLIENT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OBJDCECLIENTRM {
const struct NVOC_RTTI *__nvoc_rtti;
struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
struct Object *__nvoc_pbase_Object;
struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
struct OBJDCECLIENTRM *__nvoc_pbase_OBJDCECLIENTRM;
NV_STATUS (*__dceclientConstructEngine__)(struct OBJGPU *, struct OBJDCECLIENTRM *, ENGDESCRIPTOR);
void (*__dceclientStateDestroy__)(struct OBJGPU *, struct OBJDCECLIENTRM *);
NV_STATUS (*__dceclientStateLoad__)(struct OBJGPU *, struct OBJDCECLIENTRM *, NvU32);
NV_STATUS (*__dceclientStateUnload__)(struct OBJGPU *, struct OBJDCECLIENTRM *, NvU32);
NV_STATUS (*__dceclientReconcileTunableState__)(POBJGPU, struct OBJDCECLIENTRM *, void *);
NV_STATUS (*__dceclientStateInitLocked__)(POBJGPU, struct OBJDCECLIENTRM *);
NV_STATUS (*__dceclientStatePreLoad__)(POBJGPU, struct OBJDCECLIENTRM *, NvU32);
NV_STATUS (*__dceclientStatePostUnload__)(POBJGPU, struct OBJDCECLIENTRM *, NvU32);
NV_STATUS (*__dceclientStatePreUnload__)(POBJGPU, struct OBJDCECLIENTRM *, NvU32);
NV_STATUS (*__dceclientStateInitUnlocked__)(POBJGPU, struct OBJDCECLIENTRM *);
void (*__dceclientInitMissing__)(POBJGPU, struct OBJDCECLIENTRM *);
NV_STATUS (*__dceclientStatePreInitLocked__)(POBJGPU, struct OBJDCECLIENTRM *);
NV_STATUS (*__dceclientStatePreInitUnlocked__)(POBJGPU, struct OBJDCECLIENTRM *);
NV_STATUS (*__dceclientGetTunableState__)(POBJGPU, struct OBJDCECLIENTRM *, void *);
NV_STATUS (*__dceclientCompareTunableState__)(POBJGPU, struct OBJDCECLIENTRM *, void *, void *);
void (*__dceclientFreeTunableState__)(POBJGPU, struct OBJDCECLIENTRM *, void *);
NV_STATUS (*__dceclientStatePostLoad__)(POBJGPU, struct OBJDCECLIENTRM *, NvU32);
NV_STATUS (*__dceclientAllocTunableState__)(POBJGPU, struct OBJDCECLIENTRM *, void **);
NV_STATUS (*__dceclientSetTunableState__)(POBJGPU, struct OBJDCECLIENTRM *, void *);
NvBool (*__dceclientIsPresent__)(POBJGPU, struct OBJDCECLIENTRM *);
struct OBJRPC *pRpc;
NvU32 clientId[2];
};
#ifndef __NVOC_CLASS_OBJDCECLIENTRM_TYPEDEF__
#define __NVOC_CLASS_OBJDCECLIENTRM_TYPEDEF__
typedef struct OBJDCECLIENTRM OBJDCECLIENTRM;
#endif /* __NVOC_CLASS_OBJDCECLIENTRM_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJDCECLIENTRM
#define __nvoc_class_id_OBJDCECLIENTRM 0x61649c
#endif /* __nvoc_class_id_OBJDCECLIENTRM */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJDCECLIENTRM;
#define __staticCast_OBJDCECLIENTRM(pThis) \
((pThis)->__nvoc_pbase_OBJDCECLIENTRM)
#ifdef __nvoc_dce_client_h_disabled
#define __dynamicCast_OBJDCECLIENTRM(pThis) ((OBJDCECLIENTRM*)NULL)
#else //__nvoc_dce_client_h_disabled
#define __dynamicCast_OBJDCECLIENTRM(pThis) \
((OBJDCECLIENTRM*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJDCECLIENTRM)))
#endif //__nvoc_dce_client_h_disabled
#define PDB_PROP_DCECLIENT_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
#define PDB_PROP_DCECLIENT_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
NV_STATUS __nvoc_objCreateDynamic_OBJDCECLIENTRM(OBJDCECLIENTRM**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJDCECLIENTRM(OBJDCECLIENTRM**, Dynamic*, NvU32);
#define __objCreate_OBJDCECLIENTRM(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_OBJDCECLIENTRM((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
#define dceclientConstructEngine(arg0, arg1, arg2) dceclientConstructEngine_DISPATCH(arg0, arg1, arg2)
#define dceclientStateDestroy(arg0, arg1) dceclientStateDestroy_DISPATCH(arg0, arg1)
#define dceclientStateLoad(arg0, arg1, arg2) dceclientStateLoad_DISPATCH(arg0, arg1, arg2)
#define dceclientStateUnload(arg0, arg1, arg2) dceclientStateUnload_DISPATCH(arg0, arg1, arg2)
#define dceclientReconcileTunableState(pGpu, pEngstate, pTunableState) dceclientReconcileTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define dceclientStateInitLocked(pGpu, pEngstate) dceclientStateInitLocked_DISPATCH(pGpu, pEngstate)
#define dceclientStatePreLoad(pGpu, pEngstate, arg0) dceclientStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
#define dceclientStatePostUnload(pGpu, pEngstate, arg0) dceclientStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
#define dceclientStatePreUnload(pGpu, pEngstate, arg0) dceclientStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
#define dceclientStateInitUnlocked(pGpu, pEngstate) dceclientStateInitUnlocked_DISPATCH(pGpu, pEngstate)
#define dceclientInitMissing(pGpu, pEngstate) dceclientInitMissing_DISPATCH(pGpu, pEngstate)
#define dceclientStatePreInitLocked(pGpu, pEngstate) dceclientStatePreInitLocked_DISPATCH(pGpu, pEngstate)
#define dceclientStatePreInitUnlocked(pGpu, pEngstate) dceclientStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
#define dceclientGetTunableState(pGpu, pEngstate, pTunableState) dceclientGetTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define dceclientCompareTunableState(pGpu, pEngstate, pTunables1, pTunables2) dceclientCompareTunableState_DISPATCH(pGpu, pEngstate, pTunables1, pTunables2)
#define dceclientFreeTunableState(pGpu, pEngstate, pTunableState) dceclientFreeTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define dceclientStatePostLoad(pGpu, pEngstate, arg0) dceclientStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
#define dceclientAllocTunableState(pGpu, pEngstate, ppTunableState) dceclientAllocTunableState_DISPATCH(pGpu, pEngstate, ppTunableState)
#define dceclientSetTunableState(pGpu, pEngstate, pTunableState) dceclientSetTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define dceclientIsPresent(pGpu, pEngstate) dceclientIsPresent_DISPATCH(pGpu, pEngstate)
NV_STATUS dceclientConstructEngine_IMPL(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1, ENGDESCRIPTOR arg2);
static inline NV_STATUS dceclientConstructEngine_DISPATCH(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1, ENGDESCRIPTOR arg2) {
return arg1->__dceclientConstructEngine__(arg0, arg1, arg2);
}
void dceclientStateDestroy_IMPL(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1);
static inline void dceclientStateDestroy_DISPATCH(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1) {
arg1->__dceclientStateDestroy__(arg0, arg1);
}
NV_STATUS dceclientStateLoad_IMPL(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1, NvU32 arg2);
static inline NV_STATUS dceclientStateLoad_DISPATCH(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1, NvU32 arg2) {
return arg1->__dceclientStateLoad__(arg0, arg1, arg2);
}
NV_STATUS dceclientStateUnload_IMPL(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1, NvU32 arg2);
static inline NV_STATUS dceclientStateUnload_DISPATCH(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1, NvU32 arg2) {
return arg1->__dceclientStateUnload__(arg0, arg1, arg2);
}
static inline NV_STATUS dceclientReconcileTunableState_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunableState) {
return pEngstate->__dceclientReconcileTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS dceclientStateInitLocked_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return pEngstate->__dceclientStateInitLocked__(pGpu, pEngstate);
}
static inline NV_STATUS dceclientStatePreLoad_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, NvU32 arg0) {
return pEngstate->__dceclientStatePreLoad__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS dceclientStatePostUnload_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, NvU32 arg0) {
return pEngstate->__dceclientStatePostUnload__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS dceclientStatePreUnload_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, NvU32 arg0) {
return pEngstate->__dceclientStatePreUnload__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS dceclientStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return pEngstate->__dceclientStateInitUnlocked__(pGpu, pEngstate);
}
static inline void dceclientInitMissing_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
pEngstate->__dceclientInitMissing__(pGpu, pEngstate);
}
static inline NV_STATUS dceclientStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return pEngstate->__dceclientStatePreInitLocked__(pGpu, pEngstate);
}
static inline NV_STATUS dceclientStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return pEngstate->__dceclientStatePreInitUnlocked__(pGpu, pEngstate);
}
static inline NV_STATUS dceclientGetTunableState_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunableState) {
return pEngstate->__dceclientGetTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS dceclientCompareTunableState_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunables1, void *pTunables2) {
return pEngstate->__dceclientCompareTunableState__(pGpu, pEngstate, pTunables1, pTunables2);
}
static inline void dceclientFreeTunableState_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunableState) {
pEngstate->__dceclientFreeTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS dceclientStatePostLoad_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, NvU32 arg0) {
return pEngstate->__dceclientStatePostLoad__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS dceclientAllocTunableState_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void **ppTunableState) {
return pEngstate->__dceclientAllocTunableState__(pGpu, pEngstate, ppTunableState);
}
static inline NV_STATUS dceclientSetTunableState_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate, void *pTunableState) {
return pEngstate->__dceclientSetTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NvBool dceclientIsPresent_DISPATCH(POBJGPU pGpu, struct OBJDCECLIENTRM *pEngstate) {
return pEngstate->__dceclientIsPresent__(pGpu, pEngstate);
}
NV_STATUS dceclientInitRpcInfra_IMPL(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1);
#ifdef __nvoc_dce_client_h_disabled
static inline NV_STATUS dceclientInitRpcInfra(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1) {
NV_ASSERT_FAILED_PRECOMP("OBJDCECLIENTRM was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_dce_client_h_disabled
#define dceclientInitRpcInfra(arg0, arg1) dceclientInitRpcInfra_IMPL(arg0, arg1)
#endif //__nvoc_dce_client_h_disabled
void dceclientDeinitRpcInfra_IMPL(struct OBJDCECLIENTRM *arg0);
#ifdef __nvoc_dce_client_h_disabled
static inline void dceclientDeinitRpcInfra(struct OBJDCECLIENTRM *arg0) {
NV_ASSERT_FAILED_PRECOMP("OBJDCECLIENTRM was disabled!");
}
#else //__nvoc_dce_client_h_disabled
#define dceclientDeinitRpcInfra(arg0) dceclientDeinitRpcInfra_IMPL(arg0)
#endif //__nvoc_dce_client_h_disabled
NV_STATUS dceclientDceRmInit_IMPL(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1, NvBool arg2);
#ifdef __nvoc_dce_client_h_disabled
static inline NV_STATUS dceclientDceRmInit(struct OBJGPU *arg0, struct OBJDCECLIENTRM *arg1, NvBool arg2) {
NV_ASSERT_FAILED_PRECOMP("OBJDCECLIENTRM was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_dce_client_h_disabled
#define dceclientDceRmInit(arg0, arg1, arg2) dceclientDceRmInit_IMPL(arg0, arg1, arg2)
#endif //__nvoc_dce_client_h_disabled
NV_STATUS dceclientSendRpc_IMPL(struct OBJDCECLIENTRM *arg0, void *arg1, NvU32 arg2);
#ifdef __nvoc_dce_client_h_disabled
static inline NV_STATUS dceclientSendRpc(struct OBJDCECLIENTRM *arg0, void *arg1, NvU32 arg2) {
NV_ASSERT_FAILED_PRECOMP("OBJDCECLIENTRM was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_dce_client_h_disabled
#define dceclientSendRpc(arg0, arg1, arg2) dceclientSendRpc_IMPL(arg0, arg1, arg2)
#endif //__nvoc_dce_client_h_disabled
#undef PRIVATE_FIELD
NV_STATUS rpcRmApiControl_dce(RM_API *pRmApi,
NvHandle hClient, NvHandle hObject,
NvU32 cmd, void *pParamStructPtr,
NvU32 paramsSize);
NV_STATUS rpcRmApiAlloc_dce(RM_API *pRmApi,
NvHandle hClient, NvHandle hParent,
NvHandle hObject, NvU32 hClass,
void *pAllocParams);
NV_STATUS rpcRmApiDupObject_dce(RM_API *pRmApi, NvHandle hClient,
NvHandle hParent, NvHandle *phObject, NvHandle hClientSrc,
NvHandle hObjectSrc, NvU32 flags);
NV_STATUS rpcRmApiFree_dce(RM_API *pRmApi, NvHandle hClient, NvHandle hObject);
NV_STATUS rpcDceRmInit_dce(RM_API *pRmApi, NvBool bInit);
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_DCE_CLIENT_NVOC_H_

View File

@@ -0,0 +1,550 @@
#define NVOC_DEVICE_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_device_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xe0ac20 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Device;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
void __nvoc_init_Device(Device*);
void __nvoc_init_funcTable_Device(Device*);
NV_STATUS __nvoc_ctor_Device(Device*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_Device(Device*);
void __nvoc_dtor_Device(Device*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_Device;
static const struct NVOC_RTTI __nvoc_rtti_Device_Device = {
/*pClassDef=*/ &__nvoc_class_def_Device,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_Device,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_Device_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Device, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_Device_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Device, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_Device_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Device, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_Device_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Device, __nvoc_base_GpuResource.__nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_Device_GpuResource = {
/*pClassDef=*/ &__nvoc_class_def_GpuResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Device, __nvoc_base_GpuResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_Device = {
/*numRelatives=*/ 6,
/*relatives=*/ {
&__nvoc_rtti_Device_Device,
&__nvoc_rtti_Device_GpuResource,
&__nvoc_rtti_Device_RmResource,
&__nvoc_rtti_Device_RmResourceCommon,
&__nvoc_rtti_Device_RsResource,
&__nvoc_rtti_Device_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_Device =
{
/*classInfo=*/ {
/*size=*/ sizeof(Device),
/*classId=*/ classId(Device),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "Device",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_Device,
/*pCastInfo=*/ &__nvoc_castinfo_Device,
/*pExportInfo=*/ &__nvoc_export_info_Device
};
static NV_STATUS __nvoc_thunk_Device_gpuresControl(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return deviceControl((struct Device *)(((unsigned char *)pResource) - __nvoc_rtti_Device_GpuResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_Device_gpuresInternalControlForward(struct GpuResource *pDevice, NvU32 command, void *pParams, NvU32 size) {
return deviceInternalControlForward((struct Device *)(((unsigned char *)pDevice) - __nvoc_rtti_Device_GpuResource.offset), command, pParams, size);
}
static NvBool __nvoc_thunk_GpuResource_deviceShareCallback(struct Device *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Device_GpuResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_GpuResource_deviceUnmap(struct Device *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Device_GpuResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_deviceGetMemInterMapParams(struct Device *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Device_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_deviceGetMemoryMappingDescriptor(struct Device *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Device_RmResource.offset), ppMemDesc);
}
static NV_STATUS __nvoc_thunk_GpuResource_deviceGetMapAddrSpace(struct Device *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return gpuresGetMapAddrSpace((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Device_GpuResource.offset), pCallContext, mapFlags, pAddrSpace);
}
static NvHandle __nvoc_thunk_GpuResource_deviceGetInternalObjectHandle(struct Device *pGpuResource) {
return gpuresGetInternalObjectHandle((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Device_GpuResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_deviceControlFilter(struct Device *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_deviceAddAdditionalDependants(struct RsClient *pClient, struct Device *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RsResource.offset), pReference);
}
static NvU32 __nvoc_thunk_RsResource_deviceGetRefCount(struct Device *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RmResource_deviceCheckMemInterUnmap(struct Device *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Device_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_deviceMapTo(struct Device *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_deviceControl_Prologue(struct Device *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_deviceGetRegBaseOffsetAndSize(struct Device *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return gpuresGetRegBaseOffsetAndSize((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Device_GpuResource.offset), pGpu, pOffset, pSize);
}
static NvBool __nvoc_thunk_RsResource_deviceCanCopy(struct Device *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RsResource.offset));
}
static void __nvoc_thunk_RsResource_devicePreDestruct(struct Device *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_deviceUnmapFrom(struct Device *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_deviceControl_Epilogue(struct Device *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_deviceControlLookup(struct Device *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_GpuResource_deviceMap(struct Device *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return gpuresMap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Device_GpuResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_deviceAccessCallback(struct Device *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Device_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
#if !defined(NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG)
#define NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(x) (0)
#endif
static const struct NVOC_EXPORTED_METHOD_DEF __nvoc_exported_method_def_Device[] =
{
{ /* [0] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x813u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuGetClasslist_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x813u)
/*flags=*/ 0x813u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800201u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_GET_CLASSLIST_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuGetClasslist"
#endif
},
{ /* [1] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x811u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuGetNumSubdevices_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x811u)
/*flags=*/ 0x811u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800280u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_GET_NUM_SUBDEVICES_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuGetNumSubdevices"
#endif
},
{ /* [2] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x5u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuModifyGpuSwStatePersistence_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x5u)
/*flags=*/ 0x5u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800287u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_MODIFY_SW_STATE_PERSISTENCE_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuModifyGpuSwStatePersistence"
#endif
},
{ /* [3] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuQueryGpuSwStatePersistence_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
/*flags=*/ 0x11u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800288u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_QUERY_SW_STATE_PERSISTENCE_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuQueryGpuSwStatePersistence"
#endif
},
{ /* [4] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x810u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuGetVirtualizationMode_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x810u)
/*flags=*/ 0x810u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800289u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_GET_VIRTUALIZATION_MODE_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuGetVirtualizationMode"
#endif
},
{ /* [5] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x813u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuGetClasslistV2_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x813u)
/*flags=*/ 0x813u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800292u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_GET_CLASSLIST_V2_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuGetClasslistV2"
#endif
},
{ /* [6] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x13u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuGetFindSubDeviceHandle_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x13u)
/*flags=*/ 0x13u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800293u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_FIND_SUBDEVICE_HANDLE_PARAM),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuGetFindSubDeviceHandle"
#endif
},
{ /* [7] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x211u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuGetBrandCaps_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x211u)
/*flags=*/ 0x211u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800294u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_GET_BRAND_CAPS_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuGetBrandCaps"
#endif
},
{ /* [8] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x204u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdGpuSetVgpuVfBar1Size_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x204u)
/*flags=*/ 0x204u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x800296u,
/*paramSize=*/ sizeof(NV0080_CTRL_GPU_SET_VGPU_VF_BAR1_SIZE_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdGpuSetVgpuVfBar1Size"
#endif
},
{ /* [9] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x1u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdOsUnixVTSwitch_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x1u)
/*flags=*/ 0x1u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x801e01u,
/*paramSize=*/ sizeof(NV0080_CTRL_OS_UNIX_VT_SWITCH_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdOsUnixVTSwitch"
#endif
},
{ /* [10] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x1u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) deviceCtrlCmdOsUnixVTGetFBInfo_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x1u)
/*flags=*/ 0x1u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x801e02u,
/*paramSize=*/ sizeof(NV0080_CTRL_OS_UNIX_VT_GET_FB_INFO_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_Device.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "deviceCtrlCmdOsUnixVTGetFBInfo"
#endif
},
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_Device =
{
/*numEntries=*/ 11,
/*pExportEntries=*/ __nvoc_exported_method_def_Device
};
void __nvoc_dtor_GpuResource(GpuResource*);
void __nvoc_dtor_Device(Device *pThis) {
__nvoc_deviceDestruct(pThis);
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_Device(Device *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_GpuResource(GpuResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_Device(Device *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_GpuResource(&pThis->__nvoc_base_GpuResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_Device_fail_GpuResource;
__nvoc_init_dataField_Device(pThis);
status = __nvoc_deviceConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_Device_fail__init;
goto __nvoc_ctor_Device_exit; // Success
__nvoc_ctor_Device_fail__init:
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_ctor_Device_fail_GpuResource:
__nvoc_ctor_Device_exit:
return status;
}
static void __nvoc_init_funcTable_Device_1(Device *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__deviceControl__ = &deviceControl_IMPL;
pThis->__deviceInternalControlForward__ = &deviceInternalControlForward_IMPL;
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x813u)
pThis->__deviceCtrlCmdGpuGetClasslist__ = &deviceCtrlCmdGpuGetClasslist_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x813u)
pThis->__deviceCtrlCmdGpuGetClasslistV2__ = &deviceCtrlCmdGpuGetClasslistV2_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x811u)
pThis->__deviceCtrlCmdGpuGetNumSubdevices__ = &deviceCtrlCmdGpuGetNumSubdevices_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x5u)
pThis->__deviceCtrlCmdGpuModifyGpuSwStatePersistence__ = &deviceCtrlCmdGpuModifyGpuSwStatePersistence_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
pThis->__deviceCtrlCmdGpuQueryGpuSwStatePersistence__ = &deviceCtrlCmdGpuQueryGpuSwStatePersistence_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x810u)
pThis->__deviceCtrlCmdGpuGetVirtualizationMode__ = &deviceCtrlCmdGpuGetVirtualizationMode_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x204u)
pThis->__deviceCtrlCmdGpuSetVgpuVfBar1Size__ = &deviceCtrlCmdGpuSetVgpuVfBar1Size_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x211u)
pThis->__deviceCtrlCmdGpuGetBrandCaps__ = &deviceCtrlCmdGpuGetBrandCaps_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x13u)
pThis->__deviceCtrlCmdGpuGetFindSubDeviceHandle__ = &deviceCtrlCmdGpuGetFindSubDeviceHandle_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x1u)
pThis->__deviceCtrlCmdOsUnixVTSwitch__ = &deviceCtrlCmdOsUnixVTSwitch_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x1u)
pThis->__deviceCtrlCmdOsUnixVTGetFBInfo__ = &deviceCtrlCmdOsUnixVTGetFBInfo_IMPL;
#endif
pThis->__nvoc_base_GpuResource.__gpuresControl__ = &__nvoc_thunk_Device_gpuresControl;
pThis->__nvoc_base_GpuResource.__gpuresInternalControlForward__ = &__nvoc_thunk_Device_gpuresInternalControlForward;
pThis->__deviceShareCallback__ = &__nvoc_thunk_GpuResource_deviceShareCallback;
pThis->__deviceUnmap__ = &__nvoc_thunk_GpuResource_deviceUnmap;
pThis->__deviceGetMemInterMapParams__ = &__nvoc_thunk_RmResource_deviceGetMemInterMapParams;
pThis->__deviceGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_deviceGetMemoryMappingDescriptor;
pThis->__deviceGetMapAddrSpace__ = &__nvoc_thunk_GpuResource_deviceGetMapAddrSpace;
pThis->__deviceGetInternalObjectHandle__ = &__nvoc_thunk_GpuResource_deviceGetInternalObjectHandle;
pThis->__deviceControlFilter__ = &__nvoc_thunk_RsResource_deviceControlFilter;
pThis->__deviceAddAdditionalDependants__ = &__nvoc_thunk_RsResource_deviceAddAdditionalDependants;
pThis->__deviceGetRefCount__ = &__nvoc_thunk_RsResource_deviceGetRefCount;
pThis->__deviceCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_deviceCheckMemInterUnmap;
pThis->__deviceMapTo__ = &__nvoc_thunk_RsResource_deviceMapTo;
pThis->__deviceControl_Prologue__ = &__nvoc_thunk_RmResource_deviceControl_Prologue;
pThis->__deviceGetRegBaseOffsetAndSize__ = &__nvoc_thunk_GpuResource_deviceGetRegBaseOffsetAndSize;
pThis->__deviceCanCopy__ = &__nvoc_thunk_RsResource_deviceCanCopy;
pThis->__devicePreDestruct__ = &__nvoc_thunk_RsResource_devicePreDestruct;
pThis->__deviceUnmapFrom__ = &__nvoc_thunk_RsResource_deviceUnmapFrom;
pThis->__deviceControl_Epilogue__ = &__nvoc_thunk_RmResource_deviceControl_Epilogue;
pThis->__deviceControlLookup__ = &__nvoc_thunk_RsResource_deviceControlLookup;
pThis->__deviceMap__ = &__nvoc_thunk_GpuResource_deviceMap;
pThis->__deviceAccessCallback__ = &__nvoc_thunk_RmResource_deviceAccessCallback;
}
void __nvoc_init_funcTable_Device(Device *pThis) {
__nvoc_init_funcTable_Device_1(pThis);
}
void __nvoc_init_GpuResource(GpuResource*);
void __nvoc_init_Device(Device *pThis) {
pThis->__nvoc_pbase_Device = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource;
pThis->__nvoc_pbase_GpuResource = &pThis->__nvoc_base_GpuResource;
__nvoc_init_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_init_funcTable_Device(pThis);
}
NV_STATUS __nvoc_objCreate_Device(Device **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
Device *pThis;
pThis = portMemAllocNonPaged(sizeof(Device));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(Device));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_Device);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_Device(pThis);
status = __nvoc_ctor_Device(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_Device_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_Device_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_Device(Device **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_Device(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,466 @@
#ifndef _G_DEVICE_NVOC_H_
#define _G_DEVICE_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_device_nvoc.h"
#ifndef _DEVICE_H_
#define _DEVICE_H_
#include "core/core.h"
#include "resserv/resserv.h"
#include "nvoc/prelude.h"
#include "nvoc/utility.h"
#include "resserv/rs_resource.h"
#include "rmapi/control.h"
#include "containers/btree.h"
#include "gpu/gpu_resource.h"
#include "mem_mgr/vaspace.h"
#include "ctrl/ctrl0080.h" // rmcontrol params
// Forward declaration
struct HOST_VGPU_DEVICE;
struct OBJVASPACE;
#ifndef __NVOC_CLASS_OBJVASPACE_TYPEDEF__
#define __NVOC_CLASS_OBJVASPACE_TYPEDEF__
typedef struct OBJVASPACE OBJVASPACE;
#endif /* __NVOC_CLASS_OBJVASPACE_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJVASPACE
#define __nvoc_class_id_OBJVASPACE 0x6c347f
#endif /* __nvoc_class_id_OBJVASPACE */
// TODO: Remove this after adding KERNEL_HOST_VGPU_DEVICE
typedef struct HOST_VGPU_DEVICE KERNEL_HOST_VGPU_DEVICE;
/**
* A device consists of one or more GPUs. Devices provide broadcast
* semantics; that is, operations involving a device are applied to all GPUs
* in the device.
*/
#ifdef NVOC_DEVICE_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct Device {
const struct NVOC_RTTI *__nvoc_rtti;
struct GpuResource __nvoc_base_GpuResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct Device *__nvoc_pbase_Device;
NV_STATUS (*__deviceControl__)(struct Device *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__deviceInternalControlForward__)(struct Device *, NvU32, void *, NvU32);
NV_STATUS (*__deviceCtrlCmdGpuGetClasslist__)(struct Device *, NV0080_CTRL_GPU_GET_CLASSLIST_PARAMS *);
NV_STATUS (*__deviceCtrlCmdGpuGetClasslistV2__)(struct Device *, NV0080_CTRL_GPU_GET_CLASSLIST_V2_PARAMS *);
NV_STATUS (*__deviceCtrlCmdGpuGetNumSubdevices__)(struct Device *, NV0080_CTRL_GPU_GET_NUM_SUBDEVICES_PARAMS *);
NV_STATUS (*__deviceCtrlCmdGpuModifyGpuSwStatePersistence__)(struct Device *, NV0080_CTRL_GPU_MODIFY_SW_STATE_PERSISTENCE_PARAMS *);
NV_STATUS (*__deviceCtrlCmdGpuQueryGpuSwStatePersistence__)(struct Device *, NV0080_CTRL_GPU_QUERY_SW_STATE_PERSISTENCE_PARAMS *);
NV_STATUS (*__deviceCtrlCmdGpuGetVirtualizationMode__)(struct Device *, NV0080_CTRL_GPU_GET_VIRTUALIZATION_MODE_PARAMS *);
NV_STATUS (*__deviceCtrlCmdGpuSetVgpuVfBar1Size__)(struct Device *, NV0080_CTRL_GPU_SET_VGPU_VF_BAR1_SIZE_PARAMS *);
NV_STATUS (*__deviceCtrlCmdGpuGetBrandCaps__)(struct Device *, NV0080_CTRL_GPU_GET_BRAND_CAPS_PARAMS *);
NV_STATUS (*__deviceCtrlCmdGpuGetFindSubDeviceHandle__)(struct Device *, NV0080_CTRL_GPU_FIND_SUBDEVICE_HANDLE_PARAM *);
NV_STATUS (*__deviceCtrlCmdOsUnixVTSwitch__)(struct Device *, NV0080_CTRL_OS_UNIX_VT_SWITCH_PARAMS *);
NV_STATUS (*__deviceCtrlCmdOsUnixVTGetFBInfo__)(struct Device *, NV0080_CTRL_OS_UNIX_VT_GET_FB_INFO_PARAMS *);
NvBool (*__deviceShareCallback__)(struct Device *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__deviceUnmap__)(struct Device *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__deviceGetMemInterMapParams__)(struct Device *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__deviceGetMemoryMappingDescriptor__)(struct Device *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__deviceGetMapAddrSpace__)(struct Device *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NvHandle (*__deviceGetInternalObjectHandle__)(struct Device *);
NV_STATUS (*__deviceControlFilter__)(struct Device *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__deviceAddAdditionalDependants__)(struct RsClient *, struct Device *, RsResourceRef *);
NvU32 (*__deviceGetRefCount__)(struct Device *);
NV_STATUS (*__deviceCheckMemInterUnmap__)(struct Device *, NvBool);
NV_STATUS (*__deviceMapTo__)(struct Device *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__deviceControl_Prologue__)(struct Device *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__deviceGetRegBaseOffsetAndSize__)(struct Device *, struct OBJGPU *, NvU32 *, NvU32 *);
NvBool (*__deviceCanCopy__)(struct Device *);
void (*__devicePreDestruct__)(struct Device *);
NV_STATUS (*__deviceUnmapFrom__)(struct Device *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__deviceControl_Epilogue__)(struct Device *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__deviceControlLookup__)(struct Device *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__deviceMap__)(struct Device *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__deviceAccessCallback__)(struct Device *, struct RsClient *, void *, RsAccessRight);
NvU32 deviceInst;
NvU32 PerfReqCnt;
PNODE DevMemoryTable;
NvBool bSliGpuBoostSyncActivate;
NvBool bPerfOptpActive;
NvU32 nPerfOptpRefCnt;
NvU32 nCudaLimitRefCnt;
struct OBJVASPACE *pVASpace;
NvHandle hClientShare;
NvHandle hTargetClient;
NvHandle hTargetDevice;
NvU32 deviceAllocFlags;
NvU32 deviceInternalAllocFlags;
NvU64 vaStartInternal;
NvU64 vaLimitInternal;
NvU64 vaSize;
NvU32 vaMode;
struct HOST_VGPU_DEVICE *pHostVgpuDevice;
KERNEL_HOST_VGPU_DEVICE *pKernelHostVgpuDevice;
};
#ifndef __NVOC_CLASS_Device_TYPEDEF__
#define __NVOC_CLASS_Device_TYPEDEF__
typedef struct Device Device;
#endif /* __NVOC_CLASS_Device_TYPEDEF__ */
#ifndef __nvoc_class_id_Device
#define __nvoc_class_id_Device 0xe0ac20
#endif /* __nvoc_class_id_Device */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Device;
#define __staticCast_Device(pThis) \
((pThis)->__nvoc_pbase_Device)
#ifdef __nvoc_device_h_disabled
#define __dynamicCast_Device(pThis) ((Device*)NULL)
#else //__nvoc_device_h_disabled
#define __dynamicCast_Device(pThis) \
((Device*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Device)))
#endif //__nvoc_device_h_disabled
NV_STATUS __nvoc_objCreateDynamic_Device(Device**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_Device(Device**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_Device(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_Device((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define deviceControl(pResource, pCallContext, pParams) deviceControl_DISPATCH(pResource, pCallContext, pParams)
#define deviceInternalControlForward(pDevice, command, pParams, size) deviceInternalControlForward_DISPATCH(pDevice, command, pParams, size)
#define deviceCtrlCmdGpuGetClasslist(pDevice, pClassListParams) deviceCtrlCmdGpuGetClasslist_DISPATCH(pDevice, pClassListParams)
#define deviceCtrlCmdGpuGetClasslistV2(pDevice, pParams) deviceCtrlCmdGpuGetClasslistV2_DISPATCH(pDevice, pParams)
#define deviceCtrlCmdGpuGetNumSubdevices(pDevice, pSubDeviceCountParams) deviceCtrlCmdGpuGetNumSubdevices_DISPATCH(pDevice, pSubDeviceCountParams)
#define deviceCtrlCmdGpuModifyGpuSwStatePersistence(pDevice, pParams) deviceCtrlCmdGpuModifyGpuSwStatePersistence_DISPATCH(pDevice, pParams)
#define deviceCtrlCmdGpuQueryGpuSwStatePersistence(pDevice, pParams) deviceCtrlCmdGpuQueryGpuSwStatePersistence_DISPATCH(pDevice, pParams)
#define deviceCtrlCmdGpuGetVirtualizationMode(pDevice, pParams) deviceCtrlCmdGpuGetVirtualizationMode_DISPATCH(pDevice, pParams)
#define deviceCtrlCmdGpuSetVgpuVfBar1Size(pDevice, pParams) deviceCtrlCmdGpuSetVgpuVfBar1Size_DISPATCH(pDevice, pParams)
#define deviceCtrlCmdGpuGetBrandCaps(pDevice, pParams) deviceCtrlCmdGpuGetBrandCaps_DISPATCH(pDevice, pParams)
#define deviceCtrlCmdGpuGetFindSubDeviceHandle(pDevice, pParams) deviceCtrlCmdGpuGetFindSubDeviceHandle_DISPATCH(pDevice, pParams)
#define deviceCtrlCmdOsUnixVTSwitch(pDevice, pParams) deviceCtrlCmdOsUnixVTSwitch_DISPATCH(pDevice, pParams)
#define deviceCtrlCmdOsUnixVTGetFBInfo(pDevice, pParams) deviceCtrlCmdOsUnixVTGetFBInfo_DISPATCH(pDevice, pParams)
#define deviceShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) deviceShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define deviceUnmap(pGpuResource, pCallContext, pCpuMapping) deviceUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define deviceGetMemInterMapParams(pRmResource, pParams) deviceGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define deviceGetMemoryMappingDescriptor(pRmResource, ppMemDesc) deviceGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define deviceGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) deviceGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define deviceGetInternalObjectHandle(pGpuResource) deviceGetInternalObjectHandle_DISPATCH(pGpuResource)
#define deviceControlFilter(pResource, pCallContext, pParams) deviceControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define deviceAddAdditionalDependants(pClient, pResource, pReference) deviceAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define deviceGetRefCount(pResource) deviceGetRefCount_DISPATCH(pResource)
#define deviceCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) deviceCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define deviceMapTo(pResource, pParams) deviceMapTo_DISPATCH(pResource, pParams)
#define deviceControl_Prologue(pResource, pCallContext, pParams) deviceControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define deviceGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) deviceGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
#define deviceCanCopy(pResource) deviceCanCopy_DISPATCH(pResource)
#define devicePreDestruct(pResource) devicePreDestruct_DISPATCH(pResource)
#define deviceUnmapFrom(pResource, pParams) deviceUnmapFrom_DISPATCH(pResource, pParams)
#define deviceControl_Epilogue(pResource, pCallContext, pParams) deviceControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define deviceControlLookup(pResource, pParams, ppEntry) deviceControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define deviceMap(pGpuResource, pCallContext, pParams, pCpuMapping) deviceMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define deviceAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) deviceAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS deviceControl_IMPL(struct Device *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
static inline NV_STATUS deviceControl_DISPATCH(struct Device *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__deviceControl__(pResource, pCallContext, pParams);
}
NV_STATUS deviceInternalControlForward_IMPL(struct Device *pDevice, NvU32 command, void *pParams, NvU32 size);
static inline NV_STATUS deviceInternalControlForward_DISPATCH(struct Device *pDevice, NvU32 command, void *pParams, NvU32 size) {
return pDevice->__deviceInternalControlForward__(pDevice, command, pParams, size);
}
NV_STATUS deviceCtrlCmdGpuGetClasslist_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_GET_CLASSLIST_PARAMS *pClassListParams);
static inline NV_STATUS deviceCtrlCmdGpuGetClasslist_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_GET_CLASSLIST_PARAMS *pClassListParams) {
return pDevice->__deviceCtrlCmdGpuGetClasslist__(pDevice, pClassListParams);
}
NV_STATUS deviceCtrlCmdGpuGetClasslistV2_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_GET_CLASSLIST_V2_PARAMS *pParams);
static inline NV_STATUS deviceCtrlCmdGpuGetClasslistV2_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_GET_CLASSLIST_V2_PARAMS *pParams) {
return pDevice->__deviceCtrlCmdGpuGetClasslistV2__(pDevice, pParams);
}
NV_STATUS deviceCtrlCmdGpuGetNumSubdevices_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_GET_NUM_SUBDEVICES_PARAMS *pSubDeviceCountParams);
static inline NV_STATUS deviceCtrlCmdGpuGetNumSubdevices_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_GET_NUM_SUBDEVICES_PARAMS *pSubDeviceCountParams) {
return pDevice->__deviceCtrlCmdGpuGetNumSubdevices__(pDevice, pSubDeviceCountParams);
}
NV_STATUS deviceCtrlCmdGpuModifyGpuSwStatePersistence_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_MODIFY_SW_STATE_PERSISTENCE_PARAMS *pParams);
static inline NV_STATUS deviceCtrlCmdGpuModifyGpuSwStatePersistence_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_MODIFY_SW_STATE_PERSISTENCE_PARAMS *pParams) {
return pDevice->__deviceCtrlCmdGpuModifyGpuSwStatePersistence__(pDevice, pParams);
}
NV_STATUS deviceCtrlCmdGpuQueryGpuSwStatePersistence_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_QUERY_SW_STATE_PERSISTENCE_PARAMS *pParams);
static inline NV_STATUS deviceCtrlCmdGpuQueryGpuSwStatePersistence_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_QUERY_SW_STATE_PERSISTENCE_PARAMS *pParams) {
return pDevice->__deviceCtrlCmdGpuQueryGpuSwStatePersistence__(pDevice, pParams);
}
NV_STATUS deviceCtrlCmdGpuGetVirtualizationMode_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_GET_VIRTUALIZATION_MODE_PARAMS *pParams);
static inline NV_STATUS deviceCtrlCmdGpuGetVirtualizationMode_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_GET_VIRTUALIZATION_MODE_PARAMS *pParams) {
return pDevice->__deviceCtrlCmdGpuGetVirtualizationMode__(pDevice, pParams);
}
NV_STATUS deviceCtrlCmdGpuSetVgpuVfBar1Size_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_SET_VGPU_VF_BAR1_SIZE_PARAMS *pParams);
static inline NV_STATUS deviceCtrlCmdGpuSetVgpuVfBar1Size_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_SET_VGPU_VF_BAR1_SIZE_PARAMS *pParams) {
return pDevice->__deviceCtrlCmdGpuSetVgpuVfBar1Size__(pDevice, pParams);
}
NV_STATUS deviceCtrlCmdGpuGetBrandCaps_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_GET_BRAND_CAPS_PARAMS *pParams);
static inline NV_STATUS deviceCtrlCmdGpuGetBrandCaps_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_GET_BRAND_CAPS_PARAMS *pParams) {
return pDevice->__deviceCtrlCmdGpuGetBrandCaps__(pDevice, pParams);
}
NV_STATUS deviceCtrlCmdGpuGetFindSubDeviceHandle_IMPL(struct Device *pDevice, NV0080_CTRL_GPU_FIND_SUBDEVICE_HANDLE_PARAM *pParams);
static inline NV_STATUS deviceCtrlCmdGpuGetFindSubDeviceHandle_DISPATCH(struct Device *pDevice, NV0080_CTRL_GPU_FIND_SUBDEVICE_HANDLE_PARAM *pParams) {
return pDevice->__deviceCtrlCmdGpuGetFindSubDeviceHandle__(pDevice, pParams);
}
NV_STATUS deviceCtrlCmdOsUnixVTSwitch_IMPL(struct Device *pDevice, NV0080_CTRL_OS_UNIX_VT_SWITCH_PARAMS *pParams);
static inline NV_STATUS deviceCtrlCmdOsUnixVTSwitch_DISPATCH(struct Device *pDevice, NV0080_CTRL_OS_UNIX_VT_SWITCH_PARAMS *pParams) {
return pDevice->__deviceCtrlCmdOsUnixVTSwitch__(pDevice, pParams);
}
NV_STATUS deviceCtrlCmdOsUnixVTGetFBInfo_IMPL(struct Device *pDevice, NV0080_CTRL_OS_UNIX_VT_GET_FB_INFO_PARAMS *pParams);
static inline NV_STATUS deviceCtrlCmdOsUnixVTGetFBInfo_DISPATCH(struct Device *pDevice, NV0080_CTRL_OS_UNIX_VT_GET_FB_INFO_PARAMS *pParams) {
return pDevice->__deviceCtrlCmdOsUnixVTGetFBInfo__(pDevice, pParams);
}
static inline NvBool deviceShareCallback_DISPATCH(struct Device *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__deviceShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS deviceUnmap_DISPATCH(struct Device *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__deviceUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS deviceGetMemInterMapParams_DISPATCH(struct Device *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__deviceGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS deviceGetMemoryMappingDescriptor_DISPATCH(struct Device *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__deviceGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS deviceGetMapAddrSpace_DISPATCH(struct Device *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__deviceGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline NvHandle deviceGetInternalObjectHandle_DISPATCH(struct Device *pGpuResource) {
return pGpuResource->__deviceGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS deviceControlFilter_DISPATCH(struct Device *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__deviceControlFilter__(pResource, pCallContext, pParams);
}
static inline void deviceAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct Device *pResource, RsResourceRef *pReference) {
pResource->__deviceAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 deviceGetRefCount_DISPATCH(struct Device *pResource) {
return pResource->__deviceGetRefCount__(pResource);
}
static inline NV_STATUS deviceCheckMemInterUnmap_DISPATCH(struct Device *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__deviceCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS deviceMapTo_DISPATCH(struct Device *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__deviceMapTo__(pResource, pParams);
}
static inline NV_STATUS deviceControl_Prologue_DISPATCH(struct Device *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__deviceControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS deviceGetRegBaseOffsetAndSize_DISPATCH(struct Device *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pGpuResource->__deviceGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
}
static inline NvBool deviceCanCopy_DISPATCH(struct Device *pResource) {
return pResource->__deviceCanCopy__(pResource);
}
static inline void devicePreDestruct_DISPATCH(struct Device *pResource) {
pResource->__devicePreDestruct__(pResource);
}
static inline NV_STATUS deviceUnmapFrom_DISPATCH(struct Device *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__deviceUnmapFrom__(pResource, pParams);
}
static inline void deviceControl_Epilogue_DISPATCH(struct Device *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__deviceControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS deviceControlLookup_DISPATCH(struct Device *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__deviceControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS deviceMap_DISPATCH(struct Device *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__deviceMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool deviceAccessCallback_DISPATCH(struct Device *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__deviceAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
static inline NV_STATUS deviceSetDefaultVASpace(struct Device *pDevice, NvHandle hVASpace) {
return NV_OK;
}
NV_STATUS deviceConstruct_IMPL(struct Device *arg_pResource, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_deviceConstruct(arg_pResource, arg_pCallContext, arg_pParams) deviceConstruct_IMPL(arg_pResource, arg_pCallContext, arg_pParams)
void deviceDestruct_IMPL(struct Device *pResource);
#define __nvoc_deviceDestruct(pResource) deviceDestruct_IMPL(pResource)
NV_STATUS deviceInit_IMPL(struct Device *pDevice, struct CALL_CONTEXT *pCallContext, NvHandle hClient, NvHandle hDevice, NvU32 deviceInst, NvHandle hClientShare, NvHandle hTargetClient, NvHandle hTargetDevice, NvU64 vaSize, NvU64 vaStartInternal, NvU64 vaLimitInternal, NvU32 allocFlags, NvU32 vaMode);
#ifdef __nvoc_device_h_disabled
static inline NV_STATUS deviceInit(struct Device *pDevice, struct CALL_CONTEXT *pCallContext, NvHandle hClient, NvHandle hDevice, NvU32 deviceInst, NvHandle hClientShare, NvHandle hTargetClient, NvHandle hTargetDevice, NvU64 vaSize, NvU64 vaStartInternal, NvU64 vaLimitInternal, NvU32 allocFlags, NvU32 vaMode) {
NV_ASSERT_FAILED_PRECOMP("Device was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_device_h_disabled
#define deviceInit(pDevice, pCallContext, hClient, hDevice, deviceInst, hClientShare, hTargetClient, hTargetDevice, vaSize, vaStartInternal, vaLimitInternal, allocFlags, vaMode) deviceInit_IMPL(pDevice, pCallContext, hClient, hDevice, deviceInst, hClientShare, hTargetClient, hTargetDevice, vaSize, vaStartInternal, vaLimitInternal, allocFlags, vaMode)
#endif //__nvoc_device_h_disabled
NV_STATUS deviceGetByHandle_IMPL(struct RsClient *pClient, NvHandle hDevice, struct Device **ppDevice);
#define deviceGetByHandle(pClient, hDevice, ppDevice) deviceGetByHandle_IMPL(pClient, hDevice, ppDevice)
NV_STATUS deviceGetByInstance_IMPL(struct RsClient *pClient, NvU32 deviceInstance, struct Device **ppDevice);
#define deviceGetByInstance(pClient, deviceInstance, ppDevice) deviceGetByInstance_IMPL(pClient, deviceInstance, ppDevice)
NV_STATUS deviceGetByGpu_IMPL(struct RsClient *pClient, struct OBJGPU *pGpu, NvBool bAnyInGroup, struct Device **ppDevice);
#define deviceGetByGpu(pClient, pGpu, bAnyInGroup, ppDevice) deviceGetByGpu_IMPL(pClient, pGpu, bAnyInGroup, ppDevice)
NV_STATUS deviceGetDefaultVASpace_IMPL(struct Device *pDevice, struct OBJVASPACE **ppVAS);
#ifdef __nvoc_device_h_disabled
static inline NV_STATUS deviceGetDefaultVASpace(struct Device *pDevice, struct OBJVASPACE **ppVAS) {
NV_ASSERT_FAILED_PRECOMP("Device was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_device_h_disabled
#define deviceGetDefaultVASpace(pDevice, ppVAS) deviceGetDefaultVASpace_IMPL(pDevice, ppVAS)
#endif //__nvoc_device_h_disabled
NV_STATUS deviceSetClientShare_IMPL(struct Device *pDevice, NvHandle hClientShare, NvU64 vaSize, NvU64 vaStartInternal, NvU64 vaLimitInternal, NvU32 deviceAllocFlags);
#ifdef __nvoc_device_h_disabled
static inline NV_STATUS deviceSetClientShare(struct Device *pDevice, NvHandle hClientShare, NvU64 vaSize, NvU64 vaStartInternal, NvU64 vaLimitInternal, NvU32 deviceAllocFlags) {
NV_ASSERT_FAILED_PRECOMP("Device was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_device_h_disabled
#define deviceSetClientShare(pDevice, hClientShare, vaSize, vaStartInternal, vaLimitInternal, deviceAllocFlags) deviceSetClientShare_IMPL(pDevice, hClientShare, vaSize, vaStartInternal, vaLimitInternal, deviceAllocFlags)
#endif //__nvoc_device_h_disabled
void deviceRemoveFromClientShare_IMPL(struct Device *pDevice);
#ifdef __nvoc_device_h_disabled
static inline void deviceRemoveFromClientShare(struct Device *pDevice) {
NV_ASSERT_FAILED_PRECOMP("Device was disabled!");
}
#else //__nvoc_device_h_disabled
#define deviceRemoveFromClientShare(pDevice) deviceRemoveFromClientShare_IMPL(pDevice)
#endif //__nvoc_device_h_disabled
#undef PRIVATE_FIELD
// ****************************************************************************
// Deprecated Definitions
// ****************************************************************************
/**
* WARNING: This function is deprecated! Please use deviceGetByHandle.
*/
struct Device *CliGetDeviceInfo(NvHandle, NvHandle);
/**
* WARNING: This function is deprecated and use is *strongly* discouraged
* (especially for new code!)
*
* From the function name (CliSetGpuContext) it appears as a simple accessor but
* violates expectations by modifying the SLI BC threadstate (calls to
* GPU_RES_SET_THREAD_BC_STATE). This can be dangerous if not carefully managed
* by the caller.
*
* Instead of using this routine, please use deviceGetByHandle then call
* GPU_RES_GET_GPU, GPU_RES_GET_GPUGRP, GPU_RES_SET_THREAD_BC_STATE as needed.
*
* Note that GPU_RES_GET_GPU supports returning a pGpu for both pDevice,
* pSubdevice, the base pResource type, and any resource that inherits from
* GpuResource. That is, instead of using CliSetGpuContext or
* CliSetSubDeviceContext, please use following pattern to look up the pGpu:
*
* OBJGPU *pGpu = GPU_RES_GET_GPU(pResource or pResourceRef->pResource)
*
* To set the threadstate, please use:
*
* GPU_RES_SET_THREAD_BC_STATE(pResource or pResourceRef->pResource);
*/
NV_STATUS CliSetGpuContext(NvHandle, NvHandle, OBJGPU **, struct OBJGPUGRP **);
/**
* WARNING: This function is deprecated! Please use gpuGetByRef()
*/
OBJGPU *CliGetGpuFromContext(RsResourceRef *pContextRef, NvBool *pbBroadcast);
/**
* WARNING: This function is deprecated! Please use gpuGetByHandle()
*/
OBJGPU *CliGetGpuFromHandle(NvHandle hClient, NvHandle hResource, NvBool *pbBroadcast);
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_DEVICE_NVOC_H_

View File

@@ -0,0 +1,329 @@
#define NVOC_DISP_CAPABILITIES_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_disp_capabilities_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x99db3e = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DispCapabilities;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
void __nvoc_init_DispCapabilities(DispCapabilities*);
void __nvoc_init_funcTable_DispCapabilities(DispCapabilities*);
NV_STATUS __nvoc_ctor_DispCapabilities(DispCapabilities*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_DispCapabilities(DispCapabilities*);
void __nvoc_dtor_DispCapabilities(DispCapabilities*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_DispCapabilities;
static const struct NVOC_RTTI __nvoc_rtti_DispCapabilities_DispCapabilities = {
/*pClassDef=*/ &__nvoc_class_def_DispCapabilities,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_DispCapabilities,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_DispCapabilities_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispCapabilities, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_DispCapabilities_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispCapabilities, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_DispCapabilities_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispCapabilities, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_DispCapabilities_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispCapabilities, __nvoc_base_GpuResource.__nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_DispCapabilities_GpuResource = {
/*pClassDef=*/ &__nvoc_class_def_GpuResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispCapabilities, __nvoc_base_GpuResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_DispCapabilities = {
/*numRelatives=*/ 6,
/*relatives=*/ {
&__nvoc_rtti_DispCapabilities_DispCapabilities,
&__nvoc_rtti_DispCapabilities_GpuResource,
&__nvoc_rtti_DispCapabilities_RmResource,
&__nvoc_rtti_DispCapabilities_RmResourceCommon,
&__nvoc_rtti_DispCapabilities_RsResource,
&__nvoc_rtti_DispCapabilities_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_DispCapabilities =
{
/*classInfo=*/ {
/*size=*/ sizeof(DispCapabilities),
/*classId=*/ classId(DispCapabilities),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "DispCapabilities",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_DispCapabilities,
/*pCastInfo=*/ &__nvoc_castinfo_DispCapabilities,
/*pExportInfo=*/ &__nvoc_export_info_DispCapabilities
};
static NV_STATUS __nvoc_thunk_DispCapabilities_gpuresGetRegBaseOffsetAndSize(struct GpuResource *pDispCapabilities, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return dispcapGetRegBaseOffsetAndSize((struct DispCapabilities *)(((unsigned char *)pDispCapabilities) - __nvoc_rtti_DispCapabilities_GpuResource.offset), pGpu, pOffset, pSize);
}
static NvBool __nvoc_thunk_GpuResource_dispcapShareCallback(struct DispCapabilities *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispCapabilities_GpuResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_GpuResource_dispcapControl(struct DispCapabilities *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return gpuresControl((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispCapabilities_GpuResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_dispcapUnmap(struct DispCapabilities *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispCapabilities_GpuResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_dispcapGetMemInterMapParams(struct DispCapabilities *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_DispCapabilities_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_dispcapGetMemoryMappingDescriptor(struct DispCapabilities *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_DispCapabilities_RmResource.offset), ppMemDesc);
}
static NV_STATUS __nvoc_thunk_GpuResource_dispcapGetMapAddrSpace(struct DispCapabilities *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return gpuresGetMapAddrSpace((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispCapabilities_GpuResource.offset), pCallContext, mapFlags, pAddrSpace);
}
static NvHandle __nvoc_thunk_GpuResource_dispcapGetInternalObjectHandle(struct DispCapabilities *pGpuResource) {
return gpuresGetInternalObjectHandle((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispCapabilities_GpuResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_dispcapControlFilter(struct DispCapabilities *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_dispcapAddAdditionalDependants(struct RsClient *pClient, struct DispCapabilities *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RsResource.offset), pReference);
}
static NvU32 __nvoc_thunk_RsResource_dispcapGetRefCount(struct DispCapabilities *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RmResource_dispcapCheckMemInterUnmap(struct DispCapabilities *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_DispCapabilities_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_dispcapMapTo(struct DispCapabilities *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_dispcapControl_Prologue(struct DispCapabilities *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RmResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_RsResource_dispcapCanCopy(struct DispCapabilities *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_GpuResource_dispcapInternalControlForward(struct DispCapabilities *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return gpuresInternalControlForward((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispCapabilities_GpuResource.offset), command, pParams, size);
}
static void __nvoc_thunk_RsResource_dispcapPreDestruct(struct DispCapabilities *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_dispcapUnmapFrom(struct DispCapabilities *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_dispcapControl_Epilogue(struct DispCapabilities *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_dispcapControlLookup(struct DispCapabilities *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_GpuResource_dispcapMap(struct DispCapabilities *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return gpuresMap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispCapabilities_GpuResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_dispcapAccessCallback(struct DispCapabilities *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispCapabilities_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_DispCapabilities =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_GpuResource(GpuResource*);
void __nvoc_dtor_DispCapabilities(DispCapabilities *pThis) {
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_DispCapabilities(DispCapabilities *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_GpuResource(GpuResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_DispCapabilities(DispCapabilities *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_GpuResource(&pThis->__nvoc_base_GpuResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_DispCapabilities_fail_GpuResource;
__nvoc_init_dataField_DispCapabilities(pThis);
status = __nvoc_dispcapConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_DispCapabilities_fail__init;
goto __nvoc_ctor_DispCapabilities_exit; // Success
__nvoc_ctor_DispCapabilities_fail__init:
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_ctor_DispCapabilities_fail_GpuResource:
__nvoc_ctor_DispCapabilities_exit:
return status;
}
static void __nvoc_init_funcTable_DispCapabilities_1(DispCapabilities *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__dispcapGetRegBaseOffsetAndSize__ = &dispcapGetRegBaseOffsetAndSize_IMPL;
pThis->__nvoc_base_GpuResource.__gpuresGetRegBaseOffsetAndSize__ = &__nvoc_thunk_DispCapabilities_gpuresGetRegBaseOffsetAndSize;
pThis->__dispcapShareCallback__ = &__nvoc_thunk_GpuResource_dispcapShareCallback;
pThis->__dispcapControl__ = &__nvoc_thunk_GpuResource_dispcapControl;
pThis->__dispcapUnmap__ = &__nvoc_thunk_GpuResource_dispcapUnmap;
pThis->__dispcapGetMemInterMapParams__ = &__nvoc_thunk_RmResource_dispcapGetMemInterMapParams;
pThis->__dispcapGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_dispcapGetMemoryMappingDescriptor;
pThis->__dispcapGetMapAddrSpace__ = &__nvoc_thunk_GpuResource_dispcapGetMapAddrSpace;
pThis->__dispcapGetInternalObjectHandle__ = &__nvoc_thunk_GpuResource_dispcapGetInternalObjectHandle;
pThis->__dispcapControlFilter__ = &__nvoc_thunk_RsResource_dispcapControlFilter;
pThis->__dispcapAddAdditionalDependants__ = &__nvoc_thunk_RsResource_dispcapAddAdditionalDependants;
pThis->__dispcapGetRefCount__ = &__nvoc_thunk_RsResource_dispcapGetRefCount;
pThis->__dispcapCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_dispcapCheckMemInterUnmap;
pThis->__dispcapMapTo__ = &__nvoc_thunk_RsResource_dispcapMapTo;
pThis->__dispcapControl_Prologue__ = &__nvoc_thunk_RmResource_dispcapControl_Prologue;
pThis->__dispcapCanCopy__ = &__nvoc_thunk_RsResource_dispcapCanCopy;
pThis->__dispcapInternalControlForward__ = &__nvoc_thunk_GpuResource_dispcapInternalControlForward;
pThis->__dispcapPreDestruct__ = &__nvoc_thunk_RsResource_dispcapPreDestruct;
pThis->__dispcapUnmapFrom__ = &__nvoc_thunk_RsResource_dispcapUnmapFrom;
pThis->__dispcapControl_Epilogue__ = &__nvoc_thunk_RmResource_dispcapControl_Epilogue;
pThis->__dispcapControlLookup__ = &__nvoc_thunk_RsResource_dispcapControlLookup;
pThis->__dispcapMap__ = &__nvoc_thunk_GpuResource_dispcapMap;
pThis->__dispcapAccessCallback__ = &__nvoc_thunk_RmResource_dispcapAccessCallback;
}
void __nvoc_init_funcTable_DispCapabilities(DispCapabilities *pThis) {
__nvoc_init_funcTable_DispCapabilities_1(pThis);
}
void __nvoc_init_GpuResource(GpuResource*);
void __nvoc_init_DispCapabilities(DispCapabilities *pThis) {
pThis->__nvoc_pbase_DispCapabilities = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource;
pThis->__nvoc_pbase_GpuResource = &pThis->__nvoc_base_GpuResource;
__nvoc_init_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_init_funcTable_DispCapabilities(pThis);
}
NV_STATUS __nvoc_objCreate_DispCapabilities(DispCapabilities **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
DispCapabilities *pThis;
pThis = portMemAllocNonPaged(sizeof(DispCapabilities));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(DispCapabilities));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_DispCapabilities);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_DispCapabilities(pThis);
status = __nvoc_ctor_DispCapabilities(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_DispCapabilities_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_DispCapabilities_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_DispCapabilities(DispCapabilities **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_DispCapabilities(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,239 @@
#ifndef _G_DISP_CAPABILITIES_NVOC_H_
#define _G_DISP_CAPABILITIES_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/******************************************************************************
*
* Description:
* This file contains functions managing DispCapabilities class.
*
******************************************************************************/
#include "g_disp_capabilities_nvoc.h"
#ifndef DISP_CAPABILITIES_H
#define DISP_CAPABILITIES_H
#include "gpu/gpu_resource.h"
/*!
* RM internal class representing NVXXXX_DISP_CAPABILITIES
*/
#ifdef NVOC_DISP_CAPABILITIES_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct DispCapabilities {
const struct NVOC_RTTI *__nvoc_rtti;
struct GpuResource __nvoc_base_GpuResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct DispCapabilities *__nvoc_pbase_DispCapabilities;
NV_STATUS (*__dispcapGetRegBaseOffsetAndSize__)(struct DispCapabilities *, struct OBJGPU *, NvU32 *, NvU32 *);
NvBool (*__dispcapShareCallback__)(struct DispCapabilities *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__dispcapControl__)(struct DispCapabilities *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispcapUnmap__)(struct DispCapabilities *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__dispcapGetMemInterMapParams__)(struct DispCapabilities *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__dispcapGetMemoryMappingDescriptor__)(struct DispCapabilities *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__dispcapGetMapAddrSpace__)(struct DispCapabilities *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NvHandle (*__dispcapGetInternalObjectHandle__)(struct DispCapabilities *);
NV_STATUS (*__dispcapControlFilter__)(struct DispCapabilities *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__dispcapAddAdditionalDependants__)(struct RsClient *, struct DispCapabilities *, RsResourceRef *);
NvU32 (*__dispcapGetRefCount__)(struct DispCapabilities *);
NV_STATUS (*__dispcapCheckMemInterUnmap__)(struct DispCapabilities *, NvBool);
NV_STATUS (*__dispcapMapTo__)(struct DispCapabilities *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__dispcapControl_Prologue__)(struct DispCapabilities *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__dispcapCanCopy__)(struct DispCapabilities *);
NV_STATUS (*__dispcapInternalControlForward__)(struct DispCapabilities *, NvU32, void *, NvU32);
void (*__dispcapPreDestruct__)(struct DispCapabilities *);
NV_STATUS (*__dispcapUnmapFrom__)(struct DispCapabilities *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__dispcapControl_Epilogue__)(struct DispCapabilities *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispcapControlLookup__)(struct DispCapabilities *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__dispcapMap__)(struct DispCapabilities *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__dispcapAccessCallback__)(struct DispCapabilities *, struct RsClient *, void *, RsAccessRight);
NvU32 ControlOffset;
NvU32 ControlLength;
};
#ifndef __NVOC_CLASS_DispCapabilities_TYPEDEF__
#define __NVOC_CLASS_DispCapabilities_TYPEDEF__
typedef struct DispCapabilities DispCapabilities;
#endif /* __NVOC_CLASS_DispCapabilities_TYPEDEF__ */
#ifndef __nvoc_class_id_DispCapabilities
#define __nvoc_class_id_DispCapabilities 0x99db3e
#endif /* __nvoc_class_id_DispCapabilities */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DispCapabilities;
#define __staticCast_DispCapabilities(pThis) \
((pThis)->__nvoc_pbase_DispCapabilities)
#ifdef __nvoc_disp_capabilities_h_disabled
#define __dynamicCast_DispCapabilities(pThis) ((DispCapabilities*)NULL)
#else //__nvoc_disp_capabilities_h_disabled
#define __dynamicCast_DispCapabilities(pThis) \
((DispCapabilities*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(DispCapabilities)))
#endif //__nvoc_disp_capabilities_h_disabled
NV_STATUS __nvoc_objCreateDynamic_DispCapabilities(DispCapabilities**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_DispCapabilities(DispCapabilities**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_DispCapabilities(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_DispCapabilities((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define dispcapGetRegBaseOffsetAndSize(pDispCapabilities, pGpu, pOffset, pSize) dispcapGetRegBaseOffsetAndSize_DISPATCH(pDispCapabilities, pGpu, pOffset, pSize)
#define dispcapShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) dispcapShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define dispcapControl(pGpuResource, pCallContext, pParams) dispcapControl_DISPATCH(pGpuResource, pCallContext, pParams)
#define dispcapUnmap(pGpuResource, pCallContext, pCpuMapping) dispcapUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define dispcapGetMemInterMapParams(pRmResource, pParams) dispcapGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define dispcapGetMemoryMappingDescriptor(pRmResource, ppMemDesc) dispcapGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define dispcapGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) dispcapGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define dispcapGetInternalObjectHandle(pGpuResource) dispcapGetInternalObjectHandle_DISPATCH(pGpuResource)
#define dispcapControlFilter(pResource, pCallContext, pParams) dispcapControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define dispcapAddAdditionalDependants(pClient, pResource, pReference) dispcapAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define dispcapGetRefCount(pResource) dispcapGetRefCount_DISPATCH(pResource)
#define dispcapCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) dispcapCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define dispcapMapTo(pResource, pParams) dispcapMapTo_DISPATCH(pResource, pParams)
#define dispcapControl_Prologue(pResource, pCallContext, pParams) dispcapControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define dispcapCanCopy(pResource) dispcapCanCopy_DISPATCH(pResource)
#define dispcapInternalControlForward(pGpuResource, command, pParams, size) dispcapInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define dispcapPreDestruct(pResource) dispcapPreDestruct_DISPATCH(pResource)
#define dispcapUnmapFrom(pResource, pParams) dispcapUnmapFrom_DISPATCH(pResource, pParams)
#define dispcapControl_Epilogue(pResource, pCallContext, pParams) dispcapControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define dispcapControlLookup(pResource, pParams, ppEntry) dispcapControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define dispcapMap(pGpuResource, pCallContext, pParams, pCpuMapping) dispcapMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define dispcapAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) dispcapAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS dispcapGetRegBaseOffsetAndSize_IMPL(struct DispCapabilities *pDispCapabilities, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize);
static inline NV_STATUS dispcapGetRegBaseOffsetAndSize_DISPATCH(struct DispCapabilities *pDispCapabilities, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pDispCapabilities->__dispcapGetRegBaseOffsetAndSize__(pDispCapabilities, pGpu, pOffset, pSize);
}
static inline NvBool dispcapShareCallback_DISPATCH(struct DispCapabilities *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__dispcapShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS dispcapControl_DISPATCH(struct DispCapabilities *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pGpuResource->__dispcapControl__(pGpuResource, pCallContext, pParams);
}
static inline NV_STATUS dispcapUnmap_DISPATCH(struct DispCapabilities *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispcapUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS dispcapGetMemInterMapParams_DISPATCH(struct DispCapabilities *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__dispcapGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS dispcapGetMemoryMappingDescriptor_DISPATCH(struct DispCapabilities *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__dispcapGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS dispcapGetMapAddrSpace_DISPATCH(struct DispCapabilities *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__dispcapGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline NvHandle dispcapGetInternalObjectHandle_DISPATCH(struct DispCapabilities *pGpuResource) {
return pGpuResource->__dispcapGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS dispcapControlFilter_DISPATCH(struct DispCapabilities *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispcapControlFilter__(pResource, pCallContext, pParams);
}
static inline void dispcapAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct DispCapabilities *pResource, RsResourceRef *pReference) {
pResource->__dispcapAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 dispcapGetRefCount_DISPATCH(struct DispCapabilities *pResource) {
return pResource->__dispcapGetRefCount__(pResource);
}
static inline NV_STATUS dispcapCheckMemInterUnmap_DISPATCH(struct DispCapabilities *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__dispcapCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS dispcapMapTo_DISPATCH(struct DispCapabilities *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__dispcapMapTo__(pResource, pParams);
}
static inline NV_STATUS dispcapControl_Prologue_DISPATCH(struct DispCapabilities *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispcapControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NvBool dispcapCanCopy_DISPATCH(struct DispCapabilities *pResource) {
return pResource->__dispcapCanCopy__(pResource);
}
static inline NV_STATUS dispcapInternalControlForward_DISPATCH(struct DispCapabilities *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__dispcapInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline void dispcapPreDestruct_DISPATCH(struct DispCapabilities *pResource) {
pResource->__dispcapPreDestruct__(pResource);
}
static inline NV_STATUS dispcapUnmapFrom_DISPATCH(struct DispCapabilities *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__dispcapUnmapFrom__(pResource, pParams);
}
static inline void dispcapControl_Epilogue_DISPATCH(struct DispCapabilities *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__dispcapControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispcapControlLookup_DISPATCH(struct DispCapabilities *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__dispcapControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS dispcapMap_DISPATCH(struct DispCapabilities *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispcapMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool dispcapAccessCallback_DISPATCH(struct DispCapabilities *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__dispcapAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS dispcapConstruct_IMPL(struct DispCapabilities *arg_pDispCapabilities, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_dispcapConstruct(arg_pDispCapabilities, arg_pCallContext, arg_pParams) dispcapConstruct_IMPL(arg_pDispCapabilities, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
#endif // DISP_CAPABILITIES_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_DISP_CAPABILITIES_NVOC_H_

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,776 @@
#ifndef _G_DISP_CHANNEL_NVOC_H_
#define _G_DISP_CHANNEL_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/******************************************************************************
*
* Description:
* This file contains functions managing DispChannel and its derived classes.
*
******************************************************************************/
#include "g_disp_channel_nvoc.h"
#ifndef DISP_CHANNEL_H
#define DISP_CHANNEL_H
#include "gpu/gpu_resource.h"
#include "rmapi/event.h"
struct ContextDma;
#ifndef __NVOC_CLASS_ContextDma_TYPEDEF__
#define __NVOC_CLASS_ContextDma_TYPEDEF__
typedef struct ContextDma ContextDma;
#endif /* __NVOC_CLASS_ContextDma_TYPEDEF__ */
#ifndef __nvoc_class_id_ContextDma
#define __nvoc_class_id_ContextDma 0x88441b
#endif /* __nvoc_class_id_ContextDma */
struct DispObject;
#ifndef __NVOC_CLASS_DispObject_TYPEDEF__
#define __NVOC_CLASS_DispObject_TYPEDEF__
typedef struct DispObject DispObject;
#endif /* __NVOC_CLASS_DispObject_TYPEDEF__ */
#ifndef __nvoc_class_id_DispObject
#define __nvoc_class_id_DispObject 0x999839
#endif /* __nvoc_class_id_DispObject */
/*!
* Base class for display channels
*/
#ifdef NVOC_DISP_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct DispChannel {
const struct NVOC_RTTI *__nvoc_rtti;
struct GpuResource __nvoc_base_GpuResource;
struct Notifier __nvoc_base_Notifier;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct INotifier *__nvoc_pbase_INotifier;
struct Notifier *__nvoc_pbase_Notifier;
struct DispChannel *__nvoc_pbase_DispChannel;
NV_STATUS (*__dispchnGetRegBaseOffsetAndSize__)(struct DispChannel *, struct OBJGPU *, NvU32 *, NvU32 *);
NvBool (*__dispchnShareCallback__)(struct DispChannel *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__dispchnMapTo__)(struct DispChannel *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__dispchnGetOrAllocNotifShare__)(struct DispChannel *, NvHandle, NvHandle, struct NotifShare **);
NV_STATUS (*__dispchnCheckMemInterUnmap__)(struct DispChannel *, NvBool);
NV_STATUS (*__dispchnGetMapAddrSpace__)(struct DispChannel *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
void (*__dispchnSetNotificationShare__)(struct DispChannel *, struct NotifShare *);
NvU32 (*__dispchnGetRefCount__)(struct DispChannel *);
void (*__dispchnAddAdditionalDependants__)(struct RsClient *, struct DispChannel *, RsResourceRef *);
NV_STATUS (*__dispchnControl_Prologue__)(struct DispChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchnInternalControlForward__)(struct DispChannel *, NvU32, void *, NvU32);
NV_STATUS (*__dispchnUnmapFrom__)(struct DispChannel *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__dispchnControl_Epilogue__)(struct DispChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchnControlLookup__)(struct DispChannel *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NvHandle (*__dispchnGetInternalObjectHandle__)(struct DispChannel *);
NV_STATUS (*__dispchnControl__)(struct DispChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchnUnmap__)(struct DispChannel *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__dispchnGetMemInterMapParams__)(struct DispChannel *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__dispchnGetMemoryMappingDescriptor__)(struct DispChannel *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__dispchnControlFilter__)(struct DispChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchnUnregisterEvent__)(struct DispChannel *, NvHandle, NvHandle, NvHandle, NvHandle);
NvBool (*__dispchnCanCopy__)(struct DispChannel *);
void (*__dispchnPreDestruct__)(struct DispChannel *);
PEVENTNOTIFICATION *(*__dispchnGetNotificationListPtr__)(struct DispChannel *);
struct NotifShare *(*__dispchnGetNotificationShare__)(struct DispChannel *);
NV_STATUS (*__dispchnMap__)(struct DispChannel *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__dispchnAccessCallback__)(struct DispChannel *, struct RsClient *, void *, RsAccessRight);
struct DispObject *pDispObject;
NvU32 DispClass;
NvU32 InstanceNumber;
NvP64 pControl;
NvP64 pPriv;
NvU32 ControlOffset;
NvU32 ControlLength;
NvBool bIsDma;
};
#ifndef __NVOC_CLASS_DispChannel_TYPEDEF__
#define __NVOC_CLASS_DispChannel_TYPEDEF__
typedef struct DispChannel DispChannel;
#endif /* __NVOC_CLASS_DispChannel_TYPEDEF__ */
#ifndef __nvoc_class_id_DispChannel
#define __nvoc_class_id_DispChannel 0xbd2ff3
#endif /* __nvoc_class_id_DispChannel */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DispChannel;
#define __staticCast_DispChannel(pThis) \
((pThis)->__nvoc_pbase_DispChannel)
#ifdef __nvoc_disp_channel_h_disabled
#define __dynamicCast_DispChannel(pThis) ((DispChannel*)NULL)
#else //__nvoc_disp_channel_h_disabled
#define __dynamicCast_DispChannel(pThis) \
((DispChannel*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(DispChannel)))
#endif //__nvoc_disp_channel_h_disabled
NV_STATUS __nvoc_objCreateDynamic_DispChannel(DispChannel**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_DispChannel(DispChannel**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams, NvU32 arg_isDma);
#define __objCreate_DispChannel(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams, arg_isDma) \
__nvoc_objCreate_DispChannel((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams, arg_isDma)
#define dispchnGetRegBaseOffsetAndSize(pDispChannel, pGpu, pOffset, pSize) dispchnGetRegBaseOffsetAndSize_DISPATCH(pDispChannel, pGpu, pOffset, pSize)
#define dispchnShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) dispchnShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define dispchnMapTo(pResource, pParams) dispchnMapTo_DISPATCH(pResource, pParams)
#define dispchnGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) dispchnGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
#define dispchnCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) dispchnCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define dispchnGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) dispchnGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define dispchnSetNotificationShare(pNotifier, pNotifShare) dispchnSetNotificationShare_DISPATCH(pNotifier, pNotifShare)
#define dispchnGetRefCount(pResource) dispchnGetRefCount_DISPATCH(pResource)
#define dispchnAddAdditionalDependants(pClient, pResource, pReference) dispchnAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define dispchnControl_Prologue(pResource, pCallContext, pParams) dispchnControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define dispchnInternalControlForward(pGpuResource, command, pParams, size) dispchnInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define dispchnUnmapFrom(pResource, pParams) dispchnUnmapFrom_DISPATCH(pResource, pParams)
#define dispchnControl_Epilogue(pResource, pCallContext, pParams) dispchnControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define dispchnControlLookup(pResource, pParams, ppEntry) dispchnControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define dispchnGetInternalObjectHandle(pGpuResource) dispchnGetInternalObjectHandle_DISPATCH(pGpuResource)
#define dispchnControl(pGpuResource, pCallContext, pParams) dispchnControl_DISPATCH(pGpuResource, pCallContext, pParams)
#define dispchnUnmap(pGpuResource, pCallContext, pCpuMapping) dispchnUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define dispchnGetMemInterMapParams(pRmResource, pParams) dispchnGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define dispchnGetMemoryMappingDescriptor(pRmResource, ppMemDesc) dispchnGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define dispchnControlFilter(pResource, pCallContext, pParams) dispchnControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define dispchnUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) dispchnUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
#define dispchnCanCopy(pResource) dispchnCanCopy_DISPATCH(pResource)
#define dispchnPreDestruct(pResource) dispchnPreDestruct_DISPATCH(pResource)
#define dispchnGetNotificationListPtr(pNotifier) dispchnGetNotificationListPtr_DISPATCH(pNotifier)
#define dispchnGetNotificationShare(pNotifier) dispchnGetNotificationShare_DISPATCH(pNotifier)
#define dispchnMap(pGpuResource, pCallContext, pParams, pCpuMapping) dispchnMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define dispchnAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) dispchnAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS dispchnGetRegBaseOffsetAndSize_IMPL(struct DispChannel *pDispChannel, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize);
static inline NV_STATUS dispchnGetRegBaseOffsetAndSize_DISPATCH(struct DispChannel *pDispChannel, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pDispChannel->__dispchnGetRegBaseOffsetAndSize__(pDispChannel, pGpu, pOffset, pSize);
}
static inline NvBool dispchnShareCallback_DISPATCH(struct DispChannel *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__dispchnShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS dispchnMapTo_DISPATCH(struct DispChannel *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__dispchnMapTo__(pResource, pParams);
}
static inline NV_STATUS dispchnGetOrAllocNotifShare_DISPATCH(struct DispChannel *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return pNotifier->__dispchnGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
}
static inline NV_STATUS dispchnCheckMemInterUnmap_DISPATCH(struct DispChannel *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__dispchnCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS dispchnGetMapAddrSpace_DISPATCH(struct DispChannel *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__dispchnGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline void dispchnSetNotificationShare_DISPATCH(struct DispChannel *pNotifier, struct NotifShare *pNotifShare) {
pNotifier->__dispchnSetNotificationShare__(pNotifier, pNotifShare);
}
static inline NvU32 dispchnGetRefCount_DISPATCH(struct DispChannel *pResource) {
return pResource->__dispchnGetRefCount__(pResource);
}
static inline void dispchnAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct DispChannel *pResource, RsResourceRef *pReference) {
pResource->__dispchnAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NV_STATUS dispchnControl_Prologue_DISPATCH(struct DispChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispchnControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchnInternalControlForward_DISPATCH(struct DispChannel *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__dispchnInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline NV_STATUS dispchnUnmapFrom_DISPATCH(struct DispChannel *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__dispchnUnmapFrom__(pResource, pParams);
}
static inline void dispchnControl_Epilogue_DISPATCH(struct DispChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__dispchnControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchnControlLookup_DISPATCH(struct DispChannel *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__dispchnControlLookup__(pResource, pParams, ppEntry);
}
static inline NvHandle dispchnGetInternalObjectHandle_DISPATCH(struct DispChannel *pGpuResource) {
return pGpuResource->__dispchnGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS dispchnControl_DISPATCH(struct DispChannel *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pGpuResource->__dispchnControl__(pGpuResource, pCallContext, pParams);
}
static inline NV_STATUS dispchnUnmap_DISPATCH(struct DispChannel *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispchnUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS dispchnGetMemInterMapParams_DISPATCH(struct DispChannel *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__dispchnGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS dispchnGetMemoryMappingDescriptor_DISPATCH(struct DispChannel *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__dispchnGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS dispchnControlFilter_DISPATCH(struct DispChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispchnControlFilter__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchnUnregisterEvent_DISPATCH(struct DispChannel *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return pNotifier->__dispchnUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
static inline NvBool dispchnCanCopy_DISPATCH(struct DispChannel *pResource) {
return pResource->__dispchnCanCopy__(pResource);
}
static inline void dispchnPreDestruct_DISPATCH(struct DispChannel *pResource) {
pResource->__dispchnPreDestruct__(pResource);
}
static inline PEVENTNOTIFICATION *dispchnGetNotificationListPtr_DISPATCH(struct DispChannel *pNotifier) {
return pNotifier->__dispchnGetNotificationListPtr__(pNotifier);
}
static inline struct NotifShare *dispchnGetNotificationShare_DISPATCH(struct DispChannel *pNotifier) {
return pNotifier->__dispchnGetNotificationShare__(pNotifier);
}
static inline NV_STATUS dispchnMap_DISPATCH(struct DispChannel *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispchnMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool dispchnAccessCallback_DISPATCH(struct DispChannel *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__dispchnAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS dispchnConstruct_IMPL(struct DispChannel *arg_pDispChannel, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams, NvU32 arg_isDma);
#define __nvoc_dispchnConstruct(arg_pDispChannel, arg_pCallContext, arg_pParams, arg_isDma) dispchnConstruct_IMPL(arg_pDispChannel, arg_pCallContext, arg_pParams, arg_isDma)
void dispchnDestruct_IMPL(struct DispChannel *pDispChannel);
#define __nvoc_dispchnDestruct(pDispChannel) dispchnDestruct_IMPL(pDispChannel)
void dispchnSetRegBaseOffsetAndSize_IMPL(struct DispChannel *pDispChannel, struct OBJGPU *pGpu);
#ifdef __nvoc_disp_channel_h_disabled
static inline void dispchnSetRegBaseOffsetAndSize(struct DispChannel *pDispChannel, struct OBJGPU *pGpu) {
NV_ASSERT_FAILED_PRECOMP("DispChannel was disabled!");
}
#else //__nvoc_disp_channel_h_disabled
#define dispchnSetRegBaseOffsetAndSize(pDispChannel, pGpu) dispchnSetRegBaseOffsetAndSize_IMPL(pDispChannel, pGpu)
#endif //__nvoc_disp_channel_h_disabled
NV_STATUS dispchnGrabChannel_IMPL(struct DispChannel *pDispChannel, NvHandle hClient, NvHandle hParent, NvHandle hChannel, NvU32 hClass, void *pAllocParms);
#ifdef __nvoc_disp_channel_h_disabled
static inline NV_STATUS dispchnGrabChannel(struct DispChannel *pDispChannel, NvHandle hClient, NvHandle hParent, NvHandle hChannel, NvU32 hClass, void *pAllocParms) {
NV_ASSERT_FAILED_PRECOMP("DispChannel was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_channel_h_disabled
#define dispchnGrabChannel(pDispChannel, hClient, hParent, hChannel, hClass, pAllocParms) dispchnGrabChannel_IMPL(pDispChannel, hClient, hParent, hChannel, hClass, pAllocParms)
#endif //__nvoc_disp_channel_h_disabled
NV_STATUS dispchnBindCtx_IMPL(struct OBJGPU *pGpu, struct ContextDma *pContextDma, NvHandle hDispChannel);
#define dispchnBindCtx(pGpu, pContextDma, hDispChannel) dispchnBindCtx_IMPL(pGpu, pContextDma, hDispChannel)
NV_STATUS dispchnUnbindCtx_IMPL(struct OBJGPU *pGpu, struct ContextDma *pContextDma, NvHandle hDispChannel);
#define dispchnUnbindCtx(pGpu, pContextDma, hDispChannel) dispchnUnbindCtx_IMPL(pGpu, pContextDma, hDispChannel)
void dispchnUnbindCtxFromAllChannels_IMPL(struct OBJGPU *pGpu, struct ContextDma *pContextDma);
#define dispchnUnbindCtxFromAllChannels(pGpu, pContextDma) dispchnUnbindCtxFromAllChannels_IMPL(pGpu, pContextDma)
void dispchnUnbindAllCtx_IMPL(struct OBJGPU *pGpu, struct DispChannel *pDispChannel);
#ifdef __nvoc_disp_channel_h_disabled
static inline void dispchnUnbindAllCtx(struct OBJGPU *pGpu, struct DispChannel *pDispChannel) {
NV_ASSERT_FAILED_PRECOMP("DispChannel was disabled!");
}
#else //__nvoc_disp_channel_h_disabled
#define dispchnUnbindAllCtx(pGpu, pDispChannel) dispchnUnbindAllCtx_IMPL(pGpu, pDispChannel)
#endif //__nvoc_disp_channel_h_disabled
NV_STATUS dispchnGetByHandle_IMPL(struct RsClient *pClient, NvHandle hDisplayChannel, struct DispChannel **ppDispChannel);
#define dispchnGetByHandle(pClient, hDisplayChannel, ppDispChannel) dispchnGetByHandle_IMPL(pClient, hDisplayChannel, ppDispChannel)
#undef PRIVATE_FIELD
/*!
* RM internal class representing XXX_XXX_CHANNEL_PIO
*/
#ifdef NVOC_DISP_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct DispChannelPio {
const struct NVOC_RTTI *__nvoc_rtti;
struct DispChannel __nvoc_base_DispChannel;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct INotifier *__nvoc_pbase_INotifier;
struct Notifier *__nvoc_pbase_Notifier;
struct DispChannel *__nvoc_pbase_DispChannel;
struct DispChannelPio *__nvoc_pbase_DispChannelPio;
NvBool (*__dispchnpioShareCallback__)(struct DispChannelPio *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__dispchnpioMapTo__)(struct DispChannelPio *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__dispchnpioGetOrAllocNotifShare__)(struct DispChannelPio *, NvHandle, NvHandle, struct NotifShare **);
NV_STATUS (*__dispchnpioCheckMemInterUnmap__)(struct DispChannelPio *, NvBool);
NV_STATUS (*__dispchnpioGetMapAddrSpace__)(struct DispChannelPio *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
void (*__dispchnpioSetNotificationShare__)(struct DispChannelPio *, struct NotifShare *);
NvU32 (*__dispchnpioGetRefCount__)(struct DispChannelPio *);
void (*__dispchnpioAddAdditionalDependants__)(struct RsClient *, struct DispChannelPio *, RsResourceRef *);
NV_STATUS (*__dispchnpioControl_Prologue__)(struct DispChannelPio *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchnpioGetRegBaseOffsetAndSize__)(struct DispChannelPio *, struct OBJGPU *, NvU32 *, NvU32 *);
NV_STATUS (*__dispchnpioInternalControlForward__)(struct DispChannelPio *, NvU32, void *, NvU32);
NV_STATUS (*__dispchnpioUnmapFrom__)(struct DispChannelPio *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__dispchnpioControl_Epilogue__)(struct DispChannelPio *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchnpioControlLookup__)(struct DispChannelPio *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NvHandle (*__dispchnpioGetInternalObjectHandle__)(struct DispChannelPio *);
NV_STATUS (*__dispchnpioControl__)(struct DispChannelPio *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchnpioUnmap__)(struct DispChannelPio *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__dispchnpioGetMemInterMapParams__)(struct DispChannelPio *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__dispchnpioGetMemoryMappingDescriptor__)(struct DispChannelPio *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__dispchnpioControlFilter__)(struct DispChannelPio *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchnpioUnregisterEvent__)(struct DispChannelPio *, NvHandle, NvHandle, NvHandle, NvHandle);
NvBool (*__dispchnpioCanCopy__)(struct DispChannelPio *);
void (*__dispchnpioPreDestruct__)(struct DispChannelPio *);
PEVENTNOTIFICATION *(*__dispchnpioGetNotificationListPtr__)(struct DispChannelPio *);
struct NotifShare *(*__dispchnpioGetNotificationShare__)(struct DispChannelPio *);
NV_STATUS (*__dispchnpioMap__)(struct DispChannelPio *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__dispchnpioAccessCallback__)(struct DispChannelPio *, struct RsClient *, void *, RsAccessRight);
};
#ifndef __NVOC_CLASS_DispChannelPio_TYPEDEF__
#define __NVOC_CLASS_DispChannelPio_TYPEDEF__
typedef struct DispChannelPio DispChannelPio;
#endif /* __NVOC_CLASS_DispChannelPio_TYPEDEF__ */
#ifndef __nvoc_class_id_DispChannelPio
#define __nvoc_class_id_DispChannelPio 0x10dec3
#endif /* __nvoc_class_id_DispChannelPio */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DispChannelPio;
#define __staticCast_DispChannelPio(pThis) \
((pThis)->__nvoc_pbase_DispChannelPio)
#ifdef __nvoc_disp_channel_h_disabled
#define __dynamicCast_DispChannelPio(pThis) ((DispChannelPio*)NULL)
#else //__nvoc_disp_channel_h_disabled
#define __dynamicCast_DispChannelPio(pThis) \
((DispChannelPio*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(DispChannelPio)))
#endif //__nvoc_disp_channel_h_disabled
NV_STATUS __nvoc_objCreateDynamic_DispChannelPio(DispChannelPio**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_DispChannelPio(DispChannelPio**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_DispChannelPio(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_DispChannelPio((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define dispchnpioShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) dispchnpioShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define dispchnpioMapTo(pResource, pParams) dispchnpioMapTo_DISPATCH(pResource, pParams)
#define dispchnpioGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) dispchnpioGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
#define dispchnpioCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) dispchnpioCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define dispchnpioGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) dispchnpioGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define dispchnpioSetNotificationShare(pNotifier, pNotifShare) dispchnpioSetNotificationShare_DISPATCH(pNotifier, pNotifShare)
#define dispchnpioGetRefCount(pResource) dispchnpioGetRefCount_DISPATCH(pResource)
#define dispchnpioAddAdditionalDependants(pClient, pResource, pReference) dispchnpioAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define dispchnpioControl_Prologue(pResource, pCallContext, pParams) dispchnpioControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define dispchnpioGetRegBaseOffsetAndSize(pDispChannel, pGpu, pOffset, pSize) dispchnpioGetRegBaseOffsetAndSize_DISPATCH(pDispChannel, pGpu, pOffset, pSize)
#define dispchnpioInternalControlForward(pGpuResource, command, pParams, size) dispchnpioInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define dispchnpioUnmapFrom(pResource, pParams) dispchnpioUnmapFrom_DISPATCH(pResource, pParams)
#define dispchnpioControl_Epilogue(pResource, pCallContext, pParams) dispchnpioControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define dispchnpioControlLookup(pResource, pParams, ppEntry) dispchnpioControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define dispchnpioGetInternalObjectHandle(pGpuResource) dispchnpioGetInternalObjectHandle_DISPATCH(pGpuResource)
#define dispchnpioControl(pGpuResource, pCallContext, pParams) dispchnpioControl_DISPATCH(pGpuResource, pCallContext, pParams)
#define dispchnpioUnmap(pGpuResource, pCallContext, pCpuMapping) dispchnpioUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define dispchnpioGetMemInterMapParams(pRmResource, pParams) dispchnpioGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define dispchnpioGetMemoryMappingDescriptor(pRmResource, ppMemDesc) dispchnpioGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define dispchnpioControlFilter(pResource, pCallContext, pParams) dispchnpioControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define dispchnpioUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) dispchnpioUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
#define dispchnpioCanCopy(pResource) dispchnpioCanCopy_DISPATCH(pResource)
#define dispchnpioPreDestruct(pResource) dispchnpioPreDestruct_DISPATCH(pResource)
#define dispchnpioGetNotificationListPtr(pNotifier) dispchnpioGetNotificationListPtr_DISPATCH(pNotifier)
#define dispchnpioGetNotificationShare(pNotifier) dispchnpioGetNotificationShare_DISPATCH(pNotifier)
#define dispchnpioMap(pGpuResource, pCallContext, pParams, pCpuMapping) dispchnpioMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define dispchnpioAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) dispchnpioAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
static inline NvBool dispchnpioShareCallback_DISPATCH(struct DispChannelPio *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__dispchnpioShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS dispchnpioMapTo_DISPATCH(struct DispChannelPio *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__dispchnpioMapTo__(pResource, pParams);
}
static inline NV_STATUS dispchnpioGetOrAllocNotifShare_DISPATCH(struct DispChannelPio *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return pNotifier->__dispchnpioGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
}
static inline NV_STATUS dispchnpioCheckMemInterUnmap_DISPATCH(struct DispChannelPio *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__dispchnpioCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS dispchnpioGetMapAddrSpace_DISPATCH(struct DispChannelPio *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__dispchnpioGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline void dispchnpioSetNotificationShare_DISPATCH(struct DispChannelPio *pNotifier, struct NotifShare *pNotifShare) {
pNotifier->__dispchnpioSetNotificationShare__(pNotifier, pNotifShare);
}
static inline NvU32 dispchnpioGetRefCount_DISPATCH(struct DispChannelPio *pResource) {
return pResource->__dispchnpioGetRefCount__(pResource);
}
static inline void dispchnpioAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct DispChannelPio *pResource, RsResourceRef *pReference) {
pResource->__dispchnpioAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NV_STATUS dispchnpioControl_Prologue_DISPATCH(struct DispChannelPio *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispchnpioControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchnpioGetRegBaseOffsetAndSize_DISPATCH(struct DispChannelPio *pDispChannel, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pDispChannel->__dispchnpioGetRegBaseOffsetAndSize__(pDispChannel, pGpu, pOffset, pSize);
}
static inline NV_STATUS dispchnpioInternalControlForward_DISPATCH(struct DispChannelPio *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__dispchnpioInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline NV_STATUS dispchnpioUnmapFrom_DISPATCH(struct DispChannelPio *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__dispchnpioUnmapFrom__(pResource, pParams);
}
static inline void dispchnpioControl_Epilogue_DISPATCH(struct DispChannelPio *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__dispchnpioControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchnpioControlLookup_DISPATCH(struct DispChannelPio *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__dispchnpioControlLookup__(pResource, pParams, ppEntry);
}
static inline NvHandle dispchnpioGetInternalObjectHandle_DISPATCH(struct DispChannelPio *pGpuResource) {
return pGpuResource->__dispchnpioGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS dispchnpioControl_DISPATCH(struct DispChannelPio *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pGpuResource->__dispchnpioControl__(pGpuResource, pCallContext, pParams);
}
static inline NV_STATUS dispchnpioUnmap_DISPATCH(struct DispChannelPio *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispchnpioUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS dispchnpioGetMemInterMapParams_DISPATCH(struct DispChannelPio *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__dispchnpioGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS dispchnpioGetMemoryMappingDescriptor_DISPATCH(struct DispChannelPio *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__dispchnpioGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS dispchnpioControlFilter_DISPATCH(struct DispChannelPio *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispchnpioControlFilter__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchnpioUnregisterEvent_DISPATCH(struct DispChannelPio *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return pNotifier->__dispchnpioUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
static inline NvBool dispchnpioCanCopy_DISPATCH(struct DispChannelPio *pResource) {
return pResource->__dispchnpioCanCopy__(pResource);
}
static inline void dispchnpioPreDestruct_DISPATCH(struct DispChannelPio *pResource) {
pResource->__dispchnpioPreDestruct__(pResource);
}
static inline PEVENTNOTIFICATION *dispchnpioGetNotificationListPtr_DISPATCH(struct DispChannelPio *pNotifier) {
return pNotifier->__dispchnpioGetNotificationListPtr__(pNotifier);
}
static inline struct NotifShare *dispchnpioGetNotificationShare_DISPATCH(struct DispChannelPio *pNotifier) {
return pNotifier->__dispchnpioGetNotificationShare__(pNotifier);
}
static inline NV_STATUS dispchnpioMap_DISPATCH(struct DispChannelPio *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispchnpioMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool dispchnpioAccessCallback_DISPATCH(struct DispChannelPio *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__dispchnpioAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS dispchnpioConstruct_IMPL(struct DispChannelPio *arg_pDispChannelPio, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_dispchnpioConstruct(arg_pDispChannelPio, arg_pCallContext, arg_pParams) dispchnpioConstruct_IMPL(arg_pDispChannelPio, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
/*!
* RM internal class representing XXX_XXX_CHANNEL_DMA
*/
#ifdef NVOC_DISP_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct DispChannelDma {
const struct NVOC_RTTI *__nvoc_rtti;
struct DispChannel __nvoc_base_DispChannel;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct INotifier *__nvoc_pbase_INotifier;
struct Notifier *__nvoc_pbase_Notifier;
struct DispChannel *__nvoc_pbase_DispChannel;
struct DispChannelDma *__nvoc_pbase_DispChannelDma;
NvBool (*__dispchndmaShareCallback__)(struct DispChannelDma *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__dispchndmaMapTo__)(struct DispChannelDma *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__dispchndmaGetOrAllocNotifShare__)(struct DispChannelDma *, NvHandle, NvHandle, struct NotifShare **);
NV_STATUS (*__dispchndmaCheckMemInterUnmap__)(struct DispChannelDma *, NvBool);
NV_STATUS (*__dispchndmaGetMapAddrSpace__)(struct DispChannelDma *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
void (*__dispchndmaSetNotificationShare__)(struct DispChannelDma *, struct NotifShare *);
NvU32 (*__dispchndmaGetRefCount__)(struct DispChannelDma *);
void (*__dispchndmaAddAdditionalDependants__)(struct RsClient *, struct DispChannelDma *, RsResourceRef *);
NV_STATUS (*__dispchndmaControl_Prologue__)(struct DispChannelDma *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchndmaGetRegBaseOffsetAndSize__)(struct DispChannelDma *, struct OBJGPU *, NvU32 *, NvU32 *);
NV_STATUS (*__dispchndmaInternalControlForward__)(struct DispChannelDma *, NvU32, void *, NvU32);
NV_STATUS (*__dispchndmaUnmapFrom__)(struct DispChannelDma *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__dispchndmaControl_Epilogue__)(struct DispChannelDma *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchndmaControlLookup__)(struct DispChannelDma *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NvHandle (*__dispchndmaGetInternalObjectHandle__)(struct DispChannelDma *);
NV_STATUS (*__dispchndmaControl__)(struct DispChannelDma *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchndmaUnmap__)(struct DispChannelDma *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__dispchndmaGetMemInterMapParams__)(struct DispChannelDma *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__dispchndmaGetMemoryMappingDescriptor__)(struct DispChannelDma *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__dispchndmaControlFilter__)(struct DispChannelDma *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispchndmaUnregisterEvent__)(struct DispChannelDma *, NvHandle, NvHandle, NvHandle, NvHandle);
NvBool (*__dispchndmaCanCopy__)(struct DispChannelDma *);
void (*__dispchndmaPreDestruct__)(struct DispChannelDma *);
PEVENTNOTIFICATION *(*__dispchndmaGetNotificationListPtr__)(struct DispChannelDma *);
struct NotifShare *(*__dispchndmaGetNotificationShare__)(struct DispChannelDma *);
NV_STATUS (*__dispchndmaMap__)(struct DispChannelDma *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__dispchndmaAccessCallback__)(struct DispChannelDma *, struct RsClient *, void *, RsAccessRight);
};
#ifndef __NVOC_CLASS_DispChannelDma_TYPEDEF__
#define __NVOC_CLASS_DispChannelDma_TYPEDEF__
typedef struct DispChannelDma DispChannelDma;
#endif /* __NVOC_CLASS_DispChannelDma_TYPEDEF__ */
#ifndef __nvoc_class_id_DispChannelDma
#define __nvoc_class_id_DispChannelDma 0xfe3d2e
#endif /* __nvoc_class_id_DispChannelDma */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DispChannelDma;
#define __staticCast_DispChannelDma(pThis) \
((pThis)->__nvoc_pbase_DispChannelDma)
#ifdef __nvoc_disp_channel_h_disabled
#define __dynamicCast_DispChannelDma(pThis) ((DispChannelDma*)NULL)
#else //__nvoc_disp_channel_h_disabled
#define __dynamicCast_DispChannelDma(pThis) \
((DispChannelDma*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(DispChannelDma)))
#endif //__nvoc_disp_channel_h_disabled
NV_STATUS __nvoc_objCreateDynamic_DispChannelDma(DispChannelDma**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_DispChannelDma(DispChannelDma**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_DispChannelDma(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_DispChannelDma((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define dispchndmaShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) dispchndmaShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define dispchndmaMapTo(pResource, pParams) dispchndmaMapTo_DISPATCH(pResource, pParams)
#define dispchndmaGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) dispchndmaGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
#define dispchndmaCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) dispchndmaCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define dispchndmaGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) dispchndmaGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define dispchndmaSetNotificationShare(pNotifier, pNotifShare) dispchndmaSetNotificationShare_DISPATCH(pNotifier, pNotifShare)
#define dispchndmaGetRefCount(pResource) dispchndmaGetRefCount_DISPATCH(pResource)
#define dispchndmaAddAdditionalDependants(pClient, pResource, pReference) dispchndmaAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define dispchndmaControl_Prologue(pResource, pCallContext, pParams) dispchndmaControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define dispchndmaGetRegBaseOffsetAndSize(pDispChannel, pGpu, pOffset, pSize) dispchndmaGetRegBaseOffsetAndSize_DISPATCH(pDispChannel, pGpu, pOffset, pSize)
#define dispchndmaInternalControlForward(pGpuResource, command, pParams, size) dispchndmaInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define dispchndmaUnmapFrom(pResource, pParams) dispchndmaUnmapFrom_DISPATCH(pResource, pParams)
#define dispchndmaControl_Epilogue(pResource, pCallContext, pParams) dispchndmaControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define dispchndmaControlLookup(pResource, pParams, ppEntry) dispchndmaControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define dispchndmaGetInternalObjectHandle(pGpuResource) dispchndmaGetInternalObjectHandle_DISPATCH(pGpuResource)
#define dispchndmaControl(pGpuResource, pCallContext, pParams) dispchndmaControl_DISPATCH(pGpuResource, pCallContext, pParams)
#define dispchndmaUnmap(pGpuResource, pCallContext, pCpuMapping) dispchndmaUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define dispchndmaGetMemInterMapParams(pRmResource, pParams) dispchndmaGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define dispchndmaGetMemoryMappingDescriptor(pRmResource, ppMemDesc) dispchndmaGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define dispchndmaControlFilter(pResource, pCallContext, pParams) dispchndmaControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define dispchndmaUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) dispchndmaUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
#define dispchndmaCanCopy(pResource) dispchndmaCanCopy_DISPATCH(pResource)
#define dispchndmaPreDestruct(pResource) dispchndmaPreDestruct_DISPATCH(pResource)
#define dispchndmaGetNotificationListPtr(pNotifier) dispchndmaGetNotificationListPtr_DISPATCH(pNotifier)
#define dispchndmaGetNotificationShare(pNotifier) dispchndmaGetNotificationShare_DISPATCH(pNotifier)
#define dispchndmaMap(pGpuResource, pCallContext, pParams, pCpuMapping) dispchndmaMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define dispchndmaAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) dispchndmaAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
static inline NvBool dispchndmaShareCallback_DISPATCH(struct DispChannelDma *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__dispchndmaShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS dispchndmaMapTo_DISPATCH(struct DispChannelDma *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__dispchndmaMapTo__(pResource, pParams);
}
static inline NV_STATUS dispchndmaGetOrAllocNotifShare_DISPATCH(struct DispChannelDma *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return pNotifier->__dispchndmaGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
}
static inline NV_STATUS dispchndmaCheckMemInterUnmap_DISPATCH(struct DispChannelDma *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__dispchndmaCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS dispchndmaGetMapAddrSpace_DISPATCH(struct DispChannelDma *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__dispchndmaGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline void dispchndmaSetNotificationShare_DISPATCH(struct DispChannelDma *pNotifier, struct NotifShare *pNotifShare) {
pNotifier->__dispchndmaSetNotificationShare__(pNotifier, pNotifShare);
}
static inline NvU32 dispchndmaGetRefCount_DISPATCH(struct DispChannelDma *pResource) {
return pResource->__dispchndmaGetRefCount__(pResource);
}
static inline void dispchndmaAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct DispChannelDma *pResource, RsResourceRef *pReference) {
pResource->__dispchndmaAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NV_STATUS dispchndmaControl_Prologue_DISPATCH(struct DispChannelDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispchndmaControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchndmaGetRegBaseOffsetAndSize_DISPATCH(struct DispChannelDma *pDispChannel, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pDispChannel->__dispchndmaGetRegBaseOffsetAndSize__(pDispChannel, pGpu, pOffset, pSize);
}
static inline NV_STATUS dispchndmaInternalControlForward_DISPATCH(struct DispChannelDma *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__dispchndmaInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline NV_STATUS dispchndmaUnmapFrom_DISPATCH(struct DispChannelDma *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__dispchndmaUnmapFrom__(pResource, pParams);
}
static inline void dispchndmaControl_Epilogue_DISPATCH(struct DispChannelDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__dispchndmaControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchndmaControlLookup_DISPATCH(struct DispChannelDma *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__dispchndmaControlLookup__(pResource, pParams, ppEntry);
}
static inline NvHandle dispchndmaGetInternalObjectHandle_DISPATCH(struct DispChannelDma *pGpuResource) {
return pGpuResource->__dispchndmaGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS dispchndmaControl_DISPATCH(struct DispChannelDma *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pGpuResource->__dispchndmaControl__(pGpuResource, pCallContext, pParams);
}
static inline NV_STATUS dispchndmaUnmap_DISPATCH(struct DispChannelDma *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispchndmaUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS dispchndmaGetMemInterMapParams_DISPATCH(struct DispChannelDma *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__dispchndmaGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS dispchndmaGetMemoryMappingDescriptor_DISPATCH(struct DispChannelDma *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__dispchndmaGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS dispchndmaControlFilter_DISPATCH(struct DispChannelDma *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispchndmaControlFilter__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispchndmaUnregisterEvent_DISPATCH(struct DispChannelDma *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return pNotifier->__dispchndmaUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
static inline NvBool dispchndmaCanCopy_DISPATCH(struct DispChannelDma *pResource) {
return pResource->__dispchndmaCanCopy__(pResource);
}
static inline void dispchndmaPreDestruct_DISPATCH(struct DispChannelDma *pResource) {
pResource->__dispchndmaPreDestruct__(pResource);
}
static inline PEVENTNOTIFICATION *dispchndmaGetNotificationListPtr_DISPATCH(struct DispChannelDma *pNotifier) {
return pNotifier->__dispchndmaGetNotificationListPtr__(pNotifier);
}
static inline struct NotifShare *dispchndmaGetNotificationShare_DISPATCH(struct DispChannelDma *pNotifier) {
return pNotifier->__dispchndmaGetNotificationShare__(pNotifier);
}
static inline NV_STATUS dispchndmaMap_DISPATCH(struct DispChannelDma *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispchndmaMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool dispchndmaAccessCallback_DISPATCH(struct DispChannelDma *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__dispchndmaAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS dispchndmaConstruct_IMPL(struct DispChannelDma *arg_pDispChannelDma, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_dispchndmaConstruct(arg_pDispChannelDma, arg_pCallContext, arg_pParams) dispchndmaConstruct_IMPL(arg_pDispChannelDma, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
#endif // DISP_CHANNEL_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_DISP_CHANNEL_NVOC_H_

View File

@@ -0,0 +1,169 @@
#define NVOC_DISP_INST_MEM_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_disp_inst_mem_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x8223e2 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DisplayInstanceMemory;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_DisplayInstanceMemory(DisplayInstanceMemory*, RmHalspecOwner* );
void __nvoc_init_funcTable_DisplayInstanceMemory(DisplayInstanceMemory*, RmHalspecOwner* );
NV_STATUS __nvoc_ctor_DisplayInstanceMemory(DisplayInstanceMemory*, RmHalspecOwner* );
void __nvoc_init_dataField_DisplayInstanceMemory(DisplayInstanceMemory*, RmHalspecOwner* );
void __nvoc_dtor_DisplayInstanceMemory(DisplayInstanceMemory*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_DisplayInstanceMemory;
static const struct NVOC_RTTI __nvoc_rtti_DisplayInstanceMemory_DisplayInstanceMemory = {
/*pClassDef=*/ &__nvoc_class_def_DisplayInstanceMemory,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_DisplayInstanceMemory,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_DisplayInstanceMemory_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DisplayInstanceMemory, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_DisplayInstanceMemory = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_DisplayInstanceMemory_DisplayInstanceMemory,
&__nvoc_rtti_DisplayInstanceMemory_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_DisplayInstanceMemory =
{
/*classInfo=*/ {
/*size=*/ sizeof(DisplayInstanceMemory),
/*classId=*/ classId(DisplayInstanceMemory),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "DisplayInstanceMemory",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_DisplayInstanceMemory,
/*pCastInfo=*/ &__nvoc_castinfo_DisplayInstanceMemory,
/*pExportInfo=*/ &__nvoc_export_info_DisplayInstanceMemory
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_DisplayInstanceMemory =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_DisplayInstanceMemory(DisplayInstanceMemory *pThis) {
__nvoc_instmemDestruct(pThis);
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_DisplayInstanceMemory(DisplayInstanceMemory *pThis, RmHalspecOwner *pRmhalspecowner) {
DispIpHal *dispIpHal = &pRmhalspecowner->dispIpHal;
const unsigned long dispIpHal_HalVarIdx = (unsigned long)dispIpHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(dispIpHal);
PORT_UNREFERENCED_VARIABLE(dispIpHal_HalVarIdx);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_DisplayInstanceMemory(DisplayInstanceMemory *pThis, RmHalspecOwner *pRmhalspecowner) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_DisplayInstanceMemory_fail_Object;
__nvoc_init_dataField_DisplayInstanceMemory(pThis, pRmhalspecowner);
status = __nvoc_instmemConstruct(pThis);
if (status != NV_OK) goto __nvoc_ctor_DisplayInstanceMemory_fail__init;
goto __nvoc_ctor_DisplayInstanceMemory_exit; // Success
__nvoc_ctor_DisplayInstanceMemory_fail__init:
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_ctor_DisplayInstanceMemory_fail_Object:
__nvoc_ctor_DisplayInstanceMemory_exit:
return status;
}
static void __nvoc_init_funcTable_DisplayInstanceMemory_1(DisplayInstanceMemory *pThis, RmHalspecOwner *pRmhalspecowner) {
DispIpHal *dispIpHal = &pRmhalspecowner->dispIpHal;
const unsigned long dispIpHal_HalVarIdx = (unsigned long)dispIpHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(dispIpHal);
PORT_UNREFERENCED_VARIABLE(dispIpHal_HalVarIdx);
}
void __nvoc_init_funcTable_DisplayInstanceMemory(DisplayInstanceMemory *pThis, RmHalspecOwner *pRmhalspecowner) {
__nvoc_init_funcTable_DisplayInstanceMemory_1(pThis, pRmhalspecowner);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_DisplayInstanceMemory(DisplayInstanceMemory *pThis, RmHalspecOwner *pRmhalspecowner) {
pThis->__nvoc_pbase_DisplayInstanceMemory = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_DisplayInstanceMemory(pThis, pRmhalspecowner);
}
NV_STATUS __nvoc_objCreate_DisplayInstanceMemory(DisplayInstanceMemory **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
DisplayInstanceMemory *pThis;
RmHalspecOwner *pRmhalspecowner;
pThis = portMemAllocNonPaged(sizeof(DisplayInstanceMemory));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(DisplayInstanceMemory));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_DisplayInstanceMemory);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
if ((pRmhalspecowner = dynamicCast(pParent, RmHalspecOwner)) == NULL)
pRmhalspecowner = objFindAncestorOfType(RmHalspecOwner, pParent);
NV_ASSERT_OR_RETURN(pRmhalspecowner != NULL, NV_ERR_INVALID_ARGUMENT);
__nvoc_init_DisplayInstanceMemory(pThis, pRmhalspecowner);
status = __nvoc_ctor_DisplayInstanceMemory(pThis, pRmhalspecowner);
if (status != NV_OK) goto __nvoc_objCreate_DisplayInstanceMemory_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_DisplayInstanceMemory_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_DisplayInstanceMemory(DisplayInstanceMemory **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_DisplayInstanceMemory(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,358 @@
#ifndef _G_DISP_INST_MEM_NVOC_H_
#define _G_DISP_INST_MEM_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_disp_inst_mem_nvoc.h"
#ifndef DISPLAY_INSTANCE_MEMORY_H
#define DISPLAY_INSTANCE_MEMORY_H
/* ------------------------ Includes --------------------------------------- */
#include "nvtypes.h"
#include "nvoc/utility.h"
#include "gpu/disp/kern_disp.h"
#include "gpu/mem_mgr/virt_mem_allocator_common.h"
#include "gpu/mem_mgr/mem_desc.h"
/* ------------------------ Forward Declaration ---------------------------- */
typedef struct OBJEHEAP OBJEHEAP;
struct DispChannel;
#ifndef __NVOC_CLASS_DispChannel_TYPEDEF__
#define __NVOC_CLASS_DispChannel_TYPEDEF__
typedef struct DispChannel DispChannel;
#endif /* __NVOC_CLASS_DispChannel_TYPEDEF__ */
#ifndef __nvoc_class_id_DispChannel
#define __nvoc_class_id_DispChannel 0xbd2ff3
#endif /* __nvoc_class_id_DispChannel */
struct ContextDma;
#ifndef __NVOC_CLASS_ContextDma_TYPEDEF__
#define __NVOC_CLASS_ContextDma_TYPEDEF__
typedef struct ContextDma ContextDma;
#endif /* __NVOC_CLASS_ContextDma_TYPEDEF__ */
#ifndef __nvoc_class_id_ContextDma
#define __nvoc_class_id_ContextDma 0x88441b
#endif /* __nvoc_class_id_ContextDma */
/* ------------------------ Macros & Defines ------------------------------- */
#define KERNEL_DISPLAY_GET_INST_MEM(p) ((p)->pInst)
#define DISP_INST_MEM_ALIGN 0x10000
/* ------------------------ Types definitions ------------------------------ */
/*!
* A software hash table entry
*/
typedef struct
{
struct ContextDma *pContextDma;
struct DispChannel *pDispChannel;
} SW_HASH_TABLE_ENTRY;
#ifdef NVOC_DISP_INST_MEM_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct DisplayInstanceMemory {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct DisplayInstanceMemory *__nvoc_pbase_DisplayInstanceMemory;
NV_ADDRESS_SPACE instMemAddrSpace;
NvU32 instMemAttr;
NvU64 instMemBase;
NvU32 instMemSize;
MEMORY_DESCRIPTOR *pAllocedInstMemDesc;
MEMORY_DESCRIPTOR *pInstMemDesc;
void *pInstMem;
NvU32 nHashTableEntries;
NvU32 hashTableBaseAddr;
SW_HASH_TABLE_ENTRY *pHashTable;
OBJEHEAP *pInstHeap;
};
#ifndef __NVOC_CLASS_DisplayInstanceMemory_TYPEDEF__
#define __NVOC_CLASS_DisplayInstanceMemory_TYPEDEF__
typedef struct DisplayInstanceMemory DisplayInstanceMemory;
#endif /* __NVOC_CLASS_DisplayInstanceMemory_TYPEDEF__ */
#ifndef __nvoc_class_id_DisplayInstanceMemory
#define __nvoc_class_id_DisplayInstanceMemory 0x8223e2
#endif /* __nvoc_class_id_DisplayInstanceMemory */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DisplayInstanceMemory;
#define __staticCast_DisplayInstanceMemory(pThis) \
((pThis)->__nvoc_pbase_DisplayInstanceMemory)
#ifdef __nvoc_disp_inst_mem_h_disabled
#define __dynamicCast_DisplayInstanceMemory(pThis) ((DisplayInstanceMemory*)NULL)
#else //__nvoc_disp_inst_mem_h_disabled
#define __dynamicCast_DisplayInstanceMemory(pThis) \
((DisplayInstanceMemory*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(DisplayInstanceMemory)))
#endif //__nvoc_disp_inst_mem_h_disabled
NV_STATUS __nvoc_objCreateDynamic_DisplayInstanceMemory(DisplayInstanceMemory**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_DisplayInstanceMemory(DisplayInstanceMemory**, Dynamic*, NvU32);
#define __objCreate_DisplayInstanceMemory(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_DisplayInstanceMemory((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
void instmemGetSize_v03_00(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 *pTotalInstMemSize, NvU32 *pHashTableSize);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline void instmemGetSize(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 *pTotalInstMemSize, NvU32 *pHashTableSize) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemGetSize(pGpu, pInstMem, pTotalInstMemSize, pHashTableSize) instmemGetSize_v03_00(pGpu, pInstMem, pTotalInstMemSize, pHashTableSize)
#endif //__nvoc_disp_inst_mem_h_disabled
#define instmemGetSize_HAL(pGpu, pInstMem, pTotalInstMemSize, pHashTableSize) instmemGetSize(pGpu, pInstMem, pTotalInstMemSize, pHashTableSize)
NvU32 instmemGetHashTableBaseAddr_v03_00(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NvU32 instmemGetHashTableBaseAddr(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return 0;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemGetHashTableBaseAddr(pGpu, pInstMem) instmemGetHashTableBaseAddr_v03_00(pGpu, pInstMem)
#endif //__nvoc_disp_inst_mem_h_disabled
#define instmemGetHashTableBaseAddr_HAL(pGpu, pInstMem) instmemGetHashTableBaseAddr(pGpu, pInstMem)
NvBool instmemIsValid_v03_00(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 offset);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NvBool instmemIsValid(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 offset) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_FALSE;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemIsValid(pGpu, pInstMem, offset) instmemIsValid_v03_00(pGpu, pInstMem, offset)
#endif //__nvoc_disp_inst_mem_h_disabled
#define instmemIsValid_HAL(pGpu, pInstMem, offset) instmemIsValid(pGpu, pInstMem, offset)
NvU32 instmemGenerateHashTableData_v03_00(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 hClient, NvU32 offset, NvU32 dispChannelNum);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NvU32 instmemGenerateHashTableData(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 hClient, NvU32 offset, NvU32 dispChannelNum) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return 0;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemGenerateHashTableData(pGpu, pInstMem, hClient, offset, dispChannelNum) instmemGenerateHashTableData_v03_00(pGpu, pInstMem, hClient, offset, dispChannelNum)
#endif //__nvoc_disp_inst_mem_h_disabled
#define instmemGenerateHashTableData_HAL(pGpu, pInstMem, hClient, offset, dispChannelNum) instmemGenerateHashTableData(pGpu, pInstMem, hClient, offset, dispChannelNum)
NV_STATUS instmemHashFunc_v03_00(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvHandle hClient, NvHandle hContextDma, NvU32 dispChannelNum, NvU32 *result);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemHashFunc(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvHandle hClient, NvHandle hContextDma, NvU32 dispChannelNum, NvU32 *result) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemHashFunc(pGpu, pInstMem, hClient, hContextDma, dispChannelNum, result) instmemHashFunc_v03_00(pGpu, pInstMem, hClient, hContextDma, dispChannelNum, result)
#endif //__nvoc_disp_inst_mem_h_disabled
#define instmemHashFunc_HAL(pGpu, pInstMem, hClient, hContextDma, dispChannelNum, result) instmemHashFunc(pGpu, pInstMem, hClient, hContextDma, dispChannelNum, result)
NV_STATUS instmemCommitContextDma_v03_00(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemCommitContextDma(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemCommitContextDma(pGpu, pInstMem, pContextDma) instmemCommitContextDma_v03_00(pGpu, pInstMem, pContextDma)
#endif //__nvoc_disp_inst_mem_h_disabled
#define instmemCommitContextDma_HAL(pGpu, pInstMem, pContextDma) instmemCommitContextDma(pGpu, pInstMem, pContextDma)
static inline void instmemDecommitContextDma_b3696a(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma) {
return;
}
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline void instmemDecommitContextDma(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemDecommitContextDma(pGpu, pInstMem, pContextDma) instmemDecommitContextDma_b3696a(pGpu, pInstMem, pContextDma)
#endif //__nvoc_disp_inst_mem_h_disabled
#define instmemDecommitContextDma_HAL(pGpu, pInstMem, pContextDma) instmemDecommitContextDma(pGpu, pInstMem, pContextDma)
NV_STATUS instmemUpdateContextDma_v03_00(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma, NvU64 *pNewAddress, NvU64 *pNewLimit, NvHandle hMemory, NvU32 comprInfo);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemUpdateContextDma(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma, NvU64 *pNewAddress, NvU64 *pNewLimit, NvHandle hMemory, NvU32 comprInfo) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemUpdateContextDma(pGpu, pInstMem, pContextDma, pNewAddress, pNewLimit, hMemory, comprInfo) instmemUpdateContextDma_v03_00(pGpu, pInstMem, pContextDma, pNewAddress, pNewLimit, hMemory, comprInfo)
#endif //__nvoc_disp_inst_mem_h_disabled
#define instmemUpdateContextDma_HAL(pGpu, pInstMem, pContextDma, pNewAddress, pNewLimit, hMemory, comprInfo) instmemUpdateContextDma(pGpu, pInstMem, pContextDma, pNewAddress, pNewLimit, hMemory, comprInfo)
NV_STATUS instmemConstruct_IMPL(struct DisplayInstanceMemory *arg_pInstMem);
#define __nvoc_instmemConstruct(arg_pInstMem) instmemConstruct_IMPL(arg_pInstMem)
void instmemDestruct_IMPL(struct DisplayInstanceMemory *pInstMem);
#define __nvoc_instmemDestruct(pInstMem) instmemDestruct_IMPL(pInstMem)
NV_STATUS instmemStateInitLocked_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemStateInitLocked(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemStateInitLocked(pGpu, pInstMem) instmemStateInitLocked_IMPL(pGpu, pInstMem)
#endif //__nvoc_disp_inst_mem_h_disabled
void instmemStateDestroy_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline void instmemStateDestroy(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemStateDestroy(pGpu, pInstMem) instmemStateDestroy_IMPL(pGpu, pInstMem)
#endif //__nvoc_disp_inst_mem_h_disabled
NV_STATUS instmemStateLoad_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 flags);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemStateLoad(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 flags) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemStateLoad(pGpu, pInstMem, flags) instmemStateLoad_IMPL(pGpu, pInstMem, flags)
#endif //__nvoc_disp_inst_mem_h_disabled
NV_STATUS instmemStateUnload_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 flags);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemStateUnload(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 flags) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemStateUnload(pGpu, pInstMem, flags) instmemStateUnload_IMPL(pGpu, pInstMem, flags)
#endif //__nvoc_disp_inst_mem_h_disabled
void instmemSetMemory_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NV_ADDRESS_SPACE dispInstMemAddrSpace, NvU32 dispInstMemAttr, NvU64 dispInstMemBase, NvU32 dispInstMemSize);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline void instmemSetMemory(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NV_ADDRESS_SPACE dispInstMemAddrSpace, NvU32 dispInstMemAttr, NvU64 dispInstMemBase, NvU32 dispInstMemSize) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemSetMemory(pGpu, pInstMem, dispInstMemAddrSpace, dispInstMemAttr, dispInstMemBase, dispInstMemSize) instmemSetMemory_IMPL(pGpu, pInstMem, dispInstMemAddrSpace, dispInstMemAttr, dispInstMemBase, dispInstMemSize)
#endif //__nvoc_disp_inst_mem_h_disabled
NV_STATUS instmemBindContextDma_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma, struct DispChannel *pDispChannel);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemBindContextDma(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma, struct DispChannel *pDispChannel) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemBindContextDma(pGpu, pInstMem, pContextDma, pDispChannel) instmemBindContextDma_IMPL(pGpu, pInstMem, pContextDma, pDispChannel)
#endif //__nvoc_disp_inst_mem_h_disabled
NV_STATUS instmemUnbindContextDma_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma, struct DispChannel *pDispChannel);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemUnbindContextDma(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma, struct DispChannel *pDispChannel) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemUnbindContextDma(pGpu, pInstMem, pContextDma, pDispChannel) instmemUnbindContextDma_IMPL(pGpu, pInstMem, pContextDma, pDispChannel)
#endif //__nvoc_disp_inst_mem_h_disabled
void instmemUnbindContextDmaFromAllChannels_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline void instmemUnbindContextDmaFromAllChannels(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct ContextDma *pContextDma) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemUnbindContextDmaFromAllChannels(pGpu, pInstMem, pContextDma) instmemUnbindContextDmaFromAllChannels_IMPL(pGpu, pInstMem, pContextDma)
#endif //__nvoc_disp_inst_mem_h_disabled
void instmemUnbindDispChannelContextDmas_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct DispChannel *pDispChannel);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline void instmemUnbindDispChannelContextDmas(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, struct DispChannel *pDispChannel) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemUnbindDispChannelContextDmas(pGpu, pInstMem, pDispChannel) instmemUnbindDispChannelContextDmas_IMPL(pGpu, pInstMem, pDispChannel)
#endif //__nvoc_disp_inst_mem_h_disabled
NV_STATUS instmemReserveContextDma_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 *offset);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemReserveContextDma(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 *offset) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemReserveContextDma(pGpu, pInstMem, offset) instmemReserveContextDma_IMPL(pGpu, pInstMem, offset)
#endif //__nvoc_disp_inst_mem_h_disabled
NV_STATUS instmemFreeContextDma_IMPL(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 offset);
#ifdef __nvoc_disp_inst_mem_h_disabled
static inline NV_STATUS instmemFreeContextDma(OBJGPU *pGpu, struct DisplayInstanceMemory *pInstMem, NvU32 offset) {
NV_ASSERT_FAILED_PRECOMP("DisplayInstanceMemory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_disp_inst_mem_h_disabled
#define instmemFreeContextDma(pGpu, pInstMem, offset) instmemFreeContextDma_IMPL(pGpu, pInstMem, offset)
#endif //__nvoc_disp_inst_mem_h_disabled
#undef PRIVATE_FIELD
#endif // DISPLAY_INSTANCE_MEMORY_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_DISP_INST_MEM_NVOC_H_

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,329 @@
#define NVOC_DISP_SF_USER_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_disp_sf_user_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xba7439 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DispSfUser;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
void __nvoc_init_DispSfUser(DispSfUser*);
void __nvoc_init_funcTable_DispSfUser(DispSfUser*);
NV_STATUS __nvoc_ctor_DispSfUser(DispSfUser*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_DispSfUser(DispSfUser*);
void __nvoc_dtor_DispSfUser(DispSfUser*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_DispSfUser;
static const struct NVOC_RTTI __nvoc_rtti_DispSfUser_DispSfUser = {
/*pClassDef=*/ &__nvoc_class_def_DispSfUser,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_DispSfUser,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_DispSfUser_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispSfUser, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_DispSfUser_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispSfUser, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_DispSfUser_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispSfUser, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_DispSfUser_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispSfUser, __nvoc_base_GpuResource.__nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_DispSfUser_GpuResource = {
/*pClassDef=*/ &__nvoc_class_def_GpuResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(DispSfUser, __nvoc_base_GpuResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_DispSfUser = {
/*numRelatives=*/ 6,
/*relatives=*/ {
&__nvoc_rtti_DispSfUser_DispSfUser,
&__nvoc_rtti_DispSfUser_GpuResource,
&__nvoc_rtti_DispSfUser_RmResource,
&__nvoc_rtti_DispSfUser_RmResourceCommon,
&__nvoc_rtti_DispSfUser_RsResource,
&__nvoc_rtti_DispSfUser_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_DispSfUser =
{
/*classInfo=*/ {
/*size=*/ sizeof(DispSfUser),
/*classId=*/ classId(DispSfUser),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "DispSfUser",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_DispSfUser,
/*pCastInfo=*/ &__nvoc_castinfo_DispSfUser,
/*pExportInfo=*/ &__nvoc_export_info_DispSfUser
};
static NV_STATUS __nvoc_thunk_DispSfUser_gpuresGetRegBaseOffsetAndSize(struct GpuResource *pDispSfUser, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return dispsfGetRegBaseOffsetAndSize((struct DispSfUser *)(((unsigned char *)pDispSfUser) - __nvoc_rtti_DispSfUser_GpuResource.offset), pGpu, pOffset, pSize);
}
static NvBool __nvoc_thunk_GpuResource_dispsfShareCallback(struct DispSfUser *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispSfUser_GpuResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_GpuResource_dispsfControl(struct DispSfUser *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return gpuresControl((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispSfUser_GpuResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_dispsfUnmap(struct DispSfUser *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispSfUser_GpuResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_dispsfGetMemInterMapParams(struct DispSfUser *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_DispSfUser_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_dispsfGetMemoryMappingDescriptor(struct DispSfUser *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_DispSfUser_RmResource.offset), ppMemDesc);
}
static NV_STATUS __nvoc_thunk_GpuResource_dispsfGetMapAddrSpace(struct DispSfUser *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return gpuresGetMapAddrSpace((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispSfUser_GpuResource.offset), pCallContext, mapFlags, pAddrSpace);
}
static NvHandle __nvoc_thunk_GpuResource_dispsfGetInternalObjectHandle(struct DispSfUser *pGpuResource) {
return gpuresGetInternalObjectHandle((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispSfUser_GpuResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_dispsfControlFilter(struct DispSfUser *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_dispsfAddAdditionalDependants(struct RsClient *pClient, struct DispSfUser *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RsResource.offset), pReference);
}
static NvU32 __nvoc_thunk_RsResource_dispsfGetRefCount(struct DispSfUser *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RmResource_dispsfCheckMemInterUnmap(struct DispSfUser *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_DispSfUser_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_dispsfMapTo(struct DispSfUser *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_dispsfControl_Prologue(struct DispSfUser *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RmResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_RsResource_dispsfCanCopy(struct DispSfUser *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_GpuResource_dispsfInternalControlForward(struct DispSfUser *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return gpuresInternalControlForward((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispSfUser_GpuResource.offset), command, pParams, size);
}
static void __nvoc_thunk_RsResource_dispsfPreDestruct(struct DispSfUser *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_dispsfUnmapFrom(struct DispSfUser *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_dispsfControl_Epilogue(struct DispSfUser *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_dispsfControlLookup(struct DispSfUser *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_GpuResource_dispsfMap(struct DispSfUser *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return gpuresMap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_DispSfUser_GpuResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_dispsfAccessCallback(struct DispSfUser *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_DispSfUser_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_DispSfUser =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_GpuResource(GpuResource*);
void __nvoc_dtor_DispSfUser(DispSfUser *pThis) {
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_DispSfUser(DispSfUser *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_GpuResource(GpuResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_DispSfUser(DispSfUser *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_GpuResource(&pThis->__nvoc_base_GpuResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_DispSfUser_fail_GpuResource;
__nvoc_init_dataField_DispSfUser(pThis);
status = __nvoc_dispsfConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_DispSfUser_fail__init;
goto __nvoc_ctor_DispSfUser_exit; // Success
__nvoc_ctor_DispSfUser_fail__init:
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_ctor_DispSfUser_fail_GpuResource:
__nvoc_ctor_DispSfUser_exit:
return status;
}
static void __nvoc_init_funcTable_DispSfUser_1(DispSfUser *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__dispsfGetRegBaseOffsetAndSize__ = &dispsfGetRegBaseOffsetAndSize_IMPL;
pThis->__nvoc_base_GpuResource.__gpuresGetRegBaseOffsetAndSize__ = &__nvoc_thunk_DispSfUser_gpuresGetRegBaseOffsetAndSize;
pThis->__dispsfShareCallback__ = &__nvoc_thunk_GpuResource_dispsfShareCallback;
pThis->__dispsfControl__ = &__nvoc_thunk_GpuResource_dispsfControl;
pThis->__dispsfUnmap__ = &__nvoc_thunk_GpuResource_dispsfUnmap;
pThis->__dispsfGetMemInterMapParams__ = &__nvoc_thunk_RmResource_dispsfGetMemInterMapParams;
pThis->__dispsfGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_dispsfGetMemoryMappingDescriptor;
pThis->__dispsfGetMapAddrSpace__ = &__nvoc_thunk_GpuResource_dispsfGetMapAddrSpace;
pThis->__dispsfGetInternalObjectHandle__ = &__nvoc_thunk_GpuResource_dispsfGetInternalObjectHandle;
pThis->__dispsfControlFilter__ = &__nvoc_thunk_RsResource_dispsfControlFilter;
pThis->__dispsfAddAdditionalDependants__ = &__nvoc_thunk_RsResource_dispsfAddAdditionalDependants;
pThis->__dispsfGetRefCount__ = &__nvoc_thunk_RsResource_dispsfGetRefCount;
pThis->__dispsfCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_dispsfCheckMemInterUnmap;
pThis->__dispsfMapTo__ = &__nvoc_thunk_RsResource_dispsfMapTo;
pThis->__dispsfControl_Prologue__ = &__nvoc_thunk_RmResource_dispsfControl_Prologue;
pThis->__dispsfCanCopy__ = &__nvoc_thunk_RsResource_dispsfCanCopy;
pThis->__dispsfInternalControlForward__ = &__nvoc_thunk_GpuResource_dispsfInternalControlForward;
pThis->__dispsfPreDestruct__ = &__nvoc_thunk_RsResource_dispsfPreDestruct;
pThis->__dispsfUnmapFrom__ = &__nvoc_thunk_RsResource_dispsfUnmapFrom;
pThis->__dispsfControl_Epilogue__ = &__nvoc_thunk_RmResource_dispsfControl_Epilogue;
pThis->__dispsfControlLookup__ = &__nvoc_thunk_RsResource_dispsfControlLookup;
pThis->__dispsfMap__ = &__nvoc_thunk_GpuResource_dispsfMap;
pThis->__dispsfAccessCallback__ = &__nvoc_thunk_RmResource_dispsfAccessCallback;
}
void __nvoc_init_funcTable_DispSfUser(DispSfUser *pThis) {
__nvoc_init_funcTable_DispSfUser_1(pThis);
}
void __nvoc_init_GpuResource(GpuResource*);
void __nvoc_init_DispSfUser(DispSfUser *pThis) {
pThis->__nvoc_pbase_DispSfUser = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource;
pThis->__nvoc_pbase_GpuResource = &pThis->__nvoc_base_GpuResource;
__nvoc_init_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_init_funcTable_DispSfUser(pThis);
}
NV_STATUS __nvoc_objCreate_DispSfUser(DispSfUser **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
DispSfUser *pThis;
pThis = portMemAllocNonPaged(sizeof(DispSfUser));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(DispSfUser));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_DispSfUser);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_DispSfUser(pThis);
status = __nvoc_ctor_DispSfUser(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_DispSfUser_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_DispSfUser_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_DispSfUser(DispSfUser **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_DispSfUser(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,239 @@
#ifndef _G_DISP_SF_USER_NVOC_H_
#define _G_DISP_SF_USER_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/******************************************************************************
*
* Description:
* This file contains functions managing DispSfUser class.
*
******************************************************************************/
#include "g_disp_sf_user_nvoc.h"
#ifndef DISP_SF_USER_H
#define DISP_SF_USER_H
#include "gpu/gpu_resource.h"
/*!
* RM internal class representing NVXXXX_DISP_SF_USER
*/
#ifdef NVOC_DISP_SF_USER_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct DispSfUser {
const struct NVOC_RTTI *__nvoc_rtti;
struct GpuResource __nvoc_base_GpuResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct DispSfUser *__nvoc_pbase_DispSfUser;
NV_STATUS (*__dispsfGetRegBaseOffsetAndSize__)(struct DispSfUser *, struct OBJGPU *, NvU32 *, NvU32 *);
NvBool (*__dispsfShareCallback__)(struct DispSfUser *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__dispsfControl__)(struct DispSfUser *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispsfUnmap__)(struct DispSfUser *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__dispsfGetMemInterMapParams__)(struct DispSfUser *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__dispsfGetMemoryMappingDescriptor__)(struct DispSfUser *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__dispsfGetMapAddrSpace__)(struct DispSfUser *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NvHandle (*__dispsfGetInternalObjectHandle__)(struct DispSfUser *);
NV_STATUS (*__dispsfControlFilter__)(struct DispSfUser *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__dispsfAddAdditionalDependants__)(struct RsClient *, struct DispSfUser *, RsResourceRef *);
NvU32 (*__dispsfGetRefCount__)(struct DispSfUser *);
NV_STATUS (*__dispsfCheckMemInterUnmap__)(struct DispSfUser *, NvBool);
NV_STATUS (*__dispsfMapTo__)(struct DispSfUser *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__dispsfControl_Prologue__)(struct DispSfUser *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__dispsfCanCopy__)(struct DispSfUser *);
NV_STATUS (*__dispsfInternalControlForward__)(struct DispSfUser *, NvU32, void *, NvU32);
void (*__dispsfPreDestruct__)(struct DispSfUser *);
NV_STATUS (*__dispsfUnmapFrom__)(struct DispSfUser *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__dispsfControl_Epilogue__)(struct DispSfUser *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__dispsfControlLookup__)(struct DispSfUser *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__dispsfMap__)(struct DispSfUser *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__dispsfAccessCallback__)(struct DispSfUser *, struct RsClient *, void *, RsAccessRight);
NvU32 ControlOffset;
NvU32 ControlLength;
};
#ifndef __NVOC_CLASS_DispSfUser_TYPEDEF__
#define __NVOC_CLASS_DispSfUser_TYPEDEF__
typedef struct DispSfUser DispSfUser;
#endif /* __NVOC_CLASS_DispSfUser_TYPEDEF__ */
#ifndef __nvoc_class_id_DispSfUser
#define __nvoc_class_id_DispSfUser 0xba7439
#endif /* __nvoc_class_id_DispSfUser */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_DispSfUser;
#define __staticCast_DispSfUser(pThis) \
((pThis)->__nvoc_pbase_DispSfUser)
#ifdef __nvoc_disp_sf_user_h_disabled
#define __dynamicCast_DispSfUser(pThis) ((DispSfUser*)NULL)
#else //__nvoc_disp_sf_user_h_disabled
#define __dynamicCast_DispSfUser(pThis) \
((DispSfUser*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(DispSfUser)))
#endif //__nvoc_disp_sf_user_h_disabled
NV_STATUS __nvoc_objCreateDynamic_DispSfUser(DispSfUser**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_DispSfUser(DispSfUser**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_DispSfUser(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_DispSfUser((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define dispsfGetRegBaseOffsetAndSize(pDispSfUser, pGpu, pOffset, pSize) dispsfGetRegBaseOffsetAndSize_DISPATCH(pDispSfUser, pGpu, pOffset, pSize)
#define dispsfShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) dispsfShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define dispsfControl(pGpuResource, pCallContext, pParams) dispsfControl_DISPATCH(pGpuResource, pCallContext, pParams)
#define dispsfUnmap(pGpuResource, pCallContext, pCpuMapping) dispsfUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define dispsfGetMemInterMapParams(pRmResource, pParams) dispsfGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define dispsfGetMemoryMappingDescriptor(pRmResource, ppMemDesc) dispsfGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define dispsfGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) dispsfGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define dispsfGetInternalObjectHandle(pGpuResource) dispsfGetInternalObjectHandle_DISPATCH(pGpuResource)
#define dispsfControlFilter(pResource, pCallContext, pParams) dispsfControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define dispsfAddAdditionalDependants(pClient, pResource, pReference) dispsfAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define dispsfGetRefCount(pResource) dispsfGetRefCount_DISPATCH(pResource)
#define dispsfCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) dispsfCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define dispsfMapTo(pResource, pParams) dispsfMapTo_DISPATCH(pResource, pParams)
#define dispsfControl_Prologue(pResource, pCallContext, pParams) dispsfControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define dispsfCanCopy(pResource) dispsfCanCopy_DISPATCH(pResource)
#define dispsfInternalControlForward(pGpuResource, command, pParams, size) dispsfInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define dispsfPreDestruct(pResource) dispsfPreDestruct_DISPATCH(pResource)
#define dispsfUnmapFrom(pResource, pParams) dispsfUnmapFrom_DISPATCH(pResource, pParams)
#define dispsfControl_Epilogue(pResource, pCallContext, pParams) dispsfControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define dispsfControlLookup(pResource, pParams, ppEntry) dispsfControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define dispsfMap(pGpuResource, pCallContext, pParams, pCpuMapping) dispsfMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define dispsfAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) dispsfAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS dispsfGetRegBaseOffsetAndSize_IMPL(struct DispSfUser *pDispSfUser, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize);
static inline NV_STATUS dispsfGetRegBaseOffsetAndSize_DISPATCH(struct DispSfUser *pDispSfUser, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pDispSfUser->__dispsfGetRegBaseOffsetAndSize__(pDispSfUser, pGpu, pOffset, pSize);
}
static inline NvBool dispsfShareCallback_DISPATCH(struct DispSfUser *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__dispsfShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS dispsfControl_DISPATCH(struct DispSfUser *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pGpuResource->__dispsfControl__(pGpuResource, pCallContext, pParams);
}
static inline NV_STATUS dispsfUnmap_DISPATCH(struct DispSfUser *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispsfUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS dispsfGetMemInterMapParams_DISPATCH(struct DispSfUser *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__dispsfGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS dispsfGetMemoryMappingDescriptor_DISPATCH(struct DispSfUser *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__dispsfGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS dispsfGetMapAddrSpace_DISPATCH(struct DispSfUser *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__dispsfGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline NvHandle dispsfGetInternalObjectHandle_DISPATCH(struct DispSfUser *pGpuResource) {
return pGpuResource->__dispsfGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS dispsfControlFilter_DISPATCH(struct DispSfUser *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispsfControlFilter__(pResource, pCallContext, pParams);
}
static inline void dispsfAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct DispSfUser *pResource, RsResourceRef *pReference) {
pResource->__dispsfAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 dispsfGetRefCount_DISPATCH(struct DispSfUser *pResource) {
return pResource->__dispsfGetRefCount__(pResource);
}
static inline NV_STATUS dispsfCheckMemInterUnmap_DISPATCH(struct DispSfUser *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__dispsfCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS dispsfMapTo_DISPATCH(struct DispSfUser *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__dispsfMapTo__(pResource, pParams);
}
static inline NV_STATUS dispsfControl_Prologue_DISPATCH(struct DispSfUser *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__dispsfControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NvBool dispsfCanCopy_DISPATCH(struct DispSfUser *pResource) {
return pResource->__dispsfCanCopy__(pResource);
}
static inline NV_STATUS dispsfInternalControlForward_DISPATCH(struct DispSfUser *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__dispsfInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline void dispsfPreDestruct_DISPATCH(struct DispSfUser *pResource) {
pResource->__dispsfPreDestruct__(pResource);
}
static inline NV_STATUS dispsfUnmapFrom_DISPATCH(struct DispSfUser *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__dispsfUnmapFrom__(pResource, pParams);
}
static inline void dispsfControl_Epilogue_DISPATCH(struct DispSfUser *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__dispsfControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS dispsfControlLookup_DISPATCH(struct DispSfUser *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__dispsfControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS dispsfMap_DISPATCH(struct DispSfUser *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__dispsfMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool dispsfAccessCallback_DISPATCH(struct DispSfUser *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__dispsfAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS dispsfConstruct_IMPL(struct DispSfUser *arg_pDispSfUser, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_dispsfConstruct(arg_pDispSfUser, arg_pCallContext, arg_pParams) dispsfConstruct_IMPL(arg_pDispSfUser, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
#endif // DISP_SF_USER_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_DISP_SF_USER_NVOC_H_

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,189 @@
#define NVOC_ENG_STATE_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_eng_state_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x7a7ed6 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJENGSTATE;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_init_funcTable_OBJENGSTATE(OBJENGSTATE*);
NV_STATUS __nvoc_ctor_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_init_dataField_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_dtor_OBJENGSTATE(OBJENGSTATE*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJENGSTATE;
static const struct NVOC_RTTI __nvoc_rtti_OBJENGSTATE_OBJENGSTATE = {
/*pClassDef=*/ &__nvoc_class_def_OBJENGSTATE,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJENGSTATE,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJENGSTATE_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJENGSTATE, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJENGSTATE = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_OBJENGSTATE_OBJENGSTATE,
&__nvoc_rtti_OBJENGSTATE_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJENGSTATE =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJENGSTATE),
/*classId=*/ classId(OBJENGSTATE),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJENGSTATE",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJENGSTATE,
/*pCastInfo=*/ &__nvoc_castinfo_OBJENGSTATE,
/*pExportInfo=*/ &__nvoc_export_info_OBJENGSTATE
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJENGSTATE =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_OBJENGSTATE(OBJENGSTATE *pThis) {
__nvoc_engstateDestruct(pThis);
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJENGSTATE(OBJENGSTATE *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_OBJENGSTATE(OBJENGSTATE *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_OBJENGSTATE_fail_Object;
__nvoc_init_dataField_OBJENGSTATE(pThis);
goto __nvoc_ctor_OBJENGSTATE_exit; // Success
__nvoc_ctor_OBJENGSTATE_fail_Object:
__nvoc_ctor_OBJENGSTATE_exit:
return status;
}
static void __nvoc_init_funcTable_OBJENGSTATE_1(OBJENGSTATE *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__engstateConstructEngine__ = &engstateConstructEngine_IMPL;
pThis->__engstateInitMissing__ = &engstateInitMissing_IMPL;
pThis->__engstateStatePreInitLocked__ = &engstateStatePreInitLocked_IMPL;
pThis->__engstateStatePreInitUnlocked__ = &engstateStatePreInitUnlocked_IMPL;
pThis->__engstateStateInitLocked__ = &engstateStateInitLocked_IMPL;
pThis->__engstateStateInitUnlocked__ = &engstateStateInitUnlocked_IMPL;
pThis->__engstateStatePreLoad__ = &engstateStatePreLoad_IMPL;
pThis->__engstateStateLoad__ = &engstateStateLoad_IMPL;
pThis->__engstateStatePostLoad__ = &engstateStatePostLoad_IMPL;
pThis->__engstateStatePreUnload__ = &engstateStatePreUnload_IMPL;
pThis->__engstateStateUnload__ = &engstateStateUnload_IMPL;
pThis->__engstateStatePostUnload__ = &engstateStatePostUnload_IMPL;
pThis->__engstateStateDestroy__ = &engstateStateDestroy_IMPL;
pThis->__engstateAllocTunableState__ = &engstateAllocTunableState_IMPL;
pThis->__engstateFreeTunableState__ = &engstateFreeTunableState_IMPL;
pThis->__engstateGetTunableState__ = &engstateGetTunableState_IMPL;
pThis->__engstateSetTunableState__ = &engstateSetTunableState_IMPL;
pThis->__engstateReconcileTunableState__ = &engstateReconcileTunableState_IMPL;
pThis->__engstateCompareTunableState__ = &engstateCompareTunableState_IMPL;
pThis->__engstateIsPresent__ = &engstateIsPresent_IMPL;
}
void __nvoc_init_funcTable_OBJENGSTATE(OBJENGSTATE *pThis) {
__nvoc_init_funcTable_OBJENGSTATE_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_OBJENGSTATE(OBJENGSTATE *pThis) {
pThis->__nvoc_pbase_OBJENGSTATE = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_OBJENGSTATE(pThis);
}
NV_STATUS __nvoc_objCreate_OBJENGSTATE(OBJENGSTATE **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJENGSTATE *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJENGSTATE));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJENGSTATE));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJENGSTATE);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJENGSTATE(pThis);
status = __nvoc_ctor_OBJENGSTATE(pThis);
if (status != NV_OK) goto __nvoc_objCreate_OBJENGSTATE_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJENGSTATE_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJENGSTATE(OBJENGSTATE **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJENGSTATE(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,385 @@
#ifndef _G_ENG_STATE_NVOC_H_
#define _G_ENG_STATE_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_eng_state_nvoc.h"
#ifndef _ENG_STATE_H_
#define _ENG_STATE_H_
/*!
* @file eng_state.h
* @brief Provides definitions for all OBJENGSTATE data structures and interfaces.
*/
#include "core/core.h"
#include "gpu/eng_desc.h"
typedef enum ENGSTATE_STATE
{
ENGSTATE_STATE_UNDEFINED = 0,
ENGSTATE_STATE_CONSTRUCT,
ENGSTATE_STATE_PRE_INIT,
ENGSTATE_STATE_INIT,
ENGSTATE_STATE_PRE_LOAD,
ENGSTATE_STATE_LOAD,
ENGSTATE_STATE_POST_LOAD,
ENGSTATE_STATE_PRE_UNLOAD,
ENGSTATE_STATE_UNLOAD,
ENGSTATE_STATE_POST_UNLOAD,
ENGSTATE_STATE_DESTROY,
ENGSTATE_STATE_COUNT // Keep this last
} ENGSTATE_STATE;
// Stats data stored for every state transition
typedef struct ENGSTATE_STATS
{
NvS32 memoryAllocCount;
NvS32 memoryAllocSize;
NvU32 transitionTimeUs;
} ENGSTATE_STATS;
// Temporary transition data, not stored
typedef struct ENGSTATE_TRANSITION_DATA
{
NvS64 memoryAllocCount;
NvS64 memoryAllocSize;
NvU64 transitionStartTimeNs;
} ENGSTATE_TRANSITION_DATA;
typedef struct OBJENGSTATE *POBJENGSTATE;
#define ENG_GET_FIFO(p) (engstateGetFifo(staticCast((p), OBJENGSTATE)))
#define ENG_GET_ENG_DESC(p) (staticCast((p), OBJENGSTATE)->engDesc)
/*!
* Defines the structure used to contain all generic information related to
* the OBJENGSTATE.
*/
#ifdef NVOC_ENG_STATE_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OBJENGSTATE {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
NV_STATUS (*__engstateConstructEngine__)(POBJGPU, POBJENGSTATE, ENGDESCRIPTOR);
void (*__engstateInitMissing__)(POBJGPU, POBJENGSTATE);
NV_STATUS (*__engstateStatePreInitLocked__)(POBJGPU, POBJENGSTATE);
NV_STATUS (*__engstateStatePreInitUnlocked__)(POBJGPU, POBJENGSTATE);
NV_STATUS (*__engstateStateInitLocked__)(POBJGPU, POBJENGSTATE);
NV_STATUS (*__engstateStateInitUnlocked__)(POBJGPU, POBJENGSTATE);
NV_STATUS (*__engstateStatePreLoad__)(POBJGPU, POBJENGSTATE, NvU32);
NV_STATUS (*__engstateStateLoad__)(POBJGPU, POBJENGSTATE, NvU32);
NV_STATUS (*__engstateStatePostLoad__)(POBJGPU, POBJENGSTATE, NvU32);
NV_STATUS (*__engstateStatePreUnload__)(POBJGPU, POBJENGSTATE, NvU32);
NV_STATUS (*__engstateStateUnload__)(POBJGPU, POBJENGSTATE, NvU32);
NV_STATUS (*__engstateStatePostUnload__)(POBJGPU, POBJENGSTATE, NvU32);
void (*__engstateStateDestroy__)(POBJGPU, POBJENGSTATE);
NV_STATUS (*__engstateAllocTunableState__)(POBJGPU, POBJENGSTATE, void **);
void (*__engstateFreeTunableState__)(POBJGPU, POBJENGSTATE, void *);
NV_STATUS (*__engstateGetTunableState__)(POBJGPU, POBJENGSTATE, void *);
NV_STATUS (*__engstateSetTunableState__)(POBJGPU, POBJENGSTATE, void *);
NV_STATUS (*__engstateReconcileTunableState__)(POBJGPU, POBJENGSTATE, void *);
NV_STATUS (*__engstateCompareTunableState__)(POBJGPU, POBJENGSTATE, void *, void *);
NvBool (*__engstateIsPresent__)(POBJGPU, POBJENGSTATE);
NvBool PDB_PROP_ENGSTATE_IS_MISSING;
ENGDESCRIPTOR engDesc;
void *pOriginalTunableState;
struct OBJGPU *pGpu;
ENGSTATE_STATE currentState;
ENGSTATE_STATS stats[11];
char name[100];
};
#ifndef __NVOC_CLASS_OBJENGSTATE_TYPEDEF__
#define __NVOC_CLASS_OBJENGSTATE_TYPEDEF__
typedef struct OBJENGSTATE OBJENGSTATE;
#endif /* __NVOC_CLASS_OBJENGSTATE_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJENGSTATE
#define __nvoc_class_id_OBJENGSTATE 0x7a7ed6
#endif /* __nvoc_class_id_OBJENGSTATE */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJENGSTATE;
#define __staticCast_OBJENGSTATE(pThis) \
((pThis)->__nvoc_pbase_OBJENGSTATE)
#ifdef __nvoc_eng_state_h_disabled
#define __dynamicCast_OBJENGSTATE(pThis) ((OBJENGSTATE*)NULL)
#else //__nvoc_eng_state_h_disabled
#define __dynamicCast_OBJENGSTATE(pThis) \
((OBJENGSTATE*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJENGSTATE)))
#endif //__nvoc_eng_state_h_disabled
#define PDB_PROP_ENGSTATE_IS_MISSING_BASE_CAST
#define PDB_PROP_ENGSTATE_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
NV_STATUS __nvoc_objCreateDynamic_OBJENGSTATE(OBJENGSTATE**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJENGSTATE(OBJENGSTATE**, Dynamic*, NvU32);
#define __objCreate_OBJENGSTATE(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_OBJENGSTATE((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
#define engstateConstructEngine(pGpu, pEngstate, arg0) engstateConstructEngine_DISPATCH(pGpu, pEngstate, arg0)
#define engstateInitMissing(pGpu, pEngstate) engstateInitMissing_DISPATCH(pGpu, pEngstate)
#define engstateStatePreInitLocked(pGpu, pEngstate) engstateStatePreInitLocked_DISPATCH(pGpu, pEngstate)
#define engstateStatePreInitUnlocked(pGpu, pEngstate) engstateStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
#define engstateStateInitLocked(pGpu, pEngstate) engstateStateInitLocked_DISPATCH(pGpu, pEngstate)
#define engstateStateInitUnlocked(pGpu, pEngstate) engstateStateInitUnlocked_DISPATCH(pGpu, pEngstate)
#define engstateStatePreLoad(pGpu, pEngstate, arg0) engstateStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
#define engstateStateLoad(pGpu, pEngstate, arg0) engstateStateLoad_DISPATCH(pGpu, pEngstate, arg0)
#define engstateStatePostLoad(pGpu, pEngstate, arg0) engstateStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
#define engstateStatePreUnload(pGpu, pEngstate, arg0) engstateStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
#define engstateStateUnload(pGpu, pEngstate, arg0) engstateStateUnload_DISPATCH(pGpu, pEngstate, arg0)
#define engstateStatePostUnload(pGpu, pEngstate, arg0) engstateStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
#define engstateStateDestroy(pGpu, pEngstate) engstateStateDestroy_DISPATCH(pGpu, pEngstate)
#define engstateAllocTunableState(pGpu, pEngstate, ppTunableState) engstateAllocTunableState_DISPATCH(pGpu, pEngstate, ppTunableState)
#define engstateFreeTunableState(pGpu, pEngstate, pTunableState) engstateFreeTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define engstateGetTunableState(pGpu, pEngstate, pTunableState) engstateGetTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define engstateSetTunableState(pGpu, pEngstate, pTunableState) engstateSetTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define engstateReconcileTunableState(pGpu, pEngstate, pTunableState) engstateReconcileTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define engstateCompareTunableState(pGpu, pEngstate, pTunables1, pTunables2) engstateCompareTunableState_DISPATCH(pGpu, pEngstate, pTunables1, pTunables2)
#define engstateIsPresent(pGpu, pEngstate) engstateIsPresent_DISPATCH(pGpu, pEngstate)
NV_STATUS engstateConstructEngine_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, ENGDESCRIPTOR arg0);
static inline NV_STATUS engstateConstructEngine_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, ENGDESCRIPTOR arg0) {
return pEngstate->__engstateConstructEngine__(pGpu, pEngstate, arg0);
}
void engstateInitMissing_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
static inline void engstateInitMissing_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate) {
pEngstate->__engstateInitMissing__(pGpu, pEngstate);
}
NV_STATUS engstateStatePreInitLocked_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
static inline NV_STATUS engstateStatePreInitLocked_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate) {
return pEngstate->__engstateStatePreInitLocked__(pGpu, pEngstate);
}
NV_STATUS engstateStatePreInitUnlocked_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
static inline NV_STATUS engstateStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate) {
return pEngstate->__engstateStatePreInitUnlocked__(pGpu, pEngstate);
}
NV_STATUS engstateStateInitLocked_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
static inline NV_STATUS engstateStateInitLocked_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate) {
return pEngstate->__engstateStateInitLocked__(pGpu, pEngstate);
}
NV_STATUS engstateStateInitUnlocked_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
static inline NV_STATUS engstateStateInitUnlocked_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate) {
return pEngstate->__engstateStateInitUnlocked__(pGpu, pEngstate);
}
NV_STATUS engstateStatePreLoad_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0);
static inline NV_STATUS engstateStatePreLoad_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0) {
return pEngstate->__engstateStatePreLoad__(pGpu, pEngstate, arg0);
}
NV_STATUS engstateStateLoad_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0);
static inline NV_STATUS engstateStateLoad_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0) {
return pEngstate->__engstateStateLoad__(pGpu, pEngstate, arg0);
}
NV_STATUS engstateStatePostLoad_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0);
static inline NV_STATUS engstateStatePostLoad_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0) {
return pEngstate->__engstateStatePostLoad__(pGpu, pEngstate, arg0);
}
NV_STATUS engstateStatePreUnload_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0);
static inline NV_STATUS engstateStatePreUnload_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0) {
return pEngstate->__engstateStatePreUnload__(pGpu, pEngstate, arg0);
}
NV_STATUS engstateStateUnload_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0);
static inline NV_STATUS engstateStateUnload_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0) {
return pEngstate->__engstateStateUnload__(pGpu, pEngstate, arg0);
}
NV_STATUS engstateStatePostUnload_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0);
static inline NV_STATUS engstateStatePostUnload_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, NvU32 arg0) {
return pEngstate->__engstateStatePostUnload__(pGpu, pEngstate, arg0);
}
void engstateStateDestroy_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
static inline void engstateStateDestroy_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate) {
pEngstate->__engstateStateDestroy__(pGpu, pEngstate);
}
NV_STATUS engstateAllocTunableState_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, void **ppTunableState);
static inline NV_STATUS engstateAllocTunableState_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, void **ppTunableState) {
return pEngstate->__engstateAllocTunableState__(pGpu, pEngstate, ppTunableState);
}
void engstateFreeTunableState_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunableState);
static inline void engstateFreeTunableState_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunableState) {
pEngstate->__engstateFreeTunableState__(pGpu, pEngstate, pTunableState);
}
NV_STATUS engstateGetTunableState_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunableState);
static inline NV_STATUS engstateGetTunableState_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunableState) {
return pEngstate->__engstateGetTunableState__(pGpu, pEngstate, pTunableState);
}
NV_STATUS engstateSetTunableState_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunableState);
static inline NV_STATUS engstateSetTunableState_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunableState) {
return pEngstate->__engstateSetTunableState__(pGpu, pEngstate, pTunableState);
}
NV_STATUS engstateReconcileTunableState_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunableState);
static inline NV_STATUS engstateReconcileTunableState_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunableState) {
return pEngstate->__engstateReconcileTunableState__(pGpu, pEngstate, pTunableState);
}
NV_STATUS engstateCompareTunableState_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunables1, void *pTunables2);
static inline NV_STATUS engstateCompareTunableState_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate, void *pTunables1, void *pTunables2) {
return pEngstate->__engstateCompareTunableState__(pGpu, pEngstate, pTunables1, pTunables2);
}
NvBool engstateIsPresent_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
static inline NvBool engstateIsPresent_DISPATCH(POBJGPU pGpu, POBJENGSTATE pEngstate) {
return pEngstate->__engstateIsPresent__(pGpu, pEngstate);
}
NV_STATUS engstateConstructBase_IMPL(struct OBJENGSTATE *arg0, struct OBJGPU *arg1, ENGDESCRIPTOR arg2);
#ifdef __nvoc_eng_state_h_disabled
static inline NV_STATUS engstateConstructBase(struct OBJENGSTATE *arg0, struct OBJGPU *arg1, ENGDESCRIPTOR arg2) {
NV_ASSERT_FAILED_PRECOMP("OBJENGSTATE was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_eng_state_h_disabled
#define engstateConstructBase(arg0, arg1, arg2) engstateConstructBase_IMPL(arg0, arg1, arg2)
#endif //__nvoc_eng_state_h_disabled
void engstateLogStateTransitionPre_IMPL(struct OBJENGSTATE *arg0, ENGSTATE_STATE arg1, ENGSTATE_TRANSITION_DATA *arg2);
#ifdef __nvoc_eng_state_h_disabled
static inline void engstateLogStateTransitionPre(struct OBJENGSTATE *arg0, ENGSTATE_STATE arg1, ENGSTATE_TRANSITION_DATA *arg2) {
NV_ASSERT_FAILED_PRECOMP("OBJENGSTATE was disabled!");
}
#else //__nvoc_eng_state_h_disabled
#define engstateLogStateTransitionPre(arg0, arg1, arg2) engstateLogStateTransitionPre_IMPL(arg0, arg1, arg2)
#endif //__nvoc_eng_state_h_disabled
void engstateLogStateTransitionPost_IMPL(struct OBJENGSTATE *arg0, ENGSTATE_STATE arg1, ENGSTATE_TRANSITION_DATA *arg2);
#ifdef __nvoc_eng_state_h_disabled
static inline void engstateLogStateTransitionPost(struct OBJENGSTATE *arg0, ENGSTATE_STATE arg1, ENGSTATE_TRANSITION_DATA *arg2) {
NV_ASSERT_FAILED_PRECOMP("OBJENGSTATE was disabled!");
}
#else //__nvoc_eng_state_h_disabled
#define engstateLogStateTransitionPost(arg0, arg1, arg2) engstateLogStateTransitionPost_IMPL(arg0, arg1, arg2)
#endif //__nvoc_eng_state_h_disabled
const char *engstateGetName_IMPL(struct OBJENGSTATE *arg0);
#ifdef __nvoc_eng_state_h_disabled
static inline const char *engstateGetName(struct OBJENGSTATE *arg0) {
NV_ASSERT_FAILED_PRECOMP("OBJENGSTATE was disabled!");
return NULL;
}
#else //__nvoc_eng_state_h_disabled
#define engstateGetName(arg0) engstateGetName_IMPL(arg0)
#endif //__nvoc_eng_state_h_disabled
void engstateDestruct_IMPL(POBJENGSTATE pEngstate);
#define __nvoc_engstateDestruct(pEngstate) engstateDestruct_IMPL(pEngstate)
NV_STATUS engstateStatePreInit_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
#ifdef __nvoc_eng_state_h_disabled
static inline NV_STATUS engstateStatePreInit(POBJGPU pGpu, POBJENGSTATE pEngstate) {
NV_ASSERT_FAILED_PRECOMP("OBJENGSTATE was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_eng_state_h_disabled
#define engstateStatePreInit(pGpu, pEngstate) engstateStatePreInit_IMPL(pGpu, pEngstate)
#endif //__nvoc_eng_state_h_disabled
NV_STATUS engstateStateInit_IMPL(POBJGPU pGpu, POBJENGSTATE pEngstate);
#ifdef __nvoc_eng_state_h_disabled
static inline NV_STATUS engstateStateInit(POBJGPU pGpu, POBJENGSTATE pEngstate) {
NV_ASSERT_FAILED_PRECOMP("OBJENGSTATE was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_eng_state_h_disabled
#define engstateStateInit(pGpu, pEngstate) engstateStateInit_IMPL(pGpu, pEngstate)
#endif //__nvoc_eng_state_h_disabled
ENGDESCRIPTOR engstateGetDescriptor_IMPL(POBJENGSTATE pEngstate);
#ifdef __nvoc_eng_state_h_disabled
static inline ENGDESCRIPTOR engstateGetDescriptor(POBJENGSTATE pEngstate) {
NV_ASSERT_FAILED_PRECOMP("OBJENGSTATE was disabled!");
ENGDESCRIPTOR ret;
portMemSet(&ret, 0, sizeof(ENGDESCRIPTOR));
return ret;
}
#else //__nvoc_eng_state_h_disabled
#define engstateGetDescriptor(pEngstate) engstateGetDescriptor_IMPL(pEngstate)
#endif //__nvoc_eng_state_h_disabled
struct OBJFIFO *engstateGetFifo_IMPL(POBJENGSTATE pEngstate);
#ifdef __nvoc_eng_state_h_disabled
static inline struct OBJFIFO *engstateGetFifo(POBJENGSTATE pEngstate) {
NV_ASSERT_FAILED_PRECOMP("OBJENGSTATE was disabled!");
return NULL;
}
#else //__nvoc_eng_state_h_disabled
#define engstateGetFifo(pEngstate) engstateGetFifo_IMPL(pEngstate)
#endif //__nvoc_eng_state_h_disabled
#undef PRIVATE_FIELD
#endif // _ENG_STATE_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_ENG_STATE_NVOC_H_

View File

@@ -0,0 +1,379 @@
#define NVOC_EVENT_BUFFER_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_event_buffer_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x63502b = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_EventBuffer;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
void __nvoc_init_EventBuffer(EventBuffer*);
void __nvoc_init_funcTable_EventBuffer(EventBuffer*);
NV_STATUS __nvoc_ctor_EventBuffer(EventBuffer*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_EventBuffer(EventBuffer*);
void __nvoc_dtor_EventBuffer(EventBuffer*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_EventBuffer;
static const struct NVOC_RTTI __nvoc_rtti_EventBuffer_EventBuffer = {
/*pClassDef=*/ &__nvoc_class_def_EventBuffer,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_EventBuffer,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_EventBuffer_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(EventBuffer, __nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_EventBuffer_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(EventBuffer, __nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_EventBuffer_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(EventBuffer, __nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_EventBuffer_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(EventBuffer, __nvoc_base_RmResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_EventBuffer = {
/*numRelatives=*/ 5,
/*relatives=*/ {
&__nvoc_rtti_EventBuffer_EventBuffer,
&__nvoc_rtti_EventBuffer_RmResource,
&__nvoc_rtti_EventBuffer_RmResourceCommon,
&__nvoc_rtti_EventBuffer_RsResource,
&__nvoc_rtti_EventBuffer_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_EventBuffer =
{
/*classInfo=*/ {
/*size=*/ sizeof(EventBuffer),
/*classId=*/ classId(EventBuffer),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "EventBuffer",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_EventBuffer,
/*pCastInfo=*/ &__nvoc_castinfo_EventBuffer,
/*pExportInfo=*/ &__nvoc_export_info_EventBuffer
};
static NvBool __nvoc_thunk_RmResource_eventbufferShareCallback(struct EventBuffer *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return rmresShareCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RmResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_RmResource_eventbufferCheckMemInterUnmap(struct EventBuffer *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_EventBuffer_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_eventbufferControl(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControl((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_eventbufferGetMemInterMapParams(struct EventBuffer *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_EventBuffer_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_eventbufferGetMemoryMappingDescriptor(struct EventBuffer *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_EventBuffer_RmResource.offset), ppMemDesc);
}
static NvU32 __nvoc_thunk_RsResource_eventbufferGetRefCount(struct EventBuffer *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_eventbufferControlFilter(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_eventbufferAddAdditionalDependants(struct RsClient *pClient, struct EventBuffer *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset), pReference);
}
static NV_STATUS __nvoc_thunk_RsResource_eventbufferUnmap(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return resUnmap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_eventbufferControl_Prologue(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RmResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_RsResource_eventbufferCanCopy(struct EventBuffer *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_eventbufferMapTo(struct EventBuffer *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset), pParams);
}
static void __nvoc_thunk_RsResource_eventbufferPreDestruct(struct EventBuffer *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_eventbufferUnmapFrom(struct EventBuffer *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_eventbufferControl_Epilogue(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_eventbufferControlLookup(struct EventBuffer *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_RsResource_eventbufferMap(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return resMap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RsResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_eventbufferAccessCallback(struct EventBuffer *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_EventBuffer_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
#if !defined(NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG)
#define NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(x) (0)
#endif
static const struct NVOC_EXPORTED_METHOD_DEF __nvoc_exported_method_def_EventBuffer[] =
{
{ /* [0] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) eventbuffertBufferCtrlCmdEnableEvent_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
/*flags=*/ 0x11u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x90cd0101u,
/*paramSize=*/ sizeof(NV_EVENT_BUFFER_CTRL_CMD_ENABLE_EVENTS_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_EventBuffer.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "eventbuffertBufferCtrlCmdEnableEvent"
#endif
},
{ /* [1] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) eventbuffertBufferCtrlCmdUpdateGet_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
/*flags=*/ 0x11u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x90cd0102u,
/*paramSize=*/ sizeof(NV_EVENT_BUFFER_CTRL_CMD_UPDATE_GET_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_EventBuffer.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "eventbuffertBufferCtrlCmdUpdateGet"
#endif
},
{ /* [2] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) eventbuffertBufferCtrlCmdFlush_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
/*flags=*/ 0x10u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x90cd0104u,
/*paramSize=*/ 0,
/*pClassInfo=*/ &(__nvoc_class_def_EventBuffer.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "eventbuffertBufferCtrlCmdFlush"
#endif
},
{ /* [3] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) eventbuffertBufferCtrlCmdPostTelemetryEvent_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
/*flags=*/ 0x10u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x90cd0105u,
/*paramSize=*/ sizeof(NV_EVENT_BUFFER_CTRL_CMD_POST_TELEMETRY_EVENT_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_EventBuffer.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "eventbuffertBufferCtrlCmdPostTelemetryEvent"
#endif
},
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_EventBuffer =
{
/*numEntries=*/ 4,
/*pExportEntries=*/ __nvoc_exported_method_def_EventBuffer
};
void __nvoc_dtor_RmResource(RmResource*);
void __nvoc_dtor_EventBuffer(EventBuffer *pThis) {
__nvoc_eventbufferDestruct(pThis);
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_EventBuffer(EventBuffer *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RmResource(RmResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_EventBuffer(EventBuffer *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RmResource(&pThis->__nvoc_base_RmResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_EventBuffer_fail_RmResource;
__nvoc_init_dataField_EventBuffer(pThis);
status = __nvoc_eventbufferConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_EventBuffer_fail__init;
goto __nvoc_ctor_EventBuffer_exit; // Success
__nvoc_ctor_EventBuffer_fail__init:
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_ctor_EventBuffer_fail_RmResource:
__nvoc_ctor_EventBuffer_exit:
return status;
}
static void __nvoc_init_funcTable_EventBuffer_1(EventBuffer *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
pThis->__eventbuffertBufferCtrlCmdEnableEvent__ = &eventbuffertBufferCtrlCmdEnableEvent_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x11u)
pThis->__eventbuffertBufferCtrlCmdUpdateGet__ = &eventbuffertBufferCtrlCmdUpdateGet_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
pThis->__eventbuffertBufferCtrlCmdFlush__ = &eventbuffertBufferCtrlCmdFlush_IMPL;
#endif
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x10u)
pThis->__eventbuffertBufferCtrlCmdPostTelemetryEvent__ = &eventbuffertBufferCtrlCmdPostTelemetryEvent_IMPL;
#endif
pThis->__eventbufferShareCallback__ = &__nvoc_thunk_RmResource_eventbufferShareCallback;
pThis->__eventbufferCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_eventbufferCheckMemInterUnmap;
pThis->__eventbufferControl__ = &__nvoc_thunk_RsResource_eventbufferControl;
pThis->__eventbufferGetMemInterMapParams__ = &__nvoc_thunk_RmResource_eventbufferGetMemInterMapParams;
pThis->__eventbufferGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_eventbufferGetMemoryMappingDescriptor;
pThis->__eventbufferGetRefCount__ = &__nvoc_thunk_RsResource_eventbufferGetRefCount;
pThis->__eventbufferControlFilter__ = &__nvoc_thunk_RsResource_eventbufferControlFilter;
pThis->__eventbufferAddAdditionalDependants__ = &__nvoc_thunk_RsResource_eventbufferAddAdditionalDependants;
pThis->__eventbufferUnmap__ = &__nvoc_thunk_RsResource_eventbufferUnmap;
pThis->__eventbufferControl_Prologue__ = &__nvoc_thunk_RmResource_eventbufferControl_Prologue;
pThis->__eventbufferCanCopy__ = &__nvoc_thunk_RsResource_eventbufferCanCopy;
pThis->__eventbufferMapTo__ = &__nvoc_thunk_RsResource_eventbufferMapTo;
pThis->__eventbufferPreDestruct__ = &__nvoc_thunk_RsResource_eventbufferPreDestruct;
pThis->__eventbufferUnmapFrom__ = &__nvoc_thunk_RsResource_eventbufferUnmapFrom;
pThis->__eventbufferControl_Epilogue__ = &__nvoc_thunk_RmResource_eventbufferControl_Epilogue;
pThis->__eventbufferControlLookup__ = &__nvoc_thunk_RsResource_eventbufferControlLookup;
pThis->__eventbufferMap__ = &__nvoc_thunk_RsResource_eventbufferMap;
pThis->__eventbufferAccessCallback__ = &__nvoc_thunk_RmResource_eventbufferAccessCallback;
}
void __nvoc_init_funcTable_EventBuffer(EventBuffer *pThis) {
__nvoc_init_funcTable_EventBuffer_1(pThis);
}
void __nvoc_init_RmResource(RmResource*);
void __nvoc_init_EventBuffer(EventBuffer *pThis) {
pThis->__nvoc_pbase_EventBuffer = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_RmResource;
__nvoc_init_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_init_funcTable_EventBuffer(pThis);
}
NV_STATUS __nvoc_objCreate_EventBuffer(EventBuffer **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
EventBuffer *pThis;
pThis = portMemAllocNonPaged(sizeof(EventBuffer));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(EventBuffer));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_EventBuffer);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_EventBuffer(pThis);
status = __nvoc_ctor_EventBuffer(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_EventBuffer_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_EventBuffer_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_EventBuffer(EventBuffer **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_EventBuffer(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,288 @@
#ifndef _G_EVENT_BUFFER_NVOC_H_
#define _G_EVENT_BUFFER_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2016-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_event_buffer_nvoc.h"
#ifndef _EVENT_BUFFER_H_
#define _EVENT_BUFFER_H_
#include "resserv/resserv.h"
#include "nvoc/prelude.h"
#include "rmapi/event.h"
#include "rmapi/resource.h"
#include "ctrl/ctrl90cd.h"
#include "eventbufferproducer.h"
struct Memory;
#ifndef __NVOC_CLASS_Memory_TYPEDEF__
#define __NVOC_CLASS_Memory_TYPEDEF__
typedef struct Memory Memory;
#endif /* __NVOC_CLASS_Memory_TYPEDEF__ */
#ifndef __nvoc_class_id_Memory
#define __nvoc_class_id_Memory 0x4789f2
#endif /* __nvoc_class_id_Memory */
typedef struct
{
//
// Addr: user RO address
// Priv: return cookie to be passed to unmap
//
NvP64 headerAddr;
NvP64 headerPriv;
NvP64 recordBuffAddr;
NvP64 recordBuffPriv;
NvP64 vardataBuffAddr;
NvP64 vardataBuffPriv;
} EVENT_BUFFER_MAP_INFO;
// This class shares buffers between kernel and usermode
#ifdef NVOC_EVENT_BUFFER_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct EventBuffer {
const struct NVOC_RTTI *__nvoc_rtti;
struct RmResource __nvoc_base_RmResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct EventBuffer *__nvoc_pbase_EventBuffer;
NV_STATUS (*__eventbuffertBufferCtrlCmdEnableEvent__)(struct EventBuffer *, NV_EVENT_BUFFER_CTRL_CMD_ENABLE_EVENTS_PARAMS *);
NV_STATUS (*__eventbuffertBufferCtrlCmdUpdateGet__)(struct EventBuffer *, NV_EVENT_BUFFER_CTRL_CMD_UPDATE_GET_PARAMS *);
NV_STATUS (*__eventbuffertBufferCtrlCmdFlush__)(struct EventBuffer *);
NV_STATUS (*__eventbuffertBufferCtrlCmdPostTelemetryEvent__)(struct EventBuffer *, NV_EVENT_BUFFER_CTRL_CMD_POST_TELEMETRY_EVENT_PARAMS *);
NvBool (*__eventbufferShareCallback__)(struct EventBuffer *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__eventbufferCheckMemInterUnmap__)(struct EventBuffer *, NvBool);
NV_STATUS (*__eventbufferControl__)(struct EventBuffer *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__eventbufferGetMemInterMapParams__)(struct EventBuffer *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__eventbufferGetMemoryMappingDescriptor__)(struct EventBuffer *, struct MEMORY_DESCRIPTOR **);
NvU32 (*__eventbufferGetRefCount__)(struct EventBuffer *);
NV_STATUS (*__eventbufferControlFilter__)(struct EventBuffer *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__eventbufferAddAdditionalDependants__)(struct RsClient *, struct EventBuffer *, RsResourceRef *);
NV_STATUS (*__eventbufferUnmap__)(struct EventBuffer *, struct CALL_CONTEXT *, RsCpuMapping *);
NV_STATUS (*__eventbufferControl_Prologue__)(struct EventBuffer *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__eventbufferCanCopy__)(struct EventBuffer *);
NV_STATUS (*__eventbufferMapTo__)(struct EventBuffer *, RS_RES_MAP_TO_PARAMS *);
void (*__eventbufferPreDestruct__)(struct EventBuffer *);
NV_STATUS (*__eventbufferUnmapFrom__)(struct EventBuffer *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__eventbufferControl_Epilogue__)(struct EventBuffer *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__eventbufferControlLookup__)(struct EventBuffer *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__eventbufferMap__)(struct EventBuffer *, struct CALL_CONTEXT *, RS_CPU_MAP_PARAMS *, RsCpuMapping *);
NvBool (*__eventbufferAccessCallback__)(struct EventBuffer *, struct RsClient *, void *, RsAccessRight);
struct MEMORY_DESCRIPTOR *pHeaderDesc;
struct MEMORY_DESCRIPTOR *pRecordBufDesc;
struct MEMORY_DESCRIPTOR *pVardataBufDesc;
NvHandle hSubDevice;
NvU32 subDeviceInst;
EVENT_BUFFER_MAP_INFO kernelMapInfo;
EVENT_BUFFER_MAP_INFO clientMapInfo;
NvHandle hClient;
NvU16 seqNo;
NvBool bNotifyPending;
PEVENTNOTIFICATION pListeners;
EVENT_BUFFER_PRODUCER_INFO producerInfo;
struct Memory *pHeader;
struct Memory *pRecord;
struct Memory *pVardata;
NvHandle hInternalClient;
NvHandle hInternalDevice;
NvHandle hInternalSubdevice;
NvHandle hInternalHeader;
NvHandle hInternalBuffer;
};
#ifndef __NVOC_CLASS_EventBuffer_TYPEDEF__
#define __NVOC_CLASS_EventBuffer_TYPEDEF__
typedef struct EventBuffer EventBuffer;
#endif /* __NVOC_CLASS_EventBuffer_TYPEDEF__ */
#ifndef __nvoc_class_id_EventBuffer
#define __nvoc_class_id_EventBuffer 0x63502b
#endif /* __nvoc_class_id_EventBuffer */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_EventBuffer;
#define __staticCast_EventBuffer(pThis) \
((pThis)->__nvoc_pbase_EventBuffer)
#ifdef __nvoc_event_buffer_h_disabled
#define __dynamicCast_EventBuffer(pThis) ((EventBuffer*)NULL)
#else //__nvoc_event_buffer_h_disabled
#define __dynamicCast_EventBuffer(pThis) \
((EventBuffer*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(EventBuffer)))
#endif //__nvoc_event_buffer_h_disabled
NV_STATUS __nvoc_objCreateDynamic_EventBuffer(EventBuffer**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_EventBuffer(EventBuffer**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_EventBuffer(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_EventBuffer((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define eventbuffertBufferCtrlCmdEnableEvent(pEventBuffer, pEnableParams) eventbuffertBufferCtrlCmdEnableEvent_DISPATCH(pEventBuffer, pEnableParams)
#define eventbuffertBufferCtrlCmdUpdateGet(pEventBuffer, pUpdateParams) eventbuffertBufferCtrlCmdUpdateGet_DISPATCH(pEventBuffer, pUpdateParams)
#define eventbuffertBufferCtrlCmdFlush(pEventBuffer) eventbuffertBufferCtrlCmdFlush_DISPATCH(pEventBuffer)
#define eventbuffertBufferCtrlCmdPostTelemetryEvent(pEventBuffer, pPostTelemetryEvent) eventbuffertBufferCtrlCmdPostTelemetryEvent_DISPATCH(pEventBuffer, pPostTelemetryEvent)
#define eventbufferShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) eventbufferShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
#define eventbufferCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) eventbufferCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define eventbufferControl(pResource, pCallContext, pParams) eventbufferControl_DISPATCH(pResource, pCallContext, pParams)
#define eventbufferGetMemInterMapParams(pRmResource, pParams) eventbufferGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define eventbufferGetMemoryMappingDescriptor(pRmResource, ppMemDesc) eventbufferGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define eventbufferGetRefCount(pResource) eventbufferGetRefCount_DISPATCH(pResource)
#define eventbufferControlFilter(pResource, pCallContext, pParams) eventbufferControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define eventbufferAddAdditionalDependants(pClient, pResource, pReference) eventbufferAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define eventbufferUnmap(pResource, pCallContext, pCpuMapping) eventbufferUnmap_DISPATCH(pResource, pCallContext, pCpuMapping)
#define eventbufferControl_Prologue(pResource, pCallContext, pParams) eventbufferControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define eventbufferCanCopy(pResource) eventbufferCanCopy_DISPATCH(pResource)
#define eventbufferMapTo(pResource, pParams) eventbufferMapTo_DISPATCH(pResource, pParams)
#define eventbufferPreDestruct(pResource) eventbufferPreDestruct_DISPATCH(pResource)
#define eventbufferUnmapFrom(pResource, pParams) eventbufferUnmapFrom_DISPATCH(pResource, pParams)
#define eventbufferControl_Epilogue(pResource, pCallContext, pParams) eventbufferControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define eventbufferControlLookup(pResource, pParams, ppEntry) eventbufferControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define eventbufferMap(pResource, pCallContext, pParams, pCpuMapping) eventbufferMap_DISPATCH(pResource, pCallContext, pParams, pCpuMapping)
#define eventbufferAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) eventbufferAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS eventbuffertBufferCtrlCmdEnableEvent_IMPL(struct EventBuffer *pEventBuffer, NV_EVENT_BUFFER_CTRL_CMD_ENABLE_EVENTS_PARAMS *pEnableParams);
static inline NV_STATUS eventbuffertBufferCtrlCmdEnableEvent_DISPATCH(struct EventBuffer *pEventBuffer, NV_EVENT_BUFFER_CTRL_CMD_ENABLE_EVENTS_PARAMS *pEnableParams) {
return pEventBuffer->__eventbuffertBufferCtrlCmdEnableEvent__(pEventBuffer, pEnableParams);
}
NV_STATUS eventbuffertBufferCtrlCmdUpdateGet_IMPL(struct EventBuffer *pEventBuffer, NV_EVENT_BUFFER_CTRL_CMD_UPDATE_GET_PARAMS *pUpdateParams);
static inline NV_STATUS eventbuffertBufferCtrlCmdUpdateGet_DISPATCH(struct EventBuffer *pEventBuffer, NV_EVENT_BUFFER_CTRL_CMD_UPDATE_GET_PARAMS *pUpdateParams) {
return pEventBuffer->__eventbuffertBufferCtrlCmdUpdateGet__(pEventBuffer, pUpdateParams);
}
NV_STATUS eventbuffertBufferCtrlCmdFlush_IMPL(struct EventBuffer *pEventBuffer);
static inline NV_STATUS eventbuffertBufferCtrlCmdFlush_DISPATCH(struct EventBuffer *pEventBuffer) {
return pEventBuffer->__eventbuffertBufferCtrlCmdFlush__(pEventBuffer);
}
NV_STATUS eventbuffertBufferCtrlCmdPostTelemetryEvent_IMPL(struct EventBuffer *pEventBuffer, NV_EVENT_BUFFER_CTRL_CMD_POST_TELEMETRY_EVENT_PARAMS *pPostTelemetryEvent);
static inline NV_STATUS eventbuffertBufferCtrlCmdPostTelemetryEvent_DISPATCH(struct EventBuffer *pEventBuffer, NV_EVENT_BUFFER_CTRL_CMD_POST_TELEMETRY_EVENT_PARAMS *pPostTelemetryEvent) {
return pEventBuffer->__eventbuffertBufferCtrlCmdPostTelemetryEvent__(pEventBuffer, pPostTelemetryEvent);
}
static inline NvBool eventbufferShareCallback_DISPATCH(struct EventBuffer *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pResource->__eventbufferShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS eventbufferCheckMemInterUnmap_DISPATCH(struct EventBuffer *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__eventbufferCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS eventbufferControl_DISPATCH(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__eventbufferControl__(pResource, pCallContext, pParams);
}
static inline NV_STATUS eventbufferGetMemInterMapParams_DISPATCH(struct EventBuffer *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__eventbufferGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS eventbufferGetMemoryMappingDescriptor_DISPATCH(struct EventBuffer *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__eventbufferGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NvU32 eventbufferGetRefCount_DISPATCH(struct EventBuffer *pResource) {
return pResource->__eventbufferGetRefCount__(pResource);
}
static inline NV_STATUS eventbufferControlFilter_DISPATCH(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__eventbufferControlFilter__(pResource, pCallContext, pParams);
}
static inline void eventbufferAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct EventBuffer *pResource, RsResourceRef *pReference) {
pResource->__eventbufferAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NV_STATUS eventbufferUnmap_DISPATCH(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return pResource->__eventbufferUnmap__(pResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS eventbufferControl_Prologue_DISPATCH(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__eventbufferControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NvBool eventbufferCanCopy_DISPATCH(struct EventBuffer *pResource) {
return pResource->__eventbufferCanCopy__(pResource);
}
static inline NV_STATUS eventbufferMapTo_DISPATCH(struct EventBuffer *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__eventbufferMapTo__(pResource, pParams);
}
static inline void eventbufferPreDestruct_DISPATCH(struct EventBuffer *pResource) {
pResource->__eventbufferPreDestruct__(pResource);
}
static inline NV_STATUS eventbufferUnmapFrom_DISPATCH(struct EventBuffer *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__eventbufferUnmapFrom__(pResource, pParams);
}
static inline void eventbufferControl_Epilogue_DISPATCH(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__eventbufferControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS eventbufferControlLookup_DISPATCH(struct EventBuffer *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__eventbufferControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS eventbufferMap_DISPATCH(struct EventBuffer *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return pResource->__eventbufferMap__(pResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool eventbufferAccessCallback_DISPATCH(struct EventBuffer *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__eventbufferAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS eventbufferConstruct_IMPL(struct EventBuffer *arg_pEventBuffer, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_eventbufferConstruct(arg_pEventBuffer, arg_pCallContext, arg_pParams) eventbufferConstruct_IMPL(arg_pEventBuffer, arg_pCallContext, arg_pParams)
void eventbufferDestruct_IMPL(struct EventBuffer *pEventBuffer);
#define __nvoc_eventbufferDestruct(pEventBuffer) eventbufferDestruct_IMPL(pEventBuffer)
#undef PRIVATE_FIELD
NV_STATUS eventBufferAdd(struct EventBuffer *pEventBuffer, void* pEventData, NvU32 recordType, NvBool* bNotify, NvP64 *pHandle);
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_EVENT_BUFFER_NVOC_H_

View File

@@ -0,0 +1,692 @@
#define NVOC_EVENT_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_event_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xd5f150 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_NotifShare;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsShared;
void __nvoc_init_NotifShare(NotifShare*);
void __nvoc_init_funcTable_NotifShare(NotifShare*);
NV_STATUS __nvoc_ctor_NotifShare(NotifShare*);
void __nvoc_init_dataField_NotifShare(NotifShare*);
void __nvoc_dtor_NotifShare(NotifShare*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_NotifShare;
static const struct NVOC_RTTI __nvoc_rtti_NotifShare_NotifShare = {
/*pClassDef=*/ &__nvoc_class_def_NotifShare,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_NotifShare,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_NotifShare_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(NotifShare, __nvoc_base_RsShared.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_NotifShare_RsShared = {
/*pClassDef=*/ &__nvoc_class_def_RsShared,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(NotifShare, __nvoc_base_RsShared),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_NotifShare = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_NotifShare_NotifShare,
&__nvoc_rtti_NotifShare_RsShared,
&__nvoc_rtti_NotifShare_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_NotifShare =
{
/*classInfo=*/ {
/*size=*/ sizeof(NotifShare),
/*classId=*/ classId(NotifShare),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "NotifShare",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_NotifShare,
/*pCastInfo=*/ &__nvoc_castinfo_NotifShare,
/*pExportInfo=*/ &__nvoc_export_info_NotifShare
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_NotifShare =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RsShared(RsShared*);
void __nvoc_dtor_NotifShare(NotifShare *pThis) {
__nvoc_shrnotifDestruct(pThis);
__nvoc_dtor_RsShared(&pThis->__nvoc_base_RsShared);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_NotifShare(NotifShare *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RsShared(RsShared* );
NV_STATUS __nvoc_ctor_NotifShare(NotifShare *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RsShared(&pThis->__nvoc_base_RsShared);
if (status != NV_OK) goto __nvoc_ctor_NotifShare_fail_RsShared;
__nvoc_init_dataField_NotifShare(pThis);
status = __nvoc_shrnotifConstruct(pThis);
if (status != NV_OK) goto __nvoc_ctor_NotifShare_fail__init;
goto __nvoc_ctor_NotifShare_exit; // Success
__nvoc_ctor_NotifShare_fail__init:
__nvoc_dtor_RsShared(&pThis->__nvoc_base_RsShared);
__nvoc_ctor_NotifShare_fail_RsShared:
__nvoc_ctor_NotifShare_exit:
return status;
}
static void __nvoc_init_funcTable_NotifShare_1(NotifShare *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_NotifShare(NotifShare *pThis) {
__nvoc_init_funcTable_NotifShare_1(pThis);
}
void __nvoc_init_RsShared(RsShared*);
void __nvoc_init_NotifShare(NotifShare *pThis) {
pThis->__nvoc_pbase_NotifShare = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RsShared.__nvoc_base_Object;
pThis->__nvoc_pbase_RsShared = &pThis->__nvoc_base_RsShared;
__nvoc_init_RsShared(&pThis->__nvoc_base_RsShared);
__nvoc_init_funcTable_NotifShare(pThis);
}
NV_STATUS __nvoc_objCreate_NotifShare(NotifShare **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
NotifShare *pThis;
pThis = portMemAllocNonPaged(sizeof(NotifShare));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(NotifShare));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_NotifShare);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RsShared.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RsShared.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_NotifShare(pThis);
status = __nvoc_ctor_NotifShare(pThis);
if (status != NV_OK) goto __nvoc_objCreate_NotifShare_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_NotifShare_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_NotifShare(NotifShare **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_NotifShare(ppThis, pParent, createFlags);
return status;
}
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xa4ecfc = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Event;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
void __nvoc_init_Event(Event*);
void __nvoc_init_funcTable_Event(Event*);
NV_STATUS __nvoc_ctor_Event(Event*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_Event(Event*);
void __nvoc_dtor_Event(Event*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_Event;
static const struct NVOC_RTTI __nvoc_rtti_Event_Event = {
/*pClassDef=*/ &__nvoc_class_def_Event,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_Event,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_Event_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Event, __nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_Event_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Event, __nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_Event_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Event, __nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_Event_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Event, __nvoc_base_RmResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_Event = {
/*numRelatives=*/ 5,
/*relatives=*/ {
&__nvoc_rtti_Event_Event,
&__nvoc_rtti_Event_RmResource,
&__nvoc_rtti_Event_RmResourceCommon,
&__nvoc_rtti_Event_RsResource,
&__nvoc_rtti_Event_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_Event =
{
/*classInfo=*/ {
/*size=*/ sizeof(Event),
/*classId=*/ classId(Event),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "Event",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_Event,
/*pCastInfo=*/ &__nvoc_castinfo_Event,
/*pExportInfo=*/ &__nvoc_export_info_Event
};
static NvBool __nvoc_thunk_RmResource_eventShareCallback(struct Event *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return rmresShareCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RmResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_RmResource_eventCheckMemInterUnmap(struct Event *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Event_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_eventControl(struct Event *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControl((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_eventGetMemInterMapParams(struct Event *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Event_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_eventGetMemoryMappingDescriptor(struct Event *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Event_RmResource.offset), ppMemDesc);
}
static NvU32 __nvoc_thunk_RsResource_eventGetRefCount(struct Event *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_eventControlFilter(struct Event *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_eventAddAdditionalDependants(struct RsClient *pClient, struct Event *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset), pReference);
}
static NV_STATUS __nvoc_thunk_RsResource_eventUnmap(struct Event *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return resUnmap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_eventControl_Prologue(struct Event *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RmResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_RsResource_eventCanCopy(struct Event *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_eventMapTo(struct Event *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset), pParams);
}
static void __nvoc_thunk_RsResource_eventPreDestruct(struct Event *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_eventUnmapFrom(struct Event *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_eventControl_Epilogue(struct Event *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_eventControlLookup(struct Event *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_RsResource_eventMap(struct Event *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return resMap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RsResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_eventAccessCallback(struct Event *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Event_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_Event =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RmResource(RmResource*);
void __nvoc_dtor_Event(Event *pThis) {
__nvoc_eventDestruct(pThis);
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_Event(Event *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RmResource(RmResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_Event(Event *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RmResource(&pThis->__nvoc_base_RmResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_Event_fail_RmResource;
__nvoc_init_dataField_Event(pThis);
status = __nvoc_eventConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_Event_fail__init;
goto __nvoc_ctor_Event_exit; // Success
__nvoc_ctor_Event_fail__init:
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_ctor_Event_fail_RmResource:
__nvoc_ctor_Event_exit:
return status;
}
static void __nvoc_init_funcTable_Event_1(Event *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__eventShareCallback__ = &__nvoc_thunk_RmResource_eventShareCallback;
pThis->__eventCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_eventCheckMemInterUnmap;
pThis->__eventControl__ = &__nvoc_thunk_RsResource_eventControl;
pThis->__eventGetMemInterMapParams__ = &__nvoc_thunk_RmResource_eventGetMemInterMapParams;
pThis->__eventGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_eventGetMemoryMappingDescriptor;
pThis->__eventGetRefCount__ = &__nvoc_thunk_RsResource_eventGetRefCount;
pThis->__eventControlFilter__ = &__nvoc_thunk_RsResource_eventControlFilter;
pThis->__eventAddAdditionalDependants__ = &__nvoc_thunk_RsResource_eventAddAdditionalDependants;
pThis->__eventUnmap__ = &__nvoc_thunk_RsResource_eventUnmap;
pThis->__eventControl_Prologue__ = &__nvoc_thunk_RmResource_eventControl_Prologue;
pThis->__eventCanCopy__ = &__nvoc_thunk_RsResource_eventCanCopy;
pThis->__eventMapTo__ = &__nvoc_thunk_RsResource_eventMapTo;
pThis->__eventPreDestruct__ = &__nvoc_thunk_RsResource_eventPreDestruct;
pThis->__eventUnmapFrom__ = &__nvoc_thunk_RsResource_eventUnmapFrom;
pThis->__eventControl_Epilogue__ = &__nvoc_thunk_RmResource_eventControl_Epilogue;
pThis->__eventControlLookup__ = &__nvoc_thunk_RsResource_eventControlLookup;
pThis->__eventMap__ = &__nvoc_thunk_RsResource_eventMap;
pThis->__eventAccessCallback__ = &__nvoc_thunk_RmResource_eventAccessCallback;
}
void __nvoc_init_funcTable_Event(Event *pThis) {
__nvoc_init_funcTable_Event_1(pThis);
}
void __nvoc_init_RmResource(RmResource*);
void __nvoc_init_Event(Event *pThis) {
pThis->__nvoc_pbase_Event = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_RmResource;
__nvoc_init_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_init_funcTable_Event(pThis);
}
NV_STATUS __nvoc_objCreate_Event(Event **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
Event *pThis;
pThis = portMemAllocNonPaged(sizeof(Event));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(Event));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_Event);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_Event(pThis);
status = __nvoc_ctor_Event(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_Event_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_Event_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_Event(Event **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_Event(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xf8f965 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_INotifier;
void __nvoc_init_INotifier(INotifier*);
void __nvoc_init_funcTable_INotifier(INotifier*);
NV_STATUS __nvoc_ctor_INotifier(INotifier*, struct CALL_CONTEXT * arg_pCallContext);
void __nvoc_init_dataField_INotifier(INotifier*);
void __nvoc_dtor_INotifier(INotifier*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_INotifier;
static const struct NVOC_RTTI __nvoc_rtti_INotifier_INotifier = {
/*pClassDef=*/ &__nvoc_class_def_INotifier,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_INotifier,
/*offset=*/ 0,
};
static const struct NVOC_CASTINFO __nvoc_castinfo_INotifier = {
/*numRelatives=*/ 1,
/*relatives=*/ {
&__nvoc_rtti_INotifier_INotifier,
},
};
// Not instantiable because it's not derived from class "Object"
// Not instantiable because it's an abstract class with following pure virtual functions:
// inotifyGetNotificationListPtr
// inotifySetNotificationShare
// inotifyGetNotificationShare
// inotifyUnregisterEvent
// inotifyGetOrAllocNotifShare
const struct NVOC_CLASS_DEF __nvoc_class_def_INotifier =
{
/*classInfo=*/ {
/*size=*/ sizeof(INotifier),
/*classId=*/ classId(INotifier),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "INotifier",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) NULL,
/*pCastInfo=*/ &__nvoc_castinfo_INotifier,
/*pExportInfo=*/ &__nvoc_export_info_INotifier
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_INotifier =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_INotifier(INotifier *pThis) {
__nvoc_inotifyDestruct(pThis);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_INotifier(INotifier *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_INotifier(INotifier *pThis, struct CALL_CONTEXT * arg_pCallContext) {
NV_STATUS status = NV_OK;
__nvoc_init_dataField_INotifier(pThis);
status = __nvoc_inotifyConstruct(pThis, arg_pCallContext);
if (status != NV_OK) goto __nvoc_ctor_INotifier_fail__init;
goto __nvoc_ctor_INotifier_exit; // Success
__nvoc_ctor_INotifier_fail__init:
__nvoc_ctor_INotifier_exit:
return status;
}
static void __nvoc_init_funcTable_INotifier_1(INotifier *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__inotifyGetNotificationListPtr__ = NULL;
pThis->__inotifySetNotificationShare__ = NULL;
pThis->__inotifyGetNotificationShare__ = NULL;
pThis->__inotifyUnregisterEvent__ = NULL;
pThis->__inotifyGetOrAllocNotifShare__ = NULL;
}
void __nvoc_init_funcTable_INotifier(INotifier *pThis) {
__nvoc_init_funcTable_INotifier_1(pThis);
}
void __nvoc_init_INotifier(INotifier *pThis) {
pThis->__nvoc_pbase_INotifier = pThis;
__nvoc_init_funcTable_INotifier(pThis);
}
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xa8683b = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Notifier;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_INotifier;
void __nvoc_init_Notifier(Notifier*);
void __nvoc_init_funcTable_Notifier(Notifier*);
NV_STATUS __nvoc_ctor_Notifier(Notifier*, struct CALL_CONTEXT * arg_pCallContext);
void __nvoc_init_dataField_Notifier(Notifier*);
void __nvoc_dtor_Notifier(Notifier*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_Notifier;
static const struct NVOC_RTTI __nvoc_rtti_Notifier_Notifier = {
/*pClassDef=*/ &__nvoc_class_def_Notifier,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_Notifier,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_Notifier_INotifier = {
/*pClassDef=*/ &__nvoc_class_def_INotifier,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Notifier, __nvoc_base_INotifier),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_Notifier = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_Notifier_Notifier,
&__nvoc_rtti_Notifier_INotifier,
},
};
// Not instantiable because it's not derived from class "Object"
const struct NVOC_CLASS_DEF __nvoc_class_def_Notifier =
{
/*classInfo=*/ {
/*size=*/ sizeof(Notifier),
/*classId=*/ classId(Notifier),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "Notifier",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) NULL,
/*pCastInfo=*/ &__nvoc_castinfo_Notifier,
/*pExportInfo=*/ &__nvoc_export_info_Notifier
};
static PEVENTNOTIFICATION *__nvoc_thunk_Notifier_inotifyGetNotificationListPtr(struct INotifier *pNotifier) {
return notifyGetNotificationListPtr((struct Notifier *)(((unsigned char *)pNotifier) - __nvoc_rtti_Notifier_INotifier.offset));
}
static struct NotifShare *__nvoc_thunk_Notifier_inotifyGetNotificationShare(struct INotifier *pNotifier) {
return notifyGetNotificationShare((struct Notifier *)(((unsigned char *)pNotifier) - __nvoc_rtti_Notifier_INotifier.offset));
}
static void __nvoc_thunk_Notifier_inotifySetNotificationShare(struct INotifier *pNotifier, struct NotifShare *pNotifShare) {
notifySetNotificationShare((struct Notifier *)(((unsigned char *)pNotifier) - __nvoc_rtti_Notifier_INotifier.offset), pNotifShare);
}
static NV_STATUS __nvoc_thunk_Notifier_inotifyUnregisterEvent(struct INotifier *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return notifyUnregisterEvent((struct Notifier *)(((unsigned char *)pNotifier) - __nvoc_rtti_Notifier_INotifier.offset), hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
static NV_STATUS __nvoc_thunk_Notifier_inotifyGetOrAllocNotifShare(struct INotifier *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return notifyGetOrAllocNotifShare((struct Notifier *)(((unsigned char *)pNotifier) - __nvoc_rtti_Notifier_INotifier.offset), hNotifierClient, hNotifierResource, ppNotifShare);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_Notifier =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_INotifier(INotifier*);
void __nvoc_dtor_Notifier(Notifier *pThis) {
__nvoc_notifyDestruct(pThis);
__nvoc_dtor_INotifier(&pThis->__nvoc_base_INotifier);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_Notifier(Notifier *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_INotifier(INotifier* , struct CALL_CONTEXT *);
NV_STATUS __nvoc_ctor_Notifier(Notifier *pThis, struct CALL_CONTEXT * arg_pCallContext) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_INotifier(&pThis->__nvoc_base_INotifier, arg_pCallContext);
if (status != NV_OK) goto __nvoc_ctor_Notifier_fail_INotifier;
__nvoc_init_dataField_Notifier(pThis);
status = __nvoc_notifyConstruct(pThis, arg_pCallContext);
if (status != NV_OK) goto __nvoc_ctor_Notifier_fail__init;
goto __nvoc_ctor_Notifier_exit; // Success
__nvoc_ctor_Notifier_fail__init:
__nvoc_dtor_INotifier(&pThis->__nvoc_base_INotifier);
__nvoc_ctor_Notifier_fail_INotifier:
__nvoc_ctor_Notifier_exit:
return status;
}
static void __nvoc_init_funcTable_Notifier_1(Notifier *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__notifyGetNotificationListPtr__ = &notifyGetNotificationListPtr_IMPL;
pThis->__notifyGetNotificationShare__ = &notifyGetNotificationShare_IMPL;
pThis->__notifySetNotificationShare__ = &notifySetNotificationShare_IMPL;
pThis->__notifyUnregisterEvent__ = &notifyUnregisterEvent_IMPL;
pThis->__notifyGetOrAllocNotifShare__ = &notifyGetOrAllocNotifShare_IMPL;
pThis->__nvoc_base_INotifier.__inotifyGetNotificationListPtr__ = &__nvoc_thunk_Notifier_inotifyGetNotificationListPtr;
pThis->__nvoc_base_INotifier.__inotifyGetNotificationShare__ = &__nvoc_thunk_Notifier_inotifyGetNotificationShare;
pThis->__nvoc_base_INotifier.__inotifySetNotificationShare__ = &__nvoc_thunk_Notifier_inotifySetNotificationShare;
pThis->__nvoc_base_INotifier.__inotifyUnregisterEvent__ = &__nvoc_thunk_Notifier_inotifyUnregisterEvent;
pThis->__nvoc_base_INotifier.__inotifyGetOrAllocNotifShare__ = &__nvoc_thunk_Notifier_inotifyGetOrAllocNotifShare;
}
void __nvoc_init_funcTable_Notifier(Notifier *pThis) {
__nvoc_init_funcTable_Notifier_1(pThis);
}
void __nvoc_init_INotifier(INotifier*);
void __nvoc_init_Notifier(Notifier *pThis) {
pThis->__nvoc_pbase_Notifier = pThis;
pThis->__nvoc_pbase_INotifier = &pThis->__nvoc_base_INotifier;
__nvoc_init_INotifier(&pThis->__nvoc_base_INotifier);
__nvoc_init_funcTable_Notifier(pThis);
}

View File

@@ -0,0 +1,529 @@
#ifndef _G_EVENT_NVOC_H_
#define _G_EVENT_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2019 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_event_nvoc.h"
#ifndef _EVENT_H_
#define _EVENT_H_
#include "class/cl0000.h" // NV0000_NOTIFIERS_MAXCOUNT
#include "resserv/resserv.h"
#include "nvoc/prelude.h"
#include "resserv/rs_server.h"
#include "rmapi/resource.h"
typedef struct _def_system_event_queue SYSTEM_EVENTS_QUEUE;
struct EVENTNOTIFICATION
{
NvHandle hEventClient;
NvHandle hEvent;
NvU32 subdeviceInst;
NvU32 NotifyIndex; // NVnnnn_NOTIFIERS_xyz
NvU32 NotifyType; // Event class. NV01_EVENT_OS_EVENT for example.
NvBool bUserOsEventHandle; // Event was allocated from user app.
NvBool bBroadcastEvent; // Wait for all subdevices before sending event.
NvBool bClientRM; // Event was allocated from client RM.
NvBool bSubdeviceSpecificEvent; // SubdeviceSpecificValue is valid.
NvU32 SubdeviceSpecificValue; // NV0005_NOTIFY_INDEX_SUBDEVICE
NvBool bEventDataRequired; // nv_post_event allocates memory for Data.
NvBool bNonStallIntrEvent;
NvU32 NotifyTriggerCount; // Used with bBroadcastEvent.
NvP64 Data;
struct EVENTNOTIFICATION *Next;
};
typedef struct EVENTNOTIFICATION EVENTNOTIFICATION, *PEVENTNOTIFICATION;
struct INotifier;
#ifndef __NVOC_CLASS_INotifier_TYPEDEF__
#define __NVOC_CLASS_INotifier_TYPEDEF__
typedef struct INotifier INotifier;
#endif /* __NVOC_CLASS_INotifier_TYPEDEF__ */
#ifndef __nvoc_class_id_INotifier
#define __nvoc_class_id_INotifier 0xf8f965
#endif /* __nvoc_class_id_INotifier */
#define NV_SYSTEM_EVENT_QUEUE_SIZE 16
struct _def_system_event_queue
{
NvU32 Head;
NvU32 Tail;
struct event_queue
{
NvU32 event;
NvU32 status;
} EventQueue[NV_SYSTEM_EVENT_QUEUE_SIZE];
};
struct _def_client_system_event_info
{
SYSTEM_EVENTS_QUEUE systemEventsQueue;
NvU32 notifyActions[NV0000_NOTIFIERS_MAXCOUNT];
};
/**
* This class represents data that is shared between one notifier and any
* events that are registered with the notifier.
*
* Instances of this class are ref-counted and will be kept alive until
* the notifier and all of its events have been freed.
*/
#ifdef NVOC_EVENT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct NotifShare {
const struct NVOC_RTTI *__nvoc_rtti;
struct RsShared __nvoc_base_RsShared;
struct Object *__nvoc_pbase_Object;
struct RsShared *__nvoc_pbase_RsShared;
struct NotifShare *__nvoc_pbase_NotifShare;
struct INotifier *pNotifier;
NvHandle hNotifierClient;
NvHandle hNotifierResource;
EVENTNOTIFICATION *pEventList;
};
#ifndef __NVOC_CLASS_NotifShare_TYPEDEF__
#define __NVOC_CLASS_NotifShare_TYPEDEF__
typedef struct NotifShare NotifShare;
#endif /* __NVOC_CLASS_NotifShare_TYPEDEF__ */
#ifndef __nvoc_class_id_NotifShare
#define __nvoc_class_id_NotifShare 0xd5f150
#endif /* __nvoc_class_id_NotifShare */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_NotifShare;
#define __staticCast_NotifShare(pThis) \
((pThis)->__nvoc_pbase_NotifShare)
#ifdef __nvoc_event_h_disabled
#define __dynamicCast_NotifShare(pThis) ((NotifShare*)NULL)
#else //__nvoc_event_h_disabled
#define __dynamicCast_NotifShare(pThis) \
((NotifShare*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(NotifShare)))
#endif //__nvoc_event_h_disabled
NV_STATUS __nvoc_objCreateDynamic_NotifShare(NotifShare**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_NotifShare(NotifShare**, Dynamic*, NvU32);
#define __objCreate_NotifShare(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_NotifShare((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
NV_STATUS shrnotifConstruct_IMPL(struct NotifShare *arg_pNotifShare);
#define __nvoc_shrnotifConstruct(arg_pNotifShare) shrnotifConstruct_IMPL(arg_pNotifShare)
void shrnotifDestruct_IMPL(struct NotifShare *pNotifShare);
#define __nvoc_shrnotifDestruct(pNotifShare) shrnotifDestruct_IMPL(pNotifShare)
#undef PRIVATE_FIELD
/**
* This class represents event notification consumers
*/
#ifdef NVOC_EVENT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct Event {
const struct NVOC_RTTI *__nvoc_rtti;
struct RmResource __nvoc_base_RmResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct Event *__nvoc_pbase_Event;
NvBool (*__eventShareCallback__)(struct Event *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__eventCheckMemInterUnmap__)(struct Event *, NvBool);
NV_STATUS (*__eventControl__)(struct Event *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__eventGetMemInterMapParams__)(struct Event *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__eventGetMemoryMappingDescriptor__)(struct Event *, struct MEMORY_DESCRIPTOR **);
NvU32 (*__eventGetRefCount__)(struct Event *);
NV_STATUS (*__eventControlFilter__)(struct Event *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__eventAddAdditionalDependants__)(struct RsClient *, struct Event *, RsResourceRef *);
NV_STATUS (*__eventUnmap__)(struct Event *, struct CALL_CONTEXT *, RsCpuMapping *);
NV_STATUS (*__eventControl_Prologue__)(struct Event *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__eventCanCopy__)(struct Event *);
NV_STATUS (*__eventMapTo__)(struct Event *, RS_RES_MAP_TO_PARAMS *);
void (*__eventPreDestruct__)(struct Event *);
NV_STATUS (*__eventUnmapFrom__)(struct Event *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__eventControl_Epilogue__)(struct Event *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__eventControlLookup__)(struct Event *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__eventMap__)(struct Event *, struct CALL_CONTEXT *, RS_CPU_MAP_PARAMS *, RsCpuMapping *);
NvBool (*__eventAccessCallback__)(struct Event *, struct RsClient *, void *, RsAccessRight);
struct NotifShare *pNotifierShare;
NvHandle hNotifierClient;
NvHandle hNotifierResource;
NvHandle hEvent;
};
#ifndef __NVOC_CLASS_Event_TYPEDEF__
#define __NVOC_CLASS_Event_TYPEDEF__
typedef struct Event Event;
#endif /* __NVOC_CLASS_Event_TYPEDEF__ */
#ifndef __nvoc_class_id_Event
#define __nvoc_class_id_Event 0xa4ecfc
#endif /* __nvoc_class_id_Event */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Event;
#define __staticCast_Event(pThis) \
((pThis)->__nvoc_pbase_Event)
#ifdef __nvoc_event_h_disabled
#define __dynamicCast_Event(pThis) ((Event*)NULL)
#else //__nvoc_event_h_disabled
#define __dynamicCast_Event(pThis) \
((Event*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Event)))
#endif //__nvoc_event_h_disabled
NV_STATUS __nvoc_objCreateDynamic_Event(Event**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_Event(Event**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_Event(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_Event((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define eventShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) eventShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
#define eventCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) eventCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define eventControl(pResource, pCallContext, pParams) eventControl_DISPATCH(pResource, pCallContext, pParams)
#define eventGetMemInterMapParams(pRmResource, pParams) eventGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define eventGetMemoryMappingDescriptor(pRmResource, ppMemDesc) eventGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define eventGetRefCount(pResource) eventGetRefCount_DISPATCH(pResource)
#define eventControlFilter(pResource, pCallContext, pParams) eventControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define eventAddAdditionalDependants(pClient, pResource, pReference) eventAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define eventUnmap(pResource, pCallContext, pCpuMapping) eventUnmap_DISPATCH(pResource, pCallContext, pCpuMapping)
#define eventControl_Prologue(pResource, pCallContext, pParams) eventControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define eventCanCopy(pResource) eventCanCopy_DISPATCH(pResource)
#define eventMapTo(pResource, pParams) eventMapTo_DISPATCH(pResource, pParams)
#define eventPreDestruct(pResource) eventPreDestruct_DISPATCH(pResource)
#define eventUnmapFrom(pResource, pParams) eventUnmapFrom_DISPATCH(pResource, pParams)
#define eventControl_Epilogue(pResource, pCallContext, pParams) eventControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define eventControlLookup(pResource, pParams, ppEntry) eventControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define eventMap(pResource, pCallContext, pParams, pCpuMapping) eventMap_DISPATCH(pResource, pCallContext, pParams, pCpuMapping)
#define eventAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) eventAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
static inline NvBool eventShareCallback_DISPATCH(struct Event *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pResource->__eventShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS eventCheckMemInterUnmap_DISPATCH(struct Event *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__eventCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS eventControl_DISPATCH(struct Event *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__eventControl__(pResource, pCallContext, pParams);
}
static inline NV_STATUS eventGetMemInterMapParams_DISPATCH(struct Event *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__eventGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS eventGetMemoryMappingDescriptor_DISPATCH(struct Event *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__eventGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NvU32 eventGetRefCount_DISPATCH(struct Event *pResource) {
return pResource->__eventGetRefCount__(pResource);
}
static inline NV_STATUS eventControlFilter_DISPATCH(struct Event *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__eventControlFilter__(pResource, pCallContext, pParams);
}
static inline void eventAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct Event *pResource, RsResourceRef *pReference) {
pResource->__eventAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NV_STATUS eventUnmap_DISPATCH(struct Event *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return pResource->__eventUnmap__(pResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS eventControl_Prologue_DISPATCH(struct Event *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__eventControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NvBool eventCanCopy_DISPATCH(struct Event *pResource) {
return pResource->__eventCanCopy__(pResource);
}
static inline NV_STATUS eventMapTo_DISPATCH(struct Event *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__eventMapTo__(pResource, pParams);
}
static inline void eventPreDestruct_DISPATCH(struct Event *pResource) {
pResource->__eventPreDestruct__(pResource);
}
static inline NV_STATUS eventUnmapFrom_DISPATCH(struct Event *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__eventUnmapFrom__(pResource, pParams);
}
static inline void eventControl_Epilogue_DISPATCH(struct Event *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__eventControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS eventControlLookup_DISPATCH(struct Event *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__eventControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS eventMap_DISPATCH(struct Event *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return pResource->__eventMap__(pResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool eventAccessCallback_DISPATCH(struct Event *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__eventAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS eventConstruct_IMPL(struct Event *arg_pEvent, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_eventConstruct(arg_pEvent, arg_pCallContext, arg_pParams) eventConstruct_IMPL(arg_pEvent, arg_pCallContext, arg_pParams)
void eventDestruct_IMPL(struct Event *pEvent);
#define __nvoc_eventDestruct(pEvent) eventDestruct_IMPL(pEvent)
NV_STATUS eventInit_IMPL(struct Event *pEvent, struct CALL_CONTEXT *pCallContext, NvHandle hNotifierClient, NvHandle hNotifierResource, PEVENTNOTIFICATION **pppEventNotification);
#ifdef __nvoc_event_h_disabled
static inline NV_STATUS eventInit(struct Event *pEvent, struct CALL_CONTEXT *pCallContext, NvHandle hNotifierClient, NvHandle hNotifierResource, PEVENTNOTIFICATION **pppEventNotification) {
NV_ASSERT_FAILED_PRECOMP("Event was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_event_h_disabled
#define eventInit(pEvent, pCallContext, hNotifierClient, hNotifierResource, pppEventNotification) eventInit_IMPL(pEvent, pCallContext, hNotifierClient, hNotifierResource, pppEventNotification)
#endif //__nvoc_event_h_disabled
#undef PRIVATE_FIELD
/**
* Mix-in interface for resources that send notifications to events
*/
#ifdef NVOC_EVENT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct INotifier {
const struct NVOC_RTTI *__nvoc_rtti;
struct INotifier *__nvoc_pbase_INotifier;
PEVENTNOTIFICATION *(*__inotifyGetNotificationListPtr__)(struct INotifier *);
void (*__inotifySetNotificationShare__)(struct INotifier *, struct NotifShare *);
struct NotifShare *(*__inotifyGetNotificationShare__)(struct INotifier *);
NV_STATUS (*__inotifyUnregisterEvent__)(struct INotifier *, NvHandle, NvHandle, NvHandle, NvHandle);
NV_STATUS (*__inotifyGetOrAllocNotifShare__)(struct INotifier *, NvHandle, NvHandle, struct NotifShare **);
};
#ifndef __NVOC_CLASS_INotifier_TYPEDEF__
#define __NVOC_CLASS_INotifier_TYPEDEF__
typedef struct INotifier INotifier;
#endif /* __NVOC_CLASS_INotifier_TYPEDEF__ */
#ifndef __nvoc_class_id_INotifier
#define __nvoc_class_id_INotifier 0xf8f965
#endif /* __nvoc_class_id_INotifier */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_INotifier;
#define __staticCast_INotifier(pThis) \
((pThis)->__nvoc_pbase_INotifier)
#ifdef __nvoc_event_h_disabled
#define __dynamicCast_INotifier(pThis) ((INotifier*)NULL)
#else //__nvoc_event_h_disabled
#define __dynamicCast_INotifier(pThis) \
((INotifier*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(INotifier)))
#endif //__nvoc_event_h_disabled
NV_STATUS __nvoc_objCreateDynamic_INotifier(INotifier**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_INotifier(INotifier**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext);
#define __objCreate_INotifier(ppNewObj, pParent, createFlags, arg_pCallContext) \
__nvoc_objCreate_INotifier((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext)
#define inotifyGetNotificationListPtr(pNotifier) inotifyGetNotificationListPtr_DISPATCH(pNotifier)
#define inotifySetNotificationShare(pNotifier, pNotifShare) inotifySetNotificationShare_DISPATCH(pNotifier, pNotifShare)
#define inotifyGetNotificationShare(pNotifier) inotifyGetNotificationShare_DISPATCH(pNotifier)
#define inotifyUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) inotifyUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
#define inotifyGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) inotifyGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
static inline PEVENTNOTIFICATION *inotifyGetNotificationListPtr_DISPATCH(struct INotifier *pNotifier) {
return pNotifier->__inotifyGetNotificationListPtr__(pNotifier);
}
static inline void inotifySetNotificationShare_DISPATCH(struct INotifier *pNotifier, struct NotifShare *pNotifShare) {
pNotifier->__inotifySetNotificationShare__(pNotifier, pNotifShare);
}
static inline struct NotifShare *inotifyGetNotificationShare_DISPATCH(struct INotifier *pNotifier) {
return pNotifier->__inotifyGetNotificationShare__(pNotifier);
}
static inline NV_STATUS inotifyUnregisterEvent_DISPATCH(struct INotifier *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return pNotifier->__inotifyUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
static inline NV_STATUS inotifyGetOrAllocNotifShare_DISPATCH(struct INotifier *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return pNotifier->__inotifyGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
}
NV_STATUS inotifyConstruct_IMPL(struct INotifier *arg_pNotifier, struct CALL_CONTEXT *arg_pCallContext);
#define __nvoc_inotifyConstruct(arg_pNotifier, arg_pCallContext) inotifyConstruct_IMPL(arg_pNotifier, arg_pCallContext)
void inotifyDestruct_IMPL(struct INotifier *pNotifier);
#define __nvoc_inotifyDestruct(pNotifier) inotifyDestruct_IMPL(pNotifier)
PEVENTNOTIFICATION inotifyGetNotificationList_IMPL(struct INotifier *pNotifier);
#ifdef __nvoc_event_h_disabled
static inline PEVENTNOTIFICATION inotifyGetNotificationList(struct INotifier *pNotifier) {
NV_ASSERT_FAILED_PRECOMP("INotifier was disabled!");
return NULL;
}
#else //__nvoc_event_h_disabled
#define inotifyGetNotificationList(pNotifier) inotifyGetNotificationList_IMPL(pNotifier)
#endif //__nvoc_event_h_disabled
#undef PRIVATE_FIELD
/**
* Basic implementation for event notification mix-in
*/
#ifdef NVOC_EVENT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct Notifier {
const struct NVOC_RTTI *__nvoc_rtti;
struct INotifier __nvoc_base_INotifier;
struct INotifier *__nvoc_pbase_INotifier;
struct Notifier *__nvoc_pbase_Notifier;
PEVENTNOTIFICATION *(*__notifyGetNotificationListPtr__)(struct Notifier *);
struct NotifShare *(*__notifyGetNotificationShare__)(struct Notifier *);
void (*__notifySetNotificationShare__)(struct Notifier *, struct NotifShare *);
NV_STATUS (*__notifyUnregisterEvent__)(struct Notifier *, NvHandle, NvHandle, NvHandle, NvHandle);
NV_STATUS (*__notifyGetOrAllocNotifShare__)(struct Notifier *, NvHandle, NvHandle, struct NotifShare **);
struct NotifShare *pNotifierShare;
};
#ifndef __NVOC_CLASS_Notifier_TYPEDEF__
#define __NVOC_CLASS_Notifier_TYPEDEF__
typedef struct Notifier Notifier;
#endif /* __NVOC_CLASS_Notifier_TYPEDEF__ */
#ifndef __nvoc_class_id_Notifier
#define __nvoc_class_id_Notifier 0xa8683b
#endif /* __nvoc_class_id_Notifier */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Notifier;
#define __staticCast_Notifier(pThis) \
((pThis)->__nvoc_pbase_Notifier)
#ifdef __nvoc_event_h_disabled
#define __dynamicCast_Notifier(pThis) ((Notifier*)NULL)
#else //__nvoc_event_h_disabled
#define __dynamicCast_Notifier(pThis) \
((Notifier*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Notifier)))
#endif //__nvoc_event_h_disabled
NV_STATUS __nvoc_objCreateDynamic_Notifier(Notifier**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_Notifier(Notifier**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext);
#define __objCreate_Notifier(ppNewObj, pParent, createFlags, arg_pCallContext) \
__nvoc_objCreate_Notifier((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext)
#define notifyGetNotificationListPtr(pNotifier) notifyGetNotificationListPtr_DISPATCH(pNotifier)
#define notifyGetNotificationShare(pNotifier) notifyGetNotificationShare_DISPATCH(pNotifier)
#define notifySetNotificationShare(pNotifier, pNotifShare) notifySetNotificationShare_DISPATCH(pNotifier, pNotifShare)
#define notifyUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) notifyUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
#define notifyGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) notifyGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
PEVENTNOTIFICATION *notifyGetNotificationListPtr_IMPL(struct Notifier *pNotifier);
static inline PEVENTNOTIFICATION *notifyGetNotificationListPtr_DISPATCH(struct Notifier *pNotifier) {
return pNotifier->__notifyGetNotificationListPtr__(pNotifier);
}
struct NotifShare *notifyGetNotificationShare_IMPL(struct Notifier *pNotifier);
static inline struct NotifShare *notifyGetNotificationShare_DISPATCH(struct Notifier *pNotifier) {
return pNotifier->__notifyGetNotificationShare__(pNotifier);
}
void notifySetNotificationShare_IMPL(struct Notifier *pNotifier, struct NotifShare *pNotifShare);
static inline void notifySetNotificationShare_DISPATCH(struct Notifier *pNotifier, struct NotifShare *pNotifShare) {
pNotifier->__notifySetNotificationShare__(pNotifier, pNotifShare);
}
NV_STATUS notifyUnregisterEvent_IMPL(struct Notifier *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent);
static inline NV_STATUS notifyUnregisterEvent_DISPATCH(struct Notifier *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
return pNotifier->__notifyUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
}
NV_STATUS notifyGetOrAllocNotifShare_IMPL(struct Notifier *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare);
static inline NV_STATUS notifyGetOrAllocNotifShare_DISPATCH(struct Notifier *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
return pNotifier->__notifyGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
}
NV_STATUS notifyConstruct_IMPL(struct Notifier *arg_pNotifier, struct CALL_CONTEXT *arg_pCallContext);
#define __nvoc_notifyConstruct(arg_pNotifier, arg_pCallContext) notifyConstruct_IMPL(arg_pNotifier, arg_pCallContext)
void notifyDestruct_IMPL(struct Notifier *pNotifier);
#define __nvoc_notifyDestruct(pNotifier) notifyDestruct_IMPL(pNotifier)
#undef PRIVATE_FIELD
void CliAddSystemEvent(NvU32, NvU32);
NvBool CliDelObjectEvents(NvHandle hClient, NvHandle hObject);
NvBool CliGetEventInfo(NvHandle hClient, NvHandle hEvent, struct Event **ppEvent);
NV_STATUS CliGetEventNotificationList(NvHandle hClient, NvHandle hObject,
struct INotifier **ppNotifier,
PEVENTNOTIFICATION **pppEventNotification);
NV_STATUS registerEventNotification(PEVENTNOTIFICATION*, NvHandle, NvHandle, NvHandle, NvU32, NvU32, NvP64, NvBool);
NV_STATUS unregisterEventNotification(PEVENTNOTIFICATION*, NvHandle, NvHandle, NvHandle);
NV_STATUS unregisterEventNotificationWithData(PEVENTNOTIFICATION *, NvHandle, NvHandle, NvHandle, NvBool, NvP64);
NV_STATUS bindEventNotificationToSubdevice(PEVENTNOTIFICATION, NvHandle, NvU32);
NV_STATUS engineNonStallIntrNotify(OBJGPU *, NvU32);
NV_STATUS notifyEvents(OBJGPU*, EVENTNOTIFICATION*, NvU32, NvU32, NvU32, NV_STATUS, NvU32);
NV_STATUS engineNonStallIntrNotifyEvent(OBJGPU *, NvU32, NvHandle);
#endif // _EVENT_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_EVENT_NVOC_H_

View File

@@ -0,0 +1,334 @@
#define NVOC_GENERIC_ENGINE_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_generic_engine_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x4bc329 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GenericEngineApi;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
void __nvoc_init_GenericEngineApi(GenericEngineApi*);
void __nvoc_init_funcTable_GenericEngineApi(GenericEngineApi*);
NV_STATUS __nvoc_ctor_GenericEngineApi(GenericEngineApi*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_GenericEngineApi(GenericEngineApi*);
void __nvoc_dtor_GenericEngineApi(GenericEngineApi*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_GenericEngineApi;
static const struct NVOC_RTTI __nvoc_rtti_GenericEngineApi_GenericEngineApi = {
/*pClassDef=*/ &__nvoc_class_def_GenericEngineApi,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_GenericEngineApi,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_GenericEngineApi_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GenericEngineApi, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_GenericEngineApi_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GenericEngineApi, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_GenericEngineApi_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GenericEngineApi, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_GenericEngineApi_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GenericEngineApi, __nvoc_base_GpuResource.__nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_GenericEngineApi_GpuResource = {
/*pClassDef=*/ &__nvoc_class_def_GpuResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GenericEngineApi, __nvoc_base_GpuResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_GenericEngineApi = {
/*numRelatives=*/ 6,
/*relatives=*/ {
&__nvoc_rtti_GenericEngineApi_GenericEngineApi,
&__nvoc_rtti_GenericEngineApi_GpuResource,
&__nvoc_rtti_GenericEngineApi_RmResource,
&__nvoc_rtti_GenericEngineApi_RmResourceCommon,
&__nvoc_rtti_GenericEngineApi_RsResource,
&__nvoc_rtti_GenericEngineApi_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_GenericEngineApi =
{
/*classInfo=*/ {
/*size=*/ sizeof(GenericEngineApi),
/*classId=*/ classId(GenericEngineApi),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "GenericEngineApi",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_GenericEngineApi,
/*pCastInfo=*/ &__nvoc_castinfo_GenericEngineApi,
/*pExportInfo=*/ &__nvoc_export_info_GenericEngineApi
};
static NV_STATUS __nvoc_thunk_GenericEngineApi_gpuresMap(struct GpuResource *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return genapiMap((struct GenericEngineApi *)(((unsigned char *)pGenericEngineApi) - __nvoc_rtti_GenericEngineApi_GpuResource.offset), pCallContext, pParams, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_GenericEngineApi_gpuresGetMapAddrSpace(struct GpuResource *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return genapiGetMapAddrSpace((struct GenericEngineApi *)(((unsigned char *)pGenericEngineApi) - __nvoc_rtti_GenericEngineApi_GpuResource.offset), pCallContext, mapFlags, pAddrSpace);
}
static NV_STATUS __nvoc_thunk_GenericEngineApi_gpuresControl(struct GpuResource *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return genapiControl((struct GenericEngineApi *)(((unsigned char *)pGenericEngineApi) - __nvoc_rtti_GenericEngineApi_GpuResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_GpuResource_genapiShareCallback(struct GenericEngineApi *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_GenericEngineApi_GpuResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_GpuResource_genapiUnmap(struct GenericEngineApi *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_GenericEngineApi_GpuResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_genapiGetMemInterMapParams(struct GenericEngineApi *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GenericEngineApi_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_genapiGetMemoryMappingDescriptor(struct GenericEngineApi *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GenericEngineApi_RmResource.offset), ppMemDesc);
}
static NvHandle __nvoc_thunk_GpuResource_genapiGetInternalObjectHandle(struct GenericEngineApi *pGpuResource) {
return gpuresGetInternalObjectHandle((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_GenericEngineApi_GpuResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_genapiControlFilter(struct GenericEngineApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_genapiAddAdditionalDependants(struct RsClient *pClient, struct GenericEngineApi *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RsResource.offset), pReference);
}
static NvU32 __nvoc_thunk_RsResource_genapiGetRefCount(struct GenericEngineApi *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RmResource_genapiCheckMemInterUnmap(struct GenericEngineApi *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GenericEngineApi_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_genapiMapTo(struct GenericEngineApi *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_genapiControl_Prologue(struct GenericEngineApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_genapiGetRegBaseOffsetAndSize(struct GenericEngineApi *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return gpuresGetRegBaseOffsetAndSize((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_GenericEngineApi_GpuResource.offset), pGpu, pOffset, pSize);
}
static NvBool __nvoc_thunk_RsResource_genapiCanCopy(struct GenericEngineApi *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_GpuResource_genapiInternalControlForward(struct GenericEngineApi *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return gpuresInternalControlForward((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_GenericEngineApi_GpuResource.offset), command, pParams, size);
}
static void __nvoc_thunk_RsResource_genapiPreDestruct(struct GenericEngineApi *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_genapiUnmapFrom(struct GenericEngineApi *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_genapiControl_Epilogue(struct GenericEngineApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_genapiControlLookup(struct GenericEngineApi *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RsResource.offset), pParams, ppEntry);
}
static NvBool __nvoc_thunk_RmResource_genapiAccessCallback(struct GenericEngineApi *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GenericEngineApi_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_GenericEngineApi =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_GpuResource(GpuResource*);
void __nvoc_dtor_GenericEngineApi(GenericEngineApi *pThis) {
__nvoc_genapiDestruct(pThis);
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_GenericEngineApi(GenericEngineApi *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_GpuResource(GpuResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_GenericEngineApi(GenericEngineApi *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_GpuResource(&pThis->__nvoc_base_GpuResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_GenericEngineApi_fail_GpuResource;
__nvoc_init_dataField_GenericEngineApi(pThis);
status = __nvoc_genapiConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_GenericEngineApi_fail__init;
goto __nvoc_ctor_GenericEngineApi_exit; // Success
__nvoc_ctor_GenericEngineApi_fail__init:
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_ctor_GenericEngineApi_fail_GpuResource:
__nvoc_ctor_GenericEngineApi_exit:
return status;
}
static void __nvoc_init_funcTable_GenericEngineApi_1(GenericEngineApi *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__genapiMap__ = &genapiMap_IMPL;
pThis->__genapiGetMapAddrSpace__ = &genapiGetMapAddrSpace_IMPL;
pThis->__genapiControl__ = &genapiControl_IMPL;
pThis->__nvoc_base_GpuResource.__gpuresMap__ = &__nvoc_thunk_GenericEngineApi_gpuresMap;
pThis->__nvoc_base_GpuResource.__gpuresGetMapAddrSpace__ = &__nvoc_thunk_GenericEngineApi_gpuresGetMapAddrSpace;
pThis->__nvoc_base_GpuResource.__gpuresControl__ = &__nvoc_thunk_GenericEngineApi_gpuresControl;
pThis->__genapiShareCallback__ = &__nvoc_thunk_GpuResource_genapiShareCallback;
pThis->__genapiUnmap__ = &__nvoc_thunk_GpuResource_genapiUnmap;
pThis->__genapiGetMemInterMapParams__ = &__nvoc_thunk_RmResource_genapiGetMemInterMapParams;
pThis->__genapiGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_genapiGetMemoryMappingDescriptor;
pThis->__genapiGetInternalObjectHandle__ = &__nvoc_thunk_GpuResource_genapiGetInternalObjectHandle;
pThis->__genapiControlFilter__ = &__nvoc_thunk_RsResource_genapiControlFilter;
pThis->__genapiAddAdditionalDependants__ = &__nvoc_thunk_RsResource_genapiAddAdditionalDependants;
pThis->__genapiGetRefCount__ = &__nvoc_thunk_RsResource_genapiGetRefCount;
pThis->__genapiCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_genapiCheckMemInterUnmap;
pThis->__genapiMapTo__ = &__nvoc_thunk_RsResource_genapiMapTo;
pThis->__genapiControl_Prologue__ = &__nvoc_thunk_RmResource_genapiControl_Prologue;
pThis->__genapiGetRegBaseOffsetAndSize__ = &__nvoc_thunk_GpuResource_genapiGetRegBaseOffsetAndSize;
pThis->__genapiCanCopy__ = &__nvoc_thunk_RsResource_genapiCanCopy;
pThis->__genapiInternalControlForward__ = &__nvoc_thunk_GpuResource_genapiInternalControlForward;
pThis->__genapiPreDestruct__ = &__nvoc_thunk_RsResource_genapiPreDestruct;
pThis->__genapiUnmapFrom__ = &__nvoc_thunk_RsResource_genapiUnmapFrom;
pThis->__genapiControl_Epilogue__ = &__nvoc_thunk_RmResource_genapiControl_Epilogue;
pThis->__genapiControlLookup__ = &__nvoc_thunk_RsResource_genapiControlLookup;
pThis->__genapiAccessCallback__ = &__nvoc_thunk_RmResource_genapiAccessCallback;
}
void __nvoc_init_funcTable_GenericEngineApi(GenericEngineApi *pThis) {
__nvoc_init_funcTable_GenericEngineApi_1(pThis);
}
void __nvoc_init_GpuResource(GpuResource*);
void __nvoc_init_GenericEngineApi(GenericEngineApi *pThis) {
pThis->__nvoc_pbase_GenericEngineApi = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource;
pThis->__nvoc_pbase_GpuResource = &pThis->__nvoc_base_GpuResource;
__nvoc_init_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_init_funcTable_GenericEngineApi(pThis);
}
NV_STATUS __nvoc_objCreate_GenericEngineApi(GenericEngineApi **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
GenericEngineApi *pThis;
pThis = portMemAllocNonPaged(sizeof(GenericEngineApi));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(GenericEngineApi));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_GenericEngineApi);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_GenericEngineApi(pThis);
status = __nvoc_ctor_GenericEngineApi(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_GenericEngineApi_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_GenericEngineApi_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_GenericEngineApi(GenericEngineApi **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_GenericEngineApi(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,237 @@
#ifndef _G_GENERIC_ENGINE_NVOC_H_
#define _G_GENERIC_ENGINE_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2016-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_generic_engine_nvoc.h"
#ifndef _GENERICENGINEAPI_H_
#define _GENERICENGINEAPI_H_
#include "gpu/gpu_resource.h"
/*!
* RM internal class providing a generic engine API to RM clients (e.g.:
* GF100_SUBDEVICE_GRAPHICS and GF100_SUBDEVICE_FB). Classes are primarily used
* for exposing BAR0 mappings and controls.
*/
#ifdef NVOC_GENERIC_ENGINE_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct GenericEngineApi {
const struct NVOC_RTTI *__nvoc_rtti;
struct GpuResource __nvoc_base_GpuResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct GenericEngineApi *__nvoc_pbase_GenericEngineApi;
NV_STATUS (*__genapiMap__)(struct GenericEngineApi *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NV_STATUS (*__genapiGetMapAddrSpace__)(struct GenericEngineApi *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NV_STATUS (*__genapiControl__)(struct GenericEngineApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__genapiShareCallback__)(struct GenericEngineApi *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__genapiUnmap__)(struct GenericEngineApi *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__genapiGetMemInterMapParams__)(struct GenericEngineApi *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__genapiGetMemoryMappingDescriptor__)(struct GenericEngineApi *, struct MEMORY_DESCRIPTOR **);
NvHandle (*__genapiGetInternalObjectHandle__)(struct GenericEngineApi *);
NV_STATUS (*__genapiControlFilter__)(struct GenericEngineApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__genapiAddAdditionalDependants__)(struct RsClient *, struct GenericEngineApi *, RsResourceRef *);
NvU32 (*__genapiGetRefCount__)(struct GenericEngineApi *);
NV_STATUS (*__genapiCheckMemInterUnmap__)(struct GenericEngineApi *, NvBool);
NV_STATUS (*__genapiMapTo__)(struct GenericEngineApi *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__genapiControl_Prologue__)(struct GenericEngineApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__genapiGetRegBaseOffsetAndSize__)(struct GenericEngineApi *, struct OBJGPU *, NvU32 *, NvU32 *);
NvBool (*__genapiCanCopy__)(struct GenericEngineApi *);
NV_STATUS (*__genapiInternalControlForward__)(struct GenericEngineApi *, NvU32, void *, NvU32);
void (*__genapiPreDestruct__)(struct GenericEngineApi *);
NV_STATUS (*__genapiUnmapFrom__)(struct GenericEngineApi *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__genapiControl_Epilogue__)(struct GenericEngineApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__genapiControlLookup__)(struct GenericEngineApi *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NvBool (*__genapiAccessCallback__)(struct GenericEngineApi *, struct RsClient *, void *, RsAccessRight);
};
#ifndef __NVOC_CLASS_GenericEngineApi_TYPEDEF__
#define __NVOC_CLASS_GenericEngineApi_TYPEDEF__
typedef struct GenericEngineApi GenericEngineApi;
#endif /* __NVOC_CLASS_GenericEngineApi_TYPEDEF__ */
#ifndef __nvoc_class_id_GenericEngineApi
#define __nvoc_class_id_GenericEngineApi 0x4bc329
#endif /* __nvoc_class_id_GenericEngineApi */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GenericEngineApi;
#define __staticCast_GenericEngineApi(pThis) \
((pThis)->__nvoc_pbase_GenericEngineApi)
#ifdef __nvoc_generic_engine_h_disabled
#define __dynamicCast_GenericEngineApi(pThis) ((GenericEngineApi*)NULL)
#else //__nvoc_generic_engine_h_disabled
#define __dynamicCast_GenericEngineApi(pThis) \
((GenericEngineApi*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(GenericEngineApi)))
#endif //__nvoc_generic_engine_h_disabled
NV_STATUS __nvoc_objCreateDynamic_GenericEngineApi(GenericEngineApi**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_GenericEngineApi(GenericEngineApi**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_GenericEngineApi(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_GenericEngineApi((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define genapiMap(pGenericEngineApi, pCallContext, pParams, pCpuMapping) genapiMap_DISPATCH(pGenericEngineApi, pCallContext, pParams, pCpuMapping)
#define genapiGetMapAddrSpace(pGenericEngineApi, pCallContext, mapFlags, pAddrSpace) genapiGetMapAddrSpace_DISPATCH(pGenericEngineApi, pCallContext, mapFlags, pAddrSpace)
#define genapiControl(pGenericEngineApi, pCallContext, pParams) genapiControl_DISPATCH(pGenericEngineApi, pCallContext, pParams)
#define genapiShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) genapiShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define genapiUnmap(pGpuResource, pCallContext, pCpuMapping) genapiUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define genapiGetMemInterMapParams(pRmResource, pParams) genapiGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define genapiGetMemoryMappingDescriptor(pRmResource, ppMemDesc) genapiGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define genapiGetInternalObjectHandle(pGpuResource) genapiGetInternalObjectHandle_DISPATCH(pGpuResource)
#define genapiControlFilter(pResource, pCallContext, pParams) genapiControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define genapiAddAdditionalDependants(pClient, pResource, pReference) genapiAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define genapiGetRefCount(pResource) genapiGetRefCount_DISPATCH(pResource)
#define genapiCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) genapiCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define genapiMapTo(pResource, pParams) genapiMapTo_DISPATCH(pResource, pParams)
#define genapiControl_Prologue(pResource, pCallContext, pParams) genapiControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define genapiGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) genapiGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
#define genapiCanCopy(pResource) genapiCanCopy_DISPATCH(pResource)
#define genapiInternalControlForward(pGpuResource, command, pParams, size) genapiInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define genapiPreDestruct(pResource) genapiPreDestruct_DISPATCH(pResource)
#define genapiUnmapFrom(pResource, pParams) genapiUnmapFrom_DISPATCH(pResource, pParams)
#define genapiControl_Epilogue(pResource, pCallContext, pParams) genapiControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define genapiControlLookup(pResource, pParams, ppEntry) genapiControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define genapiAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) genapiAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS genapiMap_IMPL(struct GenericEngineApi *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping);
static inline NV_STATUS genapiMap_DISPATCH(struct GenericEngineApi *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGenericEngineApi->__genapiMap__(pGenericEngineApi, pCallContext, pParams, pCpuMapping);
}
NV_STATUS genapiGetMapAddrSpace_IMPL(struct GenericEngineApi *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
static inline NV_STATUS genapiGetMapAddrSpace_DISPATCH(struct GenericEngineApi *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGenericEngineApi->__genapiGetMapAddrSpace__(pGenericEngineApi, pCallContext, mapFlags, pAddrSpace);
}
NV_STATUS genapiControl_IMPL(struct GenericEngineApi *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
static inline NV_STATUS genapiControl_DISPATCH(struct GenericEngineApi *pGenericEngineApi, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pGenericEngineApi->__genapiControl__(pGenericEngineApi, pCallContext, pParams);
}
static inline NvBool genapiShareCallback_DISPATCH(struct GenericEngineApi *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__genapiShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS genapiUnmap_DISPATCH(struct GenericEngineApi *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__genapiUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS genapiGetMemInterMapParams_DISPATCH(struct GenericEngineApi *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__genapiGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS genapiGetMemoryMappingDescriptor_DISPATCH(struct GenericEngineApi *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__genapiGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NvHandle genapiGetInternalObjectHandle_DISPATCH(struct GenericEngineApi *pGpuResource) {
return pGpuResource->__genapiGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS genapiControlFilter_DISPATCH(struct GenericEngineApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__genapiControlFilter__(pResource, pCallContext, pParams);
}
static inline void genapiAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct GenericEngineApi *pResource, RsResourceRef *pReference) {
pResource->__genapiAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 genapiGetRefCount_DISPATCH(struct GenericEngineApi *pResource) {
return pResource->__genapiGetRefCount__(pResource);
}
static inline NV_STATUS genapiCheckMemInterUnmap_DISPATCH(struct GenericEngineApi *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__genapiCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS genapiMapTo_DISPATCH(struct GenericEngineApi *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__genapiMapTo__(pResource, pParams);
}
static inline NV_STATUS genapiControl_Prologue_DISPATCH(struct GenericEngineApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__genapiControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS genapiGetRegBaseOffsetAndSize_DISPATCH(struct GenericEngineApi *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pGpuResource->__genapiGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
}
static inline NvBool genapiCanCopy_DISPATCH(struct GenericEngineApi *pResource) {
return pResource->__genapiCanCopy__(pResource);
}
static inline NV_STATUS genapiInternalControlForward_DISPATCH(struct GenericEngineApi *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__genapiInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline void genapiPreDestruct_DISPATCH(struct GenericEngineApi *pResource) {
pResource->__genapiPreDestruct__(pResource);
}
static inline NV_STATUS genapiUnmapFrom_DISPATCH(struct GenericEngineApi *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__genapiUnmapFrom__(pResource, pParams);
}
static inline void genapiControl_Epilogue_DISPATCH(struct GenericEngineApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__genapiControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS genapiControlLookup_DISPATCH(struct GenericEngineApi *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__genapiControlLookup__(pResource, pParams, ppEntry);
}
static inline NvBool genapiAccessCallback_DISPATCH(struct GenericEngineApi *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__genapiAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS genapiConstruct_IMPL(struct GenericEngineApi *arg_pGenericEngineApi, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_genapiConstruct(arg_pGenericEngineApi, arg_pCallContext, arg_pParams) genapiConstruct_IMPL(arg_pGenericEngineApi, arg_pCallContext, arg_pParams)
void genapiDestruct_IMPL(struct GenericEngineApi *pGenericEngineApi);
#define __nvoc_genapiDestruct(pGenericEngineApi) genapiDestruct_IMPL(pGenericEngineApi)
#undef PRIVATE_FIELD
#endif // _GENERICENGINEAPI_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_GENERIC_ENGINE_NVOC_H_

View File

@@ -0,0 +1,59 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include <core/core.h>
#include <gpu/gpu.h>
#include <gpu/eng_desc.h>
#include <g_allclasses.h>
const CLASSDESCRIPTOR *
gpuGetClassDescriptorList_T234D(POBJGPU pGpu, NvU32 *pNumClassDescriptors)
{
static const CLASSDESCRIPTOR halT234DClassDescriptorList[] = {
{ GF100_HDACODEC, ENG_HDACODEC },
{ NV01_MEMORY_SYNCPOINT, ENG_DMA },
{ NV04_DISPLAY_COMMON, ENG_KERNEL_DISPLAY },
{ NVC372_DISPLAY_SW, ENG_KERNEL_DISPLAY },
{ NVC670_DISPLAY, ENG_KERNEL_DISPLAY },
{ NVC671_DISP_SF_USER, ENG_KERNEL_DISPLAY },
{ NVC673_DISP_CAPABILITIES, ENG_KERNEL_DISPLAY },
{ NVC67A_CURSOR_IMM_CHANNEL_PIO, ENG_KERNEL_DISPLAY },
{ NVC67B_WINDOW_IMM_CHANNEL_DMA, ENG_KERNEL_DISPLAY },
{ NVC67D_CORE_CHANNEL_DMA, ENG_KERNEL_DISPLAY },
{ NVC67E_WINDOW_CHANNEL_DMA, ENG_KERNEL_DISPLAY },
{ NVC77F_ANY_CHANNEL_DMA, ENG_KERNEL_DISPLAY },
};
#define HALT234D_NUM_CLASS_DESCS (sizeof(halT234DClassDescriptorList) / sizeof(CLASSDESCRIPTOR))
#define HALT234D_NUM_CLASSES 16
ct_assert(NV0080_CTRL_GPU_CLASSLIST_MAX_SIZE >= HALT234D_NUM_CLASSES);
*pNumClassDescriptors = HALT234D_NUM_CLASS_DESCS;
return halT234DClassDescriptorList;
}

View File

@@ -0,0 +1,154 @@
#define NVOC_GPU_DB_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_gpu_db_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xcdd250 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuDb;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_GpuDb(GpuDb*);
void __nvoc_init_funcTable_GpuDb(GpuDb*);
NV_STATUS __nvoc_ctor_GpuDb(GpuDb*);
void __nvoc_init_dataField_GpuDb(GpuDb*);
void __nvoc_dtor_GpuDb(GpuDb*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_GpuDb;
static const struct NVOC_RTTI __nvoc_rtti_GpuDb_GpuDb = {
/*pClassDef=*/ &__nvoc_class_def_GpuDb,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_GpuDb,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_GpuDb_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuDb, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_GpuDb = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_GpuDb_GpuDb,
&__nvoc_rtti_GpuDb_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_GpuDb =
{
/*classInfo=*/ {
/*size=*/ sizeof(GpuDb),
/*classId=*/ classId(GpuDb),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "GpuDb",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_GpuDb,
/*pCastInfo=*/ &__nvoc_castinfo_GpuDb,
/*pExportInfo=*/ &__nvoc_export_info_GpuDb
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_GpuDb =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_GpuDb(GpuDb *pThis) {
__nvoc_gpudbDestruct(pThis);
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_GpuDb(GpuDb *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_GpuDb(GpuDb *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_GpuDb_fail_Object;
__nvoc_init_dataField_GpuDb(pThis);
status = __nvoc_gpudbConstruct(pThis);
if (status != NV_OK) goto __nvoc_ctor_GpuDb_fail__init;
goto __nvoc_ctor_GpuDb_exit; // Success
__nvoc_ctor_GpuDb_fail__init:
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_ctor_GpuDb_fail_Object:
__nvoc_ctor_GpuDb_exit:
return status;
}
static void __nvoc_init_funcTable_GpuDb_1(GpuDb *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_GpuDb(GpuDb *pThis) {
__nvoc_init_funcTable_GpuDb_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_GpuDb(GpuDb *pThis) {
pThis->__nvoc_pbase_GpuDb = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_GpuDb(pThis);
}
NV_STATUS __nvoc_objCreate_GpuDb(GpuDb **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
GpuDb *pThis;
pThis = portMemAllocNonPaged(sizeof(GpuDb));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(GpuDb));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_GpuDb);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_GpuDb(pThis);
status = __nvoc_ctor_GpuDb(pThis);
if (status != NV_OK) goto __nvoc_objCreate_GpuDb_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_GpuDb_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_GpuDb(GpuDb **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_GpuDb(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,154 @@
#ifndef _G_GPU_DB_NVOC_H_
#define _G_GPU_DB_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2019-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_gpu_db_nvoc.h"
#ifndef GPU_DB_H
#define GPU_DB_H
#include "core/core.h"
#include "containers/list.h"
#include "gpu/gpu_uuid.h"
typedef struct NBADDR NBADDR;
// ****************************************************************************
// Type Definitions
// ****************************************************************************
//
// The GPU database object is used to encapsulate the GPUINFO
//
/*!
* @brief Compute policy data for a GPU
* Saved policy information for a GPU that can be retrieved later
*/
typedef struct GPU_COMPUTE_POLICY_INFO
{
//
// Timeslice config for channels/TSG's on a runlist. The timeslice configs
// are restricted to four levels : default, short, medium and long.
//
NvU32 timeslice;
// Future policies to be added here
} GPU_COMPUTE_POLICY_INFO;
typedef struct
{
NvU32 domain;
NvU8 bus;
NvU8 device;
NvU8 function;
NvBool bValid;
} PCI_PORT_INFO;
#define GPUDB_CLK_PROP_TOP_POLS_COUNT 1
/*!
* @brief Clock Propagation Topology Policies control data
*/
typedef struct
{
NvU8 chosenIdx[GPUDB_CLK_PROP_TOP_POLS_COUNT];
} GPU_CLK_PROP_TOP_POLS_CONTROL;
typedef struct
{
NvU8 uuid[RM_SHA1_GID_SIZE];
PCI_PORT_INFO pciPortInfo;
PCI_PORT_INFO upstreamPciPortInfo;
GPU_COMPUTE_POLICY_INFO policyInfo;
NvBool bShutdownState;
GPU_CLK_PROP_TOP_POLS_CONTROL clkPropTopPolsControl;
} GPU_INFO_LIST_NODE, *PGPU_INFO_LIST_NODE;
MAKE_LIST(GpuInfoList, GPU_INFO_LIST_NODE);
#ifdef NVOC_GPU_DB_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct GpuDb {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct GpuDb *__nvoc_pbase_GpuDb;
GpuInfoList gpuList;
PORT_MUTEX *pLock;
};
#ifndef __NVOC_CLASS_GpuDb_TYPEDEF__
#define __NVOC_CLASS_GpuDb_TYPEDEF__
typedef struct GpuDb GpuDb;
#endif /* __NVOC_CLASS_GpuDb_TYPEDEF__ */
#ifndef __nvoc_class_id_GpuDb
#define __nvoc_class_id_GpuDb 0xcdd250
#endif /* __nvoc_class_id_GpuDb */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuDb;
#define __staticCast_GpuDb(pThis) \
((pThis)->__nvoc_pbase_GpuDb)
#ifdef __nvoc_gpu_db_h_disabled
#define __dynamicCast_GpuDb(pThis) ((GpuDb*)NULL)
#else //__nvoc_gpu_db_h_disabled
#define __dynamicCast_GpuDb(pThis) \
((GpuDb*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(GpuDb)))
#endif //__nvoc_gpu_db_h_disabled
NV_STATUS __nvoc_objCreateDynamic_GpuDb(GpuDb**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_GpuDb(GpuDb**, Dynamic*, NvU32);
#define __objCreate_GpuDb(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_GpuDb((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
NV_STATUS gpudbConstruct_IMPL(struct GpuDb *arg_pGpuDb);
#define __nvoc_gpudbConstruct(arg_pGpuDb) gpudbConstruct_IMPL(arg_pGpuDb)
void gpudbDestruct_IMPL(struct GpuDb *pGpuDb);
#define __nvoc_gpudbDestruct(pGpuDb) gpudbDestruct_IMPL(pGpuDb)
#undef PRIVATE_FIELD
NV_STATUS gpudbRegisterGpu(const NvU8 *pUuid, const NBADDR *pUpstreamPortPciInfo, NvU64 pciInfo);
NV_STATUS gpudbSetGpuComputePolicyConfig(const NvU8 *uuid, NvU32 policyType, GPU_COMPUTE_POLICY_INFO *policyInfo);
NV_STATUS gpudbGetGpuComputePolicyConfigs(const NvU8 *uuid, GPU_COMPUTE_POLICY_INFO *policyInfo);
NV_STATUS gpudbSetClockPoliciesControl(const NvU8 *uuid, GPU_CLK_PROP_TOP_POLS_CONTROL *pControl);
NV_STATUS gpudbGetClockPoliciesControl(const NvU8 *uuid, GPU_CLK_PROP_TOP_POLS_CONTROL *pControl);
NV_STATUS gpudbSetShutdownState(const NvU8 *pUuid);
#endif // GPU_DB_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_GPU_DB_NVOC_H_

View File

@@ -0,0 +1,148 @@
#define NVOC_GPU_GROUP_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_gpu_group_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xe40531 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJGPUGRP;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_OBJGPUGRP(OBJGPUGRP*);
void __nvoc_init_funcTable_OBJGPUGRP(OBJGPUGRP*);
NV_STATUS __nvoc_ctor_OBJGPUGRP(OBJGPUGRP*);
void __nvoc_init_dataField_OBJGPUGRP(OBJGPUGRP*);
void __nvoc_dtor_OBJGPUGRP(OBJGPUGRP*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJGPUGRP;
static const struct NVOC_RTTI __nvoc_rtti_OBJGPUGRP_OBJGPUGRP = {
/*pClassDef=*/ &__nvoc_class_def_OBJGPUGRP,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJGPUGRP,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJGPUGRP_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJGPUGRP, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJGPUGRP = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_OBJGPUGRP_OBJGPUGRP,
&__nvoc_rtti_OBJGPUGRP_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJGPUGRP =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJGPUGRP),
/*classId=*/ classId(OBJGPUGRP),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJGPUGRP",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJGPUGRP,
/*pCastInfo=*/ &__nvoc_castinfo_OBJGPUGRP,
/*pExportInfo=*/ &__nvoc_export_info_OBJGPUGRP
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJGPUGRP =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_OBJGPUGRP(OBJGPUGRP *pThis) {
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJGPUGRP(OBJGPUGRP *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_OBJGPUGRP(OBJGPUGRP *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_OBJGPUGRP_fail_Object;
__nvoc_init_dataField_OBJGPUGRP(pThis);
goto __nvoc_ctor_OBJGPUGRP_exit; // Success
__nvoc_ctor_OBJGPUGRP_fail_Object:
__nvoc_ctor_OBJGPUGRP_exit:
return status;
}
static void __nvoc_init_funcTable_OBJGPUGRP_1(OBJGPUGRP *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_OBJGPUGRP(OBJGPUGRP *pThis) {
__nvoc_init_funcTable_OBJGPUGRP_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_OBJGPUGRP(OBJGPUGRP *pThis) {
pThis->__nvoc_pbase_OBJGPUGRP = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_OBJGPUGRP(pThis);
}
NV_STATUS __nvoc_objCreate_OBJGPUGRP(OBJGPUGRP **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJGPUGRP *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJGPUGRP));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJGPUGRP));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJGPUGRP);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJGPUGRP(pThis);
status = __nvoc_ctor_OBJGPUGRP(pThis);
if (status != NV_OK) goto __nvoc_objCreate_OBJGPUGRP_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJGPUGRP_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJGPUGRP(OBJGPUGRP **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJGPUGRP(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,308 @@
#ifndef _G_GPU_GROUP_NVOC_H_
#define _G_GPU_GROUP_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2014-2020 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_gpu_group_nvoc.h"
#ifndef GPU_GROUP_H
#define GPU_GROUP_H
/**************** Resource Manager Defines and Structures ******************\
* *
* Defines and structures used for GPUGRP Object. *
* *
\***************************************************************************/
#include "core/core.h"
#include "nvoc/object.h"
#include "nvlimits.h"
struct OBJVASPACE;
struct OBJGPU;
/*!
* @brief Specialization of @ref FOR_EACH_INDEX_IN_MASK for looping
* over each GPU in an instance bitmask and processing the GPU in
* unicast mode.
*
* @note This macro is constructed to handle 'continue' and 'break'
* statements but not 'return.' Do NOT return directly from the loop -
* use status variable and 'break' to safely abort.
*
* @param[in] maskWidth bit-width of the mask (allowed: 8, 16, 32, 64)
* @param[in,out] pGpu Local GPU variable to use.
* @param[in] mask GPU instance bitmask.
*/
#define FOR_EACH_GPU_IN_MASK_UC(maskWidth, pSys, pGpu, mask) \
{ \
NvU32 gpuInstance; \
NvBool bOrigBcState = NV_FALSE; \
NvBool bEntryBcState = NV_FALSE; \
OBJGPU *pEntryGpu = pGpu; \
pGpu = NULL; \
if (pEntryGpu != NULL) \
{ \
bEntryBcState = gpumgrGetBcEnabledStatus(pEntryGpu); \
} \
FOR_EACH_INDEX_IN_MASK(maskWidth, gpuInstance, mask) \
{ \
if (NULL != pGpu) /* continue */ \
{ \
gpumgrSetBcEnabledStatus(pGpu, bOrigBcState); \
} \
pGpu = gpumgrGetGpu(gpuInstance); \
if (pGpu == NULL) \
{ /* We should never hit this assert */ \
NV_ASSERT(0); /* But it occurs very rarely */ \
continue; /* It needs to be debugged */ \
} \
bOrigBcState = gpumgrGetBcEnabledStatus(pGpu); \
gpumgrSetBcEnabledStatus(pGpu, NV_FALSE); \
#define FOR_EACH_GPU_IN_MASK_UC_END \
} \
FOR_EACH_INDEX_IN_MASK_END \
if (NULL != pGpu) /* break */ \
{ \
gpumgrSetBcEnabledStatus(pGpu, bOrigBcState); \
pGpu = NULL; \
} \
if (pEntryGpu != NULL) \
{ \
NV_ASSERT(bEntryBcState == gpumgrGetBcEnabledStatus(pEntryGpu));\
pGpu = pEntryGpu; \
} \
}
typedef struct _def_vid_link_node
{
/*!
* GPU instance for this node
*/
NvU32 gpuInstance;
/*!
* DrPort that receives data from Child GPU
*/
NvU32 ParentDrPort;
/*!
* DrPort that sources data to a Parent GPU
*/
NvU32 ChildDrPort;
} SLILINKNODE;
typedef struct OBJGPUGRP *POBJGPUGRP;
#ifndef __NVOC_CLASS_OBJGPUGRP_TYPEDEF__
#define __NVOC_CLASS_OBJGPUGRP_TYPEDEF__
typedef struct OBJGPUGRP OBJGPUGRP;
#endif /* __NVOC_CLASS_OBJGPUGRP_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJGPUGRP
#define __nvoc_class_id_OBJGPUGRP 0xe40531
#endif /* __nvoc_class_id_OBJGPUGRP */
#ifdef NVOC_GPU_GROUP_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OBJGPUGRP {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct OBJGPUGRP *__nvoc_pbase_OBJGPUGRP;
NvU32 gpuMask;
NvU32 gpuSliLinkMask;
NvU32 linkingGpuMask;
NvU32 attachedGpuMaskAtLinking;
SLILINKNODE SliLinkOrder[8];
NvU32 ConnectionCount;
NvU32 flags;
NvU32 displayFlags;
NvBool bcEnabled;
struct OBJGPU *parentGpu;
struct OBJVASPACE *pGlobalVASpace;
};
#ifndef __NVOC_CLASS_OBJGPUGRP_TYPEDEF__
#define __NVOC_CLASS_OBJGPUGRP_TYPEDEF__
typedef struct OBJGPUGRP OBJGPUGRP;
#endif /* __NVOC_CLASS_OBJGPUGRP_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJGPUGRP
#define __nvoc_class_id_OBJGPUGRP 0xe40531
#endif /* __nvoc_class_id_OBJGPUGRP */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJGPUGRP;
#define __staticCast_OBJGPUGRP(pThis) \
((pThis)->__nvoc_pbase_OBJGPUGRP)
#ifdef __nvoc_gpu_group_h_disabled
#define __dynamicCast_OBJGPUGRP(pThis) ((OBJGPUGRP*)NULL)
#else //__nvoc_gpu_group_h_disabled
#define __dynamicCast_OBJGPUGRP(pThis) \
((OBJGPUGRP*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJGPUGRP)))
#endif //__nvoc_gpu_group_h_disabled
NV_STATUS __nvoc_objCreateDynamic_OBJGPUGRP(OBJGPUGRP**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJGPUGRP(OBJGPUGRP**, Dynamic*, NvU32);
#define __objCreate_OBJGPUGRP(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_OBJGPUGRP((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
NV_STATUS gpugrpCreate_IMPL(struct OBJGPUGRP *pGpuGrp, NvU32 gpuMask);
#ifdef __nvoc_gpu_group_h_disabled
static inline NV_STATUS gpugrpCreate(struct OBJGPUGRP *pGpuGrp, NvU32 gpuMask) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpCreate(pGpuGrp, gpuMask) gpugrpCreate_IMPL(pGpuGrp, gpuMask)
#endif //__nvoc_gpu_group_h_disabled
NV_STATUS gpugrpDestroy_IMPL(struct OBJGPUGRP *pGpuGrp);
#ifdef __nvoc_gpu_group_h_disabled
static inline NV_STATUS gpugrpDestroy(struct OBJGPUGRP *pGpuGrp) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpDestroy(pGpuGrp) gpugrpDestroy_IMPL(pGpuGrp)
#endif //__nvoc_gpu_group_h_disabled
NvU32 gpugrpGetGpuMask_IMPL(struct OBJGPUGRP *pGpuGrp);
#ifdef __nvoc_gpu_group_h_disabled
static inline NvU32 gpugrpGetGpuMask(struct OBJGPUGRP *pGpuGrp) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return 0;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpGetGpuMask(pGpuGrp) gpugrpGetGpuMask_IMPL(pGpuGrp)
#endif //__nvoc_gpu_group_h_disabled
void gpugrpSetGpuMask_IMPL(struct OBJGPUGRP *pGpuGrp, NvU32 gpuMask);
#ifdef __nvoc_gpu_group_h_disabled
static inline void gpugrpSetGpuMask(struct OBJGPUGRP *pGpuGrp, NvU32 gpuMask) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpSetGpuMask(pGpuGrp, gpuMask) gpugrpSetGpuMask_IMPL(pGpuGrp, gpuMask)
#endif //__nvoc_gpu_group_h_disabled
NvBool gpugrpGetBcEnabledState_IMPL(struct OBJGPUGRP *pGpuGrp);
#ifdef __nvoc_gpu_group_h_disabled
static inline NvBool gpugrpGetBcEnabledState(struct OBJGPUGRP *pGpuGrp) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return NV_FALSE;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpGetBcEnabledState(pGpuGrp) gpugrpGetBcEnabledState_IMPL(pGpuGrp)
#endif //__nvoc_gpu_group_h_disabled
void gpugrpSetBcEnabledState_IMPL(struct OBJGPUGRP *pGpuGrp, NvBool bcState);
#ifdef __nvoc_gpu_group_h_disabled
static inline void gpugrpSetBcEnabledState(struct OBJGPUGRP *pGpuGrp, NvBool bcState) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpSetBcEnabledState(pGpuGrp, bcState) gpugrpSetBcEnabledState_IMPL(pGpuGrp, bcState)
#endif //__nvoc_gpu_group_h_disabled
void gpugrpSetParentGpu_IMPL(struct OBJGPUGRP *pGpuGrp, struct OBJGPU *pParentGpu);
#ifdef __nvoc_gpu_group_h_disabled
static inline void gpugrpSetParentGpu(struct OBJGPUGRP *pGpuGrp, struct OBJGPU *pParentGpu) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpSetParentGpu(pGpuGrp, pParentGpu) gpugrpSetParentGpu_IMPL(pGpuGrp, pParentGpu)
#endif //__nvoc_gpu_group_h_disabled
struct OBJGPU *gpugrpGetParentGpu_IMPL(struct OBJGPUGRP *pGpuGrp);
#ifdef __nvoc_gpu_group_h_disabled
static inline struct OBJGPU *gpugrpGetParentGpu(struct OBJGPUGRP *pGpuGrp) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return NULL;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpGetParentGpu(pGpuGrp) gpugrpGetParentGpu_IMPL(pGpuGrp)
#endif //__nvoc_gpu_group_h_disabled
NV_STATUS gpugrpCreateGlobalVASpace_IMPL(struct OBJGPUGRP *pGpuGrp, struct OBJGPU *pGpu, NvU32 vaspaceClass, NvU64 vaStart, NvU64 vaEnd, NvU32 vaspaceFlags, struct OBJVASPACE **ppGlobalVAS);
#ifdef __nvoc_gpu_group_h_disabled
static inline NV_STATUS gpugrpCreateGlobalVASpace(struct OBJGPUGRP *pGpuGrp, struct OBJGPU *pGpu, NvU32 vaspaceClass, NvU64 vaStart, NvU64 vaEnd, NvU32 vaspaceFlags, struct OBJVASPACE **ppGlobalVAS) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpCreateGlobalVASpace(pGpuGrp, pGpu, vaspaceClass, vaStart, vaEnd, vaspaceFlags, ppGlobalVAS) gpugrpCreateGlobalVASpace_IMPL(pGpuGrp, pGpu, vaspaceClass, vaStart, vaEnd, vaspaceFlags, ppGlobalVAS)
#endif //__nvoc_gpu_group_h_disabled
NV_STATUS gpugrpDestroyGlobalVASpace_IMPL(struct OBJGPUGRP *pGpuGrp, struct OBJGPU *pGpu);
#ifdef __nvoc_gpu_group_h_disabled
static inline NV_STATUS gpugrpDestroyGlobalVASpace(struct OBJGPUGRP *pGpuGrp, struct OBJGPU *pGpu) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpDestroyGlobalVASpace(pGpuGrp, pGpu) gpugrpDestroyGlobalVASpace_IMPL(pGpuGrp, pGpu)
#endif //__nvoc_gpu_group_h_disabled
NV_STATUS gpugrpGetGlobalVASpace_IMPL(struct OBJGPUGRP *pGpuGrp, struct OBJVASPACE **ppGlobalVAS);
#ifdef __nvoc_gpu_group_h_disabled
static inline NV_STATUS gpugrpGetGlobalVASpace(struct OBJGPUGRP *pGpuGrp, struct OBJVASPACE **ppGlobalVAS) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpGetGlobalVASpace(pGpuGrp, ppGlobalVAS) gpugrpGetGlobalVASpace_IMPL(pGpuGrp, ppGlobalVAS)
#endif //__nvoc_gpu_group_h_disabled
NV_STATUS gpugrpGetGpuFromSubDeviceInstance_IMPL(struct OBJGPUGRP *pGpuGrp, NvU32 subDeviceInst, struct OBJGPU **ppGpu);
#ifdef __nvoc_gpu_group_h_disabled
static inline NV_STATUS gpugrpGetGpuFromSubDeviceInstance(struct OBJGPUGRP *pGpuGrp, NvU32 subDeviceInst, struct OBJGPU **ppGpu) {
NV_ASSERT_FAILED_PRECOMP("OBJGPUGRP was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_gpu_group_h_disabled
#define gpugrpGetGpuFromSubDeviceInstance(pGpuGrp, subDeviceInst, ppGpu) gpugrpGetGpuFromSubDeviceInstance_IMPL(pGpuGrp, subDeviceInst, ppGpu)
#endif //__nvoc_gpu_group_h_disabled
#undef PRIVATE_FIELD
#endif // GPU_GROUP_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_GPU_GROUP_NVOC_H_

View File

@@ -0,0 +1,97 @@
#define NVOC_GPU_HALSPEC_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_gpu_halspec_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x34a6d6 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmHalspecOwner;
void __nvoc_init_RmHalspecOwner(RmHalspecOwner*,
NvU32 ChipHal_arch, NvU32 ChipHal_impl, NvU32 ChipHal_hidrev,
RM_RUNTIME_VARIANT RmVariantHal_rmVariant,
NvU32 DispIpHal_ipver);
void __nvoc_init_funcTable_RmHalspecOwner(RmHalspecOwner*);
NV_STATUS __nvoc_ctor_RmHalspecOwner(RmHalspecOwner*);
void __nvoc_init_dataField_RmHalspecOwner(RmHalspecOwner*);
void __nvoc_dtor_RmHalspecOwner(RmHalspecOwner*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_RmHalspecOwner;
static const struct NVOC_RTTI __nvoc_rtti_RmHalspecOwner_RmHalspecOwner = {
/*pClassDef=*/ &__nvoc_class_def_RmHalspecOwner,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_RmHalspecOwner,
/*offset=*/ 0,
};
static const struct NVOC_CASTINFO __nvoc_castinfo_RmHalspecOwner = {
/*numRelatives=*/ 1,
/*relatives=*/ {
&__nvoc_rtti_RmHalspecOwner_RmHalspecOwner,
},
};
// Not instantiable because it's not derived from class "Object"
const struct NVOC_CLASS_DEF __nvoc_class_def_RmHalspecOwner =
{
/*classInfo=*/ {
/*size=*/ sizeof(RmHalspecOwner),
/*classId=*/ classId(RmHalspecOwner),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "RmHalspecOwner",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) NULL,
/*pCastInfo=*/ &__nvoc_castinfo_RmHalspecOwner,
/*pExportInfo=*/ &__nvoc_export_info_RmHalspecOwner
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_RmHalspecOwner =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RmHalspecOwner(RmHalspecOwner *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_RmHalspecOwner(RmHalspecOwner *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RmHalspecOwner(RmHalspecOwner *pThis) {
NV_STATUS status = NV_OK;
__nvoc_init_dataField_RmHalspecOwner(pThis);
goto __nvoc_ctor_RmHalspecOwner_exit; // Success
__nvoc_ctor_RmHalspecOwner_exit:
return status;
}
static void __nvoc_init_funcTable_RmHalspecOwner_1(RmHalspecOwner *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_RmHalspecOwner(RmHalspecOwner *pThis) {
__nvoc_init_funcTable_RmHalspecOwner_1(pThis);
}
void __nvoc_init_RmHalspecOwner(RmHalspecOwner *pThis,
NvU32 ChipHal_arch, NvU32 ChipHal_impl, NvU32 ChipHal_hidrev,
RM_RUNTIME_VARIANT RmVariantHal_rmVariant,
NvU32 DispIpHal_ipver) {
pThis->__nvoc_pbase_RmHalspecOwner = pThis;
__nvoc_init_halspec_ChipHal(&pThis->chipHal, ChipHal_arch, ChipHal_impl, ChipHal_hidrev);
__nvoc_init_halspec_RmVariantHal(&pThis->rmVariantHal, RmVariantHal_rmVariant);
__nvoc_init_halspec_DispIpHal(&pThis->dispIpHal, DispIpHal_ipver);
__nvoc_init_funcTable_RmHalspecOwner(pThis);
}

View File

@@ -0,0 +1,91 @@
#ifndef _G_GPU_HALSPEC_NVOC_H_
#define _G_GPU_HALSPEC_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_gpu_halspec_nvoc.h"
#ifndef GPU_HALSPEC_H
#define GPU_HALSPEC_H
#include "g_chips2halspec.h" // NVOC halspec, generated by rmconfig.pl
#ifdef NVOC_GPU_HALSPEC_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct RmHalspecOwner {
const struct NVOC_RTTI *__nvoc_rtti;
struct RmHalspecOwner *__nvoc_pbase_RmHalspecOwner;
struct ChipHal chipHal;
struct RmVariantHal rmVariantHal;
struct DispIpHal dispIpHal;
};
#ifndef __NVOC_CLASS_RmHalspecOwner_TYPEDEF__
#define __NVOC_CLASS_RmHalspecOwner_TYPEDEF__
typedef struct RmHalspecOwner RmHalspecOwner;
#endif /* __NVOC_CLASS_RmHalspecOwner_TYPEDEF__ */
#ifndef __nvoc_class_id_RmHalspecOwner
#define __nvoc_class_id_RmHalspecOwner 0x34a6d6
#endif /* __nvoc_class_id_RmHalspecOwner */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmHalspecOwner;
#define __staticCast_RmHalspecOwner(pThis) \
((pThis)->__nvoc_pbase_RmHalspecOwner)
#ifdef __nvoc_gpu_halspec_h_disabled
#define __dynamicCast_RmHalspecOwner(pThis) ((RmHalspecOwner*)NULL)
#else //__nvoc_gpu_halspec_h_disabled
#define __dynamicCast_RmHalspecOwner(pThis) \
((RmHalspecOwner*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(RmHalspecOwner)))
#endif //__nvoc_gpu_halspec_h_disabled
NV_STATUS __nvoc_objCreateDynamic_RmHalspecOwner(RmHalspecOwner**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_RmHalspecOwner(RmHalspecOwner**, Dynamic*, NvU32,
NvU32 ChipHal_arch, NvU32 ChipHal_impl, NvU32 ChipHal_hidrev,
RM_RUNTIME_VARIANT RmVariantHal_rmVariant,
NvU32 DispIpHal_ipver);
#define __objCreate_RmHalspecOwner(ppNewObj, pParent, createFlags, ChipHal_arch, ChipHal_impl, ChipHal_hidrev, RmVariantHal_rmVariant, DispIpHal_ipver) \
__nvoc_objCreate_RmHalspecOwner((ppNewObj), staticCast((pParent), Dynamic), (createFlags), ChipHal_arch, ChipHal_impl, ChipHal_hidrev, RmVariantHal_rmVariant, DispIpHal_ipver)
#undef PRIVATE_FIELD
#endif // GPU_HALSPEC_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_GPU_HALSPEC_NVOC_H_

View File

@@ -0,0 +1,322 @@
#define NVOC_GPU_MGMT_API_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_gpu_mgmt_api_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x376305 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuManagementApi;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
void __nvoc_init_GpuManagementApi(GpuManagementApi*);
void __nvoc_init_funcTable_GpuManagementApi(GpuManagementApi*);
NV_STATUS __nvoc_ctor_GpuManagementApi(GpuManagementApi*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_GpuManagementApi(GpuManagementApi*);
void __nvoc_dtor_GpuManagementApi(GpuManagementApi*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_GpuManagementApi;
static const struct NVOC_RTTI __nvoc_rtti_GpuManagementApi_GpuManagementApi = {
/*pClassDef=*/ &__nvoc_class_def_GpuManagementApi,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_GpuManagementApi,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_GpuManagementApi_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuManagementApi, __nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_GpuManagementApi_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuManagementApi, __nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_GpuManagementApi_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuManagementApi, __nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_GpuManagementApi_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuManagementApi, __nvoc_base_RmResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_GpuManagementApi = {
/*numRelatives=*/ 5,
/*relatives=*/ {
&__nvoc_rtti_GpuManagementApi_GpuManagementApi,
&__nvoc_rtti_GpuManagementApi_RmResource,
&__nvoc_rtti_GpuManagementApi_RmResourceCommon,
&__nvoc_rtti_GpuManagementApi_RsResource,
&__nvoc_rtti_GpuManagementApi_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_GpuManagementApi =
{
/*classInfo=*/ {
/*size=*/ sizeof(GpuManagementApi),
/*classId=*/ classId(GpuManagementApi),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "GpuManagementApi",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_GpuManagementApi,
/*pCastInfo=*/ &__nvoc_castinfo_GpuManagementApi,
/*pExportInfo=*/ &__nvoc_export_info_GpuManagementApi
};
static NvBool __nvoc_thunk_RmResource_gpumgmtapiShareCallback(struct GpuManagementApi *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return rmresShareCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RmResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_RmResource_gpumgmtapiCheckMemInterUnmap(struct GpuManagementApi *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GpuManagementApi_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_gpumgmtapiControl(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControl((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_gpumgmtapiGetMemInterMapParams(struct GpuManagementApi *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GpuManagementApi_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_gpumgmtapiGetMemoryMappingDescriptor(struct GpuManagementApi *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GpuManagementApi_RmResource.offset), ppMemDesc);
}
static NvU32 __nvoc_thunk_RsResource_gpumgmtapiGetRefCount(struct GpuManagementApi *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_gpumgmtapiControlFilter(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_gpumgmtapiAddAdditionalDependants(struct RsClient *pClient, struct GpuManagementApi *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset), pReference);
}
static NV_STATUS __nvoc_thunk_RsResource_gpumgmtapiUnmap(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return resUnmap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_gpumgmtapiControl_Prologue(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RmResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_RsResource_gpumgmtapiCanCopy(struct GpuManagementApi *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_gpumgmtapiMapTo(struct GpuManagementApi *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset), pParams);
}
static void __nvoc_thunk_RsResource_gpumgmtapiPreDestruct(struct GpuManagementApi *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_gpumgmtapiUnmapFrom(struct GpuManagementApi *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_gpumgmtapiControl_Epilogue(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_gpumgmtapiControlLookup(struct GpuManagementApi *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_RsResource_gpumgmtapiMap(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return resMap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RsResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_gpumgmtapiAccessCallback(struct GpuManagementApi *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuManagementApi_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
#if !defined(NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG)
#define NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(x) (0)
#endif
static const struct NVOC_EXPORTED_METHOD_DEF __nvoc_exported_method_def_GpuManagementApi[] =
{
{ /* [0] */
#if NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x7u)
/*pFunc=*/ (void (*)(void)) NULL,
#else
/*pFunc=*/ (void (*)(void)) gpumgmtapiCtrlCmdSetShutdownState_IMPL,
#endif // NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x7u)
/*flags=*/ 0x7u,
/*accessRight=*/0x0u,
/*methodId=*/ 0x200101u,
/*paramSize=*/ sizeof(NV0020_CTRL_GPU_MGMT_SET_SHUTDOWN_STATE_PARAMS),
/*pClassInfo=*/ &(__nvoc_class_def_GpuManagementApi.classInfo),
#if NV_PRINTF_STRINGS_ALLOWED
/*func=*/ "gpumgmtapiCtrlCmdSetShutdownState"
#endif
},
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_GpuManagementApi =
{
/*numEntries=*/ 1,
/*pExportEntries=*/ __nvoc_exported_method_def_GpuManagementApi
};
void __nvoc_dtor_RmResource(RmResource*);
void __nvoc_dtor_GpuManagementApi(GpuManagementApi *pThis) {
__nvoc_gpumgmtapiDestruct(pThis);
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_GpuManagementApi(GpuManagementApi *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RmResource(RmResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_GpuManagementApi(GpuManagementApi *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RmResource(&pThis->__nvoc_base_RmResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_GpuManagementApi_fail_RmResource;
__nvoc_init_dataField_GpuManagementApi(pThis);
status = __nvoc_gpumgmtapiConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_GpuManagementApi_fail__init;
goto __nvoc_ctor_GpuManagementApi_exit; // Success
__nvoc_ctor_GpuManagementApi_fail__init:
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_ctor_GpuManagementApi_fail_RmResource:
__nvoc_ctor_GpuManagementApi_exit:
return status;
}
static void __nvoc_init_funcTable_GpuManagementApi_1(GpuManagementApi *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
#if !NVOC_EXPORTED_METHOD_DISABLED_BY_FLAG(0x7u)
pThis->__gpumgmtapiCtrlCmdSetShutdownState__ = &gpumgmtapiCtrlCmdSetShutdownState_IMPL;
#endif
pThis->__gpumgmtapiShareCallback__ = &__nvoc_thunk_RmResource_gpumgmtapiShareCallback;
pThis->__gpumgmtapiCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_gpumgmtapiCheckMemInterUnmap;
pThis->__gpumgmtapiControl__ = &__nvoc_thunk_RsResource_gpumgmtapiControl;
pThis->__gpumgmtapiGetMemInterMapParams__ = &__nvoc_thunk_RmResource_gpumgmtapiGetMemInterMapParams;
pThis->__gpumgmtapiGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_gpumgmtapiGetMemoryMappingDescriptor;
pThis->__gpumgmtapiGetRefCount__ = &__nvoc_thunk_RsResource_gpumgmtapiGetRefCount;
pThis->__gpumgmtapiControlFilter__ = &__nvoc_thunk_RsResource_gpumgmtapiControlFilter;
pThis->__gpumgmtapiAddAdditionalDependants__ = &__nvoc_thunk_RsResource_gpumgmtapiAddAdditionalDependants;
pThis->__gpumgmtapiUnmap__ = &__nvoc_thunk_RsResource_gpumgmtapiUnmap;
pThis->__gpumgmtapiControl_Prologue__ = &__nvoc_thunk_RmResource_gpumgmtapiControl_Prologue;
pThis->__gpumgmtapiCanCopy__ = &__nvoc_thunk_RsResource_gpumgmtapiCanCopy;
pThis->__gpumgmtapiMapTo__ = &__nvoc_thunk_RsResource_gpumgmtapiMapTo;
pThis->__gpumgmtapiPreDestruct__ = &__nvoc_thunk_RsResource_gpumgmtapiPreDestruct;
pThis->__gpumgmtapiUnmapFrom__ = &__nvoc_thunk_RsResource_gpumgmtapiUnmapFrom;
pThis->__gpumgmtapiControl_Epilogue__ = &__nvoc_thunk_RmResource_gpumgmtapiControl_Epilogue;
pThis->__gpumgmtapiControlLookup__ = &__nvoc_thunk_RsResource_gpumgmtapiControlLookup;
pThis->__gpumgmtapiMap__ = &__nvoc_thunk_RsResource_gpumgmtapiMap;
pThis->__gpumgmtapiAccessCallback__ = &__nvoc_thunk_RmResource_gpumgmtapiAccessCallback;
}
void __nvoc_init_funcTable_GpuManagementApi(GpuManagementApi *pThis) {
__nvoc_init_funcTable_GpuManagementApi_1(pThis);
}
void __nvoc_init_RmResource(RmResource*);
void __nvoc_init_GpuManagementApi(GpuManagementApi *pThis) {
pThis->__nvoc_pbase_GpuManagementApi = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_RmResource;
__nvoc_init_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_init_funcTable_GpuManagementApi(pThis);
}
NV_STATUS __nvoc_objCreate_GpuManagementApi(GpuManagementApi **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
GpuManagementApi *pThis;
pThis = portMemAllocNonPaged(sizeof(GpuManagementApi));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(GpuManagementApi));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_GpuManagementApi);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_GpuManagementApi(pThis);
status = __nvoc_ctor_GpuManagementApi(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_GpuManagementApi_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_GpuManagementApi_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_GpuManagementApi(GpuManagementApi **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_GpuManagementApi(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,221 @@
#ifndef _G_GPU_MGMT_API_NVOC_H_
#define _G_GPU_MGMT_API_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2020-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_gpu_mgmt_api_nvoc.h"
#ifndef GPU_MGMT_API_H
#define GPU_MGMT_API_H
#include "rmapi/resource.h"
#include "ctrl/ctrl0020.h"
// ****************************************************************************
// Type Definitions
// ****************************************************************************
//
// GpuManagementApi class information
//
// This is a global GPU class will help us to route IOCTLs to probed
// and persistent GPU state
//
#ifdef NVOC_GPU_MGMT_API_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct GpuManagementApi {
const struct NVOC_RTTI *__nvoc_rtti;
struct RmResource __nvoc_base_RmResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuManagementApi *__nvoc_pbase_GpuManagementApi;
NV_STATUS (*__gpumgmtapiCtrlCmdSetShutdownState__)(struct GpuManagementApi *, NV0020_CTRL_GPU_MGMT_SET_SHUTDOWN_STATE_PARAMS *);
NvBool (*__gpumgmtapiShareCallback__)(struct GpuManagementApi *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__gpumgmtapiCheckMemInterUnmap__)(struct GpuManagementApi *, NvBool);
NV_STATUS (*__gpumgmtapiControl__)(struct GpuManagementApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__gpumgmtapiGetMemInterMapParams__)(struct GpuManagementApi *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__gpumgmtapiGetMemoryMappingDescriptor__)(struct GpuManagementApi *, struct MEMORY_DESCRIPTOR **);
NvU32 (*__gpumgmtapiGetRefCount__)(struct GpuManagementApi *);
NV_STATUS (*__gpumgmtapiControlFilter__)(struct GpuManagementApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__gpumgmtapiAddAdditionalDependants__)(struct RsClient *, struct GpuManagementApi *, RsResourceRef *);
NV_STATUS (*__gpumgmtapiUnmap__)(struct GpuManagementApi *, struct CALL_CONTEXT *, RsCpuMapping *);
NV_STATUS (*__gpumgmtapiControl_Prologue__)(struct GpuManagementApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__gpumgmtapiCanCopy__)(struct GpuManagementApi *);
NV_STATUS (*__gpumgmtapiMapTo__)(struct GpuManagementApi *, RS_RES_MAP_TO_PARAMS *);
void (*__gpumgmtapiPreDestruct__)(struct GpuManagementApi *);
NV_STATUS (*__gpumgmtapiUnmapFrom__)(struct GpuManagementApi *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__gpumgmtapiControl_Epilogue__)(struct GpuManagementApi *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__gpumgmtapiControlLookup__)(struct GpuManagementApi *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__gpumgmtapiMap__)(struct GpuManagementApi *, struct CALL_CONTEXT *, RS_CPU_MAP_PARAMS *, RsCpuMapping *);
NvBool (*__gpumgmtapiAccessCallback__)(struct GpuManagementApi *, struct RsClient *, void *, RsAccessRight);
};
#ifndef __NVOC_CLASS_GpuManagementApi_TYPEDEF__
#define __NVOC_CLASS_GpuManagementApi_TYPEDEF__
typedef struct GpuManagementApi GpuManagementApi;
#endif /* __NVOC_CLASS_GpuManagementApi_TYPEDEF__ */
#ifndef __nvoc_class_id_GpuManagementApi
#define __nvoc_class_id_GpuManagementApi 0x376305
#endif /* __nvoc_class_id_GpuManagementApi */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuManagementApi;
#define __staticCast_GpuManagementApi(pThis) \
((pThis)->__nvoc_pbase_GpuManagementApi)
#ifdef __nvoc_gpu_mgmt_api_h_disabled
#define __dynamicCast_GpuManagementApi(pThis) ((GpuManagementApi*)NULL)
#else //__nvoc_gpu_mgmt_api_h_disabled
#define __dynamicCast_GpuManagementApi(pThis) \
((GpuManagementApi*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(GpuManagementApi)))
#endif //__nvoc_gpu_mgmt_api_h_disabled
NV_STATUS __nvoc_objCreateDynamic_GpuManagementApi(GpuManagementApi**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_GpuManagementApi(GpuManagementApi**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_GpuManagementApi(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_GpuManagementApi((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define gpumgmtapiCtrlCmdSetShutdownState(pGpuMgmt, pParams) gpumgmtapiCtrlCmdSetShutdownState_DISPATCH(pGpuMgmt, pParams)
#define gpumgmtapiShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) gpumgmtapiShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
#define gpumgmtapiCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) gpumgmtapiCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define gpumgmtapiControl(pResource, pCallContext, pParams) gpumgmtapiControl_DISPATCH(pResource, pCallContext, pParams)
#define gpumgmtapiGetMemInterMapParams(pRmResource, pParams) gpumgmtapiGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define gpumgmtapiGetMemoryMappingDescriptor(pRmResource, ppMemDesc) gpumgmtapiGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define gpumgmtapiGetRefCount(pResource) gpumgmtapiGetRefCount_DISPATCH(pResource)
#define gpumgmtapiControlFilter(pResource, pCallContext, pParams) gpumgmtapiControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define gpumgmtapiAddAdditionalDependants(pClient, pResource, pReference) gpumgmtapiAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define gpumgmtapiUnmap(pResource, pCallContext, pCpuMapping) gpumgmtapiUnmap_DISPATCH(pResource, pCallContext, pCpuMapping)
#define gpumgmtapiControl_Prologue(pResource, pCallContext, pParams) gpumgmtapiControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define gpumgmtapiCanCopy(pResource) gpumgmtapiCanCopy_DISPATCH(pResource)
#define gpumgmtapiMapTo(pResource, pParams) gpumgmtapiMapTo_DISPATCH(pResource, pParams)
#define gpumgmtapiPreDestruct(pResource) gpumgmtapiPreDestruct_DISPATCH(pResource)
#define gpumgmtapiUnmapFrom(pResource, pParams) gpumgmtapiUnmapFrom_DISPATCH(pResource, pParams)
#define gpumgmtapiControl_Epilogue(pResource, pCallContext, pParams) gpumgmtapiControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define gpumgmtapiControlLookup(pResource, pParams, ppEntry) gpumgmtapiControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define gpumgmtapiMap(pResource, pCallContext, pParams, pCpuMapping) gpumgmtapiMap_DISPATCH(pResource, pCallContext, pParams, pCpuMapping)
#define gpumgmtapiAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) gpumgmtapiAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS gpumgmtapiCtrlCmdSetShutdownState_IMPL(struct GpuManagementApi *pGpuMgmt, NV0020_CTRL_GPU_MGMT_SET_SHUTDOWN_STATE_PARAMS *pParams);
static inline NV_STATUS gpumgmtapiCtrlCmdSetShutdownState_DISPATCH(struct GpuManagementApi *pGpuMgmt, NV0020_CTRL_GPU_MGMT_SET_SHUTDOWN_STATE_PARAMS *pParams) {
return pGpuMgmt->__gpumgmtapiCtrlCmdSetShutdownState__(pGpuMgmt, pParams);
}
static inline NvBool gpumgmtapiShareCallback_DISPATCH(struct GpuManagementApi *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pResource->__gpumgmtapiShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS gpumgmtapiCheckMemInterUnmap_DISPATCH(struct GpuManagementApi *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__gpumgmtapiCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS gpumgmtapiControl_DISPATCH(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__gpumgmtapiControl__(pResource, pCallContext, pParams);
}
static inline NV_STATUS gpumgmtapiGetMemInterMapParams_DISPATCH(struct GpuManagementApi *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__gpumgmtapiGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS gpumgmtapiGetMemoryMappingDescriptor_DISPATCH(struct GpuManagementApi *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__gpumgmtapiGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NvU32 gpumgmtapiGetRefCount_DISPATCH(struct GpuManagementApi *pResource) {
return pResource->__gpumgmtapiGetRefCount__(pResource);
}
static inline NV_STATUS gpumgmtapiControlFilter_DISPATCH(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__gpumgmtapiControlFilter__(pResource, pCallContext, pParams);
}
static inline void gpumgmtapiAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct GpuManagementApi *pResource, RsResourceRef *pReference) {
pResource->__gpumgmtapiAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NV_STATUS gpumgmtapiUnmap_DISPATCH(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return pResource->__gpumgmtapiUnmap__(pResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS gpumgmtapiControl_Prologue_DISPATCH(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__gpumgmtapiControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NvBool gpumgmtapiCanCopy_DISPATCH(struct GpuManagementApi *pResource) {
return pResource->__gpumgmtapiCanCopy__(pResource);
}
static inline NV_STATUS gpumgmtapiMapTo_DISPATCH(struct GpuManagementApi *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__gpumgmtapiMapTo__(pResource, pParams);
}
static inline void gpumgmtapiPreDestruct_DISPATCH(struct GpuManagementApi *pResource) {
pResource->__gpumgmtapiPreDestruct__(pResource);
}
static inline NV_STATUS gpumgmtapiUnmapFrom_DISPATCH(struct GpuManagementApi *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__gpumgmtapiUnmapFrom__(pResource, pParams);
}
static inline void gpumgmtapiControl_Epilogue_DISPATCH(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__gpumgmtapiControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS gpumgmtapiControlLookup_DISPATCH(struct GpuManagementApi *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__gpumgmtapiControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS gpumgmtapiMap_DISPATCH(struct GpuManagementApi *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return pResource->__gpumgmtapiMap__(pResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool gpumgmtapiAccessCallback_DISPATCH(struct GpuManagementApi *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__gpumgmtapiAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS gpumgmtapiConstruct_IMPL(struct GpuManagementApi *arg_pGpuMgmt, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_gpumgmtapiConstruct(arg_pGpuMgmt, arg_pCallContext, arg_pParams) gpumgmtapiConstruct_IMPL(arg_pGpuMgmt, arg_pCallContext, arg_pParams)
void gpumgmtapiDestruct_IMPL(struct GpuManagementApi *pGpuMgmt);
#define __nvoc_gpumgmtapiDestruct(pGpuMgmt) gpumgmtapiDestruct_IMPL(pGpuMgmt)
#undef PRIVATE_FIELD
#endif // GPU_MGMT_API_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_GPU_MGMT_API_NVOC_H_

View File

@@ -0,0 +1,154 @@
#define NVOC_GPU_MGR_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_gpu_mgr_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xcf1b25 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJGPUMGR;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_OBJGPUMGR(OBJGPUMGR*);
void __nvoc_init_funcTable_OBJGPUMGR(OBJGPUMGR*);
NV_STATUS __nvoc_ctor_OBJGPUMGR(OBJGPUMGR*);
void __nvoc_init_dataField_OBJGPUMGR(OBJGPUMGR*);
void __nvoc_dtor_OBJGPUMGR(OBJGPUMGR*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJGPUMGR;
static const struct NVOC_RTTI __nvoc_rtti_OBJGPUMGR_OBJGPUMGR = {
/*pClassDef=*/ &__nvoc_class_def_OBJGPUMGR,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJGPUMGR,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJGPUMGR_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJGPUMGR, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJGPUMGR = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_OBJGPUMGR_OBJGPUMGR,
&__nvoc_rtti_OBJGPUMGR_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJGPUMGR =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJGPUMGR),
/*classId=*/ classId(OBJGPUMGR),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJGPUMGR",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJGPUMGR,
/*pCastInfo=*/ &__nvoc_castinfo_OBJGPUMGR,
/*pExportInfo=*/ &__nvoc_export_info_OBJGPUMGR
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJGPUMGR =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_OBJGPUMGR(OBJGPUMGR *pThis) {
__nvoc_gpumgrDestruct(pThis);
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJGPUMGR(OBJGPUMGR *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_OBJGPUMGR(OBJGPUMGR *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_OBJGPUMGR_fail_Object;
__nvoc_init_dataField_OBJGPUMGR(pThis);
status = __nvoc_gpumgrConstruct(pThis);
if (status != NV_OK) goto __nvoc_ctor_OBJGPUMGR_fail__init;
goto __nvoc_ctor_OBJGPUMGR_exit; // Success
__nvoc_ctor_OBJGPUMGR_fail__init:
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_ctor_OBJGPUMGR_fail_Object:
__nvoc_ctor_OBJGPUMGR_exit:
return status;
}
static void __nvoc_init_funcTable_OBJGPUMGR_1(OBJGPUMGR *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_OBJGPUMGR(OBJGPUMGR *pThis) {
__nvoc_init_funcTable_OBJGPUMGR_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_OBJGPUMGR(OBJGPUMGR *pThis) {
pThis->__nvoc_pbase_OBJGPUMGR = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_OBJGPUMGR(pThis);
}
NV_STATUS __nvoc_objCreate_OBJGPUMGR(OBJGPUMGR **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJGPUMGR *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJGPUMGR));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJGPUMGR));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJGPUMGR);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJGPUMGR(pThis);
status = __nvoc_ctor_OBJGPUMGR(pThis);
if (status != NV_OK) goto __nvoc_objCreate_OBJGPUMGR_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJGPUMGR_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJGPUMGR(OBJGPUMGR **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJGPUMGR(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,425 @@
#ifndef _G_GPU_MGR_NVOC_H_
#define _G_GPU_MGR_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2005-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_gpu_mgr_nvoc.h"
#ifndef _GPUMGR_H_
#define _GPUMGR_H_
//
// GPU Manager Defines and Structures
//
struct OBJGPU;
#include "core/core.h"
#include "core/system.h"
#include "nvlimits.h"
#include "gpu_mgr/gpu_group.h"
#include "gpu/gpu_uuid.h"
#include "gpu/gpu_device_mapping.h"
#include "gpu/gpu_access.h"
#include "ctrl/ctrl0000/ctrl0000gpu.h"
#include "ctrl/ctrl2080/ctrl2080ce.h"
#include "ctrl/ctrl2080/ctrl2080internal.h"
#include "nvoc/utility.h"
#include "nv_firmware_types.h"
#include "class/cl2080.h" // NV2080_ENGINE_TYPE_*
#include "utils/nvbitvector.h"
TYPEDEF_BITVECTOR(MC_ENGINE_BITVECTOR);
#define GPUMGR_MAX_GPU_INSTANCES 8
#define GPUMGR_MAX_COMPUTE_INSTANCES 8
MAKE_BITVECTOR(ENGTYPE_BIT_VECTOR, NV2080_ENGINE_TYPE_LAST);
typedef ENGTYPE_BIT_VECTOR *PENGTYPE_BIT_VECTOR;
//
// Terminology:
// GPU -> entity sitting on the bus
// Device -> broadcast semantics; maps to one or more GPUs
// Subdevice -> unicast semantics; maps to a single GPU
//
////////////////////////////////////////////////////////////////////////////////
// DO NOT ADD NEW STUBS HERE //
////////////////////////////////////////////////////////////////////////////////
#define gpumgrGetGpuLinkCount(deviceInstance) ((NvU32) 0)
#define gpumgrGetSliLinkOutputMaskFromGpu(pGpu) ((NvU32) 0)
#define gpumgrGetVidLinkOutputMaskFromGpu(pGpu) ((NvU32) 0)
#define gpumgrGetSliLinkOrderCount(pGpu) ((NvU32) 0)
#define gpumgrGetSliLinkConnectionCount(pGpu) ((NvU32) 0)
#define gpumgrGetSLIConfig(gpuInstance, onlyWithSliLink) ((NvU32) 0)
#define gpumgrDisableVidLink(pGpu, head, max_dr_port)
#define gpumgrGetGpuVidLinkMaxPixelClock(pGpu, pMaxPclkMhz) (NV_ERR_NOT_SUPPORTED)
#define gpumgrPinsetToPinsetTableIndex(pinset, pPinsetIndex) (NV_ERR_NOT_SUPPORTED)
#define gpumgrGetBcEnabledStatus(g) (NV_FALSE)
#define gpumgrGetBcEnabledStatusEx(g, t) (NV_FALSE)
#define gpumgrSetBcEnabledStatus(g, b) do { NvBool b2 = b; (void)b2; } while (0)
#define gpumgrSLILoopReentrancy(pGpu, l, r, i, pFuncStr)
#define gpumgrSLILoopReentrancyPop(pGpu) ((NvU32)0)
#define gpumgrSLILoopReentrancyPush(pGpu, sliLoopReentrancy) do { NvU32 x = sliLoopReentrancy; (void)x; } while(0)
typedef struct
{
NvU32 gpuId;
NvU64 gpuDomainBusDevice;
NvBool bInitAttempted;
NvBool bDrainState; // no new client connections to this GPU
NvBool bRemoveIdle; // remove this GPU once it's idle (detached)
NvBool bExcluded; // this gpu is marked as excluded; do not use
NvBool bUuidValid; // cached uuid is valid
NvBool bSkipHwNvlinkDisable; //skip HW registers configuration for disabled links
NvU32 initDisabledNvlinksMask;
NV_STATUS initStatus;
NvU8 uuid[RM_SHA1_GID_SIZE];
OS_RM_CAPS *pOsRmCaps; // "Opaque" pointer to os-specific capabilities
} PROBEDGPU;
#define NV_DEVICE_DISPLAY_FLAGS_AFR_FRAME_FLIPS 11:4
#define NV_DEVICE_DISPLAY_FLAGS_AFR_FRAME_TIME 12:12
#define NV_DEVICE_DISPLAY_FLAGS_AFR_FRAME_TIME_INVALID 0x0000000
#define NV_DEVICE_DISPLAY_FLAGS_AFR_FRAME_TIME_VALID 0x0000001
/*!
* Structure for tracking resources allocated for saving primary GPU's VBIOS
* state. This is used for TDR/fullchip reset recovery. The GPU object gets
* destroyed, so the data belongs here.
*/
typedef struct _def_gpumgr_save_vbios_state
{
RmPhysAddr vgaWorkspaceVidMemBase; //<! Base address of the VGA workspace
struct MEMORY_DESCRIPTOR *pSaveToMemDesc; //<! Where VGA workspace is saved to
void *pSaveRegsOpaque; //<! Saved values of VGA registers
} GPUMGRSAVEVBIOSSTATE, *PGPUMGRSAVEVBIOSSTATE;
//
// types of bridges supported.
// These defines are inices for the types of bridges supported.
// preference for a given bridge type is determined by the lower value index.
// I.E. Video Link has the lower value index, so in the event that both NvLink & video link is
// detected, the video link will be used.
//
#define SLI_MAX_BRIDGE_TYPES 2
#define SLI_BT_VIDLINK 0
#define SLI_BT_NVLINK 1
typedef struct NVLINK_TOPOLOGY_PARAMS
{
NvU32 sysmemLinks;
NvU32 maxLinksPerPeer;
NvBool bSymmetric;
// Pascal only
NvU32 numLinks;
// Volta +
NvU32 numPeers;
NvBool bSwitchConfig;
// Ampere +
NvU32 pceAvailableMaskPerHshub[NV2080_CTRL_CE_MAX_HSHUBS];
NvU32 fbhubPceMask;
NvU32 maxPceLceMap[NV2080_CTRL_MAX_PCES];
NvU32 maxGrceConfig[NV2080_CTRL_MAX_GRCES];
NvU32 maxExposeCeMask;
NvU32 maxTopoIdx; // For table configs only; not applicable for algorithm
} NVLINK_TOPOLOGY_PARAMS, *PNVLINK_TOPOLOGY_PARAMS;
typedef struct _def_gpu_nvlink_topology_info
{
NvBool valid;
NvU64 DomainBusDevice;
NVLINK_TOPOLOGY_PARAMS params;
} NVLINK_TOPOLOGY_INFO, *PNVLINK_TOPOLOGY_INFO;
typedef struct
{
OBJGPU *pGpu;
NvU32 gpuInstance;
} GPU_HANDLE_ID;
#ifdef NVOC_GPU_MGR_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct GPUMGR_SAVE_MIG_INSTANCE_TOPOLOGY;
struct OBJGPUMGR {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct OBJGPUMGR *__nvoc_pbase_OBJGPUMGR;
PROBEDGPU probedGpus[32];
void *probedGpusLock;
NvU32 gpuAttachCount;
NvU32 gpuAttachMask;
NvU32 persistentSwStateGpuMask;
NvU32 deviceCount;
struct OBJGPUGRP *pGpuGrpTable[32];
NvU32 gpuInstMaskTable[32];
NvU8 gpuBridgeType;
GPUMGRSAVEVBIOSSTATE primaryVbiosState;
NvU8 powerDisconnectedGpuCount;
NvU8 powerDisconnectedGpuBus[32];
GPU_HANDLE_ID gpuHandleIDList[32];
NvU32 numGpuHandles;
};
#ifndef __NVOC_CLASS_OBJGPUMGR_TYPEDEF__
#define __NVOC_CLASS_OBJGPUMGR_TYPEDEF__
typedef struct OBJGPUMGR OBJGPUMGR;
#endif /* __NVOC_CLASS_OBJGPUMGR_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJGPUMGR
#define __nvoc_class_id_OBJGPUMGR 0xcf1b25
#endif /* __nvoc_class_id_OBJGPUMGR */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJGPUMGR;
#define __staticCast_OBJGPUMGR(pThis) \
((pThis)->__nvoc_pbase_OBJGPUMGR)
#ifdef __nvoc_gpu_mgr_h_disabled
#define __dynamicCast_OBJGPUMGR(pThis) ((OBJGPUMGR*)NULL)
#else //__nvoc_gpu_mgr_h_disabled
#define __dynamicCast_OBJGPUMGR(pThis) \
((OBJGPUMGR*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJGPUMGR)))
#endif //__nvoc_gpu_mgr_h_disabled
NV_STATUS __nvoc_objCreateDynamic_OBJGPUMGR(OBJGPUMGR**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJGPUMGR(OBJGPUMGR**, Dynamic*, NvU32);
#define __objCreate_OBJGPUMGR(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_OBJGPUMGR((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
static inline void gpumgrAddSystemNvlinkTopo(NvU64 DomainBusDevice) {
return;
}
static inline NvBool gpumgrGetSystemNvlinkTopo(NvU64 DomainBusDevice, struct NVLINK_TOPOLOGY_PARAMS *pTopoParams) {
return ((NvBool)(0 != 0));
}
static inline void gpumgrUpdateSystemNvlinkTopo(NvU64 DomainBusDevice, struct NVLINK_TOPOLOGY_PARAMS *pTopoParams) {
return;
}
static inline NV_STATUS gpumgrSetGpuInitDisabledNvlinks(NvU32 gpuId, NvU32 mask, NvBool bSkipHwNvlinkDisable) {
return NV_ERR_NOT_SUPPORTED;
}
static inline NV_STATUS gpumgrGetGpuInitDisabledNvlinks(NvU32 gpuId, NvU32 *pMask, NvBool *pbSkipHwNvlinkDisable) {
return NV_ERR_NOT_SUPPORTED;
}
static inline NvBool gpumgrCheckIndirectPeer(struct OBJGPU *pGpu, struct OBJGPU *pRemoteGpu) {
return ((NvBool)(0 != 0));
}
static inline void gpumgrAddSystemMIGInstanceTopo(NvU64 domainBusDevice) {
return;
}
static inline NvBool gpumgrGetSystemMIGInstanceTopo(NvU64 domainBusDevice, struct GPUMGR_SAVE_MIG_INSTANCE_TOPOLOGY **ppTopoParams) {
return ((NvBool)(0 != 0));
}
static inline NvBool gpumgrIsSystemMIGEnabled(NvU64 domainBusDevice) {
return ((NvBool)(0 != 0));
}
static inline void gpumgrSetSystemMIGEnabled(NvU64 domainBusDevice, NvBool bMIGEnabled) {
return;
}
static inline void gpumgrUnregisterRmCapsForMIGGI(NvU64 gpuDomainBusDevice) {
return;
}
static inline void gpumgrUpdateBoardId(struct OBJGPU *arg0) {
return;
}
static inline void gpumgrServiceInterrupts(NvU32 arg0, MC_ENGINE_BITVECTOR *arg1, NvBool arg2) {
return;
}
NV_STATUS gpumgrConstruct_IMPL(struct OBJGPUMGR *arg_);
#define __nvoc_gpumgrConstruct(arg_) gpumgrConstruct_IMPL(arg_)
void gpumgrDestruct_IMPL(struct OBJGPUMGR *arg0);
#define __nvoc_gpumgrDestruct(arg0) gpumgrDestruct_IMPL(arg0)
#undef PRIVATE_FIELD
typedef struct {
NvBool specified; // Set this flag when using this struct
NvBool bIsIGPU; // Set this flag for iGPU
DEVICE_MAPPING deviceMapping[DEVICE_INDEX_MAX]; // Register Aperture mapping
NvU32 socChipId0; // Chip ID used for HAL binding
NvU32 iovaspaceId; // SMMU client ID
} SOCGPUATTACHARG;
//
// Packages up system/bus state for attach process.
//
typedef struct GPUATTACHARG
{
GPUHWREG *regBaseAddr;
GPUHWREG *fbBaseAddr;
GPUHWREG *instBaseAddr;
RmPhysAddr devPhysAddr;
RmPhysAddr fbPhysAddr;
RmPhysAddr instPhysAddr;
RmPhysAddr ioPhysAddr;
NvU64 nvDomainBusDeviceFunc;
NvU32 regLength;
NvU64 fbLength;
NvU32 instLength;
NvU32 intLine;
void *pOsAttachArg;
NvBool bIsSOC;
NvU32 socDeviceCount;
DEVICE_MAPPING socDeviceMappings[GPU_MAX_DEVICE_MAPPINGS];
NvU32 socId;
NvU32 socSubId;
NvU32 socChipId0;
NvU32 iovaspaceId;
NvBool bRequestFwClientRm;
//
// The SOC-specific fields above are legacy fields that were added for
// ARCH MODS iGPU verification. There is a plan to deprecate these fields as
// part of an effort to clean up the existing iGPU code in RM.
//
// Starting with T234D+, the SOCGPUATTACHARG field below will be used to
// pass the required attach info for a single SOC device from the RM OS
// layer to core RM.
//
SOCGPUATTACHARG socDeviceArgs;
} GPUATTACHARG;
NV_STATUS gpumgrGetGpuAttachInfo(NvU32 *pGpuCnt, NvU32 *pGpuMask);
NV_STATUS gpumgrGetProbedGpuIds(NV0000_CTRL_GPU_GET_PROBED_IDS_PARAMS *);
NV_STATUS gpumgrGetProbedGpuDomainBusDevice(NvU32 gpuId, NvU64 *gpuDomainBusDevice);
NV_STATUS gpumgrGetAttachedGpuIds(NV0000_CTRL_GPU_GET_ATTACHED_IDS_PARAMS *);
NV_STATUS gpumgrGetGpuIdInfo(NV0000_CTRL_GPU_GET_ID_INFO_PARAMS *);
NV_STATUS gpumgrGetGpuIdInfoV2(NV0000_CTRL_GPU_GET_ID_INFO_V2_PARAMS *);
void gpumgrSetGpuId(OBJGPU*, NvU32 gpuId);
NV_STATUS gpumgrGetGpuInitStatus(NV0000_CTRL_GPU_GET_INIT_STATUS_PARAMS *);
void gpumgrSetGpuInitStatus(NvU32 gpuId, NV_STATUS status);
OBJGPU* gpumgrGetGpuFromId(NvU32 gpuId);
OBJGPU* gpumgrGetGpuFromUuid(const NvU8 *pGpuUuid, NvU32 flags);
OBJGPU* gpumgrGetGpuFromBusInfo(NvU32 domain, NvU8 bus, NvU8 device);
NvU32 gpumgrGetDefaultPrimaryGpu(NvU32 gpuMask);
NV_STATUS gpumgrAllocGpuInstance(NvU32 *pDeviceInstance);
NV_STATUS gpumgrRegisterGpuId(NvU32 gpuId, NvU64 gpuDomainBusDevice);
NV_STATUS gpumgrUnregisterGpuId(NvU32 gpuId);
NV_STATUS gpumgrExcludeGpuId(NvU32 gpuId);
NV_STATUS gpumgrSetUuid(NvU32 gpuId, NvU8 *uuid);
NV_STATUS gpumgrGetGpuUuidInfo(NvU32 gpuId, NvU8 **ppUuidStr, NvU32 *pUuidStrLen, NvU32 uuidFlags);
NV_STATUS gpumgrAttachGpu(NvU32 deviceInstance, GPUATTACHARG *);
NV_STATUS gpumgrDetachGpu(NvU32 deviceInstance);
OBJGPU* gpumgrGetNextGpu(NvU32 gpuMask, NvU32 *pStartIndex);
NV_STATUS gpumgrStatePreInitGpu(OBJGPU*);
NV_STATUS gpumgrStateInitGpu(OBJGPU*);
NV_STATUS gpumgrStateLoadGpu(OBJGPU*, NvU32);
NV_STATUS gpumgrAllocDeviceInstance(NvU32 *pDeviceInstance);
NV_STATUS gpumgrCreateDevice(NvU32 *pDeviceInstance, NvU32 gpuMask, NvU32 *pGpuIdsOrdinal);
NV_STATUS gpumgrDestroyDevice(NvU32 deviceInstance);
NvU32 gpumgrGetDeviceInstanceMask(void);
NvU32 gpumgrGetDeviceGpuMask(NvU32 deviceInstance);
NV_STATUS gpumgrIsDeviceInstanceValid(NvU32 deviceInstance);
NvU32 gpumgrGetPrimaryForDevice(NvU32 deviceInstance);
NvBool gpumgrIsSubDeviceInstanceValid(NvU32 subDeviceInstance);
NvBool gpumgrIsDeviceEnabled(NvU32 deviceInstance);
NvU32 gpumgrGetGpuMask(OBJGPU *pGpu);
OBJGPU* gpumgrGetGpu(NvU32 deviceInstance);
OBJGPU* gpumgrGetSomeGpu(void);
NvU32 gpumgrGetSubDeviceCount(NvU32 gpuMask);
NvU32 gpumgrGetSubDeviceCountFromGpu(OBJGPU *pGpu);
NvU32 gpumgrGetSubDeviceMaxValuePlus1(OBJGPU *pGpu);
NvU32 gpumgrGetSubDeviceInstanceFromGpu(OBJGPU *pGpu);
OBJGPU* gpumgrGetParentGPU(OBJGPU *pGpu);
void gpumgrSetParentGPU(OBJGPU *pGpu, OBJGPU *pParentGpu);
NvBool gpumgrIsGpuDisplayParent(OBJGPU*);
OBJGPU* gpumgrGetDisplayParent(OBJGPU*);
NV_STATUS gpumgrGetGpuLockAndDrPorts(OBJGPU*, OBJGPU*, NvU32 *, NvU32 *);
NV_STATUS gpumgrGetBootPrimary(OBJGPU **ppGpu);
OBJGPU* gpumgrGetMGpu(void);
RmPhysAddr gpumgrGetGpuPhysFbAddr(OBJGPU*);
OBJGPU* gpumgrGetGpuFromSubDeviceInst(NvU32, NvU32);
NV_STATUS gpumgrAddDeviceInstanceToGpus(NvU32 gpuMask);
NV_STATUS gpumgrRemoveDeviceInstanceFromGpus(NvU32 gpuMask);
NV_STATUS gpumgrConstructGpuGrpObject(struct OBJGPUMGR *pGpuMgr, NvU32 gpuMask, struct OBJGPUGRP **ppGpuGrp);
struct OBJGPUGRP* gpumgrGetGpuGrpFromGpu(OBJGPU *pGpu);
struct OBJGPUGRP* gpumgrGetGpuGrpFromInstance(NvU32 gpugrpInstance);
NV_STATUS gpumgrModifyGpuDrainState(NvU32 gpuId, NvBool bEnable, NvBool bRemove, NvBool bLinkDisable);
NV_STATUS gpumgrQueryGpuDrainState(NvU32 gpuId, NvBool *pBEnable, NvBool *pBRemove);
NvBool gpumgrIsGpuPointerValid(OBJGPU *pGpu);
NvU32 gpumgrGetGrpMaskFromGpuInst(NvU32 gpuInst);
void gpumgrAddDeviceMaskToGpuInstTable(NvU32 gpuMask);
void gpumgrClearDeviceMaskFromGpuInstTable(NvU32 gpuMask);
NvBool gpumgrSetGpuAcquire(OBJGPU *pGpu);
void gpumgrSetGpuRelease(void);
NvU8 gpumgrGetGpuBridgeType(void);
//
// gpumgrIsSubDeviceCountOne
//
static NV_INLINE NvBool
gpumgrIsSubDeviceCountOne(NvU32 gpuMask)
{
//
// A fast version of gpumgrGetSubDeviceCount(gpumask) == 1.
// Make sure it returns 0 for gpuMask==0, just like gpumgrGetSubDeviceCount(0)!!!
//
return gpuMask != 0 && (gpuMask&(gpuMask-1)) == 0;
}
//
// gpumgrIsParentGPU
//
static NV_INLINE NvBool
gpumgrIsParentGPU(OBJGPU *pGpu)
{
return gpumgrGetParentGPU(pGpu) == pGpu;
}
#endif // _GPUMGR_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_GPU_MGR_NVOC_H_

View File

@@ -0,0 +1,444 @@
#define NVOC_GPU_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_gpu_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x7ef3cb = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJGPU;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmHalspecOwner;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJTRACEABLE;
void __nvoc_init_OBJGPU(OBJGPU*,
NvU32 ChipHal_arch, NvU32 ChipHal_impl, NvU32 ChipHal_hidrev,
RM_RUNTIME_VARIANT RmVariantHal_rmVariant,
NvU32 DispIpHal_ipver);
void __nvoc_init_funcTable_OBJGPU(OBJGPU*);
NV_STATUS __nvoc_ctor_OBJGPU(OBJGPU*, NvU32 arg_gpuInstance);
void __nvoc_init_dataField_OBJGPU(OBJGPU*);
void __nvoc_dtor_OBJGPU(OBJGPU*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJGPU;
static const struct NVOC_RTTI __nvoc_rtti_OBJGPU_OBJGPU = {
/*pClassDef=*/ &__nvoc_class_def_OBJGPU,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJGPU,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJGPU_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJGPU, __nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_OBJGPU_RmHalspecOwner = {
/*pClassDef=*/ &__nvoc_class_def_RmHalspecOwner,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJGPU, __nvoc_base_RmHalspecOwner),
};
static const struct NVOC_RTTI __nvoc_rtti_OBJGPU_OBJTRACEABLE = {
/*pClassDef=*/ &__nvoc_class_def_OBJTRACEABLE,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJGPU, __nvoc_base_OBJTRACEABLE),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJGPU = {
/*numRelatives=*/ 4,
/*relatives=*/ {
&__nvoc_rtti_OBJGPU_OBJGPU,
&__nvoc_rtti_OBJGPU_OBJTRACEABLE,
&__nvoc_rtti_OBJGPU_RmHalspecOwner,
&__nvoc_rtti_OBJGPU_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJGPU =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJGPU),
/*classId=*/ classId(OBJGPU),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJGPU",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJGPU,
/*pCastInfo=*/ &__nvoc_castinfo_OBJGPU,
/*pExportInfo=*/ &__nvoc_export_info_OBJGPU
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJGPU =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_RmHalspecOwner(RmHalspecOwner*);
void __nvoc_dtor_OBJTRACEABLE(OBJTRACEABLE*);
void __nvoc_dtor_OBJGPU(OBJGPU *pThis) {
__nvoc_gpuDestruct(pThis);
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_dtor_RmHalspecOwner(&pThis->__nvoc_base_RmHalspecOwner);
__nvoc_dtor_OBJTRACEABLE(&pThis->__nvoc_base_OBJTRACEABLE);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJGPU(OBJGPU *pThis) {
ChipHal *chipHal = &staticCast(pThis, RmHalspecOwner)->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
RmVariantHal *rmVariantHal = &staticCast(pThis, RmHalspecOwner)->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
pThis->setProperty(pThis, PDB_PROP_GPU_IS_CONNECTED, ((NvBool)(0 == 0)));
// NVOC Property Hal field -- PDB_PROP_GPU_TEGRA_SOC_NVDISPLAY
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->setProperty(pThis, PDB_PROP_GPU_TEGRA_SOC_NVDISPLAY, ((NvBool)(0 == 0)));
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_TEGRA_SOC_NVDISPLAY, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_TEGRA_SOC_IGPU
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_TEGRA_SOC_IGPU, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_ATS_SUPPORTED
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_ATS_SUPPORTED, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_IS_UEFI
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->setProperty(pThis, PDB_PROP_GPU_IS_UEFI, ((NvBool)(0 == 0)));
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_IS_UEFI, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_ZERO_FB
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_ZERO_FB, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_CAN_OPTIMIZE_COMPUTE_USE_CASE
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_CAN_OPTIMIZE_COMPUTE_USE_CASE, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_MIG_SUPPORTED
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_MIG_SUPPORTED, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_VC_CAPABILITY_SUPPORTED
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_VC_CAPABILITY_SUPPORTED, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_RESETLESS_MIG_SUPPORTED
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_RESETLESS_MIG_SUPPORTED, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_GPU_IS_COT_ENABLED
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_GPU_IS_COT_ENABLED, ((NvBool)(0 != 0)));
}
pThis->boardId = ~0;
pThis->deviceInstance = 32;
// Hal field -- isVirtual
if (0)
{
}
else if (( ((rmVariantHal_HalVarIdx >> 5) == 0UL) && ((1UL << (rmVariantHal_HalVarIdx & 0x1f)) & 0x00000002UL) )) /* RmVariantHal: PF_KERNEL_ONLY */
{
pThis->isVirtual = ((NvBool)(0 != 0));
}
// Hal field -- isGspClient
if (( ((rmVariantHal_HalVarIdx >> 5) == 0UL) && ((1UL << (rmVariantHal_HalVarIdx & 0x1f)) & 0x00000002UL) )) /* RmVariantHal: PF_KERNEL_ONLY */
{
pThis->isGspClient = ((NvBool)(0 == 0));
}
else if (0)
{
}
pThis->bIsDebugModeEnabled = ((NvBool)(0 != 0));
pThis->numOfMclkLockRequests = 0U;
pThis->bUseRegisterAccessMap = !(0);
pThis->boardInfo = ((void *)0);
// Hal field -- bUnifiedMemorySpaceEnabled
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->bUnifiedMemorySpaceEnabled = ((NvBool)(0 == 0));
}
// default
else
{
pThis->bUnifiedMemorySpaceEnabled = ((NvBool)(0 != 0));
}
// Hal field -- bWarBug200577889SriovHeavyEnabled
pThis->bWarBug200577889SriovHeavyEnabled = ((NvBool)(0 != 0));
// Hal field -- bNeed4kPageIsolation
if (0)
{
}
// default
else
{
pThis->bNeed4kPageIsolation = ((NvBool)(0 != 0));
}
// Hal field -- bInstLoc47bitPaWar
if (0)
{
}
// default
else
{
pThis->bInstLoc47bitPaWar = ((NvBool)(0 != 0));
}
// Hal field -- bIsBarPteInSysmemSupported
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->bIsBarPteInSysmemSupported = ((NvBool)(0 == 0));
}
// default
else
{
pThis->bIsBarPteInSysmemSupported = ((NvBool)(0 != 0));
}
// Hal field -- bClientRmAllocatedCtxBuffer
if (0)
{
}
// default
else
{
pThis->bClientRmAllocatedCtxBuffer = ((NvBool)(0 != 0));
}
// Hal field -- bVidmemPreservationBrokenBug3172217
if (0)
{
}
// default
else
{
pThis->bVidmemPreservationBrokenBug3172217 = ((NvBool)(0 != 0));
}
// Hal field -- bInstanceMemoryAlwaysCached
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->bInstanceMemoryAlwaysCached = ((NvBool)(0 == 0));
}
// default
else
{
pThis->bInstanceMemoryAlwaysCached = ((NvBool)(0 != 0));
}
pThis->bIsGeforce = ((NvBool)(0 == 0));
// Hal field -- bComputePolicyTimesliceSupported
if (0)
{
}
// default
else
{
pThis->bComputePolicyTimesliceSupported = ((NvBool)(0 != 0));
}
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_RmHalspecOwner(RmHalspecOwner* );
NV_STATUS __nvoc_ctor_OBJTRACEABLE(OBJTRACEABLE* );
NV_STATUS __nvoc_ctor_OBJGPU(OBJGPU *pThis, NvU32 arg_gpuInstance) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_OBJGPU_fail_Object;
status = __nvoc_ctor_RmHalspecOwner(&pThis->__nvoc_base_RmHalspecOwner);
if (status != NV_OK) goto __nvoc_ctor_OBJGPU_fail_RmHalspecOwner;
status = __nvoc_ctor_OBJTRACEABLE(&pThis->__nvoc_base_OBJTRACEABLE);
if (status != NV_OK) goto __nvoc_ctor_OBJGPU_fail_OBJTRACEABLE;
__nvoc_init_dataField_OBJGPU(pThis);
status = __nvoc_gpuConstruct(pThis, arg_gpuInstance);
if (status != NV_OK) goto __nvoc_ctor_OBJGPU_fail__init;
goto __nvoc_ctor_OBJGPU_exit; // Success
__nvoc_ctor_OBJGPU_fail__init:
__nvoc_dtor_OBJTRACEABLE(&pThis->__nvoc_base_OBJTRACEABLE);
__nvoc_ctor_OBJGPU_fail_OBJTRACEABLE:
__nvoc_dtor_RmHalspecOwner(&pThis->__nvoc_base_RmHalspecOwner);
__nvoc_ctor_OBJGPU_fail_RmHalspecOwner:
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_ctor_OBJGPU_fail_Object:
__nvoc_ctor_OBJGPU_exit:
return status;
}
static void __nvoc_init_funcTable_OBJGPU_1(OBJGPU *pThis) {
ChipHal *chipHal = &staticCast(pThis, RmHalspecOwner)->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
RmVariantHal *rmVariantHal = &staticCast(pThis, RmHalspecOwner)->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
}
void __nvoc_init_funcTable_OBJGPU(OBJGPU *pThis) {
__nvoc_init_funcTable_OBJGPU_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_RmHalspecOwner(RmHalspecOwner*, NvU32, NvU32, NvU32, RM_RUNTIME_VARIANT, NvU32);
void __nvoc_init_OBJTRACEABLE(OBJTRACEABLE*);
void __nvoc_init_OBJGPU(OBJGPU *pThis,
NvU32 ChipHal_arch, NvU32 ChipHal_impl, NvU32 ChipHal_hidrev,
RM_RUNTIME_VARIANT RmVariantHal_rmVariant,
NvU32 DispIpHal_ipver) {
pThis->__nvoc_pbase_OBJGPU = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
pThis->__nvoc_pbase_RmHalspecOwner = &pThis->__nvoc_base_RmHalspecOwner;
pThis->__nvoc_pbase_OBJTRACEABLE = &pThis->__nvoc_base_OBJTRACEABLE;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_RmHalspecOwner(&pThis->__nvoc_base_RmHalspecOwner, ChipHal_arch, ChipHal_impl, ChipHal_hidrev, RmVariantHal_rmVariant, DispIpHal_ipver);
__nvoc_init_OBJTRACEABLE(&pThis->__nvoc_base_OBJTRACEABLE);
__nvoc_init_funcTable_OBJGPU(pThis);
}
NV_STATUS __nvoc_objCreate_OBJGPU(OBJGPU **ppThis, Dynamic *pParent, NvU32 createFlags,
NvU32 ChipHal_arch, NvU32 ChipHal_impl, NvU32 ChipHal_hidrev,
RM_RUNTIME_VARIANT RmVariantHal_rmVariant,
NvU32 DispIpHal_ipver, NvU32 arg_gpuInstance) {
NV_STATUS status;
Object *pParentObj;
OBJGPU *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJGPU));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJGPU));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJGPU);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJGPU(pThis, ChipHal_arch, ChipHal_impl, ChipHal_hidrev, RmVariantHal_rmVariant, DispIpHal_ipver);
status = __nvoc_ctor_OBJGPU(pThis, arg_gpuInstance);
if (status != NV_OK) goto __nvoc_objCreate_OBJGPU_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJGPU_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJGPU(OBJGPU **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
NvU32 ChipHal_arch = va_arg(args, NvU32);
NvU32 ChipHal_impl = va_arg(args, NvU32);
NvU32 ChipHal_hidrev = va_arg(args, NvU32);
RM_RUNTIME_VARIANT RmVariantHal_rmVariant = va_arg(args, RM_RUNTIME_VARIANT);
NvU32 DispIpHal_ipver = va_arg(args, NvU32);
NvU32 arg_gpuInstance = va_arg(args, NvU32);
status = __nvoc_objCreate_OBJGPU(ppThis, pParent, createFlags, ChipHal_arch, ChipHal_impl, ChipHal_hidrev, RmVariantHal_rmVariant, DispIpHal_ipver, arg_gpuInstance);
return status;
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,309 @@
#define NVOC_GPU_RESOURCE_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_gpu_resource_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x5d5d9f = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
void __nvoc_init_GpuResource(GpuResource*);
void __nvoc_init_funcTable_GpuResource(GpuResource*);
NV_STATUS __nvoc_ctor_GpuResource(GpuResource*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_GpuResource(GpuResource*);
void __nvoc_dtor_GpuResource(GpuResource*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_GpuResource;
static const struct NVOC_RTTI __nvoc_rtti_GpuResource_GpuResource = {
/*pClassDef=*/ &__nvoc_class_def_GpuResource,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_GpuResource,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_GpuResource_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuResource, __nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_GpuResource_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuResource, __nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_GpuResource_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuResource, __nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_GpuResource_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(GpuResource, __nvoc_base_RmResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_GpuResource = {
/*numRelatives=*/ 5,
/*relatives=*/ {
&__nvoc_rtti_GpuResource_GpuResource,
&__nvoc_rtti_GpuResource_RmResource,
&__nvoc_rtti_GpuResource_RmResourceCommon,
&__nvoc_rtti_GpuResource_RsResource,
&__nvoc_rtti_GpuResource_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource =
{
/*classInfo=*/ {
/*size=*/ sizeof(GpuResource),
/*classId=*/ classId(GpuResource),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "GpuResource",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_GpuResource,
/*pCastInfo=*/ &__nvoc_castinfo_GpuResource,
/*pExportInfo=*/ &__nvoc_export_info_GpuResource
};
static NV_STATUS __nvoc_thunk_GpuResource_resControl(struct RsResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return gpuresControl((struct GpuResource *)(((unsigned char *)pGpuResource) - __nvoc_rtti_GpuResource_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_resMap(struct RsResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return gpuresMap((struct GpuResource *)(((unsigned char *)pGpuResource) - __nvoc_rtti_GpuResource_RsResource.offset), pCallContext, pParams, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_GpuResource_resUnmap(struct RsResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) - __nvoc_rtti_GpuResource_RsResource.offset), pCallContext, pCpuMapping);
}
static NvBool __nvoc_thunk_GpuResource_rmresShareCallback(struct RmResource *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) - __nvoc_rtti_GpuResource_RmResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_RmResource_gpuresCheckMemInterUnmap(struct GpuResource *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GpuResource_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RmResource_gpuresGetMemInterMapParams(struct GpuResource *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GpuResource_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_gpuresGetMemoryMappingDescriptor(struct GpuResource *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_GpuResource_RmResource.offset), ppMemDesc);
}
static NvU32 __nvoc_thunk_RsResource_gpuresGetRefCount(struct GpuResource *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_gpuresControlFilter(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_gpuresAddAdditionalDependants(struct RsClient *pClient, struct GpuResource *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RsResource.offset), pReference);
}
static NV_STATUS __nvoc_thunk_RmResource_gpuresControl_Prologue(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RmResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_RsResource_gpuresCanCopy(struct GpuResource *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_gpuresMapTo(struct GpuResource *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RsResource.offset), pParams);
}
static void __nvoc_thunk_RsResource_gpuresPreDestruct(struct GpuResource *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_gpuresUnmapFrom(struct GpuResource *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_gpuresControl_Epilogue(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_gpuresControlLookup(struct GpuResource *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RsResource.offset), pParams, ppEntry);
}
static NvBool __nvoc_thunk_RmResource_gpuresAccessCallback(struct GpuResource *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_GpuResource_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_GpuResource =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RmResource(RmResource*);
void __nvoc_dtor_GpuResource(GpuResource *pThis) {
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_GpuResource(GpuResource *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RmResource(RmResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_GpuResource(GpuResource *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RmResource(&pThis->__nvoc_base_RmResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_GpuResource_fail_RmResource;
__nvoc_init_dataField_GpuResource(pThis);
status = __nvoc_gpuresConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_GpuResource_fail__init;
goto __nvoc_ctor_GpuResource_exit; // Success
__nvoc_ctor_GpuResource_fail__init:
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_ctor_GpuResource_fail_RmResource:
__nvoc_ctor_GpuResource_exit:
return status;
}
static void __nvoc_init_funcTable_GpuResource_1(GpuResource *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__gpuresControl__ = &gpuresControl_IMPL;
pThis->__gpuresMap__ = &gpuresMap_IMPL;
pThis->__gpuresUnmap__ = &gpuresUnmap_IMPL;
pThis->__gpuresShareCallback__ = &gpuresShareCallback_IMPL;
pThis->__gpuresGetRegBaseOffsetAndSize__ = &gpuresGetRegBaseOffsetAndSize_IMPL;
pThis->__gpuresGetMapAddrSpace__ = &gpuresGetMapAddrSpace_IMPL;
pThis->__gpuresInternalControlForward__ = &gpuresInternalControlForward_IMPL;
pThis->__gpuresGetInternalObjectHandle__ = &gpuresGetInternalObjectHandle_IMPL;
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__resControl__ = &__nvoc_thunk_GpuResource_resControl;
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__resMap__ = &__nvoc_thunk_GpuResource_resMap;
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__resUnmap__ = &__nvoc_thunk_GpuResource_resUnmap;
pThis->__nvoc_base_RmResource.__rmresShareCallback__ = &__nvoc_thunk_GpuResource_rmresShareCallback;
pThis->__gpuresCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_gpuresCheckMemInterUnmap;
pThis->__gpuresGetMemInterMapParams__ = &__nvoc_thunk_RmResource_gpuresGetMemInterMapParams;
pThis->__gpuresGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_gpuresGetMemoryMappingDescriptor;
pThis->__gpuresGetRefCount__ = &__nvoc_thunk_RsResource_gpuresGetRefCount;
pThis->__gpuresControlFilter__ = &__nvoc_thunk_RsResource_gpuresControlFilter;
pThis->__gpuresAddAdditionalDependants__ = &__nvoc_thunk_RsResource_gpuresAddAdditionalDependants;
pThis->__gpuresControl_Prologue__ = &__nvoc_thunk_RmResource_gpuresControl_Prologue;
pThis->__gpuresCanCopy__ = &__nvoc_thunk_RsResource_gpuresCanCopy;
pThis->__gpuresMapTo__ = &__nvoc_thunk_RsResource_gpuresMapTo;
pThis->__gpuresPreDestruct__ = &__nvoc_thunk_RsResource_gpuresPreDestruct;
pThis->__gpuresUnmapFrom__ = &__nvoc_thunk_RsResource_gpuresUnmapFrom;
pThis->__gpuresControl_Epilogue__ = &__nvoc_thunk_RmResource_gpuresControl_Epilogue;
pThis->__gpuresControlLookup__ = &__nvoc_thunk_RsResource_gpuresControlLookup;
pThis->__gpuresAccessCallback__ = &__nvoc_thunk_RmResource_gpuresAccessCallback;
}
void __nvoc_init_funcTable_GpuResource(GpuResource *pThis) {
__nvoc_init_funcTable_GpuResource_1(pThis);
}
void __nvoc_init_RmResource(RmResource*);
void __nvoc_init_GpuResource(GpuResource *pThis) {
pThis->__nvoc_pbase_GpuResource = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_RmResource;
__nvoc_init_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_init_funcTable_GpuResource(pThis);
}
NV_STATUS __nvoc_objCreate_GpuResource(GpuResource **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
GpuResource *pThis;
pThis = portMemAllocNonPaged(sizeof(GpuResource));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(GpuResource));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_GpuResource);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_GpuResource(pThis);
status = __nvoc_ctor_GpuResource(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_GpuResource_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_GpuResource_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_GpuResource(GpuResource **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_GpuResource(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,329 @@
#ifndef _G_GPU_RESOURCE_NVOC_H_
#define _G_GPU_RESOURCE_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2016-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_gpu_resource_nvoc.h"
#ifndef _GPURESOURCE_H_
#define _GPURESOURCE_H_
#include "core/core.h"
#include "gpu/mem_mgr/mem_desc.h"
#include "rmapi/resource.h"
struct OBJGPU;
#ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
#define __NVOC_CLASS_OBJGPU_TYPEDEF__
typedef struct OBJGPU OBJGPU;
#endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJGPU
#define __nvoc_class_id_OBJGPU 0x7ef3cb
#endif /* __nvoc_class_id_OBJGPU */
struct Device;
#ifndef __NVOC_CLASS_Device_TYPEDEF__
#define __NVOC_CLASS_Device_TYPEDEF__
typedef struct Device Device;
#endif /* __NVOC_CLASS_Device_TYPEDEF__ */
#ifndef __nvoc_class_id_Device
#define __nvoc_class_id_Device 0xe0ac20
#endif /* __nvoc_class_id_Device */
struct Subdevice;
#ifndef __NVOC_CLASS_Subdevice_TYPEDEF__
#define __NVOC_CLASS_Subdevice_TYPEDEF__
typedef struct Subdevice Subdevice;
#endif /* __NVOC_CLASS_Subdevice_TYPEDEF__ */
#ifndef __nvoc_class_id_Subdevice
#define __nvoc_class_id_Subdevice 0x4b01b3
#endif /* __nvoc_class_id_Subdevice */
#define GPU_RES_GET_GPU(pRes) staticCastNoPtrCheck((pRes), GpuResource)->pGpu
#define GPU_RES_GET_GPUGRP(pRes) staticCastNoPtrCheck((pRes), GpuResource)->pGpuGrp
#define GPU_RES_GET_DEVICE(pRes) staticCastNoPtrCheck((pRes), GpuResource)->pDevice
#define GPU_RES_GET_SUBDEVICE(pRes) staticCastNoPtrCheck((pRes), GpuResource)->pSubdevice
#define GPU_RES_SET_THREAD_BC_STATE(pRes) do { \
gpuSetThreadBcState(staticCastNoPtrCheck((pRes), GpuResource)->pGpu, \
staticCastNoPtrCheck((pRes), GpuResource)->bBcResource); \
} while(0)
/*!
* Abstract base class for common CPU mapping operations
*/
#ifdef NVOC_GPU_RESOURCE_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct GpuResource {
const struct NVOC_RTTI *__nvoc_rtti;
struct RmResource __nvoc_base_RmResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
NV_STATUS (*__gpuresControl__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__gpuresMap__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NV_STATUS (*__gpuresUnmap__)(struct GpuResource *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NvBool (*__gpuresShareCallback__)(struct GpuResource *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__gpuresGetRegBaseOffsetAndSize__)(struct GpuResource *, struct OBJGPU *, NvU32 *, NvU32 *);
NV_STATUS (*__gpuresGetMapAddrSpace__)(struct GpuResource *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NV_STATUS (*__gpuresInternalControlForward__)(struct GpuResource *, NvU32, void *, NvU32);
NvHandle (*__gpuresGetInternalObjectHandle__)(struct GpuResource *);
NV_STATUS (*__gpuresCheckMemInterUnmap__)(struct GpuResource *, NvBool);
NV_STATUS (*__gpuresGetMemInterMapParams__)(struct GpuResource *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__gpuresGetMemoryMappingDescriptor__)(struct GpuResource *, struct MEMORY_DESCRIPTOR **);
NvU32 (*__gpuresGetRefCount__)(struct GpuResource *);
NV_STATUS (*__gpuresControlFilter__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__gpuresAddAdditionalDependants__)(struct RsClient *, struct GpuResource *, RsResourceRef *);
NV_STATUS (*__gpuresControl_Prologue__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__gpuresCanCopy__)(struct GpuResource *);
NV_STATUS (*__gpuresMapTo__)(struct GpuResource *, RS_RES_MAP_TO_PARAMS *);
void (*__gpuresPreDestruct__)(struct GpuResource *);
NV_STATUS (*__gpuresUnmapFrom__)(struct GpuResource *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__gpuresControl_Epilogue__)(struct GpuResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__gpuresControlLookup__)(struct GpuResource *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NvBool (*__gpuresAccessCallback__)(struct GpuResource *, struct RsClient *, void *, RsAccessRight);
struct OBJGPUGRP *pGpuGrp;
struct OBJGPU *pGpu;
struct Device *pDevice;
struct Subdevice *pSubdevice;
NvBool bBcResource;
};
#ifndef __NVOC_CLASS_GpuResource_TYPEDEF__
#define __NVOC_CLASS_GpuResource_TYPEDEF__
typedef struct GpuResource GpuResource;
#endif /* __NVOC_CLASS_GpuResource_TYPEDEF__ */
#ifndef __nvoc_class_id_GpuResource
#define __nvoc_class_id_GpuResource 0x5d5d9f
#endif /* __nvoc_class_id_GpuResource */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
#define __staticCast_GpuResource(pThis) \
((pThis)->__nvoc_pbase_GpuResource)
#ifdef __nvoc_gpu_resource_h_disabled
#define __dynamicCast_GpuResource(pThis) ((GpuResource*)NULL)
#else //__nvoc_gpu_resource_h_disabled
#define __dynamicCast_GpuResource(pThis) \
((GpuResource*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(GpuResource)))
#endif //__nvoc_gpu_resource_h_disabled
NV_STATUS __nvoc_objCreateDynamic_GpuResource(GpuResource**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_GpuResource(GpuResource**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_GpuResource(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_GpuResource((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define gpuresControl(pGpuResource, pCallContext, pParams) gpuresControl_DISPATCH(pGpuResource, pCallContext, pParams)
#define gpuresMap(pGpuResource, pCallContext, pParams, pCpuMapping) gpuresMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define gpuresUnmap(pGpuResource, pCallContext, pCpuMapping) gpuresUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define gpuresShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) gpuresShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define gpuresGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) gpuresGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
#define gpuresGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) gpuresGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define gpuresInternalControlForward(pGpuResource, command, pParams, size) gpuresInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define gpuresGetInternalObjectHandle(pGpuResource) gpuresGetInternalObjectHandle_DISPATCH(pGpuResource)
#define gpuresCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) gpuresCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define gpuresGetMemInterMapParams(pRmResource, pParams) gpuresGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define gpuresGetMemoryMappingDescriptor(pRmResource, ppMemDesc) gpuresGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define gpuresGetRefCount(pResource) gpuresGetRefCount_DISPATCH(pResource)
#define gpuresControlFilter(pResource, pCallContext, pParams) gpuresControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define gpuresAddAdditionalDependants(pClient, pResource, pReference) gpuresAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define gpuresControl_Prologue(pResource, pCallContext, pParams) gpuresControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define gpuresCanCopy(pResource) gpuresCanCopy_DISPATCH(pResource)
#define gpuresMapTo(pResource, pParams) gpuresMapTo_DISPATCH(pResource, pParams)
#define gpuresPreDestruct(pResource) gpuresPreDestruct_DISPATCH(pResource)
#define gpuresUnmapFrom(pResource, pParams) gpuresUnmapFrom_DISPATCH(pResource, pParams)
#define gpuresControl_Epilogue(pResource, pCallContext, pParams) gpuresControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define gpuresControlLookup(pResource, pParams, ppEntry) gpuresControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define gpuresAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) gpuresAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS gpuresControl_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
static inline NV_STATUS gpuresControl_DISPATCH(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pGpuResource->__gpuresControl__(pGpuResource, pCallContext, pParams);
}
NV_STATUS gpuresMap_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping);
static inline NV_STATUS gpuresMap_DISPATCH(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__gpuresMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
NV_STATUS gpuresUnmap_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping);
static inline NV_STATUS gpuresUnmap_DISPATCH(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__gpuresUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
NvBool gpuresShareCallback_IMPL(struct GpuResource *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy);
static inline NvBool gpuresShareCallback_DISPATCH(struct GpuResource *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__gpuresShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
NV_STATUS gpuresGetRegBaseOffsetAndSize_IMPL(struct GpuResource *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize);
static inline NV_STATUS gpuresGetRegBaseOffsetAndSize_DISPATCH(struct GpuResource *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pGpuResource->__gpuresGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
}
NV_STATUS gpuresGetMapAddrSpace_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
static inline NV_STATUS gpuresGetMapAddrSpace_DISPATCH(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__gpuresGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
NV_STATUS gpuresInternalControlForward_IMPL(struct GpuResource *pGpuResource, NvU32 command, void *pParams, NvU32 size);
static inline NV_STATUS gpuresInternalControlForward_DISPATCH(struct GpuResource *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__gpuresInternalControlForward__(pGpuResource, command, pParams, size);
}
NvHandle gpuresGetInternalObjectHandle_IMPL(struct GpuResource *pGpuResource);
static inline NvHandle gpuresGetInternalObjectHandle_DISPATCH(struct GpuResource *pGpuResource) {
return pGpuResource->__gpuresGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS gpuresCheckMemInterUnmap_DISPATCH(struct GpuResource *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__gpuresCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS gpuresGetMemInterMapParams_DISPATCH(struct GpuResource *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__gpuresGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS gpuresGetMemoryMappingDescriptor_DISPATCH(struct GpuResource *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__gpuresGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NvU32 gpuresGetRefCount_DISPATCH(struct GpuResource *pResource) {
return pResource->__gpuresGetRefCount__(pResource);
}
static inline NV_STATUS gpuresControlFilter_DISPATCH(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__gpuresControlFilter__(pResource, pCallContext, pParams);
}
static inline void gpuresAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct GpuResource *pResource, RsResourceRef *pReference) {
pResource->__gpuresAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NV_STATUS gpuresControl_Prologue_DISPATCH(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__gpuresControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NvBool gpuresCanCopy_DISPATCH(struct GpuResource *pResource) {
return pResource->__gpuresCanCopy__(pResource);
}
static inline NV_STATUS gpuresMapTo_DISPATCH(struct GpuResource *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__gpuresMapTo__(pResource, pParams);
}
static inline void gpuresPreDestruct_DISPATCH(struct GpuResource *pResource) {
pResource->__gpuresPreDestruct__(pResource);
}
static inline NV_STATUS gpuresUnmapFrom_DISPATCH(struct GpuResource *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__gpuresUnmapFrom__(pResource, pParams);
}
static inline void gpuresControl_Epilogue_DISPATCH(struct GpuResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__gpuresControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS gpuresControlLookup_DISPATCH(struct GpuResource *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__gpuresControlLookup__(pResource, pParams, ppEntry);
}
static inline NvBool gpuresAccessCallback_DISPATCH(struct GpuResource *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__gpuresAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS gpuresConstruct_IMPL(struct GpuResource *arg_pGpuResource, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_gpuresConstruct(arg_pGpuResource, arg_pCallContext, arg_pParams) gpuresConstruct_IMPL(arg_pGpuResource, arg_pCallContext, arg_pParams)
NV_STATUS gpuresCopyConstruct_IMPL(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams);
#ifdef __nvoc_gpu_resource_h_disabled
static inline NV_STATUS gpuresCopyConstruct(struct GpuResource *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams) {
NV_ASSERT_FAILED_PRECOMP("GpuResource was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_gpu_resource_h_disabled
#define gpuresCopyConstruct(pGpuResource, pCallContext, pParams) gpuresCopyConstruct_IMPL(pGpuResource, pCallContext, pParams)
#endif //__nvoc_gpu_resource_h_disabled
void gpuresSetGpu_IMPL(struct GpuResource *pGpuResource, struct OBJGPU *pGpu, NvBool bBcResource);
#ifdef __nvoc_gpu_resource_h_disabled
static inline void gpuresSetGpu(struct GpuResource *pGpuResource, struct OBJGPU *pGpu, NvBool bBcResource) {
NV_ASSERT_FAILED_PRECOMP("GpuResource was disabled!");
}
#else //__nvoc_gpu_resource_h_disabled
#define gpuresSetGpu(pGpuResource, pGpu, bBcResource) gpuresSetGpu_IMPL(pGpuResource, pGpu, bBcResource)
#endif //__nvoc_gpu_resource_h_disabled
void gpuresControlSetup_IMPL(struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, struct GpuResource *pGpuResource);
#ifdef __nvoc_gpu_resource_h_disabled
static inline void gpuresControlSetup(struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, struct GpuResource *pGpuResource) {
NV_ASSERT_FAILED_PRECOMP("GpuResource was disabled!");
}
#else //__nvoc_gpu_resource_h_disabled
#define gpuresControlSetup(pParams, pGpuResource) gpuresControlSetup_IMPL(pParams, pGpuResource)
#endif //__nvoc_gpu_resource_h_disabled
NV_STATUS gpuresGetByHandle_IMPL(struct RsClient *pClient, NvHandle hResource, struct GpuResource **ppGpuResource);
#define gpuresGetByHandle(pClient, hResource, ppGpuResource) gpuresGetByHandle_IMPL(pClient, hResource, ppGpuResource)
NV_STATUS gpuresGetByDeviceOrSubdeviceHandle_IMPL(struct RsClient *pClient, NvHandle hResource, struct GpuResource **ppGpuResource);
#define gpuresGetByDeviceOrSubdeviceHandle(pClient, hResource, ppGpuResource) gpuresGetByDeviceOrSubdeviceHandle_IMPL(pClient, hResource, ppGpuResource)
#undef PRIVATE_FIELD
#endif // _GPURESOURCE_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_GPU_RESOURCE_NVOC_H_

View File

@@ -0,0 +1,142 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// HAL support for use in HAL setup
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_hal.h
//
#ifndef _G_RMCFG_HAL_H_
#define _G_RMCFG_HAL_H_
typedef struct DISP_HAL_IFACES *PDISP_HAL_IFACES;
typedef struct DPU_HAL_IFACES *PDPU_HAL_IFACES;
typedef struct GPIO_HAL_IFACES *PGPIO_HAL_IFACES;
typedef struct RPC_HAL_IFACES *PRPC_HAL_IFACES;
typedef struct RPCSTRUCTURECOPY_HAL_IFACES *PRPCSTRUCTURECOPY_HAL_IFACES;
//
// per-GPU list of function ptrs to setup iface for each engine
//
typedef struct {
} HAL_IFACE_SETUP, *PHAL_IFACE_SETUP;
//
// IP_VERSIONS support
//
typedef struct IGRP_IP_VERSIONS_TABLE_INFO IGRP_IP_VERSIONS_TABLE_INFO;
// generic form of Head_iGrp_ipVersions_getInfo typedef
typedef NV_STATUS IGrp_ipVersions_getInfo(IGRP_IP_VERSIONS_TABLE_INFO *);
typedef void IGrp_ipVersions_install(IGRP_IP_VERSIONS_TABLE_INFO *);
typedef NV_STATUS IGrp_ipVersions_wrapup(IGRP_IP_VERSIONS_TABLE_INFO *);
// a single inclusive version range
typedef struct {
NvU32 v0;
NvU32 v1;
} IGRP_IP_VERSION_RANGE;
typedef struct {
const IGRP_IP_VERSION_RANGE *pRanges;
NvU32 numRanges;
IGrp_ipVersions_install *ifacesInstallFn;
} IGRP_IP_VERSIONS_ENTRY;
struct IGRP_IP_VERSIONS_TABLE_INFO {
POBJGPU pGpu;
Dynamic *pDynamic; // eg: pBiff
const IGRP_IP_VERSIONS_ENTRY *pTable;
NvU32 numEntries;
IGrp_ipVersions_wrapup *ifacesWrapupFn; // overrides and asserts
};
// HAL_IMPLEMENTATION enum
typedef enum
{
HAL_IMPL_GF100,
HAL_IMPL_GF100B,
HAL_IMPL_GF104,
HAL_IMPL_GF104B,
HAL_IMPL_GF106,
HAL_IMPL_GF106B,
HAL_IMPL_GF108,
HAL_IMPL_GF110D,
HAL_IMPL_GF110,
HAL_IMPL_GF117,
HAL_IMPL_GF118,
HAL_IMPL_GF119,
HAL_IMPL_GF110F,
HAL_IMPL_GF110F2,
HAL_IMPL_GF110F3,
HAL_IMPL_GK104,
HAL_IMPL_GK106,
HAL_IMPL_GK107,
HAL_IMPL_GK20A,
HAL_IMPL_GK110,
HAL_IMPL_GK110B,
HAL_IMPL_GK110C,
HAL_IMPL_GK208,
HAL_IMPL_GK208S,
HAL_IMPL_GM107,
HAL_IMPL_GM108,
HAL_IMPL_GM200,
HAL_IMPL_GM204,
HAL_IMPL_GM206,
HAL_IMPL_GP100,
HAL_IMPL_GP102,
HAL_IMPL_GP104,
HAL_IMPL_GP106,
HAL_IMPL_GP107,
HAL_IMPL_GP108,
HAL_IMPL_GV100,
HAL_IMPL_GV11B,
HAL_IMPL_TU102,
HAL_IMPL_TU104,
HAL_IMPL_TU106,
HAL_IMPL_TU116,
HAL_IMPL_TU117,
HAL_IMPL_GA100,
HAL_IMPL_GA102,
HAL_IMPL_GA103,
HAL_IMPL_GA104,
HAL_IMPL_GA106,
HAL_IMPL_GA107,
HAL_IMPL_GA10B,
HAL_IMPL_GA102F,
HAL_IMPL_T001_FERMI_NOT_EXIST,
HAL_IMPL_T124,
HAL_IMPL_T132,
HAL_IMPL_T210,
HAL_IMPL_T186,
HAL_IMPL_T194,
HAL_IMPL_T002_TURING_NOT_EXIST,
HAL_IMPL_T234,
HAL_IMPL_T234D,
HAL_IMPL_AMODEL,
HAL_IMPL_MAXIMUM, // NOTE: this symbol must be at the end of the enum list.
// It is used to allocate arrays and control loop iterations.
} HAL_IMPLEMENTATION;
//
// HAL implementation names for debug & logging use
//
#define HAL_IMPL_NAME_LIST \
{ HAL_IMPL_T234D, "T234D" }
#endif // _G_RMCFG_HAL_H_

View File

@@ -0,0 +1,94 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// Hal registration entry points.
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_hal_archimpl.h
//
// Chips: T234D
//
#ifndef _G_RMCFG_HAL_ARCHIMPL_H_
#define _G_RMCFG_HAL_ARCHIMPL_H_
#include "g_hal.h"
// OpenRM for Tegra build uses different include path
// The following lines refer to the same file.
// TODO: merge them
#include "nv_ref.h"
//
// CHIPID array Implementation
//
const struct ChipID
{
NvU32 arch;
NvU32 impl;
NvU32 hidrev;
} chipID[] = {
{ 0x0, 0x0, 0x0 } , // GF100 (disabled)
{ 0x0, 0x0, 0x0 } , // GF100B (disabled)
{ 0x0, 0x0, 0x0 } , // GF104 (disabled)
{ 0x0, 0x0, 0x0 } , // GF104B (disabled)
{ 0x0, 0x0, 0x0 } , // GF106 (disabled)
{ 0x0, 0x0, 0x0 } , // GF106B (disabled)
{ 0x0, 0x0, 0x0 } , // GF108 (disabled)
{ 0x0, 0x0, 0x0 } , // GF110D (disabled)
{ 0x0, 0x0, 0x0 } , // GF110 (disabled)
{ 0x0, 0x0, 0x0 } , // GF117 (disabled)
{ 0x0, 0x0, 0x0 } , // GF118 (disabled)
{ 0x0, 0x0, 0x0 } , // GF119 (disabled)
{ 0x0, 0x0, 0x0 } , // GF110F (disabled)
{ 0x0, 0x0, 0x0 } , // GF110F2 (disabled)
{ 0x0, 0x0, 0x0 } , // GF110F3 (disabled)
{ 0x0, 0x0, 0x0 } , // GK104 (disabled)
{ 0x0, 0x0, 0x0 } , // GK106 (disabled)
{ 0x0, 0x0, 0x0 } , // GK107 (disabled)
{ 0x0, 0x0, 0x0 } , // GK20A (disabled)
{ 0x0, 0x0, 0x0 } , // GK110 (disabled)
{ 0x0, 0x0, 0x0 } , // GK110B (disabled)
{ 0x0, 0x0, 0x0 } , // GK110C (disabled)
{ 0x0, 0x0, 0x0 } , // GK208 (disabled)
{ 0x0, 0x0, 0x0 } , // GK208S (disabled)
{ 0x0, 0x0, 0x0 } , // GM107 (disabled)
{ 0x0, 0x0, 0x0 } , // GM108 (disabled)
{ 0x0, 0x0, 0x0 } , // GM200 (disabled)
{ 0x0, 0x0, 0x0 } , // GM204 (disabled)
{ 0x0, 0x0, 0x0 } , // GM206 (disabled)
{ 0x0, 0x0, 0x0 } , // GP100 (disabled)
{ 0x0, 0x0, 0x0 } , // GP102 (disabled)
{ 0x0, 0x0, 0x0 } , // GP104 (disabled)
{ 0x0, 0x0, 0x0 } , // GP106 (disabled)
{ 0x0, 0x0, 0x0 } , // GP107 (disabled)
{ 0x0, 0x0, 0x0 } , // GP108 (disabled)
{ 0x0, 0x0, 0x0 } , // GV100 (disabled)
{ 0x0, 0x0, 0x0 } , // GV11B (disabled)
{ 0x0, 0x0, 0x0 } , // TU102 (disabled)
{ 0x0, 0x0, 0x0 } , // TU104 (disabled)
{ 0x0, 0x0, 0x0 } , // TU106 (disabled)
{ 0x0, 0x0, 0x0 } , // TU116 (disabled)
{ 0x0, 0x0, 0x0 } , // TU117 (disabled)
{ 0x0, 0x0, 0x0 } , // GA100 (disabled)
{ 0x0, 0x0, 0x0 } , // GA102 (disabled)
{ 0x0, 0x0, 0x0 } , // GA103 (disabled)
{ 0x0, 0x0, 0x0 } , // GA104 (disabled)
{ 0x0, 0x0, 0x0 } , // GA106 (disabled)
{ 0x0, 0x0, 0x0 } , // GA107 (disabled)
{ 0x0, 0x0, 0x0 } , // GA10B (disabled)
{ 0x0, 0x0, 0x0 } , // GA102F (disabled)
{ 0x0, 0x0, 0x0 } , // T001_FERMI_NOT_EXIST (disabled)
{ 0x0, 0x0, 0x0 } , // T124 (disabled)
{ 0x0, 0x0, 0x0 } , // T132 (disabled)
{ 0x0, 0x0, 0x0 } , // T210 (disabled)
{ 0x0, 0x0, 0x0 } , // T186 (disabled)
{ 0x0, 0x0, 0x0 } , // T194 (disabled)
{ 0x0, 0x0, 0x0 } , // T002_TURING_NOT_EXIST (disabled)
{ 0x0, 0x0, 0x0 } , // T234 (disabled)
{ 0x0, 0x0, 0x235 } , // T234D
{ 0x0, 0x0, 0x0 } , // AMODEL (disabled)
};
#endif // _G_RMCFG_HAL_ARCHIMPL_H_

View File

@@ -0,0 +1,154 @@
#define NVOC_HAL_MGR_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_hal_mgr_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xbf26de = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJHALMGR;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_OBJHALMGR(OBJHALMGR*);
void __nvoc_init_funcTable_OBJHALMGR(OBJHALMGR*);
NV_STATUS __nvoc_ctor_OBJHALMGR(OBJHALMGR*);
void __nvoc_init_dataField_OBJHALMGR(OBJHALMGR*);
void __nvoc_dtor_OBJHALMGR(OBJHALMGR*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJHALMGR;
static const struct NVOC_RTTI __nvoc_rtti_OBJHALMGR_OBJHALMGR = {
/*pClassDef=*/ &__nvoc_class_def_OBJHALMGR,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJHALMGR,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJHALMGR_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJHALMGR, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJHALMGR = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_OBJHALMGR_OBJHALMGR,
&__nvoc_rtti_OBJHALMGR_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJHALMGR =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJHALMGR),
/*classId=*/ classId(OBJHALMGR),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJHALMGR",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJHALMGR,
/*pCastInfo=*/ &__nvoc_castinfo_OBJHALMGR,
/*pExportInfo=*/ &__nvoc_export_info_OBJHALMGR
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJHALMGR =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_OBJHALMGR(OBJHALMGR *pThis) {
__nvoc_halmgrDestruct(pThis);
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJHALMGR(OBJHALMGR *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_OBJHALMGR(OBJHALMGR *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_OBJHALMGR_fail_Object;
__nvoc_init_dataField_OBJHALMGR(pThis);
status = __nvoc_halmgrConstruct(pThis);
if (status != NV_OK) goto __nvoc_ctor_OBJHALMGR_fail__init;
goto __nvoc_ctor_OBJHALMGR_exit; // Success
__nvoc_ctor_OBJHALMGR_fail__init:
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_ctor_OBJHALMGR_fail_Object:
__nvoc_ctor_OBJHALMGR_exit:
return status;
}
static void __nvoc_init_funcTable_OBJHALMGR_1(OBJHALMGR *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_OBJHALMGR(OBJHALMGR *pThis) {
__nvoc_init_funcTable_OBJHALMGR_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_OBJHALMGR(OBJHALMGR *pThis) {
pThis->__nvoc_pbase_OBJHALMGR = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_OBJHALMGR(pThis);
}
NV_STATUS __nvoc_objCreate_OBJHALMGR(OBJHALMGR **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJHALMGR *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJHALMGR));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJHALMGR));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJHALMGR);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJHALMGR(pThis);
status = __nvoc_ctor_OBJHALMGR(pThis);
if (status != NV_OK) goto __nvoc_objCreate_OBJHALMGR_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJHALMGR_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJHALMGR(OBJHALMGR **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJHALMGR(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,139 @@
#ifndef _G_HAL_MGR_NVOC_H_
#define _G_HAL_MGR_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2019 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_hal_mgr_nvoc.h"
#ifndef _HAL_MGR_H_
#define _HAL_MGR_H_
#include "core/core.h"
#include "core/info_block.h"
#include "core/hal.h"
#define HALMGR_GET_HAL(p, halid) halmgrGetHal((p), halid)
typedef struct OBJHALMGR *POBJHALMGR;
#ifndef __NVOC_CLASS_OBJHALMGR_TYPEDEF__
#define __NVOC_CLASS_OBJHALMGR_TYPEDEF__
typedef struct OBJHALMGR OBJHALMGR;
#endif /* __NVOC_CLASS_OBJHALMGR_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJHALMGR
#define __nvoc_class_id_OBJHALMGR 0xbf26de
#endif /* __nvoc_class_id_OBJHALMGR */
#ifdef NVOC_HAL_MGR_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OBJHALMGR {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct OBJHALMGR *__nvoc_pbase_OBJHALMGR;
struct OBJHAL *pHalList[60];
};
#ifndef __NVOC_CLASS_OBJHALMGR_TYPEDEF__
#define __NVOC_CLASS_OBJHALMGR_TYPEDEF__
typedef struct OBJHALMGR OBJHALMGR;
#endif /* __NVOC_CLASS_OBJHALMGR_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJHALMGR
#define __nvoc_class_id_OBJHALMGR 0xbf26de
#endif /* __nvoc_class_id_OBJHALMGR */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJHALMGR;
#define __staticCast_OBJHALMGR(pThis) \
((pThis)->__nvoc_pbase_OBJHALMGR)
#ifdef __nvoc_hal_mgr_h_disabled
#define __dynamicCast_OBJHALMGR(pThis) ((OBJHALMGR*)NULL)
#else //__nvoc_hal_mgr_h_disabled
#define __dynamicCast_OBJHALMGR(pThis) \
((OBJHALMGR*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJHALMGR)))
#endif //__nvoc_hal_mgr_h_disabled
NV_STATUS __nvoc_objCreateDynamic_OBJHALMGR(OBJHALMGR**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJHALMGR(OBJHALMGR**, Dynamic*, NvU32);
#define __objCreate_OBJHALMGR(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_OBJHALMGR((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
NV_STATUS halmgrConstruct_IMPL(struct OBJHALMGR *arg_);
#define __nvoc_halmgrConstruct(arg_) halmgrConstruct_IMPL(arg_)
void halmgrDestruct_IMPL(struct OBJHALMGR *arg0);
#define __nvoc_halmgrDestruct(arg0) halmgrDestruct_IMPL(arg0)
NV_STATUS halmgrCreateHal_IMPL(struct OBJHALMGR *arg0, NvU32 arg1);
#ifdef __nvoc_hal_mgr_h_disabled
static inline NV_STATUS halmgrCreateHal(struct OBJHALMGR *arg0, NvU32 arg1) {
NV_ASSERT_FAILED_PRECOMP("OBJHALMGR was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_hal_mgr_h_disabled
#define halmgrCreateHal(arg0, arg1) halmgrCreateHal_IMPL(arg0, arg1)
#endif //__nvoc_hal_mgr_h_disabled
NV_STATUS halmgrGetHalForGpu_IMPL(struct OBJHALMGR *arg0, NvU32 arg1, NvU32 arg2, NvU32 *arg3);
#ifdef __nvoc_hal_mgr_h_disabled
static inline NV_STATUS halmgrGetHalForGpu(struct OBJHALMGR *arg0, NvU32 arg1, NvU32 arg2, NvU32 *arg3) {
NV_ASSERT_FAILED_PRECOMP("OBJHALMGR was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_hal_mgr_h_disabled
#define halmgrGetHalForGpu(arg0, arg1, arg2, arg3) halmgrGetHalForGpu_IMPL(arg0, arg1, arg2, arg3)
#endif //__nvoc_hal_mgr_h_disabled
struct OBJHAL *halmgrGetHal_IMPL(struct OBJHALMGR *arg0, NvU32 arg1);
#ifdef __nvoc_hal_mgr_h_disabled
static inline struct OBJHAL *halmgrGetHal(struct OBJHALMGR *arg0, NvU32 arg1) {
NV_ASSERT_FAILED_PRECOMP("OBJHALMGR was disabled!");
return NULL;
}
#else //__nvoc_hal_mgr_h_disabled
#define halmgrGetHal(arg0, arg1) halmgrGetHal_IMPL(arg0, arg1)
#endif //__nvoc_hal_mgr_h_disabled
#undef PRIVATE_FIELD
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_HAL_MGR_NVOC_H_

View File

@@ -0,0 +1,148 @@
#define NVOC_HAL_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_hal_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xe803b6 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJHAL;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_OBJHAL(OBJHAL*);
void __nvoc_init_funcTable_OBJHAL(OBJHAL*);
NV_STATUS __nvoc_ctor_OBJHAL(OBJHAL*);
void __nvoc_init_dataField_OBJHAL(OBJHAL*);
void __nvoc_dtor_OBJHAL(OBJHAL*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJHAL;
static const struct NVOC_RTTI __nvoc_rtti_OBJHAL_OBJHAL = {
/*pClassDef=*/ &__nvoc_class_def_OBJHAL,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJHAL,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJHAL_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJHAL, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJHAL = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_OBJHAL_OBJHAL,
&__nvoc_rtti_OBJHAL_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJHAL =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJHAL),
/*classId=*/ classId(OBJHAL),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJHAL",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJHAL,
/*pCastInfo=*/ &__nvoc_castinfo_OBJHAL,
/*pExportInfo=*/ &__nvoc_export_info_OBJHAL
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJHAL =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_OBJHAL(OBJHAL *pThis) {
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJHAL(OBJHAL *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_OBJHAL(OBJHAL *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_OBJHAL_fail_Object;
__nvoc_init_dataField_OBJHAL(pThis);
goto __nvoc_ctor_OBJHAL_exit; // Success
__nvoc_ctor_OBJHAL_fail_Object:
__nvoc_ctor_OBJHAL_exit:
return status;
}
static void __nvoc_init_funcTable_OBJHAL_1(OBJHAL *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_OBJHAL(OBJHAL *pThis) {
__nvoc_init_funcTable_OBJHAL_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_OBJHAL(OBJHAL *pThis) {
pThis->__nvoc_pbase_OBJHAL = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_OBJHAL(pThis);
}
NV_STATUS __nvoc_objCreate_OBJHAL(OBJHAL **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJHAL *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJHAL));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJHAL));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJHAL);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJHAL(pThis);
status = __nvoc_ctor_OBJHAL(pThis);
if (status != NV_OK) goto __nvoc_objCreate_OBJHAL_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJHAL_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJHAL(OBJHAL **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJHAL(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,146 @@
#ifndef _G_HAL_NVOC_H_
#define _G_HAL_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_hal_nvoc.h"
#ifndef _OBJHAL_H_
#define _OBJHAL_H_
/**************** Resource Manager Defines and Structures ******************\
* *
* Module: hal.h *
* Defines and structures used for the HAL Object. *
* *
\***************************************************************************/
#include "core/core.h"
#include "core/info_block.h"
//
// HAL Info Block Id:
//
// 31 7 0
// .-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | 24 bits | 8 bits |
// .-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// Info ID # Impl
//
// Impl: The hal implementation
// Info ID number: unique id for a particular info type
//
#define MKHALINFOID(impl,infoId) (((infoId & 0xffffff) << 8) | (impl & 0xff))
typedef struct MODULEDESCRIPTOR MODULEDESCRIPTOR, *PMODULEDESCRIPTOR;
struct MODULEDESCRIPTOR {
// (rmconfig) per-obj function ptr to init hal interfaces
const HAL_IFACE_SETUP *pHalSetIfaces;
};
typedef struct OBJHAL *POBJHAL;
#ifndef __NVOC_CLASS_OBJHAL_TYPEDEF__
#define __NVOC_CLASS_OBJHAL_TYPEDEF__
typedef struct OBJHAL OBJHAL;
#endif /* __NVOC_CLASS_OBJHAL_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJHAL
#define __nvoc_class_id_OBJHAL 0xe803b6
#endif /* __nvoc_class_id_OBJHAL */
#ifdef NVOC_HAL_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OBJHAL {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct OBJHAL *__nvoc_pbase_OBJHAL;
struct MODULEDESCRIPTOR moduleDescriptor;
};
#ifndef __NVOC_CLASS_OBJHAL_TYPEDEF__
#define __NVOC_CLASS_OBJHAL_TYPEDEF__
typedef struct OBJHAL OBJHAL;
#endif /* __NVOC_CLASS_OBJHAL_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJHAL
#define __nvoc_class_id_OBJHAL 0xe803b6
#endif /* __nvoc_class_id_OBJHAL */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJHAL;
#define __staticCast_OBJHAL(pThis) \
((pThis)->__nvoc_pbase_OBJHAL)
#ifdef __nvoc_hal_h_disabled
#define __dynamicCast_OBJHAL(pThis) ((OBJHAL*)NULL)
#else //__nvoc_hal_h_disabled
#define __dynamicCast_OBJHAL(pThis) \
((OBJHAL*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJHAL)))
#endif //__nvoc_hal_h_disabled
NV_STATUS __nvoc_objCreateDynamic_OBJHAL(OBJHAL**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJHAL(OBJHAL**, Dynamic*, NvU32);
#define __objCreate_OBJHAL(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_OBJHAL((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
PMODULEDESCRIPTOR objhalGetModuleDescriptor_IMPL(struct OBJHAL *pHal);
#ifdef __nvoc_hal_h_disabled
static inline PMODULEDESCRIPTOR objhalGetModuleDescriptor(struct OBJHAL *pHal) {
NV_ASSERT_FAILED_PRECOMP("OBJHAL was disabled!");
return NULL;
}
#else //__nvoc_hal_h_disabled
#define objhalGetModuleDescriptor(pHal) objhalGetModuleDescriptor_IMPL(pHal)
#endif //__nvoc_hal_h_disabled
#undef PRIVATE_FIELD
//--------------------------------------------------------------------
// RM routines.
//--------------------------------------------------------------------
NV_STATUS ipVersionsSetupHal(struct OBJGPU *, void *pDynamic, IGrp_ipVersions_getInfo getInfoFn);
#endif // _OBJHAL_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_HAL_NVOC_H_

View File

@@ -0,0 +1,66 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// Private HAL support for halgen.
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_hal_private.h
//
// Chips: T234D
//
//
// This file is included in several .c files for chips hal register and engines
// hal function assignment. The macros RMCFG_ENGINE_SETUP and RMCFG_HAL_SETUP_xxx
// are used to provide different content for those .c files.
//
#ifndef _G_RMCFG_HAL_PRIVATE_H_
#define _G_RMCFG_HAL_PRIVATE_H_
#include "g_hal.h"
// establish the per-chip RMCFG_HAL_SETUP_chip #defines as needed.
#if defined(RMCFG_ENGINE_SETUP)
// setup all enabled chip families
#if defined(RMCFG_HAL_SETUP_ALL)
# define RMCFG_HAL_SETUP_T23XD 1
#endif // RMCFG_HAL_SETUP_ALL
//
// setup all enabled chips in each enabled family
//
#if defined(RMCFG_HAL_SETUP_T23XD)
# define RMCFG_HAL_SETUP_T234D 1
#endif // T23XD
#endif // RMCFG_ENGINE_SETUP
// pull in private headers for each engine
//
// per-GPU structure with an interface init function for each engine
//
// registerHalModule function declaration
NV_STATUS registerHalModule(NvU32, const HAL_IFACE_SETUP *);
#if defined(RMCFG_HAL_SETUP_T234D)
static const HAL_IFACE_SETUP halIface_T234D = {
};
NV_STATUS registerHalModule_T234D(void)
{
return registerHalModule(HAL_IMPL_T234D, &halIface_T234D);
}
#endif // T23XD or T234D
#endif // _G_RMCFG_HAL_PRIVATE_H_

View File

@@ -0,0 +1,51 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// Hal registration entry points.
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_hal_register.h
//
// Chips: T234D
//
#ifndef _G_RMCFG_HAL_REGISTER_H_
#define _G_RMCFG_HAL_REGISTER_H_
//
// per-family HAL registration entry points
//
NV_STATUS registerHalModule_T234D(void);
static NV_STATUS NV_INLINE REGISTER_T23XD_HALS(void)
{
NV_STATUS rmStatus;
rmStatus = registerHalModule_T234D();
if (rmStatus != NV_OK)
return rmStatus;
return NV_OK;
}
//
// This routine can be used by platform dependent code to
// enable all HAL modules.
//
static NV_STATUS NV_INLINE REGISTER_ALL_HALS(void)
{
NV_STATUS rmStatus;
rmStatus = REGISTER_T23XD_HALS();
if (rmStatus != NV_OK)
{
return rmStatus;
}
return NV_OK;
}
#endif // _G_RMCFG_HAL_REGISTER_H_

View File

@@ -0,0 +1,327 @@
#define NVOC_HDA_CODEC_API_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_hda_codec_api_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xf59a20 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Hdacodec;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_GpuResource;
void __nvoc_init_Hdacodec(Hdacodec*);
void __nvoc_init_funcTable_Hdacodec(Hdacodec*);
NV_STATUS __nvoc_ctor_Hdacodec(Hdacodec*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_Hdacodec(Hdacodec*);
void __nvoc_dtor_Hdacodec(Hdacodec*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_Hdacodec;
static const struct NVOC_RTTI __nvoc_rtti_Hdacodec_Hdacodec = {
/*pClassDef=*/ &__nvoc_class_def_Hdacodec,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_Hdacodec,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_Hdacodec_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Hdacodec, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_Hdacodec_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Hdacodec, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_Hdacodec_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Hdacodec, __nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_Hdacodec_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Hdacodec, __nvoc_base_GpuResource.__nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_Hdacodec_GpuResource = {
/*pClassDef=*/ &__nvoc_class_def_GpuResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Hdacodec, __nvoc_base_GpuResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_Hdacodec = {
/*numRelatives=*/ 6,
/*relatives=*/ {
&__nvoc_rtti_Hdacodec_Hdacodec,
&__nvoc_rtti_Hdacodec_GpuResource,
&__nvoc_rtti_Hdacodec_RmResource,
&__nvoc_rtti_Hdacodec_RmResourceCommon,
&__nvoc_rtti_Hdacodec_RsResource,
&__nvoc_rtti_Hdacodec_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_Hdacodec =
{
/*classInfo=*/ {
/*size=*/ sizeof(Hdacodec),
/*classId=*/ classId(Hdacodec),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "Hdacodec",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_Hdacodec,
/*pCastInfo=*/ &__nvoc_castinfo_Hdacodec,
/*pExportInfo=*/ &__nvoc_export_info_Hdacodec
};
static NvBool __nvoc_thunk_GpuResource_hdacodecShareCallback(struct Hdacodec *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return gpuresShareCallback((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Hdacodec_GpuResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_GpuResource_hdacodecControl(struct Hdacodec *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return gpuresControl((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Hdacodec_GpuResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_hdacodecUnmap(struct Hdacodec *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return gpuresUnmap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Hdacodec_GpuResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RmResource_hdacodecGetMemInterMapParams(struct Hdacodec *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return rmresGetMemInterMapParams((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Hdacodec_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_hdacodecGetMemoryMappingDescriptor(struct Hdacodec *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return rmresGetMemoryMappingDescriptor((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Hdacodec_RmResource.offset), ppMemDesc);
}
static NV_STATUS __nvoc_thunk_GpuResource_hdacodecGetMapAddrSpace(struct Hdacodec *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return gpuresGetMapAddrSpace((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Hdacodec_GpuResource.offset), pCallContext, mapFlags, pAddrSpace);
}
static NvHandle __nvoc_thunk_GpuResource_hdacodecGetInternalObjectHandle(struct Hdacodec *pGpuResource) {
return gpuresGetInternalObjectHandle((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Hdacodec_GpuResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_hdacodecControlFilter(struct Hdacodec *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_hdacodecAddAdditionalDependants(struct RsClient *pClient, struct Hdacodec *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RsResource.offset), pReference);
}
static NvU32 __nvoc_thunk_RsResource_hdacodecGetRefCount(struct Hdacodec *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RmResource_hdacodecCheckMemInterUnmap(struct Hdacodec *pRmResource, NvBool bSubdeviceHandleProvided) {
return rmresCheckMemInterUnmap((struct RmResource *)(((unsigned char *)pRmResource) + __nvoc_rtti_Hdacodec_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_RsResource_hdacodecMapTo(struct Hdacodec *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_hdacodecControl_Prologue(struct Hdacodec *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_GpuResource_hdacodecGetRegBaseOffsetAndSize(struct Hdacodec *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return gpuresGetRegBaseOffsetAndSize((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Hdacodec_GpuResource.offset), pGpu, pOffset, pSize);
}
static NvBool __nvoc_thunk_RsResource_hdacodecCanCopy(struct Hdacodec *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_GpuResource_hdacodecInternalControlForward(struct Hdacodec *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return gpuresInternalControlForward((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Hdacodec_GpuResource.offset), command, pParams, size);
}
static void __nvoc_thunk_RsResource_hdacodecPreDestruct(struct Hdacodec *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_hdacodecUnmapFrom(struct Hdacodec *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_hdacodecControl_Epilogue(struct Hdacodec *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_hdacodecControlLookup(struct Hdacodec *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_GpuResource_hdacodecMap(struct Hdacodec *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return gpuresMap((struct GpuResource *)(((unsigned char *)pGpuResource) + __nvoc_rtti_Hdacodec_GpuResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_hdacodecAccessCallback(struct Hdacodec *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Hdacodec_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_Hdacodec =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_GpuResource(GpuResource*);
void __nvoc_dtor_Hdacodec(Hdacodec *pThis) {
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_Hdacodec(Hdacodec *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_GpuResource(GpuResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_Hdacodec(Hdacodec *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_GpuResource(&pThis->__nvoc_base_GpuResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_Hdacodec_fail_GpuResource;
__nvoc_init_dataField_Hdacodec(pThis);
status = __nvoc_hdacodecConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_Hdacodec_fail__init;
goto __nvoc_ctor_Hdacodec_exit; // Success
__nvoc_ctor_Hdacodec_fail__init:
__nvoc_dtor_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_ctor_Hdacodec_fail_GpuResource:
__nvoc_ctor_Hdacodec_exit:
return status;
}
static void __nvoc_init_funcTable_Hdacodec_1(Hdacodec *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__hdacodecShareCallback__ = &__nvoc_thunk_GpuResource_hdacodecShareCallback;
pThis->__hdacodecControl__ = &__nvoc_thunk_GpuResource_hdacodecControl;
pThis->__hdacodecUnmap__ = &__nvoc_thunk_GpuResource_hdacodecUnmap;
pThis->__hdacodecGetMemInterMapParams__ = &__nvoc_thunk_RmResource_hdacodecGetMemInterMapParams;
pThis->__hdacodecGetMemoryMappingDescriptor__ = &__nvoc_thunk_RmResource_hdacodecGetMemoryMappingDescriptor;
pThis->__hdacodecGetMapAddrSpace__ = &__nvoc_thunk_GpuResource_hdacodecGetMapAddrSpace;
pThis->__hdacodecGetInternalObjectHandle__ = &__nvoc_thunk_GpuResource_hdacodecGetInternalObjectHandle;
pThis->__hdacodecControlFilter__ = &__nvoc_thunk_RsResource_hdacodecControlFilter;
pThis->__hdacodecAddAdditionalDependants__ = &__nvoc_thunk_RsResource_hdacodecAddAdditionalDependants;
pThis->__hdacodecGetRefCount__ = &__nvoc_thunk_RsResource_hdacodecGetRefCount;
pThis->__hdacodecCheckMemInterUnmap__ = &__nvoc_thunk_RmResource_hdacodecCheckMemInterUnmap;
pThis->__hdacodecMapTo__ = &__nvoc_thunk_RsResource_hdacodecMapTo;
pThis->__hdacodecControl_Prologue__ = &__nvoc_thunk_RmResource_hdacodecControl_Prologue;
pThis->__hdacodecGetRegBaseOffsetAndSize__ = &__nvoc_thunk_GpuResource_hdacodecGetRegBaseOffsetAndSize;
pThis->__hdacodecCanCopy__ = &__nvoc_thunk_RsResource_hdacodecCanCopy;
pThis->__hdacodecInternalControlForward__ = &__nvoc_thunk_GpuResource_hdacodecInternalControlForward;
pThis->__hdacodecPreDestruct__ = &__nvoc_thunk_RsResource_hdacodecPreDestruct;
pThis->__hdacodecUnmapFrom__ = &__nvoc_thunk_RsResource_hdacodecUnmapFrom;
pThis->__hdacodecControl_Epilogue__ = &__nvoc_thunk_RmResource_hdacodecControl_Epilogue;
pThis->__hdacodecControlLookup__ = &__nvoc_thunk_RsResource_hdacodecControlLookup;
pThis->__hdacodecMap__ = &__nvoc_thunk_GpuResource_hdacodecMap;
pThis->__hdacodecAccessCallback__ = &__nvoc_thunk_RmResource_hdacodecAccessCallback;
}
void __nvoc_init_funcTable_Hdacodec(Hdacodec *pThis) {
__nvoc_init_funcTable_Hdacodec_1(pThis);
}
void __nvoc_init_GpuResource(GpuResource*);
void __nvoc_init_Hdacodec(Hdacodec *pThis) {
pThis->__nvoc_pbase_Hdacodec = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource;
pThis->__nvoc_pbase_GpuResource = &pThis->__nvoc_base_GpuResource;
__nvoc_init_GpuResource(&pThis->__nvoc_base_GpuResource);
__nvoc_init_funcTable_Hdacodec(pThis);
}
NV_STATUS __nvoc_objCreate_Hdacodec(Hdacodec **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
Hdacodec *pThis;
pThis = portMemAllocNonPaged(sizeof(Hdacodec));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(Hdacodec));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_Hdacodec);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_GpuResource.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_Hdacodec(pThis);
status = __nvoc_ctor_Hdacodec(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_Hdacodec_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_Hdacodec_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_Hdacodec(Hdacodec **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_Hdacodec(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,229 @@
#ifndef _G_HDA_CODEC_API_NVOC_H_
#define _G_HDA_CODEC_API_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2016-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_hda_codec_api_nvoc.h"
#ifndef HDA_CODEC_API_H
#define HDA_CODEC_API_H
#include "resserv/resserv.h"
#include "nvoc/prelude.h"
#include "resserv/rs_resource.h"
#include "ctrl/ctrl90ec.h"
#include "gpu/gpu_resource.h"
#ifdef NVOC_HDA_CODEC_API_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct Hdacodec {
const struct NVOC_RTTI *__nvoc_rtti;
struct GpuResource __nvoc_base_GpuResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct GpuResource *__nvoc_pbase_GpuResource;
struct Hdacodec *__nvoc_pbase_Hdacodec;
NvBool (*__hdacodecShareCallback__)(struct Hdacodec *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__hdacodecControl__)(struct Hdacodec *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__hdacodecUnmap__)(struct Hdacodec *, struct CALL_CONTEXT *, struct RsCpuMapping *);
NV_STATUS (*__hdacodecGetMemInterMapParams__)(struct Hdacodec *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__hdacodecGetMemoryMappingDescriptor__)(struct Hdacodec *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__hdacodecGetMapAddrSpace__)(struct Hdacodec *, struct CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NvHandle (*__hdacodecGetInternalObjectHandle__)(struct Hdacodec *);
NV_STATUS (*__hdacodecControlFilter__)(struct Hdacodec *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__hdacodecAddAdditionalDependants__)(struct RsClient *, struct Hdacodec *, RsResourceRef *);
NvU32 (*__hdacodecGetRefCount__)(struct Hdacodec *);
NV_STATUS (*__hdacodecCheckMemInterUnmap__)(struct Hdacodec *, NvBool);
NV_STATUS (*__hdacodecMapTo__)(struct Hdacodec *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__hdacodecControl_Prologue__)(struct Hdacodec *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__hdacodecGetRegBaseOffsetAndSize__)(struct Hdacodec *, struct OBJGPU *, NvU32 *, NvU32 *);
NvBool (*__hdacodecCanCopy__)(struct Hdacodec *);
NV_STATUS (*__hdacodecInternalControlForward__)(struct Hdacodec *, NvU32, void *, NvU32);
void (*__hdacodecPreDestruct__)(struct Hdacodec *);
NV_STATUS (*__hdacodecUnmapFrom__)(struct Hdacodec *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__hdacodecControl_Epilogue__)(struct Hdacodec *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__hdacodecControlLookup__)(struct Hdacodec *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__hdacodecMap__)(struct Hdacodec *, struct CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, struct RsCpuMapping *);
NvBool (*__hdacodecAccessCallback__)(struct Hdacodec *, struct RsClient *, void *, RsAccessRight);
};
#ifndef __NVOC_CLASS_Hdacodec_TYPEDEF__
#define __NVOC_CLASS_Hdacodec_TYPEDEF__
typedef struct Hdacodec Hdacodec;
#endif /* __NVOC_CLASS_Hdacodec_TYPEDEF__ */
#ifndef __nvoc_class_id_Hdacodec
#define __nvoc_class_id_Hdacodec 0xf59a20
#endif /* __nvoc_class_id_Hdacodec */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Hdacodec;
#define __staticCast_Hdacodec(pThis) \
((pThis)->__nvoc_pbase_Hdacodec)
#ifdef __nvoc_hda_codec_api_h_disabled
#define __dynamicCast_Hdacodec(pThis) ((Hdacodec*)NULL)
#else //__nvoc_hda_codec_api_h_disabled
#define __dynamicCast_Hdacodec(pThis) \
((Hdacodec*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Hdacodec)))
#endif //__nvoc_hda_codec_api_h_disabled
NV_STATUS __nvoc_objCreateDynamic_Hdacodec(Hdacodec**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_Hdacodec(Hdacodec**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_Hdacodec(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_Hdacodec((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define hdacodecShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) hdacodecShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
#define hdacodecControl(pGpuResource, pCallContext, pParams) hdacodecControl_DISPATCH(pGpuResource, pCallContext, pParams)
#define hdacodecUnmap(pGpuResource, pCallContext, pCpuMapping) hdacodecUnmap_DISPATCH(pGpuResource, pCallContext, pCpuMapping)
#define hdacodecGetMemInterMapParams(pRmResource, pParams) hdacodecGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define hdacodecGetMemoryMappingDescriptor(pRmResource, ppMemDesc) hdacodecGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define hdacodecGetMapAddrSpace(pGpuResource, pCallContext, mapFlags, pAddrSpace) hdacodecGetMapAddrSpace_DISPATCH(pGpuResource, pCallContext, mapFlags, pAddrSpace)
#define hdacodecGetInternalObjectHandle(pGpuResource) hdacodecGetInternalObjectHandle_DISPATCH(pGpuResource)
#define hdacodecControlFilter(pResource, pCallContext, pParams) hdacodecControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define hdacodecAddAdditionalDependants(pClient, pResource, pReference) hdacodecAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define hdacodecGetRefCount(pResource) hdacodecGetRefCount_DISPATCH(pResource)
#define hdacodecCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) hdacodecCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define hdacodecMapTo(pResource, pParams) hdacodecMapTo_DISPATCH(pResource, pParams)
#define hdacodecControl_Prologue(pResource, pCallContext, pParams) hdacodecControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define hdacodecGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) hdacodecGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
#define hdacodecCanCopy(pResource) hdacodecCanCopy_DISPATCH(pResource)
#define hdacodecInternalControlForward(pGpuResource, command, pParams, size) hdacodecInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
#define hdacodecPreDestruct(pResource) hdacodecPreDestruct_DISPATCH(pResource)
#define hdacodecUnmapFrom(pResource, pParams) hdacodecUnmapFrom_DISPATCH(pResource, pParams)
#define hdacodecControl_Epilogue(pResource, pCallContext, pParams) hdacodecControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define hdacodecControlLookup(pResource, pParams, ppEntry) hdacodecControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define hdacodecMap(pGpuResource, pCallContext, pParams, pCpuMapping) hdacodecMap_DISPATCH(pGpuResource, pCallContext, pParams, pCpuMapping)
#define hdacodecAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) hdacodecAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
static inline NvBool hdacodecShareCallback_DISPATCH(struct Hdacodec *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pGpuResource->__hdacodecShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS hdacodecControl_DISPATCH(struct Hdacodec *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pGpuResource->__hdacodecControl__(pGpuResource, pCallContext, pParams);
}
static inline NV_STATUS hdacodecUnmap_DISPATCH(struct Hdacodec *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__hdacodecUnmap__(pGpuResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS hdacodecGetMemInterMapParams_DISPATCH(struct Hdacodec *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__hdacodecGetMemInterMapParams__(pRmResource, pParams);
}
static inline NV_STATUS hdacodecGetMemoryMappingDescriptor_DISPATCH(struct Hdacodec *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__hdacodecGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
static inline NV_STATUS hdacodecGetMapAddrSpace_DISPATCH(struct Hdacodec *pGpuResource, struct CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pGpuResource->__hdacodecGetMapAddrSpace__(pGpuResource, pCallContext, mapFlags, pAddrSpace);
}
static inline NvHandle hdacodecGetInternalObjectHandle_DISPATCH(struct Hdacodec *pGpuResource) {
return pGpuResource->__hdacodecGetInternalObjectHandle__(pGpuResource);
}
static inline NV_STATUS hdacodecControlFilter_DISPATCH(struct Hdacodec *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__hdacodecControlFilter__(pResource, pCallContext, pParams);
}
static inline void hdacodecAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct Hdacodec *pResource, RsResourceRef *pReference) {
pResource->__hdacodecAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 hdacodecGetRefCount_DISPATCH(struct Hdacodec *pResource) {
return pResource->__hdacodecGetRefCount__(pResource);
}
static inline NV_STATUS hdacodecCheckMemInterUnmap_DISPATCH(struct Hdacodec *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__hdacodecCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
static inline NV_STATUS hdacodecMapTo_DISPATCH(struct Hdacodec *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__hdacodecMapTo__(pResource, pParams);
}
static inline NV_STATUS hdacodecControl_Prologue_DISPATCH(struct Hdacodec *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__hdacodecControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS hdacodecGetRegBaseOffsetAndSize_DISPATCH(struct Hdacodec *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
return pGpuResource->__hdacodecGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
}
static inline NvBool hdacodecCanCopy_DISPATCH(struct Hdacodec *pResource) {
return pResource->__hdacodecCanCopy__(pResource);
}
static inline NV_STATUS hdacodecInternalControlForward_DISPATCH(struct Hdacodec *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
return pGpuResource->__hdacodecInternalControlForward__(pGpuResource, command, pParams, size);
}
static inline void hdacodecPreDestruct_DISPATCH(struct Hdacodec *pResource) {
pResource->__hdacodecPreDestruct__(pResource);
}
static inline NV_STATUS hdacodecUnmapFrom_DISPATCH(struct Hdacodec *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__hdacodecUnmapFrom__(pResource, pParams);
}
static inline void hdacodecControl_Epilogue_DISPATCH(struct Hdacodec *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__hdacodecControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS hdacodecControlLookup_DISPATCH(struct Hdacodec *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__hdacodecControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS hdacodecMap_DISPATCH(struct Hdacodec *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, struct RsCpuMapping *pCpuMapping) {
return pGpuResource->__hdacodecMap__(pGpuResource, pCallContext, pParams, pCpuMapping);
}
static inline NvBool hdacodecAccessCallback_DISPATCH(struct Hdacodec *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__hdacodecAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS hdacodecConstruct_IMPL(struct Hdacodec *arg_pHdacodecApi, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_hdacodecConstruct(arg_pHdacodecApi, arg_pCallContext, arg_pParams) hdacodecConstruct_IMPL(arg_pHdacodecApi, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_HDA_CODEC_API_NVOC_H_

View File

@@ -0,0 +1,151 @@
#ifndef _G_HYPERVISOR_NVOC_H_
#define _G_HYPERVISOR_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2014-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_hypervisor_nvoc.h"
#ifndef HYPERVISOR_H
#define HYPERVISOR_H
/**************** Resource Manager Defines and Structures ******************\
* *
* Module: hypervisor.h *
* Defines and structures used for the hypervisor object. *
\***************************************************************************/
#include "core/core.h"
#include "nvoc/utility.h"
#include "nv-hypervisor.h"
#include "mem_mgr/mem.h"
/* ------------------------ Forward Declarations ---------------------------- */
struct OBJOS;
#ifndef __NVOC_CLASS_OBJOS_TYPEDEF__
#define __NVOC_CLASS_OBJOS_TYPEDEF__
typedef struct OBJOS OBJOS;
#endif /* __NVOC_CLASS_OBJOS_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJOS
#define __nvoc_class_id_OBJOS 0xaa1d70
#endif /* __nvoc_class_id_OBJOS */
typedef struct OBJHYPERVISOR *POBJHYPERVISOR;
#ifndef __NVOC_CLASS_OBJHYPERVISOR_TYPEDEF__
#define __NVOC_CLASS_OBJHYPERVISOR_TYPEDEF__
typedef struct OBJHYPERVISOR OBJHYPERVISOR;
#endif /* __NVOC_CLASS_OBJHYPERVISOR_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJHYPERVISOR
#define __nvoc_class_id_OBJHYPERVISOR 0x33c1ba
#endif /* __nvoc_class_id_OBJHYPERVISOR */
typedef struct HOST_VGPU_DEVICE HOST_VGPU_DEVICE;
#ifdef NVOC_HYPERVISOR_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OBJHYPERVISOR {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct OBJHYPERVISOR *__nvoc_pbase_OBJHYPERVISOR;
NvBool bDetected;
NvBool bIsHVMGuest;
HYPERVISOR_TYPE type;
NvBool bIsHypervHost;
NvBool bIsHypervVgpuSupported;
};
#ifndef __NVOC_CLASS_OBJHYPERVISOR_TYPEDEF__
#define __NVOC_CLASS_OBJHYPERVISOR_TYPEDEF__
typedef struct OBJHYPERVISOR OBJHYPERVISOR;
#endif /* __NVOC_CLASS_OBJHYPERVISOR_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJHYPERVISOR
#define __nvoc_class_id_OBJHYPERVISOR 0x33c1ba
#endif /* __nvoc_class_id_OBJHYPERVISOR */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJHYPERVISOR;
#define __staticCast_OBJHYPERVISOR(pThis) \
((pThis)->__nvoc_pbase_OBJHYPERVISOR)
#ifdef __nvoc_hypervisor_h_disabled
#define __dynamicCast_OBJHYPERVISOR(pThis) ((OBJHYPERVISOR*)NULL)
#else //__nvoc_hypervisor_h_disabled
#define __dynamicCast_OBJHYPERVISOR(pThis) \
((OBJHYPERVISOR*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJHYPERVISOR)))
#endif //__nvoc_hypervisor_h_disabled
NV_STATUS __nvoc_objCreateDynamic_OBJHYPERVISOR(OBJHYPERVISOR**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJHYPERVISOR(OBJHYPERVISOR**, Dynamic*, NvU32);
#define __objCreate_OBJHYPERVISOR(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_OBJHYPERVISOR((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
static inline NvBool hypervisorIsVgxHyper_491d52(void) {
return ((NvBool)(0 != 0));
}
#define hypervisorIsVgxHyper() hypervisorIsVgxHyper_491d52()
#define hypervisorIsVgxHyper_HAL() hypervisorIsVgxHyper()
static inline NvBool hypervisorCheckForAdminAccess(NvHandle hClient, NvU32 rmCtrlId) {
return ((NvBool)(0 != 0));
}
static inline NvBool hypervisorCheckForObjectAccess(NvHandle hClient) {
return ((NvBool)(0 != 0));
}
static inline NvBool hypervisorCheckForGspOffloadAccess(NvU32 rmCtrlId) {
return ((NvBool)(0 != 0));
}
static inline NvBool hypervisorIsType(HYPERVISOR_TYPE hyperType) {
return ((NvBool)(0 != 0));
}
#undef PRIVATE_FIELD
#endif // HYPERVISOR_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_HYPERVISOR_NVOC_H_

View File

@@ -0,0 +1,235 @@
#define NVOC_IO_VASPACE_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_io_vaspace_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x28ed9c = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJIOVASPACE;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJVASPACE;
void __nvoc_init_OBJIOVASPACE(OBJIOVASPACE*);
void __nvoc_init_funcTable_OBJIOVASPACE(OBJIOVASPACE*);
NV_STATUS __nvoc_ctor_OBJIOVASPACE(OBJIOVASPACE*);
void __nvoc_init_dataField_OBJIOVASPACE(OBJIOVASPACE*);
void __nvoc_dtor_OBJIOVASPACE(OBJIOVASPACE*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJIOVASPACE;
static const struct NVOC_RTTI __nvoc_rtti_OBJIOVASPACE_OBJIOVASPACE = {
/*pClassDef=*/ &__nvoc_class_def_OBJIOVASPACE,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJIOVASPACE,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJIOVASPACE_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJIOVASPACE, __nvoc_base_OBJVASPACE.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_OBJIOVASPACE_OBJVASPACE = {
/*pClassDef=*/ &__nvoc_class_def_OBJVASPACE,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJIOVASPACE, __nvoc_base_OBJVASPACE),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJIOVASPACE = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_OBJIOVASPACE_OBJIOVASPACE,
&__nvoc_rtti_OBJIOVASPACE_OBJVASPACE,
&__nvoc_rtti_OBJIOVASPACE_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJIOVASPACE =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJIOVASPACE),
/*classId=*/ classId(OBJIOVASPACE),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJIOVASPACE",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJIOVASPACE,
/*pCastInfo=*/ &__nvoc_castinfo_OBJIOVASPACE,
/*pExportInfo=*/ &__nvoc_export_info_OBJIOVASPACE
};
static NV_STATUS __nvoc_thunk_OBJIOVASPACE_vaspaceConstruct_(struct OBJVASPACE *pVAS, NvU32 classId, NvU32 vaspaceId, NvU64 vaStart, NvU64 vaLimit, NvU64 vaStartInternal, NvU64 vaLimitInternal, NvU32 flags) {
return iovaspaceConstruct_((struct OBJIOVASPACE *)(((unsigned char *)pVAS) - __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset), classId, vaspaceId, vaStart, vaLimit, vaStartInternal, vaLimitInternal, flags);
}
static NV_STATUS __nvoc_thunk_OBJIOVASPACE_vaspaceAlloc(struct OBJVASPACE *pVAS, NvU64 size, NvU64 align, NvU64 rangeLo, NvU64 rangeHi, NvU64 pageSizeLockMask, VAS_ALLOC_FLAGS flags, NvU64 *pAddr) {
return iovaspaceAlloc((struct OBJIOVASPACE *)(((unsigned char *)pVAS) - __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset), size, align, rangeLo, rangeHi, pageSizeLockMask, flags, pAddr);
}
static NV_STATUS __nvoc_thunk_OBJIOVASPACE_vaspaceFree(struct OBJVASPACE *pVAS, NvU64 vAddr) {
return iovaspaceFree((struct OBJIOVASPACE *)(((unsigned char *)pVAS) - __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset), vAddr);
}
static NV_STATUS __nvoc_thunk_OBJIOVASPACE_vaspaceApplyDefaultAlignment(struct OBJVASPACE *pVAS, const FB_ALLOC_INFO *pAllocInfo, NvU64 *pAlign, NvU64 *pSize, NvU64 *pPageSizeLockMask) {
return iovaspaceApplyDefaultAlignment((struct OBJIOVASPACE *)(((unsigned char *)pVAS) - __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset), pAllocInfo, pAlign, pSize, pPageSizeLockMask);
}
static NV_STATUS __nvoc_thunk_OBJIOVASPACE_vaspaceIncAllocRefCnt(struct OBJVASPACE *pVAS, NvU64 vAddr) {
return iovaspaceIncAllocRefCnt((struct OBJIOVASPACE *)(((unsigned char *)pVAS) - __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset), vAddr);
}
static NvU64 __nvoc_thunk_OBJIOVASPACE_vaspaceGetVaStart(struct OBJVASPACE *pVAS) {
return iovaspaceGetVaStart((struct OBJIOVASPACE *)(((unsigned char *)pVAS) - __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset));
}
static NvU64 __nvoc_thunk_OBJIOVASPACE_vaspaceGetVaLimit(struct OBJVASPACE *pVAS) {
return iovaspaceGetVaLimit((struct OBJIOVASPACE *)(((unsigned char *)pVAS) - __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset));
}
static NV_STATUS __nvoc_thunk_OBJIOVASPACE_vaspaceGetVasInfo(struct OBJVASPACE *pVAS, NV0080_CTRL_DMA_ADV_SCHED_GET_VA_CAPS_PARAMS *pParams) {
return iovaspaceGetVasInfo((struct OBJIOVASPACE *)(((unsigned char *)pVAS) - __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset), pParams);
}
static NvBool __nvoc_thunk_OBJVASPACE_iovaspaceIsInternalVaRestricted(struct OBJIOVASPACE *pVAS) {
return vaspaceIsInternalVaRestricted((struct OBJVASPACE *)(((unsigned char *)pVAS) + __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset));
}
static NvU32 __nvoc_thunk_OBJVASPACE_iovaspaceGetFlags(struct OBJIOVASPACE *pVAS) {
return vaspaceGetFlags((struct OBJVASPACE *)(((unsigned char *)pVAS) + __nvoc_rtti_OBJIOVASPACE_OBJVASPACE.offset));
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJIOVASPACE =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_OBJVASPACE(OBJVASPACE*);
void __nvoc_dtor_OBJIOVASPACE(OBJIOVASPACE *pThis) {
__nvoc_iovaspaceDestruct(pThis);
__nvoc_dtor_OBJVASPACE(&pThis->__nvoc_base_OBJVASPACE);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJIOVASPACE(OBJIOVASPACE *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_OBJVASPACE(OBJVASPACE* );
NV_STATUS __nvoc_ctor_OBJIOVASPACE(OBJIOVASPACE *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_OBJVASPACE(&pThis->__nvoc_base_OBJVASPACE);
if (status != NV_OK) goto __nvoc_ctor_OBJIOVASPACE_fail_OBJVASPACE;
__nvoc_init_dataField_OBJIOVASPACE(pThis);
goto __nvoc_ctor_OBJIOVASPACE_exit; // Success
__nvoc_ctor_OBJIOVASPACE_fail_OBJVASPACE:
__nvoc_ctor_OBJIOVASPACE_exit:
return status;
}
static void __nvoc_init_funcTable_OBJIOVASPACE_1(OBJIOVASPACE *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__iovaspaceConstruct___ = &iovaspaceConstruct__IMPL;
pThis->__iovaspaceAlloc__ = &iovaspaceAlloc_IMPL;
pThis->__iovaspaceFree__ = &iovaspaceFree_IMPL;
pThis->__iovaspaceApplyDefaultAlignment__ = &iovaspaceApplyDefaultAlignment_IMPL;
pThis->__iovaspaceIncAllocRefCnt__ = &iovaspaceIncAllocRefCnt_IMPL;
pThis->__iovaspaceGetVaStart__ = &iovaspaceGetVaStart_IMPL;
pThis->__iovaspaceGetVaLimit__ = &iovaspaceGetVaLimit_IMPL;
pThis->__iovaspaceGetVasInfo__ = &iovaspaceGetVasInfo_IMPL;
pThis->__nvoc_base_OBJVASPACE.__vaspaceConstruct___ = &__nvoc_thunk_OBJIOVASPACE_vaspaceConstruct_;
pThis->__nvoc_base_OBJVASPACE.__vaspaceAlloc__ = &__nvoc_thunk_OBJIOVASPACE_vaspaceAlloc;
pThis->__nvoc_base_OBJVASPACE.__vaspaceFree__ = &__nvoc_thunk_OBJIOVASPACE_vaspaceFree;
pThis->__nvoc_base_OBJVASPACE.__vaspaceApplyDefaultAlignment__ = &__nvoc_thunk_OBJIOVASPACE_vaspaceApplyDefaultAlignment;
pThis->__nvoc_base_OBJVASPACE.__vaspaceIncAllocRefCnt__ = &__nvoc_thunk_OBJIOVASPACE_vaspaceIncAllocRefCnt;
pThis->__nvoc_base_OBJVASPACE.__vaspaceGetVaStart__ = &__nvoc_thunk_OBJIOVASPACE_vaspaceGetVaStart;
pThis->__nvoc_base_OBJVASPACE.__vaspaceGetVaLimit__ = &__nvoc_thunk_OBJIOVASPACE_vaspaceGetVaLimit;
pThis->__nvoc_base_OBJVASPACE.__vaspaceGetVasInfo__ = &__nvoc_thunk_OBJIOVASPACE_vaspaceGetVasInfo;
pThis->__iovaspaceIsInternalVaRestricted__ = &__nvoc_thunk_OBJVASPACE_iovaspaceIsInternalVaRestricted;
pThis->__iovaspaceGetFlags__ = &__nvoc_thunk_OBJVASPACE_iovaspaceGetFlags;
}
void __nvoc_init_funcTable_OBJIOVASPACE(OBJIOVASPACE *pThis) {
__nvoc_init_funcTable_OBJIOVASPACE_1(pThis);
}
void __nvoc_init_OBJVASPACE(OBJVASPACE*);
void __nvoc_init_OBJIOVASPACE(OBJIOVASPACE *pThis) {
pThis->__nvoc_pbase_OBJIOVASPACE = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_OBJVASPACE.__nvoc_base_Object;
pThis->__nvoc_pbase_OBJVASPACE = &pThis->__nvoc_base_OBJVASPACE;
__nvoc_init_OBJVASPACE(&pThis->__nvoc_base_OBJVASPACE);
__nvoc_init_funcTable_OBJIOVASPACE(pThis);
}
NV_STATUS __nvoc_objCreate_OBJIOVASPACE(OBJIOVASPACE **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJIOVASPACE *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJIOVASPACE));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJIOVASPACE));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJIOVASPACE);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_OBJVASPACE.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_OBJVASPACE.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJIOVASPACE(pThis);
status = __nvoc_ctor_OBJIOVASPACE(pThis);
if (status != NV_OK) goto __nvoc_objCreate_OBJIOVASPACE_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJIOVASPACE_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJIOVASPACE(OBJIOVASPACE **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJIOVASPACE(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,303 @@
#ifndef _G_IO_VASPACE_NVOC_H_
#define _G_IO_VASPACE_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2013-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_io_vaspace_nvoc.h"
#ifndef _IOVASPACE_H_
#define _IOVASPACE_H_
/**************** Resource Manager Defines and Structures ******************\
* *
* Module: IOVASPACE.H *
* Defines and structures used for IOMMU Virtual Address Space Object. *
\***************************************************************************/
#include "mem_mgr/vaspace.h" // base class object header
#define NV_IOVA_DOMAIN_NONE (~(NvU32)0)
typedef struct OBJIOVASPACE *POBJIOVASPACE;
#ifndef __NVOC_CLASS_OBJIOVASPACE_TYPEDEF__
#define __NVOC_CLASS_OBJIOVASPACE_TYPEDEF__
typedef struct OBJIOVASPACE OBJIOVASPACE;
#endif /* __NVOC_CLASS_OBJIOVASPACE_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJIOVASPACE
#define __nvoc_class_id_OBJIOVASPACE 0x28ed9c
#endif /* __nvoc_class_id_OBJIOVASPACE */
typedef struct IOVAMAPPING IOVAMAPPING;
typedef struct IOVAMAPPING *PIOVAMAPPING;
// Opaque pointer for the OS layer to use
typedef struct OS_IOVA_MAPPING_DATA *POS_IOVA_MAPPING_DATA;
struct IOVAMAPPING
{
NvU32 iovaspaceId;
//
// Refcount of the mapping.
//
// Each iovaspaceAcquireMapping() call increments the refcount, and each
// iovaspaceReleaseMapping() call decrements it. Additionally, submappings
// increment the refcount of their root mapping on creation and only
// decrement it when they are destroyed.
//
// Mappings are destroyed when their refcount reaches 0.
//
// Notably a mapping can be destroyed regardless of its refcount with
// iovaspaceDestroyMapping(). Destroying a root mapping destroys all of its
// submappings as well.
//
NvU32 refcount;
PMEMORY_DESCRIPTOR pPhysMemDesc;
//
// Maintain a hierarchy of IOVA mappings. The "root" mapping will generally
// be tied to the root memory descriptor. That mapping can have submappings
// within the same IOVA space that correspond to submemory descriptors of
// the root memory descriptor.
//
// Also, the root memory descriptor may have multiple IOVA mappings (up to
// one per IOVA space), so those need to be tracked in association directly
// with the root memory descriptor.
//
// The memory descriptor (root or submemory) always points to a single IOVA
// mapping. For root memory descriptors, that mapping is the head of a list
// in which each mapping covers a unique IOVA space. For submemory
// descriptors, there can only be one IOVA mapping, corresponding to the
// IOVA space of the pGpu associated with the submemory descriptor.
//
union
{
struct IOVAMAPPING *pParent;
struct IOVAMAPPING *pChildren;
} link;
//
// For root mappings, this points to the next root mapping for the same
// parent physical memory descriptor (e.g., a root mapping for a different
// IOVA space).
//
// For submappings, this instead points to the next submapping of the
// parent root mapping, since a submemory descriptor may only have a single
// IOVA mapping (which is a submapping of an IOVA mapping on the root
// memory descriptor).
//
struct IOVAMAPPING *pNext;
// OS data associated with this mapping. Core RM doesn't touch this.
POS_IOVA_MAPPING_DATA pOsData;
//
// If the memory is contiguous, this array consists of one element.
// If the memory is discontiguous, this array is actually larger and has
// one entry for each physical page in pPhysMemDesc. As a result, this
// structure must be allocated from the heap.
//
RmPhysAddr iovaArray[1];
// WARNING: DO NOT place anything behind the IOVA array!
};
/*!
* Virtual address space for a system's IOMMU translation.
*/
#ifdef NVOC_IO_VASPACE_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OBJIOVASPACE {
const struct NVOC_RTTI *__nvoc_rtti;
struct OBJVASPACE __nvoc_base_OBJVASPACE;
struct Object *__nvoc_pbase_Object;
struct OBJVASPACE *__nvoc_pbase_OBJVASPACE;
struct OBJIOVASPACE *__nvoc_pbase_OBJIOVASPACE;
NV_STATUS (*__iovaspaceConstruct___)(struct OBJIOVASPACE *, NvU32, NvU32, NvU64, NvU64, NvU64, NvU64, NvU32);
NV_STATUS (*__iovaspaceAlloc__)(struct OBJIOVASPACE *, NvU64, NvU64, NvU64, NvU64, NvU64, VAS_ALLOC_FLAGS, NvU64 *);
NV_STATUS (*__iovaspaceFree__)(struct OBJIOVASPACE *, NvU64);
NV_STATUS (*__iovaspaceApplyDefaultAlignment__)(struct OBJIOVASPACE *, const FB_ALLOC_INFO *, NvU64 *, NvU64 *, NvU64 *);
NV_STATUS (*__iovaspaceIncAllocRefCnt__)(struct OBJIOVASPACE *, NvU64);
NvU64 (*__iovaspaceGetVaStart__)(struct OBJIOVASPACE *);
NvU64 (*__iovaspaceGetVaLimit__)(struct OBJIOVASPACE *);
NV_STATUS (*__iovaspaceGetVasInfo__)(struct OBJIOVASPACE *, NV0080_CTRL_DMA_ADV_SCHED_GET_VA_CAPS_PARAMS *);
NvBool (*__iovaspaceIsInternalVaRestricted__)(struct OBJIOVASPACE *);
NvU32 (*__iovaspaceGetFlags__)(struct OBJIOVASPACE *);
NvU64 mappingCount;
};
#ifndef __NVOC_CLASS_OBJIOVASPACE_TYPEDEF__
#define __NVOC_CLASS_OBJIOVASPACE_TYPEDEF__
typedef struct OBJIOVASPACE OBJIOVASPACE;
#endif /* __NVOC_CLASS_OBJIOVASPACE_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJIOVASPACE
#define __nvoc_class_id_OBJIOVASPACE 0x28ed9c
#endif /* __nvoc_class_id_OBJIOVASPACE */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJIOVASPACE;
#define __staticCast_OBJIOVASPACE(pThis) \
((pThis)->__nvoc_pbase_OBJIOVASPACE)
#ifdef __nvoc_io_vaspace_h_disabled
#define __dynamicCast_OBJIOVASPACE(pThis) ((OBJIOVASPACE*)NULL)
#else //__nvoc_io_vaspace_h_disabled
#define __dynamicCast_OBJIOVASPACE(pThis) \
((OBJIOVASPACE*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJIOVASPACE)))
#endif //__nvoc_io_vaspace_h_disabled
NV_STATUS __nvoc_objCreateDynamic_OBJIOVASPACE(OBJIOVASPACE**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJIOVASPACE(OBJIOVASPACE**, Dynamic*, NvU32);
#define __objCreate_OBJIOVASPACE(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_OBJIOVASPACE((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
#define iovaspaceConstruct_(pVAS, classId, vaspaceId, vaStart, vaLimit, vaStartInternal, vaLimitInternal, flags) iovaspaceConstruct__DISPATCH(pVAS, classId, vaspaceId, vaStart, vaLimit, vaStartInternal, vaLimitInternal, flags)
#define iovaspaceAlloc(pVAS, size, align, rangeLo, rangeHi, pageSizeLockMask, flags, pAddr) iovaspaceAlloc_DISPATCH(pVAS, size, align, rangeLo, rangeHi, pageSizeLockMask, flags, pAddr)
#define iovaspaceFree(pVAS, vAddr) iovaspaceFree_DISPATCH(pVAS, vAddr)
#define iovaspaceApplyDefaultAlignment(pVAS, pAllocInfo, pAlign, pSize, pPageSizeLockMask) iovaspaceApplyDefaultAlignment_DISPATCH(pVAS, pAllocInfo, pAlign, pSize, pPageSizeLockMask)
#define iovaspaceIncAllocRefCnt(pVAS, vAddr) iovaspaceIncAllocRefCnt_DISPATCH(pVAS, vAddr)
#define iovaspaceGetVaStart(pVAS) iovaspaceGetVaStart_DISPATCH(pVAS)
#define iovaspaceGetVaLimit(pVAS) iovaspaceGetVaLimit_DISPATCH(pVAS)
#define iovaspaceGetVasInfo(pVAS, pParams) iovaspaceGetVasInfo_DISPATCH(pVAS, pParams)
#define iovaspaceIsInternalVaRestricted(pVAS) iovaspaceIsInternalVaRestricted_DISPATCH(pVAS)
#define iovaspaceGetFlags(pVAS) iovaspaceGetFlags_DISPATCH(pVAS)
NV_STATUS iovaspaceConstruct__IMPL(struct OBJIOVASPACE *pVAS, NvU32 classId, NvU32 vaspaceId, NvU64 vaStart, NvU64 vaLimit, NvU64 vaStartInternal, NvU64 vaLimitInternal, NvU32 flags);
static inline NV_STATUS iovaspaceConstruct__DISPATCH(struct OBJIOVASPACE *pVAS, NvU32 classId, NvU32 vaspaceId, NvU64 vaStart, NvU64 vaLimit, NvU64 vaStartInternal, NvU64 vaLimitInternal, NvU32 flags) {
return pVAS->__iovaspaceConstruct___(pVAS, classId, vaspaceId, vaStart, vaLimit, vaStartInternal, vaLimitInternal, flags);
}
NV_STATUS iovaspaceAlloc_IMPL(struct OBJIOVASPACE *pVAS, NvU64 size, NvU64 align, NvU64 rangeLo, NvU64 rangeHi, NvU64 pageSizeLockMask, VAS_ALLOC_FLAGS flags, NvU64 *pAddr);
static inline NV_STATUS iovaspaceAlloc_DISPATCH(struct OBJIOVASPACE *pVAS, NvU64 size, NvU64 align, NvU64 rangeLo, NvU64 rangeHi, NvU64 pageSizeLockMask, VAS_ALLOC_FLAGS flags, NvU64 *pAddr) {
return pVAS->__iovaspaceAlloc__(pVAS, size, align, rangeLo, rangeHi, pageSizeLockMask, flags, pAddr);
}
NV_STATUS iovaspaceFree_IMPL(struct OBJIOVASPACE *pVAS, NvU64 vAddr);
static inline NV_STATUS iovaspaceFree_DISPATCH(struct OBJIOVASPACE *pVAS, NvU64 vAddr) {
return pVAS->__iovaspaceFree__(pVAS, vAddr);
}
NV_STATUS iovaspaceApplyDefaultAlignment_IMPL(struct OBJIOVASPACE *pVAS, const FB_ALLOC_INFO *pAllocInfo, NvU64 *pAlign, NvU64 *pSize, NvU64 *pPageSizeLockMask);
static inline NV_STATUS iovaspaceApplyDefaultAlignment_DISPATCH(struct OBJIOVASPACE *pVAS, const FB_ALLOC_INFO *pAllocInfo, NvU64 *pAlign, NvU64 *pSize, NvU64 *pPageSizeLockMask) {
return pVAS->__iovaspaceApplyDefaultAlignment__(pVAS, pAllocInfo, pAlign, pSize, pPageSizeLockMask);
}
NV_STATUS iovaspaceIncAllocRefCnt_IMPL(struct OBJIOVASPACE *pVAS, NvU64 vAddr);
static inline NV_STATUS iovaspaceIncAllocRefCnt_DISPATCH(struct OBJIOVASPACE *pVAS, NvU64 vAddr) {
return pVAS->__iovaspaceIncAllocRefCnt__(pVAS, vAddr);
}
NvU64 iovaspaceGetVaStart_IMPL(struct OBJIOVASPACE *pVAS);
static inline NvU64 iovaspaceGetVaStart_DISPATCH(struct OBJIOVASPACE *pVAS) {
return pVAS->__iovaspaceGetVaStart__(pVAS);
}
NvU64 iovaspaceGetVaLimit_IMPL(struct OBJIOVASPACE *pVAS);
static inline NvU64 iovaspaceGetVaLimit_DISPATCH(struct OBJIOVASPACE *pVAS) {
return pVAS->__iovaspaceGetVaLimit__(pVAS);
}
NV_STATUS iovaspaceGetVasInfo_IMPL(struct OBJIOVASPACE *pVAS, NV0080_CTRL_DMA_ADV_SCHED_GET_VA_CAPS_PARAMS *pParams);
static inline NV_STATUS iovaspaceGetVasInfo_DISPATCH(struct OBJIOVASPACE *pVAS, NV0080_CTRL_DMA_ADV_SCHED_GET_VA_CAPS_PARAMS *pParams) {
return pVAS->__iovaspaceGetVasInfo__(pVAS, pParams);
}
static inline NvBool iovaspaceIsInternalVaRestricted_DISPATCH(struct OBJIOVASPACE *pVAS) {
return pVAS->__iovaspaceIsInternalVaRestricted__(pVAS);
}
static inline NvU32 iovaspaceGetFlags_DISPATCH(struct OBJIOVASPACE *pVAS) {
return pVAS->__iovaspaceGetFlags__(pVAS);
}
void iovaspaceDestruct_IMPL(struct OBJIOVASPACE *pIOVAS);
#define __nvoc_iovaspaceDestruct(pIOVAS) iovaspaceDestruct_IMPL(pIOVAS)
NV_STATUS iovaspaceAcquireMapping_IMPL(struct OBJIOVASPACE *pIOVAS, PMEMORY_DESCRIPTOR pIovaMapping);
#ifdef __nvoc_io_vaspace_h_disabled
static inline NV_STATUS iovaspaceAcquireMapping(struct OBJIOVASPACE *pIOVAS, PMEMORY_DESCRIPTOR pIovaMapping) {
NV_ASSERT_FAILED_PRECOMP("OBJIOVASPACE was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_io_vaspace_h_disabled
#define iovaspaceAcquireMapping(pIOVAS, pIovaMapping) iovaspaceAcquireMapping_IMPL(pIOVAS, pIovaMapping)
#endif //__nvoc_io_vaspace_h_disabled
void iovaspaceReleaseMapping_IMPL(struct OBJIOVASPACE *pIOVAS, PIOVAMAPPING pIovaMapping);
#ifdef __nvoc_io_vaspace_h_disabled
static inline void iovaspaceReleaseMapping(struct OBJIOVASPACE *pIOVAS, PIOVAMAPPING pIovaMapping) {
NV_ASSERT_FAILED_PRECOMP("OBJIOVASPACE was disabled!");
}
#else //__nvoc_io_vaspace_h_disabled
#define iovaspaceReleaseMapping(pIOVAS, pIovaMapping) iovaspaceReleaseMapping_IMPL(pIOVAS, pIovaMapping)
#endif //__nvoc_io_vaspace_h_disabled
void iovaspaceDestroyMapping_IMPL(struct OBJIOVASPACE *pIOVAS, PIOVAMAPPING pIovaMapping);
#ifdef __nvoc_io_vaspace_h_disabled
static inline void iovaspaceDestroyMapping(struct OBJIOVASPACE *pIOVAS, PIOVAMAPPING pIovaMapping) {
NV_ASSERT_FAILED_PRECOMP("OBJIOVASPACE was disabled!");
}
#else //__nvoc_io_vaspace_h_disabled
#define iovaspaceDestroyMapping(pIOVAS, pIovaMapping) iovaspaceDestroyMapping_IMPL(pIOVAS, pIovaMapping)
#endif //__nvoc_io_vaspace_h_disabled
#undef PRIVATE_FIELD
struct OBJIOVASPACE* iovaspaceFromId(NvU32 iovaspaceId);
struct OBJIOVASPACE* iovaspaceFromMapping(PIOVAMAPPING pIovaMapping);
//
// Helper that looks up the IOVAS from the mapping and then calls
// iovaspaceDestroyMapping().
//
void iovaMappingDestroy(PIOVAMAPPING pIovaMapping);
#endif // _IOVASPACE_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_IO_VASPACE_NVOC_H_

View File

@@ -0,0 +1,47 @@
#ifndef _G_JOURNAL_NVOC_H_
#define _G_JOURNAL_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2005-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_journal_nvoc.h"
#ifndef _JOURNAL_H_
#define _JOURNAL_H_
//
// Journal object defines and Structures
//
#include "kernel/core/core.h"
#endif // _JOURNAL_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_JOURNAL_NVOC_H_

View File

@@ -0,0 +1,346 @@
#define NVOC_KERN_DISP_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_kern_disp_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x55952e = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelDisplay;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJENGSTATE;
void __nvoc_init_KernelDisplay(KernelDisplay*, RmHalspecOwner* );
void __nvoc_init_funcTable_KernelDisplay(KernelDisplay*, RmHalspecOwner* );
NV_STATUS __nvoc_ctor_KernelDisplay(KernelDisplay*, RmHalspecOwner* );
void __nvoc_init_dataField_KernelDisplay(KernelDisplay*, RmHalspecOwner* );
void __nvoc_dtor_KernelDisplay(KernelDisplay*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelDisplay;
static const struct NVOC_RTTI __nvoc_rtti_KernelDisplay_KernelDisplay = {
/*pClassDef=*/ &__nvoc_class_def_KernelDisplay,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_KernelDisplay,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_KernelDisplay_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(KernelDisplay, __nvoc_base_OBJENGSTATE.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_KernelDisplay_OBJENGSTATE = {
/*pClassDef=*/ &__nvoc_class_def_OBJENGSTATE,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(KernelDisplay, __nvoc_base_OBJENGSTATE),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_KernelDisplay = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_KernelDisplay_KernelDisplay,
&__nvoc_rtti_KernelDisplay_OBJENGSTATE,
&__nvoc_rtti_KernelDisplay_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_KernelDisplay =
{
/*classInfo=*/ {
/*size=*/ sizeof(KernelDisplay),
/*classId=*/ classId(KernelDisplay),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "KernelDisplay",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_KernelDisplay,
/*pCastInfo=*/ &__nvoc_castinfo_KernelDisplay,
/*pExportInfo=*/ &__nvoc_export_info_KernelDisplay
};
static NV_STATUS __nvoc_thunk_KernelDisplay_engstateConstructEngine(struct OBJGPU *pGpu, struct OBJENGSTATE *pKernelDisplay, ENGDESCRIPTOR engDesc) {
return kdispConstructEngine(pGpu, (struct KernelDisplay *)(((unsigned char *)pKernelDisplay) - __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), engDesc);
}
static NV_STATUS __nvoc_thunk_KernelDisplay_engstateStatePreInitLocked(struct OBJGPU *pGpu, struct OBJENGSTATE *pKernelDisplay) {
return kdispStatePreInitLocked(pGpu, (struct KernelDisplay *)(((unsigned char *)pKernelDisplay) - __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_KernelDisplay_engstateStateInitLocked(struct OBJGPU *pGpu, struct OBJENGSTATE *pKernelDisplay) {
return kdispStateInitLocked(pGpu, (struct KernelDisplay *)(((unsigned char *)pKernelDisplay) - __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset));
}
static void __nvoc_thunk_KernelDisplay_engstateStateDestroy(struct OBJGPU *pGpu, struct OBJENGSTATE *pKernelDisplay) {
kdispStateDestroy(pGpu, (struct KernelDisplay *)(((unsigned char *)pKernelDisplay) - __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_KernelDisplay_engstateStateLoad(struct OBJGPU *pGpu, struct OBJENGSTATE *pKernelDisplay, NvU32 flags) {
return kdispStateLoad(pGpu, (struct KernelDisplay *)(((unsigned char *)pKernelDisplay) - __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), flags);
}
static NV_STATUS __nvoc_thunk_KernelDisplay_engstateStateUnload(struct OBJGPU *pGpu, struct OBJENGSTATE *pKernelDisplay, NvU32 flags) {
return kdispStateUnload(pGpu, (struct KernelDisplay *)(((unsigned char *)pKernelDisplay) - __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), flags);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispReconcileTunableState(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunableState) {
return engstateReconcileTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispStatePreLoad(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
return engstateStatePreLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispStatePostUnload(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
return engstateStatePostUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispStatePreUnload(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
return engstateStatePreUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispStateInitUnlocked(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
return engstateStateInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset));
}
static void __nvoc_thunk_OBJENGSTATE_kdispInitMissing(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
engstateInitMissing(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispStatePreInitUnlocked(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
return engstateStatePreInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispGetTunableState(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunableState) {
return engstateGetTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispCompareTunableState(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunables1, void *pTunables2) {
return engstateCompareTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), pTunables1, pTunables2);
}
static void __nvoc_thunk_OBJENGSTATE_kdispFreeTunableState(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunableState) {
engstateFreeTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispStatePostLoad(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
return engstateStatePostLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispAllocTunableState(POBJGPU pGpu, struct KernelDisplay *pEngstate, void **ppTunableState) {
return engstateAllocTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), ppTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_kdispSetTunableState(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunableState) {
return engstateSetTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset), pTunableState);
}
static NvBool __nvoc_thunk_OBJENGSTATE_kdispIsPresent(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
return engstateIsPresent(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelDisplay_OBJENGSTATE.offset));
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelDisplay =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_dtor_KernelDisplay(KernelDisplay *pThis) {
__nvoc_kdispDestruct(pThis);
__nvoc_dtor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_KernelDisplay(KernelDisplay *pThis, RmHalspecOwner *pRmhalspecowner) {
RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
ChipHal *chipHal = &pRmhalspecowner->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
DispIpHal *dispIpHal = &pRmhalspecowner->dispIpHal;
const unsigned long dispIpHal_HalVarIdx = (unsigned long)dispIpHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(dispIpHal);
PORT_UNREFERENCED_VARIABLE(dispIpHal_HalVarIdx);
// NVOC Property Hal field -- PDB_PROP_KDISP_IS_MISSING
if (0)
{
}
else if (( ((rmVariantHal_HalVarIdx >> 5) == 0UL) && ((1UL << (rmVariantHal_HalVarIdx & 0x1f)) & 0x00000002UL) )) /* RmVariantHal: PF_KERNEL_ONLY */
{
pThis->setProperty(pThis, PDB_PROP_KDISP_IS_MISSING, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_KDISP_IMP_ENABLE
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->setProperty(pThis, PDB_PROP_KDISP_IMP_ENABLE, ((NvBool)(0 == 0)));
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_KDISP_IMP_ENABLE, ((NvBool)(0 != 0)));
}
pThis->pStaticInfo = ((void *)0);
pThis->bWarPurgeSatellitesOnCoreFree = ((NvBool)(0 != 0));
}
NV_STATUS __nvoc_ctor_OBJENGSTATE(OBJENGSTATE* );
NV_STATUS __nvoc_ctor_KernelDisplay(KernelDisplay *pThis, RmHalspecOwner *pRmhalspecowner) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
if (status != NV_OK) goto __nvoc_ctor_KernelDisplay_fail_OBJENGSTATE;
__nvoc_init_dataField_KernelDisplay(pThis, pRmhalspecowner);
goto __nvoc_ctor_KernelDisplay_exit; // Success
__nvoc_ctor_KernelDisplay_fail_OBJENGSTATE:
__nvoc_ctor_KernelDisplay_exit:
return status;
}
static void __nvoc_init_funcTable_KernelDisplay_1(KernelDisplay *pThis, RmHalspecOwner *pRmhalspecowner) {
RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
ChipHal *chipHal = &pRmhalspecowner->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
DispIpHal *dispIpHal = &pRmhalspecowner->dispIpHal;
const unsigned long dispIpHal_HalVarIdx = (unsigned long)dispIpHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(dispIpHal);
PORT_UNREFERENCED_VARIABLE(dispIpHal_HalVarIdx);
pThis->__kdispConstructEngine__ = &kdispConstructEngine_IMPL;
pThis->__kdispStatePreInitLocked__ = &kdispStatePreInitLocked_IMPL;
pThis->__kdispStateInitLocked__ = &kdispStateInitLocked_IMPL;
pThis->__kdispStateDestroy__ = &kdispStateDestroy_IMPL;
pThis->__kdispStateLoad__ = &kdispStateLoad_IMPL;
pThis->__kdispStateUnload__ = &kdispStateUnload_IMPL;
pThis->__nvoc_base_OBJENGSTATE.__engstateConstructEngine__ = &__nvoc_thunk_KernelDisplay_engstateConstructEngine;
pThis->__nvoc_base_OBJENGSTATE.__engstateStatePreInitLocked__ = &__nvoc_thunk_KernelDisplay_engstateStatePreInitLocked;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateInitLocked__ = &__nvoc_thunk_KernelDisplay_engstateStateInitLocked;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateDestroy__ = &__nvoc_thunk_KernelDisplay_engstateStateDestroy;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateLoad__ = &__nvoc_thunk_KernelDisplay_engstateStateLoad;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateUnload__ = &__nvoc_thunk_KernelDisplay_engstateStateUnload;
pThis->__kdispReconcileTunableState__ = &__nvoc_thunk_OBJENGSTATE_kdispReconcileTunableState;
pThis->__kdispStatePreLoad__ = &__nvoc_thunk_OBJENGSTATE_kdispStatePreLoad;
pThis->__kdispStatePostUnload__ = &__nvoc_thunk_OBJENGSTATE_kdispStatePostUnload;
pThis->__kdispStatePreUnload__ = &__nvoc_thunk_OBJENGSTATE_kdispStatePreUnload;
pThis->__kdispStateInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_kdispStateInitUnlocked;
pThis->__kdispInitMissing__ = &__nvoc_thunk_OBJENGSTATE_kdispInitMissing;
pThis->__kdispStatePreInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_kdispStatePreInitUnlocked;
pThis->__kdispGetTunableState__ = &__nvoc_thunk_OBJENGSTATE_kdispGetTunableState;
pThis->__kdispCompareTunableState__ = &__nvoc_thunk_OBJENGSTATE_kdispCompareTunableState;
pThis->__kdispFreeTunableState__ = &__nvoc_thunk_OBJENGSTATE_kdispFreeTunableState;
pThis->__kdispStatePostLoad__ = &__nvoc_thunk_OBJENGSTATE_kdispStatePostLoad;
pThis->__kdispAllocTunableState__ = &__nvoc_thunk_OBJENGSTATE_kdispAllocTunableState;
pThis->__kdispSetTunableState__ = &__nvoc_thunk_OBJENGSTATE_kdispSetTunableState;
pThis->__kdispIsPresent__ = &__nvoc_thunk_OBJENGSTATE_kdispIsPresent;
}
void __nvoc_init_funcTable_KernelDisplay(KernelDisplay *pThis, RmHalspecOwner *pRmhalspecowner) {
__nvoc_init_funcTable_KernelDisplay_1(pThis, pRmhalspecowner);
}
void __nvoc_init_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_init_KernelDisplay(KernelDisplay *pThis, RmHalspecOwner *pRmhalspecowner) {
pThis->__nvoc_pbase_KernelDisplay = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object;
pThis->__nvoc_pbase_OBJENGSTATE = &pThis->__nvoc_base_OBJENGSTATE;
__nvoc_init_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
__nvoc_init_funcTable_KernelDisplay(pThis, pRmhalspecowner);
}
NV_STATUS __nvoc_objCreate_KernelDisplay(KernelDisplay **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
KernelDisplay *pThis;
RmHalspecOwner *pRmhalspecowner;
pThis = portMemAllocNonPaged(sizeof(KernelDisplay));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(KernelDisplay));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_KernelDisplay);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object.pParent = NULL;
}
if ((pRmhalspecowner = dynamicCast(pParent, RmHalspecOwner)) == NULL)
pRmhalspecowner = objFindAncestorOfType(RmHalspecOwner, pParent);
NV_ASSERT_OR_RETURN(pRmhalspecowner != NULL, NV_ERR_INVALID_ARGUMENT);
__nvoc_init_KernelDisplay(pThis, pRmhalspecowner);
status = __nvoc_ctor_KernelDisplay(pThis, pRmhalspecowner);
if (status != NV_OK) goto __nvoc_objCreate_KernelDisplay_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_KernelDisplay_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_KernelDisplay(KernelDisplay **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_KernelDisplay(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,642 @@
#ifndef _G_KERN_DISP_NVOC_H_
#define _G_KERN_DISP_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2020-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_kern_disp_nvoc.h"
#ifndef KERN_DISP_H
#define KERN_DISP_H
/******************************************************************************
*
* Kernel Display module header
* This file contains functions managing display on CPU RM
*
******************************************************************************/
#include "gpu/eng_state.h"
#include "gpu/gpu_halspec.h"
#include "gpu/disp/kern_disp_type.h"
#include "gpu/disp/kern_disp_max.h"
#include "gpu/mem_mgr/context_dma.h"
#include "gpu/disp/vblank_callback/vblank.h"
#include "ctrl/ctrl2080/ctrl2080internal.h"
typedef NV2080_CTRL_INTERNAL_DISPLAY_GET_STATIC_INFO_PARAMS KernelDisplayStaticInfo;
struct DispChannel;
#ifndef __NVOC_CLASS_DispChannel_TYPEDEF__
#define __NVOC_CLASS_DispChannel_TYPEDEF__
typedef struct DispChannel DispChannel;
#endif /* __NVOC_CLASS_DispChannel_TYPEDEF__ */
#ifndef __nvoc_class_id_DispChannel
#define __nvoc_class_id_DispChannel 0xbd2ff3
#endif /* __nvoc_class_id_DispChannel */
struct RgLineCallback;
#ifndef __NVOC_CLASS_RgLineCallback_TYPEDEF__
#define __NVOC_CLASS_RgLineCallback_TYPEDEF__
typedef struct RgLineCallback RgLineCallback;
#endif /* __NVOC_CLASS_RgLineCallback_TYPEDEF__ */
#ifndef __nvoc_class_id_RgLineCallback
#define __nvoc_class_id_RgLineCallback 0xa3ff1c
#endif /* __nvoc_class_id_RgLineCallback */
#define KDISP_GET_HEAD(pKernelDisplay, headID) (RMCFG_MODULE_KERNEL_HEAD ? kdispGetHead(pKernelDisplay, headID) : NULL)
/*!
* KernelDisp is a logical abstraction of the GPU Display Engine. The
* Public API of the Display Engine is exposed through this object, and any
* interfaces which do not manage the underlying Display hardware can be
* managed by this object.
*/
#ifdef NVOC_KERN_DISP_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct KernelDisplay {
const struct NVOC_RTTI *__nvoc_rtti;
struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
struct Object *__nvoc_pbase_Object;
struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
struct KernelDisplay *__nvoc_pbase_KernelDisplay;
NV_STATUS (*__kdispConstructEngine__)(struct OBJGPU *, struct KernelDisplay *, ENGDESCRIPTOR);
NV_STATUS (*__kdispStatePreInitLocked__)(struct OBJGPU *, struct KernelDisplay *);
NV_STATUS (*__kdispStateInitLocked__)(struct OBJGPU *, struct KernelDisplay *);
void (*__kdispStateDestroy__)(struct OBJGPU *, struct KernelDisplay *);
NV_STATUS (*__kdispStateLoad__)(struct OBJGPU *, struct KernelDisplay *, NvU32);
NV_STATUS (*__kdispStateUnload__)(struct OBJGPU *, struct KernelDisplay *, NvU32);
NV_STATUS (*__kdispReconcileTunableState__)(POBJGPU, struct KernelDisplay *, void *);
NV_STATUS (*__kdispStatePreLoad__)(POBJGPU, struct KernelDisplay *, NvU32);
NV_STATUS (*__kdispStatePostUnload__)(POBJGPU, struct KernelDisplay *, NvU32);
NV_STATUS (*__kdispStatePreUnload__)(POBJGPU, struct KernelDisplay *, NvU32);
NV_STATUS (*__kdispStateInitUnlocked__)(POBJGPU, struct KernelDisplay *);
void (*__kdispInitMissing__)(POBJGPU, struct KernelDisplay *);
NV_STATUS (*__kdispStatePreInitUnlocked__)(POBJGPU, struct KernelDisplay *);
NV_STATUS (*__kdispGetTunableState__)(POBJGPU, struct KernelDisplay *, void *);
NV_STATUS (*__kdispCompareTunableState__)(POBJGPU, struct KernelDisplay *, void *, void *);
void (*__kdispFreeTunableState__)(POBJGPU, struct KernelDisplay *, void *);
NV_STATUS (*__kdispStatePostLoad__)(POBJGPU, struct KernelDisplay *, NvU32);
NV_STATUS (*__kdispAllocTunableState__)(POBJGPU, struct KernelDisplay *, void **);
NV_STATUS (*__kdispSetTunableState__)(POBJGPU, struct KernelDisplay *, void *);
NvBool (*__kdispIsPresent__)(POBJGPU, struct KernelDisplay *);
NvBool PDB_PROP_KDISP_IMP_ENABLE;
struct DisplayInstanceMemory *pInst;
struct KernelHead *pKernelHead[4];
const KernelDisplayStaticInfo *pStaticInfo;
NvBool bWarPurgeSatellitesOnCoreFree;
struct RgLineCallback *rgLineCallbackPerHead[4][2];
NvU32 isrVblankHeads;
};
#ifndef __NVOC_CLASS_KernelDisplay_TYPEDEF__
#define __NVOC_CLASS_KernelDisplay_TYPEDEF__
typedef struct KernelDisplay KernelDisplay;
#endif /* __NVOC_CLASS_KernelDisplay_TYPEDEF__ */
#ifndef __nvoc_class_id_KernelDisplay
#define __nvoc_class_id_KernelDisplay 0x55952e
#endif /* __nvoc_class_id_KernelDisplay */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelDisplay;
#define __staticCast_KernelDisplay(pThis) \
((pThis)->__nvoc_pbase_KernelDisplay)
#ifdef __nvoc_kern_disp_h_disabled
#define __dynamicCast_KernelDisplay(pThis) ((KernelDisplay*)NULL)
#else //__nvoc_kern_disp_h_disabled
#define __dynamicCast_KernelDisplay(pThis) \
((KernelDisplay*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelDisplay)))
#endif //__nvoc_kern_disp_h_disabled
#define PDB_PROP_KDISP_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
#define PDB_PROP_KDISP_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
#define PDB_PROP_KDISP_IMP_ENABLE_BASE_CAST
#define PDB_PROP_KDISP_IMP_ENABLE_BASE_NAME PDB_PROP_KDISP_IMP_ENABLE
NV_STATUS __nvoc_objCreateDynamic_KernelDisplay(KernelDisplay**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_KernelDisplay(KernelDisplay**, Dynamic*, NvU32);
#define __objCreate_KernelDisplay(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_KernelDisplay((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
#define kdispConstructEngine(pGpu, pKernelDisplay, engDesc) kdispConstructEngine_DISPATCH(pGpu, pKernelDisplay, engDesc)
#define kdispStatePreInitLocked(pGpu, pKernelDisplay) kdispStatePreInitLocked_DISPATCH(pGpu, pKernelDisplay)
#define kdispStateInitLocked(pGpu, pKernelDisplay) kdispStateInitLocked_DISPATCH(pGpu, pKernelDisplay)
#define kdispStateDestroy(pGpu, pKernelDisplay) kdispStateDestroy_DISPATCH(pGpu, pKernelDisplay)
#define kdispStateLoad(pGpu, pKernelDisplay, flags) kdispStateLoad_DISPATCH(pGpu, pKernelDisplay, flags)
#define kdispStateUnload(pGpu, pKernelDisplay, flags) kdispStateUnload_DISPATCH(pGpu, pKernelDisplay, flags)
#define kdispReconcileTunableState(pGpu, pEngstate, pTunableState) kdispReconcileTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define kdispStatePreLoad(pGpu, pEngstate, arg0) kdispStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
#define kdispStatePostUnload(pGpu, pEngstate, arg0) kdispStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
#define kdispStatePreUnload(pGpu, pEngstate, arg0) kdispStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
#define kdispStateInitUnlocked(pGpu, pEngstate) kdispStateInitUnlocked_DISPATCH(pGpu, pEngstate)
#define kdispInitMissing(pGpu, pEngstate) kdispInitMissing_DISPATCH(pGpu, pEngstate)
#define kdispStatePreInitUnlocked(pGpu, pEngstate) kdispStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
#define kdispGetTunableState(pGpu, pEngstate, pTunableState) kdispGetTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define kdispCompareTunableState(pGpu, pEngstate, pTunables1, pTunables2) kdispCompareTunableState_DISPATCH(pGpu, pEngstate, pTunables1, pTunables2)
#define kdispFreeTunableState(pGpu, pEngstate, pTunableState) kdispFreeTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define kdispStatePostLoad(pGpu, pEngstate, arg0) kdispStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
#define kdispAllocTunableState(pGpu, pEngstate, ppTunableState) kdispAllocTunableState_DISPATCH(pGpu, pEngstate, ppTunableState)
#define kdispSetTunableState(pGpu, pEngstate, pTunableState) kdispSetTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define kdispIsPresent(pGpu, pEngstate) kdispIsPresent_DISPATCH(pGpu, pEngstate)
NV_STATUS kdispConstructInstMem_IMPL(struct KernelDisplay *pKernelDisplay);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispConstructInstMem(struct KernelDisplay *pKernelDisplay) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispConstructInstMem(pKernelDisplay) kdispConstructInstMem_IMPL(pKernelDisplay)
#endif //__nvoc_kern_disp_h_disabled
#define kdispConstructInstMem_HAL(pKernelDisplay) kdispConstructInstMem(pKernelDisplay)
void kdispDestructInstMem_IMPL(struct KernelDisplay *pKernelDisplay);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispDestructInstMem(struct KernelDisplay *pKernelDisplay) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispDestructInstMem(pKernelDisplay) kdispDestructInstMem_IMPL(pKernelDisplay)
#endif //__nvoc_kern_disp_h_disabled
#define kdispDestructInstMem_HAL(pKernelDisplay) kdispDestructInstMem(pKernelDisplay)
NV_STATUS kdispSelectClass_v03_00_KERNEL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispSelectClass(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispSelectClass(pGpu, pKernelDisplay, swClass) kdispSelectClass_v03_00_KERNEL(pGpu, pKernelDisplay, swClass)
#endif //__nvoc_kern_disp_h_disabled
#define kdispSelectClass_HAL(pGpu, pKernelDisplay, swClass) kdispSelectClass(pGpu, pKernelDisplay, swClass)
NvS32 kdispGetBaseOffset_v04_02(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
#ifdef __nvoc_kern_disp_h_disabled
static inline NvS32 kdispGetBaseOffset(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return 0;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispGetBaseOffset(pGpu, pKernelDisplay) kdispGetBaseOffset_v04_02(pGpu, pKernelDisplay)
#endif //__nvoc_kern_disp_h_disabled
#define kdispGetBaseOffset_HAL(pGpu, pKernelDisplay) kdispGetBaseOffset(pGpu, pKernelDisplay)
NV_STATUS kdispGetChannelNum_v03_00(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispGetChannelNum(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispGetChannelNum(pKernelDisplay, channelClass, channelInstance, pChannelNum) kdispGetChannelNum_v03_00(pKernelDisplay, channelClass, channelInstance, pChannelNum)
#endif //__nvoc_kern_disp_h_disabled
#define kdispGetChannelNum_HAL(pKernelDisplay, channelClass, channelInstance, pChannelNum) kdispGetChannelNum(pKernelDisplay, channelClass, channelInstance, pChannelNum)
void kdispGetDisplayCapsBaseAndSize_v03_00(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispGetDisplayCapsBaseAndSize(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispGetDisplayCapsBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplayCapsBaseAndSize_v03_00(pGpu, pKernelDisplay, pOffset, pSize)
#endif //__nvoc_kern_disp_h_disabled
#define kdispGetDisplayCapsBaseAndSize_HAL(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplayCapsBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize)
void kdispGetDisplaySfUserBaseAndSize_v03_00(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispGetDisplaySfUserBaseAndSize(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispGetDisplaySfUserBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplaySfUserBaseAndSize_v03_00(pGpu, pKernelDisplay, pOffset, pSize)
#endif //__nvoc_kern_disp_h_disabled
#define kdispGetDisplaySfUserBaseAndSize_HAL(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplaySfUserBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize)
NV_STATUS kdispGetDisplayChannelUserBaseAndSize_v03_00(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispGetDisplayChannelUserBaseAndSize(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispGetDisplayChannelUserBaseAndSize(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize) kdispGetDisplayChannelUserBaseAndSize_v03_00(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize)
#endif //__nvoc_kern_disp_h_disabled
#define kdispGetDisplayChannelUserBaseAndSize_HAL(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize) kdispGetDisplayChannelUserBaseAndSize(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize)
NV_STATUS kdispImportImpData_IMPL(struct KernelDisplay *pKernelDisplay);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispImportImpData(struct KernelDisplay *pKernelDisplay) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispImportImpData(pKernelDisplay) kdispImportImpData_IMPL(pKernelDisplay)
#endif //__nvoc_kern_disp_h_disabled
#define kdispImportImpData_HAL(pKernelDisplay) kdispImportImpData(pKernelDisplay)
NV_STATUS kdispArbAndAllocDisplayBandwidth_v04_02(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, enum DISPLAY_ICC_BW_CLIENT iccBwClient, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispArbAndAllocDisplayBandwidth(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, enum DISPLAY_ICC_BW_CLIENT iccBwClient, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispArbAndAllocDisplayBandwidth(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispArbAndAllocDisplayBandwidth_v04_02(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
#endif //__nvoc_kern_disp_h_disabled
#define kdispArbAndAllocDisplayBandwidth_HAL(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispArbAndAllocDisplayBandwidth(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
NV_STATUS kdispSetPushBufferParamsToPhysical_IMPL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvHandle hObjectBuffer, struct ContextDma *pBufferContextDma, NvU32 hClass, NvU32 channelInstance, DISPCHNCLASS internalDispChnClass);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispSetPushBufferParamsToPhysical(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvHandle hObjectBuffer, struct ContextDma *pBufferContextDma, NvU32 hClass, NvU32 channelInstance, DISPCHNCLASS internalDispChnClass) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispSetPushBufferParamsToPhysical(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass) kdispSetPushBufferParamsToPhysical_IMPL(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass)
#endif //__nvoc_kern_disp_h_disabled
#define kdispSetPushBufferParamsToPhysical_HAL(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass) kdispSetPushBufferParamsToPhysical(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass)
static inline NV_STATUS kdispAcquireDispChannelHw_56cd7a(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvU32 channelInstance, NvHandle hObjectBuffer, NvU32 initialGetPutOffset, NvBool allowGrabWithinSameClient, NvBool connectPbAtGrab) {
return NV_OK;
}
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispAcquireDispChannelHw(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvU32 channelInstance, NvHandle hObjectBuffer, NvU32 initialGetPutOffset, NvBool allowGrabWithinSameClient, NvBool connectPbAtGrab) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispAcquireDispChannelHw(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab) kdispAcquireDispChannelHw_56cd7a(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab)
#endif //__nvoc_kern_disp_h_disabled
#define kdispAcquireDispChannelHw_HAL(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab) kdispAcquireDispChannelHw(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab)
static inline NV_STATUS kdispReleaseDispChannelHw_56cd7a(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
return NV_OK;
}
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispReleaseDispChannelHw(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispReleaseDispChannelHw(pKernelDisplay, pDispChannel) kdispReleaseDispChannelHw_56cd7a(pKernelDisplay, pDispChannel)
#endif //__nvoc_kern_disp_h_disabled
#define kdispReleaseDispChannelHw_HAL(pKernelDisplay, pDispChannel) kdispReleaseDispChannelHw(pKernelDisplay, pDispChannel)
NV_STATUS kdispMapDispChannel_IMPL(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispMapDispChannel(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispMapDispChannel(pKernelDisplay, pDispChannel) kdispMapDispChannel_IMPL(pKernelDisplay, pDispChannel)
#endif //__nvoc_kern_disp_h_disabled
#define kdispMapDispChannel_HAL(pKernelDisplay, pDispChannel) kdispMapDispChannel(pKernelDisplay, pDispChannel)
void kdispUnbindUnmapDispChannel_IMPL(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispUnbindUnmapDispChannel(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispUnbindUnmapDispChannel(pKernelDisplay, pDispChannel) kdispUnbindUnmapDispChannel_IMPL(pKernelDisplay, pDispChannel)
#endif //__nvoc_kern_disp_h_disabled
#define kdispUnbindUnmapDispChannel_HAL(pKernelDisplay, pDispChannel) kdispUnbindUnmapDispChannel(pKernelDisplay, pDispChannel)
NV_STATUS kdispRegisterRgLineCallback_IMPL(struct KernelDisplay *pKernelDisplay, struct RgLineCallback *pRgLineCallback, NvU32 head, NvU32 rgIntrLine, NvBool bEnable);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispRegisterRgLineCallback(struct KernelDisplay *pKernelDisplay, struct RgLineCallback *pRgLineCallback, NvU32 head, NvU32 rgIntrLine, NvBool bEnable) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispRegisterRgLineCallback(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable) kdispRegisterRgLineCallback_IMPL(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable)
#endif //__nvoc_kern_disp_h_disabled
#define kdispRegisterRgLineCallback_HAL(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable) kdispRegisterRgLineCallback(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable)
void kdispInvokeRgLineCallback_KERNEL(struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 rgIntrLine, NvBool bIsIrqlIsr);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispInvokeRgLineCallback(struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 rgIntrLine, NvBool bIsIrqlIsr) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispInvokeRgLineCallback(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr) kdispInvokeRgLineCallback_KERNEL(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr)
#endif //__nvoc_kern_disp_h_disabled
#define kdispInvokeRgLineCallback_HAL(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr) kdispInvokeRgLineCallback(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr)
void kdispServiceVblank_KERNEL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 arg0, NvU32 arg1, THREAD_STATE_NODE *arg2);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispServiceVblank(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 arg0, NvU32 arg1, THREAD_STATE_NODE *arg2) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispServiceVblank(pGpu, pKernelDisplay, arg0, arg1, arg2) kdispServiceVblank_KERNEL(pGpu, pKernelDisplay, arg0, arg1, arg2)
#endif //__nvoc_kern_disp_h_disabled
#define kdispServiceVblank_HAL(pGpu, pKernelDisplay, arg0, arg1, arg2) kdispServiceVblank(pGpu, pKernelDisplay, arg0, arg1, arg2)
NvU32 kdispReadPendingVblank_KERNEL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, THREAD_STATE_NODE *arg0);
#ifdef __nvoc_kern_disp_h_disabled
static inline NvU32 kdispReadPendingVblank(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, THREAD_STATE_NODE *arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return 0;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispReadPendingVblank(pGpu, pKernelDisplay, arg0) kdispReadPendingVblank_KERNEL(pGpu, pKernelDisplay, arg0)
#endif //__nvoc_kern_disp_h_disabled
#define kdispReadPendingVblank_HAL(pGpu, pKernelDisplay, arg0) kdispReadPendingVblank(pGpu, pKernelDisplay, arg0)
static inline NvBool kdispGetVgaWorkspaceBase_ceaee8(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset) {
NV_ASSERT_PRECOMP(0);
return ((NvBool)(0 != 0));
}
#ifdef __nvoc_kern_disp_h_disabled
static inline NvBool kdispGetVgaWorkspaceBase(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_FALSE;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispGetVgaWorkspaceBase(pGpu, pKernelDisplay, pOffset) kdispGetVgaWorkspaceBase_ceaee8(pGpu, pKernelDisplay, pOffset)
#endif //__nvoc_kern_disp_h_disabled
#define kdispGetVgaWorkspaceBase_HAL(pGpu, pKernelDisplay, pOffset) kdispGetVgaWorkspaceBase(pGpu, pKernelDisplay, pOffset)
void kdispInvokeDisplayModesetCallback_KERNEL(struct KernelDisplay *pKernelDisplay, NvBool bModesetStart, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispInvokeDisplayModesetCallback(struct KernelDisplay *pKernelDisplay, NvBool bModesetStart, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispInvokeDisplayModesetCallback(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispInvokeDisplayModesetCallback_KERNEL(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
#endif //__nvoc_kern_disp_h_disabled
#define kdispInvokeDisplayModesetCallback_HAL(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispInvokeDisplayModesetCallback(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
NV_STATUS kdispConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, ENGDESCRIPTOR engDesc);
static inline NV_STATUS kdispConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, ENGDESCRIPTOR engDesc) {
return pKernelDisplay->__kdispConstructEngine__(pGpu, pKernelDisplay, engDesc);
}
NV_STATUS kdispStatePreInitLocked_IMPL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
static inline NV_STATUS kdispStatePreInitLocked_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
return pKernelDisplay->__kdispStatePreInitLocked__(pGpu, pKernelDisplay);
}
NV_STATUS kdispStateInitLocked_IMPL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
static inline NV_STATUS kdispStateInitLocked_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
return pKernelDisplay->__kdispStateInitLocked__(pGpu, pKernelDisplay);
}
void kdispStateDestroy_IMPL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
static inline void kdispStateDestroy_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
pKernelDisplay->__kdispStateDestroy__(pGpu, pKernelDisplay);
}
NV_STATUS kdispStateLoad_IMPL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags);
static inline NV_STATUS kdispStateLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags) {
return pKernelDisplay->__kdispStateLoad__(pGpu, pKernelDisplay, flags);
}
NV_STATUS kdispStateUnload_IMPL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags);
static inline NV_STATUS kdispStateUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags) {
return pKernelDisplay->__kdispStateUnload__(pGpu, pKernelDisplay, flags);
}
static inline NV_STATUS kdispReconcileTunableState_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunableState) {
return pEngstate->__kdispReconcileTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS kdispStatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
return pEngstate->__kdispStatePreLoad__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS kdispStatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
return pEngstate->__kdispStatePostUnload__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS kdispStatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
return pEngstate->__kdispStatePreUnload__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS kdispStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
return pEngstate->__kdispStateInitUnlocked__(pGpu, pEngstate);
}
static inline void kdispInitMissing_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
pEngstate->__kdispInitMissing__(pGpu, pEngstate);
}
static inline NV_STATUS kdispStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
return pEngstate->__kdispStatePreInitUnlocked__(pGpu, pEngstate);
}
static inline NV_STATUS kdispGetTunableState_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunableState) {
return pEngstate->__kdispGetTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS kdispCompareTunableState_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunables1, void *pTunables2) {
return pEngstate->__kdispCompareTunableState__(pGpu, pEngstate, pTunables1, pTunables2);
}
static inline void kdispFreeTunableState_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunableState) {
pEngstate->__kdispFreeTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS kdispStatePostLoad_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
return pEngstate->__kdispStatePostLoad__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS kdispAllocTunableState_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, void **ppTunableState) {
return pEngstate->__kdispAllocTunableState__(pGpu, pEngstate, ppTunableState);
}
static inline NV_STATUS kdispSetTunableState_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, void *pTunableState) {
return pEngstate->__kdispSetTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NvBool kdispIsPresent_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
return pEngstate->__kdispIsPresent__(pGpu, pEngstate);
}
void kdispDestruct_IMPL(struct KernelDisplay *pKernelDisplay);
#define __nvoc_kdispDestruct(pKernelDisplay) kdispDestruct_IMPL(pKernelDisplay)
NV_STATUS kdispConstructKhead_IMPL(struct KernelDisplay *pKernelDisplay);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispConstructKhead(struct KernelDisplay *pKernelDisplay) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispConstructKhead(pKernelDisplay) kdispConstructKhead_IMPL(pKernelDisplay)
#endif //__nvoc_kern_disp_h_disabled
void kdispDestructKhead_IMPL(struct KernelDisplay *pKernelDisplay);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispDestructKhead(struct KernelDisplay *pKernelDisplay) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispDestructKhead(pKernelDisplay) kdispDestructKhead_IMPL(pKernelDisplay)
#endif //__nvoc_kern_disp_h_disabled
NV_STATUS kdispGetIntChnClsForHwCls_IMPL(struct KernelDisplay *pKernelDisplay, NvU32 hwClass, DISPCHNCLASS *pDispChnClass);
#ifdef __nvoc_kern_disp_h_disabled
static inline NV_STATUS kdispGetIntChnClsForHwCls(struct KernelDisplay *pKernelDisplay, NvU32 hwClass, DISPCHNCLASS *pDispChnClass) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_kern_disp_h_disabled
#define kdispGetIntChnClsForHwCls(pKernelDisplay, hwClass, pDispChnClass) kdispGetIntChnClsForHwCls_IMPL(pKernelDisplay, hwClass, pDispChnClass)
#endif //__nvoc_kern_disp_h_disabled
void kdispNotifyEvent_IMPL(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize, NvV32 info32, NvV16 info16);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispNotifyEvent(struct OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize, NvV32 info32, NvV16 info16) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispNotifyEvent(pGpu, pKernelDisplay, notifyIndex, pNotifyParams, notifyParamsSize, info32, info16) kdispNotifyEvent_IMPL(pGpu, pKernelDisplay, notifyIndex, pNotifyParams, notifyParamsSize, info32, info16)
#endif //__nvoc_kern_disp_h_disabled
void kdispSetWarPurgeSatellitesOnCoreFree_IMPL(struct KernelDisplay *pKernelDisplay, NvBool value);
#ifdef __nvoc_kern_disp_h_disabled
static inline void kdispSetWarPurgeSatellitesOnCoreFree(struct KernelDisplay *pKernelDisplay, NvBool value) {
NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
}
#else //__nvoc_kern_disp_h_disabled
#define kdispSetWarPurgeSatellitesOnCoreFree(pKernelDisplay, value) kdispSetWarPurgeSatellitesOnCoreFree_IMPL(pKernelDisplay, value)
#endif //__nvoc_kern_disp_h_disabled
#undef PRIVATE_FIELD
void
dispdeviceFillVgaSavedDisplayState( struct OBJGPU *pGpu,
NvU64 vgaAddr,
NvU8 vgaMemType,
NvBool vgaValid,
NvU64 workspaceAddr,
NvU8 workspaceMemType,
NvBool workspaceValid,
NvBool baseValid,
NvBool workspaceBaseValid
);
static NV_INLINE struct KernelHead*
kdispGetHead
(
struct KernelDisplay *pKernelDisplay,
NvU32 head
)
{
if (head >= OBJ_MAX_HEADS)
{
return NULL;
}
return pKernelDisplay->pKernelHead[head];
}
static NV_INLINE NvU32
kdispGetNumHeads(struct KernelDisplay *pKernelDisplay)
{
NV_ASSERT(pKernelDisplay->pStaticInfo != NULL);
return pKernelDisplay->pStaticInfo->numHeads;
}
static NV_INLINE NvU32
kdispGetIsPrimaryVga(struct KernelDisplay *pKernelDisplay)
{
NV_ASSERT(pKernelDisplay->pStaticInfo != NULL);
return pKernelDisplay->pStaticInfo->bPrimaryVga;
}
#endif // KERN_DISP_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_KERN_DISP_NVOC_H_

View File

@@ -0,0 +1,176 @@
#define NVOC_KERNEL_HEAD_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_kernel_head_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x0145e6 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelHead;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_KernelHead(KernelHead*, RmHalspecOwner* );
void __nvoc_init_funcTable_KernelHead(KernelHead*, RmHalspecOwner* );
NV_STATUS __nvoc_ctor_KernelHead(KernelHead*, RmHalspecOwner* );
void __nvoc_init_dataField_KernelHead(KernelHead*, RmHalspecOwner* );
void __nvoc_dtor_KernelHead(KernelHead*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelHead;
static const struct NVOC_RTTI __nvoc_rtti_KernelHead_KernelHead = {
/*pClassDef=*/ &__nvoc_class_def_KernelHead,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_KernelHead,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_KernelHead_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(KernelHead, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_KernelHead = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_KernelHead_KernelHead,
&__nvoc_rtti_KernelHead_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_KernelHead =
{
/*classInfo=*/ {
/*size=*/ sizeof(KernelHead),
/*classId=*/ classId(KernelHead),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "KernelHead",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_KernelHead,
/*pCastInfo=*/ &__nvoc_castinfo_KernelHead,
/*pExportInfo=*/ &__nvoc_export_info_KernelHead
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelHead =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_KernelHead(KernelHead *pThis) {
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_KernelHead(KernelHead *pThis, RmHalspecOwner *pRmhalspecowner) {
RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
ChipHal *chipHal = &pRmhalspecowner->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_KernelHead(KernelHead *pThis, RmHalspecOwner *pRmhalspecowner) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_KernelHead_fail_Object;
__nvoc_init_dataField_KernelHead(pThis, pRmhalspecowner);
status = __nvoc_kheadConstruct(pThis);
if (status != NV_OK) goto __nvoc_ctor_KernelHead_fail__init;
goto __nvoc_ctor_KernelHead_exit; // Success
__nvoc_ctor_KernelHead_fail__init:
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_ctor_KernelHead_fail_Object:
__nvoc_ctor_KernelHead_exit:
return status;
}
static void __nvoc_init_funcTable_KernelHead_1(KernelHead *pThis, RmHalspecOwner *pRmhalspecowner) {
RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
ChipHal *chipHal = &pRmhalspecowner->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
}
void __nvoc_init_funcTable_KernelHead(KernelHead *pThis, RmHalspecOwner *pRmhalspecowner) {
__nvoc_init_funcTable_KernelHead_1(pThis, pRmhalspecowner);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_KernelHead(KernelHead *pThis, RmHalspecOwner *pRmhalspecowner) {
pThis->__nvoc_pbase_KernelHead = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_KernelHead(pThis, pRmhalspecowner);
}
NV_STATUS __nvoc_objCreate_KernelHead(KernelHead **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
KernelHead *pThis;
RmHalspecOwner *pRmhalspecowner;
pThis = portMemAllocNonPaged(sizeof(KernelHead));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(KernelHead));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_KernelHead);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
if ((pRmhalspecowner = dynamicCast(pParent, RmHalspecOwner)) == NULL)
pRmhalspecowner = objFindAncestorOfType(RmHalspecOwner, pParent);
NV_ASSERT_OR_RETURN(pRmhalspecowner != NULL, NV_ERR_INVALID_ARGUMENT);
__nvoc_init_KernelHead(pThis, pRmhalspecowner);
status = __nvoc_ctor_KernelHead(pThis, pRmhalspecowner);
if (status != NV_OK) goto __nvoc_objCreate_KernelHead_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_KernelHead_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_KernelHead(KernelHead **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_KernelHead(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,354 @@
#ifndef _G_KERNEL_HEAD_NVOC_H_
#define _G_KERNEL_HEAD_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/**************************** Kernelhead Routines **************************\
* *
* Kernel head object function Definitions. *
* *
\***************************************************************************/
#include "g_kernel_head_nvoc.h"
#ifndef KERNEL_HEAD_H
#define KERNEL_HEAD_H
/* ------------------------ Includes --------------------------------------- */
#include "gpu/disp/vblank_callback/vblank.h"
#include "gpu/gpu_halspec.h"
/* ------------------------ Types definitions ------------------------------ */
enum
{
headIntr_none = 0,
headIntr_vblank = NVBIT(0),
};
/* ------------------------ Macros & Defines ------------------------------- */
#ifdef NVOC_KERNEL_HEAD_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct __nvoc_inner_struc_KernelHead_1__ {
struct {
NvU32 Total;
NvU32 LowLatency;
NvU32 NormLatency;
} Counters;
struct {
VBLANKCALLBACK *pListLL;
VBLANKCALLBACK *pListNL;
} Callback;
NvU32 IntrState;
};
struct KernelHead {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct KernelHead *__nvoc_pbase_KernelHead;
struct __nvoc_inner_struc_KernelHead_1__ Vblank;
NvU32 PublicId;
};
#ifndef __NVOC_CLASS_KernelHead_TYPEDEF__
#define __NVOC_CLASS_KernelHead_TYPEDEF__
typedef struct KernelHead KernelHead;
#endif /* __NVOC_CLASS_KernelHead_TYPEDEF__ */
#ifndef __nvoc_class_id_KernelHead
#define __nvoc_class_id_KernelHead 0x0145e6
#endif /* __nvoc_class_id_KernelHead */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelHead;
#define __staticCast_KernelHead(pThis) \
((pThis)->__nvoc_pbase_KernelHead)
#ifdef __nvoc_kernel_head_h_disabled
#define __dynamicCast_KernelHead(pThis) ((KernelHead*)NULL)
#else //__nvoc_kernel_head_h_disabled
#define __dynamicCast_KernelHead(pThis) \
((KernelHead*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelHead)))
#endif //__nvoc_kernel_head_h_disabled
NV_STATUS __nvoc_objCreateDynamic_KernelHead(KernelHead**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_KernelHead(KernelHead**, Dynamic*, NvU32);
#define __objCreate_KernelHead(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_KernelHead((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
NvU32 kheadGetVblankTotalCounter_IMPL(struct KernelHead *pKernelHead);
#ifdef __nvoc_kernel_head_h_disabled
static inline NvU32 kheadGetVblankTotalCounter(struct KernelHead *pKernelHead) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
return 0;
}
#else //__nvoc_kernel_head_h_disabled
#define kheadGetVblankTotalCounter(pKernelHead) kheadGetVblankTotalCounter_IMPL(pKernelHead)
#endif //__nvoc_kernel_head_h_disabled
#define kheadGetVblankTotalCounter_HAL(pKernelHead) kheadGetVblankTotalCounter(pKernelHead)
void kheadSetVblankTotalCounter_IMPL(struct KernelHead *pKernelHead, NvU32 arg0);
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadSetVblankTotalCounter(struct KernelHead *pKernelHead, NvU32 arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadSetVblankTotalCounter(pKernelHead, arg0) kheadSetVblankTotalCounter_IMPL(pKernelHead, arg0)
#endif //__nvoc_kernel_head_h_disabled
#define kheadSetVblankTotalCounter_HAL(pKernelHead, arg0) kheadSetVblankTotalCounter(pKernelHead, arg0)
NvU32 kheadGetVblankLowLatencyCounter_IMPL(struct KernelHead *pKernelHead);
#ifdef __nvoc_kernel_head_h_disabled
static inline NvU32 kheadGetVblankLowLatencyCounter(struct KernelHead *pKernelHead) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
return 0;
}
#else //__nvoc_kernel_head_h_disabled
#define kheadGetVblankLowLatencyCounter(pKernelHead) kheadGetVblankLowLatencyCounter_IMPL(pKernelHead)
#endif //__nvoc_kernel_head_h_disabled
#define kheadGetVblankLowLatencyCounter_HAL(pKernelHead) kheadGetVblankLowLatencyCounter(pKernelHead)
void kheadSetVblankLowLatencyCounter_IMPL(struct KernelHead *pKernelHead, NvU32 arg0);
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadSetVblankLowLatencyCounter(struct KernelHead *pKernelHead, NvU32 arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadSetVblankLowLatencyCounter(pKernelHead, arg0) kheadSetVblankLowLatencyCounter_IMPL(pKernelHead, arg0)
#endif //__nvoc_kernel_head_h_disabled
#define kheadSetVblankLowLatencyCounter_HAL(pKernelHead, arg0) kheadSetVblankLowLatencyCounter(pKernelHead, arg0)
static inline NvU32 kheadGetVblankNormLatencyCounter_46f6a7(struct KernelHead *pKernelHead) {
return NV_ERR_NOT_SUPPORTED;
}
#ifdef __nvoc_kernel_head_h_disabled
static inline NvU32 kheadGetVblankNormLatencyCounter(struct KernelHead *pKernelHead) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
return 0;
}
#else //__nvoc_kernel_head_h_disabled
#define kheadGetVblankNormLatencyCounter(pKernelHead) kheadGetVblankNormLatencyCounter_46f6a7(pKernelHead)
#endif //__nvoc_kernel_head_h_disabled
#define kheadGetVblankNormLatencyCounter_HAL(pKernelHead) kheadGetVblankNormLatencyCounter(pKernelHead)
static inline void kheadSetVblankNormLatencyCounter_b3696a(struct KernelHead *pKernelHead, NvU32 arg0) {
return;
}
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadSetVblankNormLatencyCounter(struct KernelHead *pKernelHead, NvU32 arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadSetVblankNormLatencyCounter(pKernelHead, arg0) kheadSetVblankNormLatencyCounter_b3696a(pKernelHead, arg0)
#endif //__nvoc_kernel_head_h_disabled
#define kheadSetVblankNormLatencyCounter_HAL(pKernelHead, arg0) kheadSetVblankNormLatencyCounter(pKernelHead, arg0)
static inline NvBool kheadReadVblankIntrEnable_491d52(struct OBJGPU *pGpu, struct KernelHead *pKernelHead) {
return ((NvBool)(0 != 0));
}
#ifdef __nvoc_kernel_head_h_disabled
static inline NvBool kheadReadVblankIntrEnable(struct OBJGPU *pGpu, struct KernelHead *pKernelHead) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
return NV_FALSE;
}
#else //__nvoc_kernel_head_h_disabled
#define kheadReadVblankIntrEnable(pGpu, pKernelHead) kheadReadVblankIntrEnable_491d52(pGpu, pKernelHead)
#endif //__nvoc_kernel_head_h_disabled
#define kheadReadVblankIntrEnable_HAL(pGpu, pKernelHead) kheadReadVblankIntrEnable(pGpu, pKernelHead)
static inline NvBool kheadGetDisplayInitialized_491d52(struct OBJGPU *pGpu, struct KernelHead *pKernelHead) {
return ((NvBool)(0 != 0));
}
#ifdef __nvoc_kernel_head_h_disabled
static inline NvBool kheadGetDisplayInitialized(struct OBJGPU *pGpu, struct KernelHead *pKernelHead) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
return NV_FALSE;
}
#else //__nvoc_kernel_head_h_disabled
#define kheadGetDisplayInitialized(pGpu, pKernelHead) kheadGetDisplayInitialized_491d52(pGpu, pKernelHead)
#endif //__nvoc_kernel_head_h_disabled
#define kheadGetDisplayInitialized_HAL(pGpu, pKernelHead) kheadGetDisplayInitialized(pGpu, pKernelHead)
static inline void kheadWriteVblankIntrEnable_b3696a(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvBool arg0) {
return;
}
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadWriteVblankIntrEnable(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvBool arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadWriteVblankIntrEnable(pGpu, pKernelHead, arg0) kheadWriteVblankIntrEnable_b3696a(pGpu, pKernelHead, arg0)
#endif //__nvoc_kernel_head_h_disabled
#define kheadWriteVblankIntrEnable_HAL(pGpu, pKernelHead, arg0) kheadWriteVblankIntrEnable(pGpu, pKernelHead, arg0)
static inline void kheadProcessVblankCallbacks_e426af(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 arg0) {
NV_ASSERT_PRECOMP(0);
return;
}
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadProcessVblankCallbacks(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadProcessVblankCallbacks(pGpu, pKernelHead, arg0) kheadProcessVblankCallbacks_e426af(pGpu, pKernelHead, arg0)
#endif //__nvoc_kernel_head_h_disabled
#define kheadProcessVblankCallbacks_HAL(pGpu, pKernelHead, arg0) kheadProcessVblankCallbacks(pGpu, pKernelHead, arg0)
static inline void kheadResetPendingVblank_e426af(struct OBJGPU *pGpu, struct KernelHead *pKhead, THREAD_STATE_NODE *arg0) {
NV_ASSERT_PRECOMP(0);
return;
}
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadResetPendingVblank(struct OBJGPU *pGpu, struct KernelHead *pKhead, THREAD_STATE_NODE *arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadResetPendingVblank(pGpu, pKhead, arg0) kheadResetPendingVblank_e426af(pGpu, pKhead, arg0)
#endif //__nvoc_kernel_head_h_disabled
#define kheadResetPendingVblank_HAL(pGpu, pKhead, arg0) kheadResetPendingVblank(pGpu, pKhead, arg0)
static inline void kheadResetPendingVblankForKernel_e426af(struct OBJGPU *pGpu, struct KernelHead *pKhead, THREAD_STATE_NODE *arg0) {
NV_ASSERT_PRECOMP(0);
return;
}
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadResetPendingVblankForKernel(struct OBJGPU *pGpu, struct KernelHead *pKhead, THREAD_STATE_NODE *arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadResetPendingVblankForKernel(pGpu, pKhead, arg0) kheadResetPendingVblankForKernel_e426af(pGpu, pKhead, arg0)
#endif //__nvoc_kernel_head_h_disabled
#define kheadResetPendingVblankForKernel_HAL(pGpu, pKhead, arg0) kheadResetPendingVblankForKernel(pGpu, pKhead, arg0)
static inline NvU32 kheadReadPendingVblank_92bfc3(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 intr) {
NV_ASSERT_PRECOMP(0);
return NV_ERR_NOT_SUPPORTED;
}
#ifdef __nvoc_kernel_head_h_disabled
static inline NvU32 kheadReadPendingVblank(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 intr) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
return 0;
}
#else //__nvoc_kernel_head_h_disabled
#define kheadReadPendingVblank(pGpu, pKernelHead, intr) kheadReadPendingVblank_92bfc3(pGpu, pKernelHead, intr)
#endif //__nvoc_kernel_head_h_disabled
#define kheadReadPendingVblank_HAL(pGpu, pKernelHead, intr) kheadReadPendingVblank(pGpu, pKernelHead, intr)
NV_STATUS kheadConstruct_IMPL(struct KernelHead *arg_pKernelHead);
#define __nvoc_kheadConstruct(arg_pKernelHead) kheadConstruct_IMPL(arg_pKernelHead)
void kheadAddVblankCallback_IMPL(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, VBLANKCALLBACK *arg0);
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadAddVblankCallback(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, VBLANKCALLBACK *arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadAddVblankCallback(pGpu, pKernelHead, arg0) kheadAddVblankCallback_IMPL(pGpu, pKernelHead, arg0)
#endif //__nvoc_kernel_head_h_disabled
void kheadDeleteVblankCallback_IMPL(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, VBLANKCALLBACK *arg0);
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadDeleteVblankCallback(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, VBLANKCALLBACK *arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadDeleteVblankCallback(pGpu, pKernelHead, arg0) kheadDeleteVblankCallback_IMPL(pGpu, pKernelHead, arg0)
#endif //__nvoc_kernel_head_h_disabled
NvU32 kheadCheckVblankCallbacksQueued_IMPL(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 arg0, NvU32 *arg1);
#ifdef __nvoc_kernel_head_h_disabled
static inline NvU32 kheadCheckVblankCallbacksQueued(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 arg0, NvU32 *arg1) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
return 0;
}
#else //__nvoc_kernel_head_h_disabled
#define kheadCheckVblankCallbacksQueued(pGpu, pKernelHead, arg0, arg1) kheadCheckVblankCallbacksQueued_IMPL(pGpu, pKernelHead, arg0, arg1)
#endif //__nvoc_kernel_head_h_disabled
NvU32 kheadReadVblankIntrState_IMPL(struct OBJGPU *pGpu, struct KernelHead *pKernelHead);
#ifdef __nvoc_kernel_head_h_disabled
static inline NvU32 kheadReadVblankIntrState(struct OBJGPU *pGpu, struct KernelHead *pKernelHead) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
return 0;
}
#else //__nvoc_kernel_head_h_disabled
#define kheadReadVblankIntrState(pGpu, pKernelHead) kheadReadVblankIntrState_IMPL(pGpu, pKernelHead)
#endif //__nvoc_kernel_head_h_disabled
void kheadWriteVblankIntrState_IMPL(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 arg0);
#ifdef __nvoc_kernel_head_h_disabled
static inline void kheadWriteVblankIntrState(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 arg0) {
NV_ASSERT_FAILED_PRECOMP("KernelHead was disabled!");
}
#else //__nvoc_kernel_head_h_disabled
#define kheadWriteVblankIntrState(pGpu, pKernelHead, arg0) kheadWriteVblankIntrState_IMPL(pGpu, pKernelHead, arg0)
#endif //__nvoc_kernel_head_h_disabled
#undef PRIVATE_FIELD
void kheadProcessVblankCallbacks_IMPL(struct OBJGPU *pGpu, struct KernelHead *pKernelHead, NvU32 state);
#endif // KERNEL_HEAD_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_KERNEL_HEAD_NVOC_H_

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,428 @@
#define NVOC_MEM_MGR_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_mem_mgr_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x22ad47 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_MemoryManager;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJENGSTATE;
void __nvoc_init_MemoryManager(MemoryManager*, RmHalspecOwner* );
void __nvoc_init_funcTable_MemoryManager(MemoryManager*, RmHalspecOwner* );
NV_STATUS __nvoc_ctor_MemoryManager(MemoryManager*, RmHalspecOwner* );
void __nvoc_init_dataField_MemoryManager(MemoryManager*, RmHalspecOwner* );
void __nvoc_dtor_MemoryManager(MemoryManager*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_MemoryManager;
static const struct NVOC_RTTI __nvoc_rtti_MemoryManager_MemoryManager = {
/*pClassDef=*/ &__nvoc_class_def_MemoryManager,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_MemoryManager,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_MemoryManager_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(MemoryManager, __nvoc_base_OBJENGSTATE.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_MemoryManager_OBJENGSTATE = {
/*pClassDef=*/ &__nvoc_class_def_OBJENGSTATE,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(MemoryManager, __nvoc_base_OBJENGSTATE),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_MemoryManager = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_MemoryManager_MemoryManager,
&__nvoc_rtti_MemoryManager_OBJENGSTATE,
&__nvoc_rtti_MemoryManager_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_MemoryManager =
{
/*classInfo=*/ {
/*size=*/ sizeof(MemoryManager),
/*classId=*/ classId(MemoryManager),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "MemoryManager",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_MemoryManager,
/*pCastInfo=*/ &__nvoc_castinfo_MemoryManager,
/*pExportInfo=*/ &__nvoc_export_info_MemoryManager
};
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrReconcileTunableState(POBJGPU pGpu, struct MemoryManager *pEngstate, void *pTunableState) {
return engstateReconcileTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStateLoad(POBJGPU pGpu, struct MemoryManager *pEngstate, NvU32 arg0) {
return engstateStateLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStateUnload(POBJGPU pGpu, struct MemoryManager *pEngstate, NvU32 arg0) {
return engstateStateUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStateInitLocked(POBJGPU pGpu, struct MemoryManager *pEngstate) {
return engstateStateInitLocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStatePreLoad(POBJGPU pGpu, struct MemoryManager *pEngstate, NvU32 arg0) {
return engstateStatePreLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStatePostUnload(POBJGPU pGpu, struct MemoryManager *pEngstate, NvU32 arg0) {
return engstateStatePostUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), arg0);
}
static void __nvoc_thunk_OBJENGSTATE_memmgrStateDestroy(POBJGPU pGpu, struct MemoryManager *pEngstate) {
engstateStateDestroy(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStatePreUnload(POBJGPU pGpu, struct MemoryManager *pEngstate, NvU32 arg0) {
return engstateStatePreUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStateInitUnlocked(POBJGPU pGpu, struct MemoryManager *pEngstate) {
return engstateStateInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset));
}
static void __nvoc_thunk_OBJENGSTATE_memmgrInitMissing(POBJGPU pGpu, struct MemoryManager *pEngstate) {
engstateInitMissing(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStatePreInitLocked(POBJGPU pGpu, struct MemoryManager *pEngstate) {
return engstateStatePreInitLocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStatePreInitUnlocked(POBJGPU pGpu, struct MemoryManager *pEngstate) {
return engstateStatePreInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrGetTunableState(POBJGPU pGpu, struct MemoryManager *pEngstate, void *pTunableState) {
return engstateGetTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrCompareTunableState(POBJGPU pGpu, struct MemoryManager *pEngstate, void *pTunables1, void *pTunables2) {
return engstateCompareTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), pTunables1, pTunables2);
}
static void __nvoc_thunk_OBJENGSTATE_memmgrFreeTunableState(POBJGPU pGpu, struct MemoryManager *pEngstate, void *pTunableState) {
engstateFreeTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrStatePostLoad(POBJGPU pGpu, struct MemoryManager *pEngstate, NvU32 arg0) {
return engstateStatePostLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrAllocTunableState(POBJGPU pGpu, struct MemoryManager *pEngstate, void **ppTunableState) {
return engstateAllocTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), ppTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrSetTunableState(POBJGPU pGpu, struct MemoryManager *pEngstate, void *pTunableState) {
return engstateSetTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_memmgrConstructEngine(POBJGPU pGpu, struct MemoryManager *pEngstate, ENGDESCRIPTOR arg0) {
return engstateConstructEngine(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset), arg0);
}
static NvBool __nvoc_thunk_OBJENGSTATE_memmgrIsPresent(POBJGPU pGpu, struct MemoryManager *pEngstate) {
return engstateIsPresent(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_MemoryManager_OBJENGSTATE.offset));
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_MemoryManager =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_dtor_MemoryManager(MemoryManager *pThis) {
__nvoc_dtor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_MemoryManager(MemoryManager *pThis, RmHalspecOwner *pRmhalspecowner) {
ChipHal *chipHal = &pRmhalspecowner->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
// Hal field -- bFbRegionsSupported
if (0)
{
}
// default
else
{
pThis->bFbRegionsSupported = ((NvBool)(0 != 0));
}
// Hal field -- bPmaEnabled
if (0)
{
}
// default
else
{
pThis->bPmaEnabled = ((NvBool)(0 != 0));
}
// Hal field -- bClientPageTablesPmaManaged
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->bClientPageTablesPmaManaged = ((NvBool)(0 == 0));
}
// default
else
{
pThis->bClientPageTablesPmaManaged = ((NvBool)(0 != 0));
}
// Hal field -- bScanoutSysmem
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->bScanoutSysmem = ((NvBool)(0 == 0));
}
// default
else
{
pThis->bScanoutSysmem = ((NvBool)(0 != 0));
}
// Hal field -- bDisallowSplitLowerMemory
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->bDisallowSplitLowerMemory = ((NvBool)(0 == 0));
}
// default
else
{
pThis->bDisallowSplitLowerMemory = ((NvBool)(0 != 0));
}
// Hal field -- bSmallPageCompression
if (0)
{
}
// default
else
{
pThis->bSmallPageCompression = ((NvBool)(0 != 0));
}
// Hal field -- bSysmemCompressionSupportDef
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->bSysmemCompressionSupportDef = ((NvBool)(0 == 0));
}
// default
else
{
pThis->bSysmemCompressionSupportDef = ((NvBool)(0 != 0));
}
// Hal field -- bBug2301372IncreaseRmReserveMemoryWar
if (0)
{
}
// default
else
{
pThis->bBug2301372IncreaseRmReserveMemoryWar = ((NvBool)(0 != 0));
}
pThis->bEnableDynamicPageOfflining = ((NvBool)(0 != 0));
// Hal field -- bVgpuPmaSupport
if (0)
{
}
// default
else
{
pThis->bVgpuPmaSupport = ((NvBool)(0 != 0));
}
// Hal field -- bAllowNoncontiguousAllocation
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->bAllowNoncontiguousAllocation = ((NvBool)(0 == 0));
}
// default
else
{
pThis->bAllowNoncontiguousAllocation = ((NvBool)(0 != 0));
}
// Hal field -- bScrubOnFreeEnabled
if (0)
{
}
// default
else
{
pThis->bScrubOnFreeEnabled = ((NvBool)(0 != 0));
}
// Hal field -- bFastScrubberEnabled
if (0)
{
}
// default
else
{
pThis->bFastScrubberEnabled = ((NvBool)(0 != 0));
}
}
NV_STATUS __nvoc_ctor_OBJENGSTATE(OBJENGSTATE* );
NV_STATUS __nvoc_ctor_MemoryManager(MemoryManager *pThis, RmHalspecOwner *pRmhalspecowner) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
if (status != NV_OK) goto __nvoc_ctor_MemoryManager_fail_OBJENGSTATE;
__nvoc_init_dataField_MemoryManager(pThis, pRmhalspecowner);
goto __nvoc_ctor_MemoryManager_exit; // Success
__nvoc_ctor_MemoryManager_fail_OBJENGSTATE:
__nvoc_ctor_MemoryManager_exit:
return status;
}
static void __nvoc_init_funcTable_MemoryManager_1(MemoryManager *pThis, RmHalspecOwner *pRmhalspecowner) {
ChipHal *chipHal = &pRmhalspecowner->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
pThis->__memmgrReconcileTunableState__ = &__nvoc_thunk_OBJENGSTATE_memmgrReconcileTunableState;
pThis->__memmgrStateLoad__ = &__nvoc_thunk_OBJENGSTATE_memmgrStateLoad;
pThis->__memmgrStateUnload__ = &__nvoc_thunk_OBJENGSTATE_memmgrStateUnload;
pThis->__memmgrStateInitLocked__ = &__nvoc_thunk_OBJENGSTATE_memmgrStateInitLocked;
pThis->__memmgrStatePreLoad__ = &__nvoc_thunk_OBJENGSTATE_memmgrStatePreLoad;
pThis->__memmgrStatePostUnload__ = &__nvoc_thunk_OBJENGSTATE_memmgrStatePostUnload;
pThis->__memmgrStateDestroy__ = &__nvoc_thunk_OBJENGSTATE_memmgrStateDestroy;
pThis->__memmgrStatePreUnload__ = &__nvoc_thunk_OBJENGSTATE_memmgrStatePreUnload;
pThis->__memmgrStateInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_memmgrStateInitUnlocked;
pThis->__memmgrInitMissing__ = &__nvoc_thunk_OBJENGSTATE_memmgrInitMissing;
pThis->__memmgrStatePreInitLocked__ = &__nvoc_thunk_OBJENGSTATE_memmgrStatePreInitLocked;
pThis->__memmgrStatePreInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_memmgrStatePreInitUnlocked;
pThis->__memmgrGetTunableState__ = &__nvoc_thunk_OBJENGSTATE_memmgrGetTunableState;
pThis->__memmgrCompareTunableState__ = &__nvoc_thunk_OBJENGSTATE_memmgrCompareTunableState;
pThis->__memmgrFreeTunableState__ = &__nvoc_thunk_OBJENGSTATE_memmgrFreeTunableState;
pThis->__memmgrStatePostLoad__ = &__nvoc_thunk_OBJENGSTATE_memmgrStatePostLoad;
pThis->__memmgrAllocTunableState__ = &__nvoc_thunk_OBJENGSTATE_memmgrAllocTunableState;
pThis->__memmgrSetTunableState__ = &__nvoc_thunk_OBJENGSTATE_memmgrSetTunableState;
pThis->__memmgrConstructEngine__ = &__nvoc_thunk_OBJENGSTATE_memmgrConstructEngine;
pThis->__memmgrIsPresent__ = &__nvoc_thunk_OBJENGSTATE_memmgrIsPresent;
}
void __nvoc_init_funcTable_MemoryManager(MemoryManager *pThis, RmHalspecOwner *pRmhalspecowner) {
__nvoc_init_funcTable_MemoryManager_1(pThis, pRmhalspecowner);
}
void __nvoc_init_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_init_MemoryManager(MemoryManager *pThis, RmHalspecOwner *pRmhalspecowner) {
pThis->__nvoc_pbase_MemoryManager = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object;
pThis->__nvoc_pbase_OBJENGSTATE = &pThis->__nvoc_base_OBJENGSTATE;
__nvoc_init_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
__nvoc_init_funcTable_MemoryManager(pThis, pRmhalspecowner);
}
NV_STATUS __nvoc_objCreate_MemoryManager(MemoryManager **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
MemoryManager *pThis;
RmHalspecOwner *pRmhalspecowner;
pThis = portMemAllocNonPaged(sizeof(MemoryManager));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(MemoryManager));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_MemoryManager);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object.pParent = NULL;
}
if ((pRmhalspecowner = dynamicCast(pParent, RmHalspecOwner)) == NULL)
pRmhalspecowner = objFindAncestorOfType(RmHalspecOwner, pParent);
NV_ASSERT_OR_RETURN(pRmhalspecowner != NULL, NV_ERR_INVALID_ARGUMENT);
__nvoc_init_MemoryManager(pThis, pRmhalspecowner);
status = __nvoc_ctor_MemoryManager(pThis, pRmhalspecowner);
if (status != NV_OK) goto __nvoc_objCreate_MemoryManager_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_MemoryManager_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_MemoryManager(MemoryManager **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_MemoryManager(ppThis, pParent, createFlags);
return status;
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,312 @@
#define NVOC_MEM_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_mem_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x4789f2 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Memory;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
void __nvoc_init_Memory(Memory*);
void __nvoc_init_funcTable_Memory(Memory*);
NV_STATUS __nvoc_ctor_Memory(Memory*, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_Memory(Memory*);
void __nvoc_dtor_Memory(Memory*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_Memory;
static const struct NVOC_RTTI __nvoc_rtti_Memory_Memory = {
/*pClassDef=*/ &__nvoc_class_def_Memory,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_Memory,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_Memory_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Memory, __nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_Memory_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Memory, __nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_Memory_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Memory, __nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_Memory_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(Memory, __nvoc_base_RmResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_Memory = {
/*numRelatives=*/ 5,
/*relatives=*/ {
&__nvoc_rtti_Memory_Memory,
&__nvoc_rtti_Memory_RmResource,
&__nvoc_rtti_Memory_RmResourceCommon,
&__nvoc_rtti_Memory_RsResource,
&__nvoc_rtti_Memory_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_Memory =
{
/*classInfo=*/ {
/*size=*/ sizeof(Memory),
/*classId=*/ classId(Memory),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "Memory",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_Memory,
/*pCastInfo=*/ &__nvoc_castinfo_Memory,
/*pExportInfo=*/ &__nvoc_export_info_Memory
};
static NV_STATUS __nvoc_thunk_Memory_resControl(struct RsResource *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return memControl((struct Memory *)(((unsigned char *)pMemory) - __nvoc_rtti_Memory_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_Memory_resMap(struct RsResource *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return memMap((struct Memory *)(((unsigned char *)pMemory) - __nvoc_rtti_Memory_RsResource.offset), pCallContext, pParams, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_Memory_resUnmap(struct RsResource *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return memUnmap((struct Memory *)(((unsigned char *)pMemory) - __nvoc_rtti_Memory_RsResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_Memory_rmresGetMemInterMapParams(struct RmResource *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return memGetMemInterMapParams((struct Memory *)(((unsigned char *)pMemory) - __nvoc_rtti_Memory_RmResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_Memory_rmresCheckMemInterUnmap(struct RmResource *pMemory, NvBool bSubdeviceHandleProvided) {
return memCheckMemInterUnmap((struct Memory *)(((unsigned char *)pMemory) - __nvoc_rtti_Memory_RmResource.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_Memory_rmresGetMemoryMappingDescriptor(struct RmResource *pMemory, MEMORY_DESCRIPTOR **ppMemDesc) {
return memGetMemoryMappingDescriptor((struct Memory *)(((unsigned char *)pMemory) - __nvoc_rtti_Memory_RmResource.offset), ppMemDesc);
}
static NvBool __nvoc_thunk_RmResource_memShareCallback(struct Memory *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return rmresShareCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RmResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NvU32 __nvoc_thunk_RsResource_memGetRefCount(struct Memory *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_memControlFilter(struct Memory *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_memAddAdditionalDependants(struct RsClient *pClient, struct Memory *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RsResource.offset), pReference);
}
static NV_STATUS __nvoc_thunk_RmResource_memControl_Prologue(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RmResource.offset), pCallContext, pParams);
}
static NvBool __nvoc_thunk_RsResource_memCanCopy(struct Memory *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_memMapTo(struct Memory *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RsResource.offset), pParams);
}
static void __nvoc_thunk_RsResource_memPreDestruct(struct Memory *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_memUnmapFrom(struct Memory *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_memControl_Epilogue(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_memControlLookup(struct Memory *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RsResource.offset), pParams, ppEntry);
}
static NvBool __nvoc_thunk_RmResource_memAccessCallback(struct Memory *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_Memory_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_Memory =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RmResource(RmResource*);
void __nvoc_dtor_Memory(Memory *pThis) {
__nvoc_memDestruct(pThis);
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_Memory(Memory *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RmResource(RmResource* , CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_Memory(Memory *pThis, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RmResource(&pThis->__nvoc_base_RmResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_Memory_fail_RmResource;
__nvoc_init_dataField_Memory(pThis);
status = __nvoc_memConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_Memory_fail__init;
goto __nvoc_ctor_Memory_exit; // Success
__nvoc_ctor_Memory_fail__init:
__nvoc_dtor_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_ctor_Memory_fail_RmResource:
__nvoc_ctor_Memory_exit:
return status;
}
static void __nvoc_init_funcTable_Memory_1(Memory *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__memGetMapAddrSpace__ = &memGetMapAddrSpace_IMPL;
pThis->__memControl__ = &memControl_IMPL;
pThis->__memMap__ = &memMap_IMPL;
pThis->__memUnmap__ = &memUnmap_IMPL;
pThis->__memGetMemInterMapParams__ = &memGetMemInterMapParams_IMPL;
pThis->__memCheckMemInterUnmap__ = &memCheckMemInterUnmap_ac1694;
pThis->__memGetMemoryMappingDescriptor__ = &memGetMemoryMappingDescriptor_IMPL;
pThis->__memCheckCopyPermissions__ = &memCheckCopyPermissions_ac1694;
pThis->__memIsReady__ = &memIsReady_IMPL;
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__resControl__ = &__nvoc_thunk_Memory_resControl;
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__resMap__ = &__nvoc_thunk_Memory_resMap;
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__resUnmap__ = &__nvoc_thunk_Memory_resUnmap;
pThis->__nvoc_base_RmResource.__rmresGetMemInterMapParams__ = &__nvoc_thunk_Memory_rmresGetMemInterMapParams;
pThis->__nvoc_base_RmResource.__rmresCheckMemInterUnmap__ = &__nvoc_thunk_Memory_rmresCheckMemInterUnmap;
pThis->__nvoc_base_RmResource.__rmresGetMemoryMappingDescriptor__ = &__nvoc_thunk_Memory_rmresGetMemoryMappingDescriptor;
pThis->__memShareCallback__ = &__nvoc_thunk_RmResource_memShareCallback;
pThis->__memGetRefCount__ = &__nvoc_thunk_RsResource_memGetRefCount;
pThis->__memControlFilter__ = &__nvoc_thunk_RsResource_memControlFilter;
pThis->__memAddAdditionalDependants__ = &__nvoc_thunk_RsResource_memAddAdditionalDependants;
pThis->__memControl_Prologue__ = &__nvoc_thunk_RmResource_memControl_Prologue;
pThis->__memCanCopy__ = &__nvoc_thunk_RsResource_memCanCopy;
pThis->__memMapTo__ = &__nvoc_thunk_RsResource_memMapTo;
pThis->__memPreDestruct__ = &__nvoc_thunk_RsResource_memPreDestruct;
pThis->__memUnmapFrom__ = &__nvoc_thunk_RsResource_memUnmapFrom;
pThis->__memControl_Epilogue__ = &__nvoc_thunk_RmResource_memControl_Epilogue;
pThis->__memControlLookup__ = &__nvoc_thunk_RsResource_memControlLookup;
pThis->__memAccessCallback__ = &__nvoc_thunk_RmResource_memAccessCallback;
}
void __nvoc_init_funcTable_Memory(Memory *pThis) {
__nvoc_init_funcTable_Memory_1(pThis);
}
void __nvoc_init_RmResource(RmResource*);
void __nvoc_init_Memory(Memory *pThis) {
pThis->__nvoc_pbase_Memory = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_RmResource;
__nvoc_init_RmResource(&pThis->__nvoc_base_RmResource);
__nvoc_init_funcTable_Memory(pThis);
}
NV_STATUS __nvoc_objCreate_Memory(Memory **ppThis, Dynamic *pParent, NvU32 createFlags, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
Memory *pThis;
pThis = portMemAllocNonPaged(sizeof(Memory));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(Memory));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_Memory);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_Memory(pThis);
status = __nvoc_ctor_Memory(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_Memory_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_Memory_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_Memory(Memory **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
CALL_CONTEXT * arg_pCallContext = va_arg(args, CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_Memory(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,417 @@
#ifndef _G_MEM_NVOC_H_
#define _G_MEM_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_mem_nvoc.h"
#ifndef _MEMORY_API_H_
#define _MEMORY_API_H_
#include "core/core.h"
#include "resserv/rs_resource.h"
#include "rmapi/rmapi.h"
#include "rmapi/resource.h"
#include "containers/btree.h"
#include "ctrl/ctrl0041.h"
struct Device;
#ifndef __NVOC_CLASS_Device_TYPEDEF__
#define __NVOC_CLASS_Device_TYPEDEF__
typedef struct Device Device;
#endif /* __NVOC_CLASS_Device_TYPEDEF__ */
#ifndef __nvoc_class_id_Device
#define __nvoc_class_id_Device 0xe0ac20
#endif /* __nvoc_class_id_Device */
struct Subdevice;
#ifndef __NVOC_CLASS_Subdevice_TYPEDEF__
#define __NVOC_CLASS_Subdevice_TYPEDEF__
typedef struct Subdevice Subdevice;
#endif /* __NVOC_CLASS_Subdevice_TYPEDEF__ */
#ifndef __nvoc_class_id_Subdevice
#define __nvoc_class_id_Subdevice 0x4b01b3
#endif /* __nvoc_class_id_Subdevice */
struct RsClient;
#ifndef __NVOC_CLASS_RsClient_TYPEDEF__
#define __NVOC_CLASS_RsClient_TYPEDEF__
typedef struct RsClient RsClient;
#endif /* __NVOC_CLASS_RsClient_TYPEDEF__ */
#ifndef __nvoc_class_id_RsClient
#define __nvoc_class_id_RsClient 0x8f87e5
#endif /* __nvoc_class_id_RsClient */
struct Heap;
#ifndef __NVOC_CLASS_Heap_TYPEDEF__
#define __NVOC_CLASS_Heap_TYPEDEF__
typedef struct Heap Heap;
#endif /* __NVOC_CLASS_Heap_TYPEDEF__ */
#ifndef __nvoc_class_id_Heap
#define __nvoc_class_id_Heap 0x556e9a
#endif /* __nvoc_class_id_Heap */
struct OBJGPU;
#ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
#define __NVOC_CLASS_OBJGPU_TYPEDEF__
typedef struct OBJGPU OBJGPU;
#endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJGPU
#define __nvoc_class_id_OBJGPU 0x7ef3cb
#endif /* __nvoc_class_id_OBJGPU */
typedef struct MEMORY_DESCRIPTOR MEMORY_DESCRIPTOR;
typedef struct PmuMapping PmuMapping;
typedef struct HWRESOURCE_INFO HWRESOURCE_INFO;
//
// vGPU non-stall interrupt info
//
typedef struct _def_client_vgpu_ns_intr
{
NvU32 nsSemValue; // Non stall interrupt semaphore value
NvU32 nsSemOffset; // Non stall interrupt semaphore offset. Currently it is always 0.
NvBool isSemaMemValidationEnabled; // Enable change in Non stall interrupt sema value check
// while generating event
NvU64 guestDomainId; // guest ID that we need to use to inject interrupt
NvU64 guestMSIAddr; // MSI address allocated by guest OS
NvU32 guestMSIData; // MSI data value set by guest OS
void *pVgpuVfioRef; // Reference to vgpu device in nvidia-vgpu-vfio module
void *pVmBusHostChannel; // VmBus Host channel to communicated the event with the Guest
void *pEventDpc; // DPC event to pass the interrupt
} VGPU_NS_INTR;
typedef struct
{
struct Memory *pNext;
struct Memory *pPrev;
} memCircularListItem;
/*!
* RM internal class representing NV01_MEMORY_XXX
*
* @note Memory cannot be a GpuResource because NoDeviceMemory
* subclass is not allocated under a device.
*/
#ifdef NVOC_MEM_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct Memory {
const struct NVOC_RTTI *__nvoc_rtti;
struct RmResource __nvoc_base_RmResource;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct Memory *__nvoc_pbase_Memory;
NV_STATUS (*__memGetMapAddrSpace__)(struct Memory *, CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NV_STATUS (*__memControl__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__memMap__)(struct Memory *, CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, RsCpuMapping *);
NV_STATUS (*__memUnmap__)(struct Memory *, CALL_CONTEXT *, RsCpuMapping *);
NV_STATUS (*__memGetMemInterMapParams__)(struct Memory *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__memCheckMemInterUnmap__)(struct Memory *, NvBool);
NV_STATUS (*__memGetMemoryMappingDescriptor__)(struct Memory *, MEMORY_DESCRIPTOR **);
NV_STATUS (*__memCheckCopyPermissions__)(struct Memory *, struct OBJGPU *, NvHandle);
NV_STATUS (*__memIsReady__)(struct Memory *);
NvBool (*__memShareCallback__)(struct Memory *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NvU32 (*__memGetRefCount__)(struct Memory *);
NV_STATUS (*__memControlFilter__)(struct Memory *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__memAddAdditionalDependants__)(struct RsClient *, struct Memory *, RsResourceRef *);
NV_STATUS (*__memControl_Prologue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NvBool (*__memCanCopy__)(struct Memory *);
NV_STATUS (*__memMapTo__)(struct Memory *, RS_RES_MAP_TO_PARAMS *);
void (*__memPreDestruct__)(struct Memory *);
NV_STATUS (*__memUnmapFrom__)(struct Memory *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__memControl_Epilogue__)(struct Memory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__memControlLookup__)(struct Memory *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NvBool (*__memAccessCallback__)(struct Memory *, struct RsClient *, void *, RsAccessRight);
NvBool bConstructed;
struct Device *pDevice;
struct Subdevice *pSubDevice;
struct OBJGPU *pGpu;
NvBool bBcResource;
NvU32 categoryClassId;
NvU64 Length;
NvU32 HeapOwner;
NvU32 RefCount;
struct Heap *pHeap;
MEMORY_DESCRIPTOR *pMemDesc;
NvBool isMemDescOwner;
memCircularListItem dupListItem;
NvP64 KernelVAddr;
NvP64 KernelMapPriv;
PmuMapping *pPmuMappingList;
NODE Node;
NvU32 Attr;
NvU32 Attr2;
NvU32 Pitch;
NvU32 Type;
NvU32 Flags;
NvU32 tag;
NvU64 osDeviceHandle;
HWRESOURCE_INFO *pHwResource;
NvBool bRpcAlloc;
VGPU_NS_INTR vgpuNsIntr;
};
#ifndef __NVOC_CLASS_Memory_TYPEDEF__
#define __NVOC_CLASS_Memory_TYPEDEF__
typedef struct Memory Memory;
#endif /* __NVOC_CLASS_Memory_TYPEDEF__ */
#ifndef __nvoc_class_id_Memory
#define __nvoc_class_id_Memory 0x4789f2
#endif /* __nvoc_class_id_Memory */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Memory;
#define __staticCast_Memory(pThis) \
((pThis)->__nvoc_pbase_Memory)
#ifdef __nvoc_mem_h_disabled
#define __dynamicCast_Memory(pThis) ((Memory*)NULL)
#else //__nvoc_mem_h_disabled
#define __dynamicCast_Memory(pThis) \
((Memory*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Memory)))
#endif //__nvoc_mem_h_disabled
NV_STATUS __nvoc_objCreateDynamic_Memory(Memory**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_Memory(Memory**, Dynamic*, NvU32, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_Memory(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_Memory((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define memGetMapAddrSpace(pMemory, pCallContext, mapFlags, pAddrSpace) memGetMapAddrSpace_DISPATCH(pMemory, pCallContext, mapFlags, pAddrSpace)
#define memControl(pMemory, pCallContext, pParams) memControl_DISPATCH(pMemory, pCallContext, pParams)
#define memMap(pMemory, pCallContext, pParams, pCpuMapping) memMap_DISPATCH(pMemory, pCallContext, pParams, pCpuMapping)
#define memUnmap(pMemory, pCallContext, pCpuMapping) memUnmap_DISPATCH(pMemory, pCallContext, pCpuMapping)
#define memGetMemInterMapParams(pMemory, pParams) memGetMemInterMapParams_DISPATCH(pMemory, pParams)
#define memCheckMemInterUnmap(pMemory, bSubdeviceHandleProvided) memCheckMemInterUnmap_DISPATCH(pMemory, bSubdeviceHandleProvided)
#define memGetMemoryMappingDescriptor(pMemory, ppMemDesc) memGetMemoryMappingDescriptor_DISPATCH(pMemory, ppMemDesc)
#define memCheckCopyPermissions(pMemory, pDstGpu, hDstClientNvBool) memCheckCopyPermissions_DISPATCH(pMemory, pDstGpu, hDstClientNvBool)
#define memIsReady(pMemory) memIsReady_DISPATCH(pMemory)
#define memShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) memShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
#define memGetRefCount(pResource) memGetRefCount_DISPATCH(pResource)
#define memControlFilter(pResource, pCallContext, pParams) memControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define memAddAdditionalDependants(pClient, pResource, pReference) memAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define memControl_Prologue(pResource, pCallContext, pParams) memControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define memCanCopy(pResource) memCanCopy_DISPATCH(pResource)
#define memMapTo(pResource, pParams) memMapTo_DISPATCH(pResource, pParams)
#define memPreDestruct(pResource) memPreDestruct_DISPATCH(pResource)
#define memUnmapFrom(pResource, pParams) memUnmapFrom_DISPATCH(pResource, pParams)
#define memControl_Epilogue(pResource, pCallContext, pParams) memControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define memControlLookup(pResource, pParams, ppEntry) memControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define memAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) memAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NV_STATUS memGetMapAddrSpace_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
static inline NV_STATUS memGetMapAddrSpace_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pMemory->__memGetMapAddrSpace__(pMemory, pCallContext, mapFlags, pAddrSpace);
}
NV_STATUS memControl_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
static inline NV_STATUS memControl_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pMemory->__memControl__(pMemory, pCallContext, pParams);
}
NV_STATUS memMap_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping);
static inline NV_STATUS memMap_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return pMemory->__memMap__(pMemory, pCallContext, pParams, pCpuMapping);
}
NV_STATUS memUnmap_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping);
static inline NV_STATUS memUnmap_DISPATCH(struct Memory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return pMemory->__memUnmap__(pMemory, pCallContext, pCpuMapping);
}
NV_STATUS memGetMemInterMapParams_IMPL(struct Memory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams);
static inline NV_STATUS memGetMemInterMapParams_DISPATCH(struct Memory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pMemory->__memGetMemInterMapParams__(pMemory, pParams);
}
static inline NV_STATUS memCheckMemInterUnmap_ac1694(struct Memory *pMemory, NvBool bSubdeviceHandleProvided) {
return NV_OK;
}
static inline NV_STATUS memCheckMemInterUnmap_DISPATCH(struct Memory *pMemory, NvBool bSubdeviceHandleProvided) {
return pMemory->__memCheckMemInterUnmap__(pMemory, bSubdeviceHandleProvided);
}
NV_STATUS memGetMemoryMappingDescriptor_IMPL(struct Memory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc);
static inline NV_STATUS memGetMemoryMappingDescriptor_DISPATCH(struct Memory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc) {
return pMemory->__memGetMemoryMappingDescriptor__(pMemory, ppMemDesc);
}
static inline NV_STATUS memCheckCopyPermissions_ac1694(struct Memory *pMemory, struct OBJGPU *pDstGpu, NvHandle hDstClientNvBool) {
return NV_OK;
}
static inline NV_STATUS memCheckCopyPermissions_DISPATCH(struct Memory *pMemory, struct OBJGPU *pDstGpu, NvHandle hDstClientNvBool) {
return pMemory->__memCheckCopyPermissions__(pMemory, pDstGpu, hDstClientNvBool);
}
NV_STATUS memIsReady_IMPL(struct Memory *pMemory);
static inline NV_STATUS memIsReady_DISPATCH(struct Memory *pMemory) {
return pMemory->__memIsReady__(pMemory);
}
static inline NvBool memShareCallback_DISPATCH(struct Memory *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pResource->__memShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NvU32 memGetRefCount_DISPATCH(struct Memory *pResource) {
return pResource->__memGetRefCount__(pResource);
}
static inline NV_STATUS memControlFilter_DISPATCH(struct Memory *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__memControlFilter__(pResource, pCallContext, pParams);
}
static inline void memAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct Memory *pResource, RsResourceRef *pReference) {
pResource->__memAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NV_STATUS memControl_Prologue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__memControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NvBool memCanCopy_DISPATCH(struct Memory *pResource) {
return pResource->__memCanCopy__(pResource);
}
static inline NV_STATUS memMapTo_DISPATCH(struct Memory *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__memMapTo__(pResource, pParams);
}
static inline void memPreDestruct_DISPATCH(struct Memory *pResource) {
pResource->__memPreDestruct__(pResource);
}
static inline NV_STATUS memUnmapFrom_DISPATCH(struct Memory *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__memUnmapFrom__(pResource, pParams);
}
static inline void memControl_Epilogue_DISPATCH(struct Memory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__memControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS memControlLookup_DISPATCH(struct Memory *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__memControlLookup__(pResource, pParams, ppEntry);
}
static inline NvBool memAccessCallback_DISPATCH(struct Memory *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__memAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS memConstruct_IMPL(struct Memory *arg_pMemory, CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_memConstruct(arg_pMemory, arg_pCallContext, arg_pParams) memConstruct_IMPL(arg_pMemory, arg_pCallContext, arg_pParams)
NV_STATUS memCopyConstruct_IMPL(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams);
#ifdef __nvoc_mem_h_disabled
static inline NV_STATUS memCopyConstruct(struct Memory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *pParams) {
NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_mem_h_disabled
#define memCopyConstruct(pMemory, pCallContext, pParams) memCopyConstruct_IMPL(pMemory, pCallContext, pParams)
#endif //__nvoc_mem_h_disabled
void memDestruct_IMPL(struct Memory *pMemory);
#define __nvoc_memDestruct(pMemory) memDestruct_IMPL(pMemory)
NV_STATUS memConstructCommon_IMPL(struct Memory *pMemory, NvU32 categoryClassId, NvU32 flags, MEMORY_DESCRIPTOR *pMemDesc, NvU32 heapOwner, struct Heap *pHeap, NvU32 attr, NvU32 attr2, NvU32 Pitch, NvU32 type, NvU32 tag, HWRESOURCE_INFO *pHwResource);
#ifdef __nvoc_mem_h_disabled
static inline NV_STATUS memConstructCommon(struct Memory *pMemory, NvU32 categoryClassId, NvU32 flags, MEMORY_DESCRIPTOR *pMemDesc, NvU32 heapOwner, struct Heap *pHeap, NvU32 attr, NvU32 attr2, NvU32 Pitch, NvU32 type, NvU32 tag, HWRESOURCE_INFO *pHwResource) {
NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_mem_h_disabled
#define memConstructCommon(pMemory, categoryClassId, flags, pMemDesc, heapOwner, pHeap, attr, attr2, Pitch, type, tag, pHwResource) memConstructCommon_IMPL(pMemory, categoryClassId, flags, pMemDesc, heapOwner, pHeap, attr, attr2, Pitch, type, tag, pHwResource)
#endif //__nvoc_mem_h_disabled
void memDestructCommon_IMPL(struct Memory *pMemory);
#ifdef __nvoc_mem_h_disabled
static inline void memDestructCommon(struct Memory *pMemory) {
NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
}
#else //__nvoc_mem_h_disabled
#define memDestructCommon(pMemory) memDestructCommon_IMPL(pMemory)
#endif //__nvoc_mem_h_disabled
NV_STATUS memCreateMemDesc_IMPL(struct OBJGPU *pGpu, MEMORY_DESCRIPTOR **ppMemDesc, NV_ADDRESS_SPACE addrSpace, NvU64 FBOffset, NvU64 length, NvU32 attr, NvU32 attr2);
#define memCreateMemDesc(pGpu, ppMemDesc, addrSpace, FBOffset, length, attr, attr2) memCreateMemDesc_IMPL(pGpu, ppMemDesc, addrSpace, FBOffset, length, attr, attr2)
NV_STATUS memCreateKernelMapping_IMPL(struct Memory *pMemory, NvU32 Protect, NvBool bClear);
#ifdef __nvoc_mem_h_disabled
static inline NV_STATUS memCreateKernelMapping(struct Memory *pMemory, NvU32 Protect, NvBool bClear) {
NV_ASSERT_FAILED_PRECOMP("Memory was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_mem_h_disabled
#define memCreateKernelMapping(pMemory, Protect, bClear) memCreateKernelMapping_IMPL(pMemory, Protect, bClear)
#endif //__nvoc_mem_h_disabled
NV_STATUS memGetByHandle_IMPL(struct RsClient *pClient, NvHandle hMemory, struct Memory **ppMemory);
#define memGetByHandle(pClient, hMemory, ppMemory) memGetByHandle_IMPL(pClient, hMemory, ppMemory)
NV_STATUS memGetByHandleAndDevice_IMPL(struct RsClient *pClient, NvHandle hMemory, NvHandle hDevice, struct Memory **ppMemory);
#define memGetByHandleAndDevice(pClient, hMemory, hDevice, ppMemory) memGetByHandleAndDevice_IMPL(pClient, hMemory, hDevice, ppMemory)
NV_STATUS memGetByHandleAndGroupedGpu_IMPL(struct RsClient *pClient, NvHandle hMemory, struct OBJGPU *pGpu, struct Memory **ppMemory);
#define memGetByHandleAndGroupedGpu(pClient, hMemory, pGpu, ppMemory) memGetByHandleAndGroupedGpu_IMPL(pClient, hMemory, pGpu, ppMemory)
#undef PRIVATE_FIELD
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_MEM_NVOC_H_

View File

@@ -0,0 +1,402 @@
#ifndef _G_NV_DEBUG_DUMP_NVOC_H_
#define _G_NV_DEBUG_DUMP_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2020 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_nv_debug_dump_nvoc.h"
#ifndef _NV_DEBUG_DUMP_H_
#define _NV_DEBUG_DUMP_H_
#include "gpu/eng_state.h"
#include "gpu/mem_mgr/mem_desc.h"
#include "core/info_block.h"
#include "lib/protobuf/prb.h"
#include "rmapi/control.h"
#include "gpu/gpu.h"
// Os Independent Error Types
typedef enum
{
NVD_SKIP_ZERO,
NVD_GPU_HUNG,
NVD_FAILURE_TO_RECOVER,
NVD_MACHINE_CHECK,
NVD_POWERUP_FAILURE,
NVD_CPU_EXCEPTION,
NVD_EXTERNALLY_GENERATED,
NVD_GPU_GENERATED,
} NVD_ERROR_TYPE;
#define NV_NVD_ERROR_CODE_MAJOR 31:16
#define NV_NVD_ERROR_CODE_MINOR 15:0
#define NVD_ERROR_CODE(Major, Minor) \
(DRF_NUM(_NVD, _ERROR_CODE, _MAJOR, Major) | \
DRF_NUM(_NVD, _ERROR_CODE, _MINOR, Minor))
#define NVD_ENGINE_FLAGS_PRIORITY 1:0
#define NVD_ENGINE_FLAGS_PRIORITY_LOW 0x00000000
#define NVD_ENGINE_FLAGS_PRIORITY_MED 0x00000001
#define NVD_ENGINE_FLAGS_PRIORITY_HIGH 0x00000002
#define NVD_ENGINE_FLAGS_PRIORITY_CRITICAL 0x00000003
/*
* NVD_ENGINE_FLAGS_SOURCE
*
* CPU - Always run on CPU, even if running as GSP-RM client.
* GSP - Run on GSP for GSP-RM client, otherwise run on CPU.
* BOTH - Engine dump is split between GSP-RM and CPU. Run both.
*/
#define NVD_ENGINE_FLAGS_SOURCE 3:2
#define NVD_ENGINE_FLAGS_SOURCE_CPU 0x00000001
#define NVD_ENGINE_FLAGS_SOURCE_GSP 0x00000002
#define NVD_ENGINE_FLAGS_SOURCE_BOTH 0x00000003
#define NV_NVD_ENGINE_STEP_MAJOR 31:16
#define NV_NVD_ENGINE_STEP_MINOR 15:0
#define NVD_ENGINE_STEP(Major, Minor) \
(DRF_NUM(_NVD, _ENGINE_STEP, _MAJOR, Major) | \
DRF_NUM(_NVD, _ENGINE_STEP, _MINOR, Minor))
typedef enum
{
NVD_FIRST_ENGINE = 0,
NVD_LAST_ENGINE = 0xFF,
} NVD_WHICH_ENGINE;
typedef struct _def_nvd_debug_buffer {
NvU32 tag;
MEMORY_DESCRIPTOR *pMemDesc;
struct _def_nvd_debug_buffer *pNext;
} NVD_DEBUG_BUFFER;
// Enumeration of Dump Types (Journal Entry, OCA dump, or API requested dump)
typedef enum
{
NVD_DUMP_TYPE_JOURNAL, // Very small records only. Total for
// whole Journal is 4K (including overhead),
// actual amount of raw data stored is less.
NVD_DUMP_TYPE_OCA, // Assume 8K - 512 K total
NVD_DUMP_TYPE_API, // Mini Dump >512K
} NVD_DUMP_TYPE;
// Enumeration of Sizes returned by nvDumpGetDumpBufferSizeEnum
typedef enum
{
NVD_DUMP_SIZE_JOURNAL_WRITE, // Very small records only.
NVD_DUMP_SIZE_SMALL, // Assume 8K - 512 K total
NVD_DUMP_SIZE_MEDIUM, // Mini Dump >512K
NVD_DUMP_SIZE_LARGE // Megs of space
} NVD_DUMP_SIZE;
//
// NV Dump State
//
// State passed into all dump routines.
//
typedef struct _def_nvd_state NVD_STATE;
struct _def_nvd_state
{
NvBool bDumpInProcess; // Currently creating dump.
NvBool bRMLock; // Acquired the RM lock.
NvBool bGpuAccessible; // OK to read priv registers on GPU.
NvU32 bugCheckCode; // Raw OS bugcheck code.
NvU32 internalCode; // OS Independent error code.
NvU32 initialbufferSize; // Size of buffer passed in.
NVD_DUMP_TYPE nvDumpType; // Type of DUMP.
};
NVD_DUMP_SIZE nvDumpGetDumpBufferSizeEnum( NVD_STATE *pNvDumpState );
typedef NV_STATUS NvdDumpEngineFunc(struct OBJGPU *pGpu, PRB_ENCODER *pPrbEnc, NVD_STATE *pNvDumpState, void *pvData);
typedef struct _def_nvd_engine_callback {
NvdDumpEngineFunc *pDumpEngineFunc; // Callback function.
NvU32 engDesc; // Indicates which engine this is.
NvU32 flags; // See NVD_ENGINE_FLAGS above.
void *pvData; // Opaque pointer to data passed to callback function.
struct _def_nvd_engine_callback *pNext; // Next Engine
} NVD_ENGINE_CALLBACK;
#ifdef NVOC_NV_DEBUG_DUMP_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct NvDebugDump {
const struct NVOC_RTTI *__nvoc_rtti;
struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
struct Object *__nvoc_pbase_Object;
struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
struct NvDebugDump *__nvoc_pbase_NvDebugDump;
NV_STATUS (*__nvdConstructEngine__)(struct OBJGPU *, struct NvDebugDump *, ENGDESCRIPTOR);
NV_STATUS (*__nvdStateInitLocked__)(struct OBJGPU *, struct NvDebugDump *);
NV_STATUS (*__nvdReconcileTunableState__)(POBJGPU, struct NvDebugDump *, void *);
NV_STATUS (*__nvdStateLoad__)(POBJGPU, struct NvDebugDump *, NvU32);
NV_STATUS (*__nvdStateUnload__)(POBJGPU, struct NvDebugDump *, NvU32);
NV_STATUS (*__nvdStatePreLoad__)(POBJGPU, struct NvDebugDump *, NvU32);
NV_STATUS (*__nvdStatePostUnload__)(POBJGPU, struct NvDebugDump *, NvU32);
void (*__nvdStateDestroy__)(POBJGPU, struct NvDebugDump *);
NV_STATUS (*__nvdStatePreUnload__)(POBJGPU, struct NvDebugDump *, NvU32);
NV_STATUS (*__nvdStateInitUnlocked__)(POBJGPU, struct NvDebugDump *);
void (*__nvdInitMissing__)(POBJGPU, struct NvDebugDump *);
NV_STATUS (*__nvdStatePreInitLocked__)(POBJGPU, struct NvDebugDump *);
NV_STATUS (*__nvdStatePreInitUnlocked__)(POBJGPU, struct NvDebugDump *);
NV_STATUS (*__nvdGetTunableState__)(POBJGPU, struct NvDebugDump *, void *);
NV_STATUS (*__nvdCompareTunableState__)(POBJGPU, struct NvDebugDump *, void *, void *);
void (*__nvdFreeTunableState__)(POBJGPU, struct NvDebugDump *, void *);
NV_STATUS (*__nvdStatePostLoad__)(POBJGPU, struct NvDebugDump *, NvU32);
NV_STATUS (*__nvdAllocTunableState__)(POBJGPU, struct NvDebugDump *, void **);
NV_STATUS (*__nvdSetTunableState__)(POBJGPU, struct NvDebugDump *, void *);
NvBool (*__nvdIsPresent__)(POBJGPU, struct NvDebugDump *);
NVD_DEBUG_BUFFER *pHeadDebugBuffer;
NVD_ENGINE_CALLBACK *pCallbacks;
};
#ifndef __NVOC_CLASS_NvDebugDump_TYPEDEF__
#define __NVOC_CLASS_NvDebugDump_TYPEDEF__
typedef struct NvDebugDump NvDebugDump;
#endif /* __NVOC_CLASS_NvDebugDump_TYPEDEF__ */
#ifndef __nvoc_class_id_NvDebugDump
#define __nvoc_class_id_NvDebugDump 0x7e80a2
#endif /* __nvoc_class_id_NvDebugDump */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_NvDebugDump;
#define __staticCast_NvDebugDump(pThis) \
((pThis)->__nvoc_pbase_NvDebugDump)
#ifdef __nvoc_nv_debug_dump_h_disabled
#define __dynamicCast_NvDebugDump(pThis) ((NvDebugDump*)NULL)
#else //__nvoc_nv_debug_dump_h_disabled
#define __dynamicCast_NvDebugDump(pThis) \
((NvDebugDump*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(NvDebugDump)))
#endif //__nvoc_nv_debug_dump_h_disabled
#define PDB_PROP_NVD_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
#define PDB_PROP_NVD_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
NV_STATUS __nvoc_objCreateDynamic_NvDebugDump(NvDebugDump**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_NvDebugDump(NvDebugDump**, Dynamic*, NvU32);
#define __objCreate_NvDebugDump(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_NvDebugDump((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
#define nvdConstructEngine(pGpu, pNvd, arg0) nvdConstructEngine_DISPATCH(pGpu, pNvd, arg0)
#define nvdStateInitLocked(pGpu, pNvd) nvdStateInitLocked_DISPATCH(pGpu, pNvd)
#define nvdReconcileTunableState(pGpu, pEngstate, pTunableState) nvdReconcileTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define nvdStateLoad(pGpu, pEngstate, arg0) nvdStateLoad_DISPATCH(pGpu, pEngstate, arg0)
#define nvdStateUnload(pGpu, pEngstate, arg0) nvdStateUnload_DISPATCH(pGpu, pEngstate, arg0)
#define nvdStatePreLoad(pGpu, pEngstate, arg0) nvdStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
#define nvdStatePostUnload(pGpu, pEngstate, arg0) nvdStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
#define nvdStateDestroy(pGpu, pEngstate) nvdStateDestroy_DISPATCH(pGpu, pEngstate)
#define nvdStatePreUnload(pGpu, pEngstate, arg0) nvdStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
#define nvdStateInitUnlocked(pGpu, pEngstate) nvdStateInitUnlocked_DISPATCH(pGpu, pEngstate)
#define nvdInitMissing(pGpu, pEngstate) nvdInitMissing_DISPATCH(pGpu, pEngstate)
#define nvdStatePreInitLocked(pGpu, pEngstate) nvdStatePreInitLocked_DISPATCH(pGpu, pEngstate)
#define nvdStatePreInitUnlocked(pGpu, pEngstate) nvdStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
#define nvdGetTunableState(pGpu, pEngstate, pTunableState) nvdGetTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define nvdCompareTunableState(pGpu, pEngstate, pTunables1, pTunables2) nvdCompareTunableState_DISPATCH(pGpu, pEngstate, pTunables1, pTunables2)
#define nvdFreeTunableState(pGpu, pEngstate, pTunableState) nvdFreeTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define nvdStatePostLoad(pGpu, pEngstate, arg0) nvdStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
#define nvdAllocTunableState(pGpu, pEngstate, ppTunableState) nvdAllocTunableState_DISPATCH(pGpu, pEngstate, ppTunableState)
#define nvdSetTunableState(pGpu, pEngstate, pTunableState) nvdSetTunableState_DISPATCH(pGpu, pEngstate, pTunableState)
#define nvdIsPresent(pGpu, pEngstate) nvdIsPresent_DISPATCH(pGpu, pEngstate)
NV_STATUS nvdConstructEngine_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, ENGDESCRIPTOR arg0);
static inline NV_STATUS nvdConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, ENGDESCRIPTOR arg0) {
return pNvd->__nvdConstructEngine__(pGpu, pNvd, arg0);
}
NV_STATUS nvdStateInitLocked_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd);
static inline NV_STATUS nvdStateInitLocked_DISPATCH(struct OBJGPU *pGpu, struct NvDebugDump *pNvd) {
return pNvd->__nvdStateInitLocked__(pGpu, pNvd);
}
static inline NV_STATUS nvdReconcileTunableState_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, void *pTunableState) {
return pEngstate->__nvdReconcileTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS nvdStateLoad_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, NvU32 arg0) {
return pEngstate->__nvdStateLoad__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS nvdStateUnload_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, NvU32 arg0) {
return pEngstate->__nvdStateUnload__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS nvdStatePreLoad_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, NvU32 arg0) {
return pEngstate->__nvdStatePreLoad__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS nvdStatePostUnload_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, NvU32 arg0) {
return pEngstate->__nvdStatePostUnload__(pGpu, pEngstate, arg0);
}
static inline void nvdStateDestroy_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate) {
pEngstate->__nvdStateDestroy__(pGpu, pEngstate);
}
static inline NV_STATUS nvdStatePreUnload_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, NvU32 arg0) {
return pEngstate->__nvdStatePreUnload__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS nvdStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate) {
return pEngstate->__nvdStateInitUnlocked__(pGpu, pEngstate);
}
static inline void nvdInitMissing_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate) {
pEngstate->__nvdInitMissing__(pGpu, pEngstate);
}
static inline NV_STATUS nvdStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate) {
return pEngstate->__nvdStatePreInitLocked__(pGpu, pEngstate);
}
static inline NV_STATUS nvdStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate) {
return pEngstate->__nvdStatePreInitUnlocked__(pGpu, pEngstate);
}
static inline NV_STATUS nvdGetTunableState_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, void *pTunableState) {
return pEngstate->__nvdGetTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS nvdCompareTunableState_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, void *pTunables1, void *pTunables2) {
return pEngstate->__nvdCompareTunableState__(pGpu, pEngstate, pTunables1, pTunables2);
}
static inline void nvdFreeTunableState_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, void *pTunableState) {
pEngstate->__nvdFreeTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NV_STATUS nvdStatePostLoad_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, NvU32 arg0) {
return pEngstate->__nvdStatePostLoad__(pGpu, pEngstate, arg0);
}
static inline NV_STATUS nvdAllocTunableState_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, void **ppTunableState) {
return pEngstate->__nvdAllocTunableState__(pGpu, pEngstate, ppTunableState);
}
static inline NV_STATUS nvdSetTunableState_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate, void *pTunableState) {
return pEngstate->__nvdSetTunableState__(pGpu, pEngstate, pTunableState);
}
static inline NvBool nvdIsPresent_DISPATCH(POBJGPU pGpu, struct NvDebugDump *pEngstate) {
return pEngstate->__nvdIsPresent__(pGpu, pEngstate);
}
void nvdDestruct_IMPL(struct NvDebugDump *pNvd);
#define __nvoc_nvdDestruct(pNvd) nvdDestruct_IMPL(pNvd)
NV_STATUS nvdAllocDebugBuffer_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, NvU32 arg0, NvU32 *arg1, MEMORY_DESCRIPTOR **arg2);
#ifdef __nvoc_nv_debug_dump_h_disabled
static inline NV_STATUS nvdAllocDebugBuffer(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, NvU32 arg0, NvU32 *arg1, MEMORY_DESCRIPTOR **arg2) {
NV_ASSERT_FAILED_PRECOMP("NvDebugDump was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_nv_debug_dump_h_disabled
#define nvdAllocDebugBuffer(pGpu, pNvd, arg0, arg1, arg2) nvdAllocDebugBuffer_IMPL(pGpu, pNvd, arg0, arg1, arg2)
#endif //__nvoc_nv_debug_dump_h_disabled
NV_STATUS nvdFreeDebugBuffer_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, MEMORY_DESCRIPTOR *arg0);
#ifdef __nvoc_nv_debug_dump_h_disabled
static inline NV_STATUS nvdFreeDebugBuffer(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, MEMORY_DESCRIPTOR *arg0) {
NV_ASSERT_FAILED_PRECOMP("NvDebugDump was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_nv_debug_dump_h_disabled
#define nvdFreeDebugBuffer(pGpu, pNvd, arg0) nvdFreeDebugBuffer_IMPL(pGpu, pNvd, arg0)
#endif //__nvoc_nv_debug_dump_h_disabled
NV_STATUS nvdEngineSignUp_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, NvdDumpEngineFunc *arg0, NvU32 engDesc, NvU32 flags, void *arg1);
#ifdef __nvoc_nv_debug_dump_h_disabled
static inline NV_STATUS nvdEngineSignUp(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, NvdDumpEngineFunc *arg0, NvU32 engDesc, NvU32 flags, void *arg1) {
NV_ASSERT_FAILED_PRECOMP("NvDebugDump was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_nv_debug_dump_h_disabled
#define nvdEngineSignUp(pGpu, pNvd, arg0, engDesc, flags, arg1) nvdEngineSignUp_IMPL(pGpu, pNvd, arg0, engDesc, flags, arg1)
#endif //__nvoc_nv_debug_dump_h_disabled
NV_STATUS nvdEngineRelease_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd);
#ifdef __nvoc_nv_debug_dump_h_disabled
static inline NV_STATUS nvdEngineRelease(struct OBJGPU *pGpu, struct NvDebugDump *pNvd) {
NV_ASSERT_FAILED_PRECOMP("NvDebugDump was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_nv_debug_dump_h_disabled
#define nvdEngineRelease(pGpu, pNvd) nvdEngineRelease_IMPL(pGpu, pNvd)
#endif //__nvoc_nv_debug_dump_h_disabled
NV_STATUS nvdDoEngineDump_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, PRB_ENCODER *pPrbEnc, NVD_STATE *pNvDumpState, NvU32 arg0);
#ifdef __nvoc_nv_debug_dump_h_disabled
static inline NV_STATUS nvdDoEngineDump(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, PRB_ENCODER *pPrbEnc, NVD_STATE *pNvDumpState, NvU32 arg0) {
NV_ASSERT_FAILED_PRECOMP("NvDebugDump was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_nv_debug_dump_h_disabled
#define nvdDoEngineDump(pGpu, pNvd, pPrbEnc, pNvDumpState, arg0) nvdDoEngineDump_IMPL(pGpu, pNvd, pPrbEnc, pNvDumpState, arg0)
#endif //__nvoc_nv_debug_dump_h_disabled
NV_STATUS nvdDumpAllEngines_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, PRB_ENCODER *pPrbEnc, NVD_STATE *pNvDumpState);
#ifdef __nvoc_nv_debug_dump_h_disabled
static inline NV_STATUS nvdDumpAllEngines(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, PRB_ENCODER *pPrbEnc, NVD_STATE *pNvDumpState) {
NV_ASSERT_FAILED_PRECOMP("NvDebugDump was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_nv_debug_dump_h_disabled
#define nvdDumpAllEngines(pGpu, pNvd, pPrbEnc, pNvDumpState) nvdDumpAllEngines_IMPL(pGpu, pNvd, pPrbEnc, pNvDumpState)
#endif //__nvoc_nv_debug_dump_h_disabled
NV_STATUS nvdFindEngine_IMPL(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, NvU32 engDesc, NVD_ENGINE_CALLBACK **ppEngineCallback);
#ifdef __nvoc_nv_debug_dump_h_disabled
static inline NV_STATUS nvdFindEngine(struct OBJGPU *pGpu, struct NvDebugDump *pNvd, NvU32 engDesc, NVD_ENGINE_CALLBACK **ppEngineCallback) {
NV_ASSERT_FAILED_PRECOMP("NvDebugDump was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_nv_debug_dump_h_disabled
#define nvdFindEngine(pGpu, pNvd, engDesc, ppEngineCallback) nvdFindEngine_IMPL(pGpu, pNvd, engDesc, ppEngineCallback)
#endif //__nvoc_nv_debug_dump_h_disabled
#undef PRIVATE_FIELD
#endif // _NV_DEBUG_DUMP_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_NV_DEBUG_DUMP_NVOC_H_

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,28 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// NVOC Header State : This file is used for different code path for disabled NVH
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_nvh_state.h
//
#ifndef _G_NVH_STATE_H_
#define _G_NVH_STATE_H_
//
// __nvoc_nvh_state_guard
// This macro define is used to check whether this header is included before
// NVOC headers. The usage:
// #ifndef __nvoc_nvh_state_guard
// #error "NVH state guard header is not included prior to this NVOC header"
// #endif
//
#define __nvoc_nvh_state_guard
//
// List of disabled NVOC headers
//
#endif // _G_NVH_STATE_H_

View File

@@ -0,0 +1,130 @@
#define NVOC_OBJECT_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_object_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x497031 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_Object(Object*);
void __nvoc_init_funcTable_Object(Object*);
NV_STATUS __nvoc_ctor_Object(Object*);
void __nvoc_init_dataField_Object(Object*);
void __nvoc_dtor_Object(Object*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_Object;
static const struct NVOC_RTTI __nvoc_rtti_Object_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_Object,
/*offset=*/ 0,
};
static const struct NVOC_CASTINFO __nvoc_castinfo_Object = {
/*numRelatives=*/ 1,
/*relatives=*/ {
&__nvoc_rtti_Object_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_Object =
{
/*classInfo=*/ {
/*size=*/ sizeof(Object),
/*classId=*/ classId(Object),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "Object",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_Object,
/*pCastInfo=*/ &__nvoc_castinfo_Object,
/*pExportInfo=*/ &__nvoc_export_info_Object
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_Object =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_Object(Object *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object *pThis) {
NV_STATUS status = NV_OK;
__nvoc_init_dataField_Object(pThis);
goto __nvoc_ctor_Object_exit; // Success
__nvoc_ctor_Object_exit:
return status;
}
static void __nvoc_init_funcTable_Object_1(Object *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_Object(Object *pThis) {
__nvoc_init_funcTable_Object_1(pThis);
}
void __nvoc_init_Object(Object *pThis) {
pThis->__nvoc_pbase_Object = pThis;
__nvoc_init_funcTable_Object(pThis);
}
NV_STATUS __nvoc_objCreate_Object(Object **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
Object *pThis;
pThis = portMemAllocNonPaged(sizeof(Object));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(Object));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_Object);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, pThis);
}
else
{
pThis->pParent = NULL;
}
__nvoc_init_Object(pThis);
status = __nvoc_ctor_Object(pThis);
if (status != NV_OK) goto __nvoc_objCreate_Object_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_Object_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_Object(Object **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_Object(ppThis, pParent, createFlags);
return status;
}

View File

@@ -0,0 +1,187 @@
#ifndef _G_OBJECT_NVOC_H_
#define _G_OBJECT_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2015-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/*
* This file is part of the NVOC runtime.
*/
#include "g_object_nvoc.h"
#ifndef _NVOC_OBJECT_H_
#define _NVOC_OBJECT_H_
#include "nvtypes.h"
#include "nvstatus.h"
#include "nvoc/prelude.h"
struct Object;
#ifndef __NVOC_CLASS_Object_TYPEDEF__
#define __NVOC_CLASS_Object_TYPEDEF__
typedef struct Object Object;
#endif /* __NVOC_CLASS_Object_TYPEDEF__ */
#ifndef __nvoc_class_id_Object
#define __nvoc_class_id_Object 0x497031
#endif /* __nvoc_class_id_Object */
struct NVOC_CLASS_INFO;
/*!
* Tracks the head of an object's child list, and the next object in its
* parent's child list.
*/
struct NVOC_CHILD_TREE
{
struct Object *pChild;
struct Object *pSibling;
};
//! The base class of all instantiable NVOC objects.
#ifdef NVOC_OBJECT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct Object {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object *__nvoc_pbase_Object;
struct Object *pParent;
struct NVOC_CHILD_TREE childTree;
NvU32 ipVersion;
};
#ifndef __NVOC_CLASS_Object_TYPEDEF__
#define __NVOC_CLASS_Object_TYPEDEF__
typedef struct Object Object;
#endif /* __NVOC_CLASS_Object_TYPEDEF__ */
#ifndef __nvoc_class_id_Object
#define __nvoc_class_id_Object 0x497031
#endif /* __nvoc_class_id_Object */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
#define __staticCast_Object(pThis) \
((pThis)->__nvoc_pbase_Object)
#ifdef __nvoc_object_h_disabled
#define __dynamicCast_Object(pThis) ((Object*)NULL)
#else //__nvoc_object_h_disabled
#define __dynamicCast_Object(pThis) \
((Object*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Object)))
#endif //__nvoc_object_h_disabled
NV_STATUS __nvoc_objCreateDynamic_Object(Object**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_Object(Object**, Dynamic*, NvU32);
#define __objCreate_Object(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_Object((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
void objAddChild_IMPL(struct Object *pObj, struct Object *pChild);
#ifdef __nvoc_object_h_disabled
static inline void objAddChild(struct Object *pObj, struct Object *pChild) {
NV_ASSERT_FAILED_PRECOMP("Object was disabled!");
}
#else //__nvoc_object_h_disabled
#define objAddChild(pObj, pChild) objAddChild_IMPL(pObj, pChild)
#endif //__nvoc_object_h_disabled
void objRemoveChild_IMPL(struct Object *pObj, struct Object *pChild);
#ifdef __nvoc_object_h_disabled
static inline void objRemoveChild(struct Object *pObj, struct Object *pChild) {
NV_ASSERT_FAILED_PRECOMP("Object was disabled!");
}
#else //__nvoc_object_h_disabled
#define objRemoveChild(pObj, pChild) objRemoveChild_IMPL(pObj, pChild)
#endif //__nvoc_object_h_disabled
struct Object *objGetChild_IMPL(struct Object *pObj);
#ifdef __nvoc_object_h_disabled
static inline struct Object *objGetChild(struct Object *pObj) {
NV_ASSERT_FAILED_PRECOMP("Object was disabled!");
return NULL;
}
#else //__nvoc_object_h_disabled
#define objGetChild(pObj) objGetChild_IMPL(pObj)
#endif //__nvoc_object_h_disabled
struct Object *objGetSibling_IMPL(struct Object *pObj);
#ifdef __nvoc_object_h_disabled
static inline struct Object *objGetSibling(struct Object *pObj) {
NV_ASSERT_FAILED_PRECOMP("Object was disabled!");
return NULL;
}
#else //__nvoc_object_h_disabled
#define objGetSibling(pObj) objGetSibling_IMPL(pObj)
#endif //__nvoc_object_h_disabled
struct Object *objGetDirectParent_IMPL(struct Object *pObj);
#ifdef __nvoc_object_h_disabled
static inline struct Object *objGetDirectParent(struct Object *pObj) {
NV_ASSERT_FAILED_PRECOMP("Object was disabled!");
return NULL;
}
#else //__nvoc_object_h_disabled
#define objGetDirectParent(pObj) objGetDirectParent_IMPL(pObj)
#endif //__nvoc_object_h_disabled
#undef PRIVATE_FIELD
//
// IP versioning definitions are temporary until NVOC halspec support is
// finished.
//
// IP_VERSION format as defined by the hardware engines.
// A _MAJOR value of 0 means the object has no version number.
//
#define NV_ODB_IP_VER_DEV 7:0 /* R-IVF */
#define NV_ODB_IP_VER_ECO 15:8 /* R-IVF */
#define NV_ODB_IP_VER_MINOR 23:16 /* R-IVF */
#define NV_ODB_IP_VER_MAJOR 31:24 /* R-IVF */
#define IPVersion(pObj) staticCast((pObj), Object)->ipVersion
#define IsIPVersionValid(pObj) (DRF_VAL(_ODB, _IP_VER, _MAJOR, IPVersion(pObj)) != 0)
#define IsIPVersionOrLater(pObj, v0) (IPVersion(pObj) >= (v0))
// v0 .. v1 inclusive
#define IsIPVersionInRange(pObj, v0, v1) ((IPVersion(pObj) >= (v0)) && (IPVersion(pObj) <= (v1)))
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_OBJECT_NVOC_H_

View File

@@ -0,0 +1,357 @@
#define NVOC_OBJTMR_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_objtmr_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x9ddede = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJTMR;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJENGSTATE;
void __nvoc_init_OBJTMR(OBJTMR*, RmHalspecOwner* );
void __nvoc_init_funcTable_OBJTMR(OBJTMR*, RmHalspecOwner* );
NV_STATUS __nvoc_ctor_OBJTMR(OBJTMR*, RmHalspecOwner* );
void __nvoc_init_dataField_OBJTMR(OBJTMR*, RmHalspecOwner* );
void __nvoc_dtor_OBJTMR(OBJTMR*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJTMR;
static const struct NVOC_RTTI __nvoc_rtti_OBJTMR_OBJTMR = {
/*pClassDef=*/ &__nvoc_class_def_OBJTMR,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJTMR,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJTMR_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJTMR, __nvoc_base_OBJENGSTATE.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_OBJTMR_OBJENGSTATE = {
/*pClassDef=*/ &__nvoc_class_def_OBJENGSTATE,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJTMR, __nvoc_base_OBJENGSTATE),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJTMR = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_OBJTMR_OBJTMR,
&__nvoc_rtti_OBJTMR_OBJENGSTATE,
&__nvoc_rtti_OBJTMR_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJTMR =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJTMR),
/*classId=*/ classId(OBJTMR),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJTMR",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJTMR,
/*pCastInfo=*/ &__nvoc_castinfo_OBJTMR,
/*pExportInfo=*/ &__nvoc_export_info_OBJTMR
};
static NV_STATUS __nvoc_thunk_OBJTMR_engstateConstructEngine(struct OBJGPU *pGpu, struct OBJENGSTATE *pTmr, ENGDESCRIPTOR arg0) {
return tmrConstructEngine(pGpu, (struct OBJTMR *)(((unsigned char *)pTmr) - __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJTMR_engstateStateInitLocked(struct OBJGPU *pGpu, struct OBJENGSTATE *pTmr) {
return tmrStateInitLocked(pGpu, (struct OBJTMR *)(((unsigned char *)pTmr) - __nvoc_rtti_OBJTMR_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJTMR_engstateStateLoad(struct OBJGPU *pGpu, struct OBJENGSTATE *pTmr, NvU32 arg0) {
return tmrStateLoad(pGpu, (struct OBJTMR *)(((unsigned char *)pTmr) - __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJTMR_engstateStateUnload(struct OBJGPU *pGpu, struct OBJENGSTATE *pTmr, NvU32 arg0) {
return tmrStateUnload(pGpu, (struct OBJTMR *)(((unsigned char *)pTmr) - __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), arg0);
}
static void __nvoc_thunk_OBJTMR_engstateStateDestroy(struct OBJGPU *pGpu, struct OBJENGSTATE *pTmr) {
tmrStateDestroy(pGpu, (struct OBJTMR *)(((unsigned char *)pTmr) - __nvoc_rtti_OBJTMR_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrReconcileTunableState(POBJGPU pGpu, struct OBJTMR *pEngstate, void *pTunableState) {
return engstateReconcileTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrStatePreLoad(POBJGPU pGpu, struct OBJTMR *pEngstate, NvU32 arg0) {
return engstateStatePreLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrStatePostUnload(POBJGPU pGpu, struct OBJTMR *pEngstate, NvU32 arg0) {
return engstateStatePostUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrStatePreUnload(POBJGPU pGpu, struct OBJTMR *pEngstate, NvU32 arg0) {
return engstateStatePreUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrStateInitUnlocked(POBJGPU pGpu, struct OBJTMR *pEngstate) {
return engstateStateInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset));
}
static void __nvoc_thunk_OBJENGSTATE_tmrInitMissing(POBJGPU pGpu, struct OBJTMR *pEngstate) {
engstateInitMissing(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrStatePreInitLocked(POBJGPU pGpu, struct OBJTMR *pEngstate) {
return engstateStatePreInitLocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrStatePreInitUnlocked(POBJGPU pGpu, struct OBJTMR *pEngstate) {
return engstateStatePreInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset));
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrGetTunableState(POBJGPU pGpu, struct OBJTMR *pEngstate, void *pTunableState) {
return engstateGetTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrCompareTunableState(POBJGPU pGpu, struct OBJTMR *pEngstate, void *pTunables1, void *pTunables2) {
return engstateCompareTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), pTunables1, pTunables2);
}
static void __nvoc_thunk_OBJENGSTATE_tmrFreeTunableState(POBJGPU pGpu, struct OBJTMR *pEngstate, void *pTunableState) {
engstateFreeTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), pTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrStatePostLoad(POBJGPU pGpu, struct OBJTMR *pEngstate, NvU32 arg0) {
return engstateStatePostLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), arg0);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrAllocTunableState(POBJGPU pGpu, struct OBJTMR *pEngstate, void **ppTunableState) {
return engstateAllocTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), ppTunableState);
}
static NV_STATUS __nvoc_thunk_OBJENGSTATE_tmrSetTunableState(POBJGPU pGpu, struct OBJTMR *pEngstate, void *pTunableState) {
return engstateSetTunableState(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset), pTunableState);
}
static NvBool __nvoc_thunk_OBJENGSTATE_tmrIsPresent(POBJGPU pGpu, struct OBJTMR *pEngstate) {
return engstateIsPresent(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_OBJTMR_OBJENGSTATE.offset));
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJTMR =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_dtor_OBJTMR(OBJTMR *pThis) {
__nvoc_tmrDestruct(pThis);
__nvoc_dtor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJTMR(OBJTMR *pThis, RmHalspecOwner *pRmhalspecowner) {
ChipHal *chipHal = &pRmhalspecowner->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
// NVOC Property Hal field -- PDB_PROP_TMR_USE_COUNTDOWN_TIMER_FOR_RM_CALLBACKS
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->setProperty(pThis, PDB_PROP_TMR_USE_COUNTDOWN_TIMER_FOR_RM_CALLBACKS, ((NvBool)(0 == 0)));
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_TMR_USE_COUNTDOWN_TIMER_FOR_RM_CALLBACKS, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_TMR_ALARM_INTR_REMOVED_FROM_PMC_TREE
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->setProperty(pThis, PDB_PROP_TMR_ALARM_INTR_REMOVED_FROM_PMC_TREE, ((NvBool)(0 == 0)));
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_TMR_ALARM_INTR_REMOVED_FROM_PMC_TREE, ((NvBool)(0 != 0)));
}
// NVOC Property Hal field -- PDB_PROP_TMR_USE_OS_TIMER_FOR_CALLBACKS
if (( ((chipHal_HalVarIdx >> 5) == 2UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00010000UL) )) /* ChipHal: T234D */
{
pThis->setProperty(pThis, PDB_PROP_TMR_USE_OS_TIMER_FOR_CALLBACKS, ((NvBool)(0 == 0)));
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_TMR_USE_OS_TIMER_FOR_CALLBACKS, ((NvBool)(0 != 0)));
}
pThis->setProperty(pThis, PDB_PROP_TMR_USE_PTIMER_FOR_OSTIMER_CALLBACKS, (0));
pThis->setProperty(pThis, PDB_PROP_TMR_USE_POLLING_FOR_CALLBACKS, (0));
// NVOC Property Hal field -- PDB_PROP_TMR_USE_SECOND_COUNTDOWN_TIMER_FOR_SWRL
if (0)
{
}
// default
else
{
pThis->setProperty(pThis, PDB_PROP_TMR_USE_SECOND_COUNTDOWN_TIMER_FOR_SWRL, ((NvBool)(0 != 0)));
}
}
NV_STATUS __nvoc_ctor_OBJENGSTATE(OBJENGSTATE* );
NV_STATUS __nvoc_ctor_OBJTMR(OBJTMR *pThis, RmHalspecOwner *pRmhalspecowner) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
if (status != NV_OK) goto __nvoc_ctor_OBJTMR_fail_OBJENGSTATE;
__nvoc_init_dataField_OBJTMR(pThis, pRmhalspecowner);
goto __nvoc_ctor_OBJTMR_exit; // Success
__nvoc_ctor_OBJTMR_fail_OBJENGSTATE:
__nvoc_ctor_OBJTMR_exit:
return status;
}
static void __nvoc_init_funcTable_OBJTMR_1(OBJTMR *pThis, RmHalspecOwner *pRmhalspecowner) {
ChipHal *chipHal = &pRmhalspecowner->chipHal;
const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx;
RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal;
const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx;
PORT_UNREFERENCED_VARIABLE(pThis);
PORT_UNREFERENCED_VARIABLE(pRmhalspecowner);
PORT_UNREFERENCED_VARIABLE(chipHal);
PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx);
PORT_UNREFERENCED_VARIABLE(rmVariantHal);
PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx);
pThis->__tmrConstructEngine__ = &tmrConstructEngine_IMPL;
pThis->__tmrStateInitLocked__ = &tmrStateInitLocked_IMPL;
pThis->__tmrStateLoad__ = &tmrStateLoad_IMPL;
pThis->__tmrStateUnload__ = &tmrStateUnload_IMPL;
pThis->__tmrStateDestroy__ = &tmrStateDestroy_IMPL;
pThis->__nvoc_base_OBJENGSTATE.__engstateConstructEngine__ = &__nvoc_thunk_OBJTMR_engstateConstructEngine;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateInitLocked__ = &__nvoc_thunk_OBJTMR_engstateStateInitLocked;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateLoad__ = &__nvoc_thunk_OBJTMR_engstateStateLoad;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateUnload__ = &__nvoc_thunk_OBJTMR_engstateStateUnload;
pThis->__nvoc_base_OBJENGSTATE.__engstateStateDestroy__ = &__nvoc_thunk_OBJTMR_engstateStateDestroy;
pThis->__tmrReconcileTunableState__ = &__nvoc_thunk_OBJENGSTATE_tmrReconcileTunableState;
pThis->__tmrStatePreLoad__ = &__nvoc_thunk_OBJENGSTATE_tmrStatePreLoad;
pThis->__tmrStatePostUnload__ = &__nvoc_thunk_OBJENGSTATE_tmrStatePostUnload;
pThis->__tmrStatePreUnload__ = &__nvoc_thunk_OBJENGSTATE_tmrStatePreUnload;
pThis->__tmrStateInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_tmrStateInitUnlocked;
pThis->__tmrInitMissing__ = &__nvoc_thunk_OBJENGSTATE_tmrInitMissing;
pThis->__tmrStatePreInitLocked__ = &__nvoc_thunk_OBJENGSTATE_tmrStatePreInitLocked;
pThis->__tmrStatePreInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_tmrStatePreInitUnlocked;
pThis->__tmrGetTunableState__ = &__nvoc_thunk_OBJENGSTATE_tmrGetTunableState;
pThis->__tmrCompareTunableState__ = &__nvoc_thunk_OBJENGSTATE_tmrCompareTunableState;
pThis->__tmrFreeTunableState__ = &__nvoc_thunk_OBJENGSTATE_tmrFreeTunableState;
pThis->__tmrStatePostLoad__ = &__nvoc_thunk_OBJENGSTATE_tmrStatePostLoad;
pThis->__tmrAllocTunableState__ = &__nvoc_thunk_OBJENGSTATE_tmrAllocTunableState;
pThis->__tmrSetTunableState__ = &__nvoc_thunk_OBJENGSTATE_tmrSetTunableState;
pThis->__tmrIsPresent__ = &__nvoc_thunk_OBJENGSTATE_tmrIsPresent;
}
void __nvoc_init_funcTable_OBJTMR(OBJTMR *pThis, RmHalspecOwner *pRmhalspecowner) {
__nvoc_init_funcTable_OBJTMR_1(pThis, pRmhalspecowner);
}
void __nvoc_init_OBJENGSTATE(OBJENGSTATE*);
void __nvoc_init_OBJTMR(OBJTMR *pThis, RmHalspecOwner *pRmhalspecowner) {
pThis->__nvoc_pbase_OBJTMR = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object;
pThis->__nvoc_pbase_OBJENGSTATE = &pThis->__nvoc_base_OBJENGSTATE;
__nvoc_init_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE);
__nvoc_init_funcTable_OBJTMR(pThis, pRmhalspecowner);
}
NV_STATUS __nvoc_objCreate_OBJTMR(OBJTMR **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJTMR *pThis;
RmHalspecOwner *pRmhalspecowner;
pThis = portMemAllocNonPaged(sizeof(OBJTMR));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJTMR));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJTMR);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object.pParent = NULL;
}
if ((pRmhalspecowner = dynamicCast(pParent, RmHalspecOwner)) == NULL)
pRmhalspecowner = objFindAncestorOfType(RmHalspecOwner, pParent);
NV_ASSERT_OR_RETURN(pRmhalspecowner != NULL, NV_ERR_INVALID_ARGUMENT);
__nvoc_init_OBJTMR(pThis, pRmhalspecowner);
status = __nvoc_ctor_OBJTMR(pThis, pRmhalspecowner);
if (status != NV_OK) goto __nvoc_objCreate_OBJTMR_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJTMR_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJTMR(OBJTMR **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJTMR(ppThis, pParent, createFlags);
return status;
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,86 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_odb.h
//
#ifndef _G_ODB_H_
#define _G_ODB_H_
#define OBJECT_BASE_DEFINITION(ENG) __##ENG##_OBJECT_BASE_DEFINITION
#ifndef __NVOC_CLASS_OBJGPIO_TYPEDEF__
#define __NVOC_CLASS_OBJGPIO_TYPEDEF__
typedef struct OBJGPIO OBJGPIO;
#endif /* __NVOC_CLASS_OBJGPIO_TYPEDEF__ */
typedef struct OBJGPIO *POBJGPIO;
#ifndef __NVOC_CLASS_OBJRPC_TYPEDEF__
#define __NVOC_CLASS_OBJRPC_TYPEDEF__
typedef struct OBJRPC OBJRPC;
#endif /* __NVOC_CLASS_OBJRPC_TYPEDEF__ */
typedef struct OBJRPC *POBJRPC;
#ifndef __NVOC_CLASS_OBJRPCSTRUCTURECOPY_TYPEDEF__
#define __NVOC_CLASS_OBJRPCSTRUCTURECOPY_TYPEDEF__
typedef struct OBJRPCSTRUCTURECOPY OBJRPCSTRUCTURECOPY;
#endif /* __NVOC_CLASS_OBJRPCSTRUCTURECOPY_TYPEDEF__ */
typedef struct OBJRPCSTRUCTURECOPY *POBJRPCSTRUCTURECOPY;
#if NV_PRINTF_STRINGS_ALLOWED
#define odbGetClassName(p) (objGetClassInfo((p))->name)
#endif
// TODO : temporary hack, to delete
#ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
#define __NVOC_CLASS_OBJGPU_TYPEDEF__
typedef struct OBJGPU OBJGPU;
#endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
typedef struct OBJGPU *POBJGPU;
#ifndef __NVOC_CLASS_OBJDISP_TYPEDEF__
#define __NVOC_CLASS_OBJDISP_TYPEDEF__
typedef struct OBJDISP OBJDISP;
#endif /* __NVOC_CLASS_OBJDISP_TYPEDEF__ */
typedef struct OBJDISP *POBJDISP;
//
// #define staticCast(pObj, TYPE) ((pObj)? __staticCast_##TYPE((pObj)) : NULL)
//
#define __staticCast_OBJGPIO(pObj) ((pObj)->__iom_pbase_OBJGPIO)
#define __staticCast_OBJRPC(pObj) ((pObj)->__iom_pbase_OBJRPC)
#define __staticCast_OBJRPCSTRUCTURECOPY(pObj) ((pObj)->__iom_pbase_OBJRPCSTRUCTURECOPY)
//
// #define dynamicCast(pObj, TYPE) (__dynamicCast_##TYPE((pObj)))
//
#define __dynamicCast_OBJGPIO(pObj) NULL
#define __dynamicCast_OBJRPC(pObj) NULL
#define __dynamicCast_OBJRPCSTRUCTURECOPY(pObj) NULL
#define PDB_PROP_GPIO_DCB_ENTRIES_PARSED_BASE_CAST
#define PDB_PROP_GPIO_DCB_ENTRIES_PARSED_BASE_NAME pdb.PDB_PROP_GPIO_DCB_ENTRIES_PARSED
#define PDB_PROP_GPIO_ENTRY_ORIGIN_DCB_GAT_BASE_CAST
#define PDB_PROP_GPIO_ENTRY_ORIGIN_DCB_GAT_BASE_NAME pdb.PDB_PROP_GPIO_ENTRY_ORIGIN_DCB_GAT
#define PDB_PROP_GPIO_EPC_HWSLOW_FC7E081B_BASE_CAST
#define PDB_PROP_GPIO_EPC_HWSLOW_FC7E081B_BASE_NAME pdb.PDB_PROP_GPIO_EPC_HWSLOW_FC7E081B
#define PDB_PROP_GPIO_FORCE_FAST_LVDS_MUX_SWITCH_BASE_CAST
#define PDB_PROP_GPIO_FORCE_FAST_LVDS_MUX_SWITCH_BASE_NAME pdb.PDB_PROP_GPIO_FORCE_FAST_LVDS_MUX_SWITCH
#define PDB_PROP_GPIO_IS_MISSING_BASE_CAST __nvoc_pbase_OBJENGSTATE->
#define PDB_PROP_GPIO_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
#define PDB_PROP_GPIO_RM_PMU_GPIO_SYNC_ENABLED_DEF_BASE_CAST
#define PDB_PROP_GPIO_RM_PMU_GPIO_SYNC_ENABLED_DEF_BASE_NAME pdb.PDB_PROP_GPIO_RM_PMU_GPIO_SYNC_ENABLED_DEF
#endif // _G_ODB_H_

View File

@@ -0,0 +1,323 @@
#define NVOC_OS_DESC_MEM_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_os_desc_mem_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xb3dacd = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OsDescMemory;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Memory;
void __nvoc_init_OsDescMemory(OsDescMemory*);
void __nvoc_init_funcTable_OsDescMemory(OsDescMemory*);
NV_STATUS __nvoc_ctor_OsDescMemory(OsDescMemory*, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_OsDescMemory(OsDescMemory*);
void __nvoc_dtor_OsDescMemory(OsDescMemory*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OsDescMemory;
static const struct NVOC_RTTI __nvoc_rtti_OsDescMemory_OsDescMemory = {
/*pClassDef=*/ &__nvoc_class_def_OsDescMemory,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OsDescMemory,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OsDescMemory_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OsDescMemory, __nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_OsDescMemory_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OsDescMemory, __nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_OsDescMemory_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OsDescMemory, __nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon),
};
static const struct NVOC_RTTI __nvoc_rtti_OsDescMemory_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OsDescMemory, __nvoc_base_Memory.__nvoc_base_RmResource),
};
static const struct NVOC_RTTI __nvoc_rtti_OsDescMemory_Memory = {
/*pClassDef=*/ &__nvoc_class_def_Memory,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OsDescMemory, __nvoc_base_Memory),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OsDescMemory = {
/*numRelatives=*/ 6,
/*relatives=*/ {
&__nvoc_rtti_OsDescMemory_OsDescMemory,
&__nvoc_rtti_OsDescMemory_Memory,
&__nvoc_rtti_OsDescMemory_RmResource,
&__nvoc_rtti_OsDescMemory_RmResourceCommon,
&__nvoc_rtti_OsDescMemory_RsResource,
&__nvoc_rtti_OsDescMemory_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OsDescMemory =
{
/*classInfo=*/ {
/*size=*/ sizeof(OsDescMemory),
/*classId=*/ classId(OsDescMemory),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OsDescMemory",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OsDescMemory,
/*pCastInfo=*/ &__nvoc_castinfo_OsDescMemory,
/*pExportInfo=*/ &__nvoc_export_info_OsDescMemory
};
static NvBool __nvoc_thunk_OsDescMemory_resCanCopy(struct RsResource *pOsDescMemory) {
return osdescCanCopy((struct OsDescMemory *)(((unsigned char *)pOsDescMemory) - __nvoc_rtti_OsDescMemory_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_Memory_osdescCheckMemInterUnmap(struct OsDescMemory *pMemory, NvBool bSubdeviceHandleProvided) {
return memCheckMemInterUnmap((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset), bSubdeviceHandleProvided);
}
static NV_STATUS __nvoc_thunk_Memory_osdescControl(struct OsDescMemory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return memControl((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_Memory_osdescUnmap(struct OsDescMemory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return memUnmap((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_Memory_osdescGetMemInterMapParams(struct OsDescMemory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return memGetMemInterMapParams((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset), pParams);
}
static NV_STATUS __nvoc_thunk_Memory_osdescGetMemoryMappingDescriptor(struct OsDescMemory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc) {
return memGetMemoryMappingDescriptor((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset), ppMemDesc);
}
static NV_STATUS __nvoc_thunk_Memory_osdescGetMapAddrSpace(struct OsDescMemory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return memGetMapAddrSpace((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset), pCallContext, mapFlags, pAddrSpace);
}
static NvBool __nvoc_thunk_RmResource_osdescShareCallback(struct OsDescMemory *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return rmresShareCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RmResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_RsResource_osdescControlFilter(struct OsDescMemory *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_osdescAddAdditionalDependants(struct RsClient *pClient, struct OsDescMemory *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RsResource.offset), pReference);
}
static NvU32 __nvoc_thunk_RsResource_osdescGetRefCount(struct OsDescMemory *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_osdescMapTo(struct OsDescMemory *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RmResource_osdescControl_Prologue(struct OsDescMemory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_Memory_osdescIsReady(struct OsDescMemory *pMemory) {
return memIsReady((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset));
}
static NV_STATUS __nvoc_thunk_Memory_osdescCheckCopyPermissions(struct OsDescMemory *pMemory, struct OBJGPU *pDstGpu, NvHandle hDstClientNvBool) {
return memCheckCopyPermissions((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset), pDstGpu, hDstClientNvBool);
}
static void __nvoc_thunk_RsResource_osdescPreDestruct(struct OsDescMemory *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_osdescUnmapFrom(struct OsDescMemory *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RsResource.offset), pParams);
}
static void __nvoc_thunk_RmResource_osdescControl_Epilogue(struct OsDescMemory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RmResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_osdescControlLookup(struct OsDescMemory *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_Memory_osdescMap(struct OsDescMemory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return memMap((struct Memory *)(((unsigned char *)pMemory) + __nvoc_rtti_OsDescMemory_Memory.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RmResource_osdescAccessCallback(struct OsDescMemory *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) + __nvoc_rtti_OsDescMemory_RmResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_OsDescMemory =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Memory(Memory*);
void __nvoc_dtor_OsDescMemory(OsDescMemory *pThis) {
__nvoc_dtor_Memory(&pThis->__nvoc_base_Memory);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OsDescMemory(OsDescMemory *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Memory(Memory* , CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_OsDescMemory(OsDescMemory *pThis, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Memory(&pThis->__nvoc_base_Memory, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_OsDescMemory_fail_Memory;
__nvoc_init_dataField_OsDescMemory(pThis);
status = __nvoc_osdescConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_OsDescMemory_fail__init;
goto __nvoc_ctor_OsDescMemory_exit; // Success
__nvoc_ctor_OsDescMemory_fail__init:
__nvoc_dtor_Memory(&pThis->__nvoc_base_Memory);
__nvoc_ctor_OsDescMemory_fail_Memory:
__nvoc_ctor_OsDescMemory_exit:
return status;
}
static void __nvoc_init_funcTable_OsDescMemory_1(OsDescMemory *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__osdescCanCopy__ = &osdescCanCopy_IMPL;
pThis->__nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RsResource.__resCanCopy__ = &__nvoc_thunk_OsDescMemory_resCanCopy;
pThis->__osdescCheckMemInterUnmap__ = &__nvoc_thunk_Memory_osdescCheckMemInterUnmap;
pThis->__osdescControl__ = &__nvoc_thunk_Memory_osdescControl;
pThis->__osdescUnmap__ = &__nvoc_thunk_Memory_osdescUnmap;
pThis->__osdescGetMemInterMapParams__ = &__nvoc_thunk_Memory_osdescGetMemInterMapParams;
pThis->__osdescGetMemoryMappingDescriptor__ = &__nvoc_thunk_Memory_osdescGetMemoryMappingDescriptor;
pThis->__osdescGetMapAddrSpace__ = &__nvoc_thunk_Memory_osdescGetMapAddrSpace;
pThis->__osdescShareCallback__ = &__nvoc_thunk_RmResource_osdescShareCallback;
pThis->__osdescControlFilter__ = &__nvoc_thunk_RsResource_osdescControlFilter;
pThis->__osdescAddAdditionalDependants__ = &__nvoc_thunk_RsResource_osdescAddAdditionalDependants;
pThis->__osdescGetRefCount__ = &__nvoc_thunk_RsResource_osdescGetRefCount;
pThis->__osdescMapTo__ = &__nvoc_thunk_RsResource_osdescMapTo;
pThis->__osdescControl_Prologue__ = &__nvoc_thunk_RmResource_osdescControl_Prologue;
pThis->__osdescIsReady__ = &__nvoc_thunk_Memory_osdescIsReady;
pThis->__osdescCheckCopyPermissions__ = &__nvoc_thunk_Memory_osdescCheckCopyPermissions;
pThis->__osdescPreDestruct__ = &__nvoc_thunk_RsResource_osdescPreDestruct;
pThis->__osdescUnmapFrom__ = &__nvoc_thunk_RsResource_osdescUnmapFrom;
pThis->__osdescControl_Epilogue__ = &__nvoc_thunk_RmResource_osdescControl_Epilogue;
pThis->__osdescControlLookup__ = &__nvoc_thunk_RsResource_osdescControlLookup;
pThis->__osdescMap__ = &__nvoc_thunk_Memory_osdescMap;
pThis->__osdescAccessCallback__ = &__nvoc_thunk_RmResource_osdescAccessCallback;
}
void __nvoc_init_funcTable_OsDescMemory(OsDescMemory *pThis) {
__nvoc_init_funcTable_OsDescMemory_1(pThis);
}
void __nvoc_init_Memory(Memory*);
void __nvoc_init_OsDescMemory(OsDescMemory *pThis) {
pThis->__nvoc_pbase_OsDescMemory = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RmResourceCommon;
pThis->__nvoc_pbase_RmResource = &pThis->__nvoc_base_Memory.__nvoc_base_RmResource;
pThis->__nvoc_pbase_Memory = &pThis->__nvoc_base_Memory;
__nvoc_init_Memory(&pThis->__nvoc_base_Memory);
__nvoc_init_funcTable_OsDescMemory(pThis);
}
NV_STATUS __nvoc_objCreate_OsDescMemory(OsDescMemory **ppThis, Dynamic *pParent, NvU32 createFlags, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
OsDescMemory *pThis;
pThis = portMemAllocNonPaged(sizeof(OsDescMemory));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OsDescMemory));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OsDescMemory);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Memory.__nvoc_base_RmResource.__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OsDescMemory(pThis);
status = __nvoc_ctor_OsDescMemory(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_OsDescMemory_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OsDescMemory_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OsDescMemory(OsDescMemory **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
CALL_CONTEXT * arg_pCallContext = va_arg(args, CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_OsDescMemory(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,224 @@
#ifndef _G_OS_DESC_MEM_NVOC_H_
#define _G_OS_DESC_MEM_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 1993-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_os_desc_mem_nvoc.h"
#ifndef _OS_DESC_MEMORY_H_
#define _OS_DESC_MEMORY_H_
#include "mem_mgr/mem.h"
/*!
* Bind memory allocated through os descriptor
*/
#ifdef NVOC_OS_DESC_MEM_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OsDescMemory {
const struct NVOC_RTTI *__nvoc_rtti;
struct Memory __nvoc_base_Memory;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
struct Memory *__nvoc_pbase_Memory;
struct OsDescMemory *__nvoc_pbase_OsDescMemory;
NvBool (*__osdescCanCopy__)(struct OsDescMemory *);
NV_STATUS (*__osdescCheckMemInterUnmap__)(struct OsDescMemory *, NvBool);
NV_STATUS (*__osdescControl__)(struct OsDescMemory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__osdescUnmap__)(struct OsDescMemory *, CALL_CONTEXT *, RsCpuMapping *);
NV_STATUS (*__osdescGetMemInterMapParams__)(struct OsDescMemory *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__osdescGetMemoryMappingDescriptor__)(struct OsDescMemory *, MEMORY_DESCRIPTOR **);
NV_STATUS (*__osdescGetMapAddrSpace__)(struct OsDescMemory *, CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
NvBool (*__osdescShareCallback__)(struct OsDescMemory *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__osdescControlFilter__)(struct OsDescMemory *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__osdescAddAdditionalDependants__)(struct RsClient *, struct OsDescMemory *, RsResourceRef *);
NvU32 (*__osdescGetRefCount__)(struct OsDescMemory *);
NV_STATUS (*__osdescMapTo__)(struct OsDescMemory *, RS_RES_MAP_TO_PARAMS *);
NV_STATUS (*__osdescControl_Prologue__)(struct OsDescMemory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__osdescIsReady__)(struct OsDescMemory *);
NV_STATUS (*__osdescCheckCopyPermissions__)(struct OsDescMemory *, struct OBJGPU *, NvHandle);
void (*__osdescPreDestruct__)(struct OsDescMemory *);
NV_STATUS (*__osdescUnmapFrom__)(struct OsDescMemory *, RS_RES_UNMAP_FROM_PARAMS *);
void (*__osdescControl_Epilogue__)(struct OsDescMemory *, CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__osdescControlLookup__)(struct OsDescMemory *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__osdescMap__)(struct OsDescMemory *, CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, RsCpuMapping *);
NvBool (*__osdescAccessCallback__)(struct OsDescMemory *, struct RsClient *, void *, RsAccessRight);
};
#ifndef __NVOC_CLASS_OsDescMemory_TYPEDEF__
#define __NVOC_CLASS_OsDescMemory_TYPEDEF__
typedef struct OsDescMemory OsDescMemory;
#endif /* __NVOC_CLASS_OsDescMemory_TYPEDEF__ */
#ifndef __nvoc_class_id_OsDescMemory
#define __nvoc_class_id_OsDescMemory 0xb3dacd
#endif /* __nvoc_class_id_OsDescMemory */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OsDescMemory;
#define __staticCast_OsDescMemory(pThis) \
((pThis)->__nvoc_pbase_OsDescMemory)
#ifdef __nvoc_os_desc_mem_h_disabled
#define __dynamicCast_OsDescMemory(pThis) ((OsDescMemory*)NULL)
#else //__nvoc_os_desc_mem_h_disabled
#define __dynamicCast_OsDescMemory(pThis) \
((OsDescMemory*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OsDescMemory)))
#endif //__nvoc_os_desc_mem_h_disabled
NV_STATUS __nvoc_objCreateDynamic_OsDescMemory(OsDescMemory**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OsDescMemory(OsDescMemory**, Dynamic*, NvU32, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_OsDescMemory(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_OsDescMemory((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define osdescCanCopy(pOsDescMemory) osdescCanCopy_DISPATCH(pOsDescMemory)
#define osdescCheckMemInterUnmap(pMemory, bSubdeviceHandleProvided) osdescCheckMemInterUnmap_DISPATCH(pMemory, bSubdeviceHandleProvided)
#define osdescControl(pMemory, pCallContext, pParams) osdescControl_DISPATCH(pMemory, pCallContext, pParams)
#define osdescUnmap(pMemory, pCallContext, pCpuMapping) osdescUnmap_DISPATCH(pMemory, pCallContext, pCpuMapping)
#define osdescGetMemInterMapParams(pMemory, pParams) osdescGetMemInterMapParams_DISPATCH(pMemory, pParams)
#define osdescGetMemoryMappingDescriptor(pMemory, ppMemDesc) osdescGetMemoryMappingDescriptor_DISPATCH(pMemory, ppMemDesc)
#define osdescGetMapAddrSpace(pMemory, pCallContext, mapFlags, pAddrSpace) osdescGetMapAddrSpace_DISPATCH(pMemory, pCallContext, mapFlags, pAddrSpace)
#define osdescShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) osdescShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
#define osdescControlFilter(pResource, pCallContext, pParams) osdescControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define osdescAddAdditionalDependants(pClient, pResource, pReference) osdescAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define osdescGetRefCount(pResource) osdescGetRefCount_DISPATCH(pResource)
#define osdescMapTo(pResource, pParams) osdescMapTo_DISPATCH(pResource, pParams)
#define osdescControl_Prologue(pResource, pCallContext, pParams) osdescControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define osdescIsReady(pMemory) osdescIsReady_DISPATCH(pMemory)
#define osdescCheckCopyPermissions(pMemory, pDstGpu, hDstClientNvBool) osdescCheckCopyPermissions_DISPATCH(pMemory, pDstGpu, hDstClientNvBool)
#define osdescPreDestruct(pResource) osdescPreDestruct_DISPATCH(pResource)
#define osdescUnmapFrom(pResource, pParams) osdescUnmapFrom_DISPATCH(pResource, pParams)
#define osdescControl_Epilogue(pResource, pCallContext, pParams) osdescControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define osdescControlLookup(pResource, pParams, ppEntry) osdescControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define osdescMap(pMemory, pCallContext, pParams, pCpuMapping) osdescMap_DISPATCH(pMemory, pCallContext, pParams, pCpuMapping)
#define osdescAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) osdescAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
NvBool osdescCanCopy_IMPL(struct OsDescMemory *pOsDescMemory);
static inline NvBool osdescCanCopy_DISPATCH(struct OsDescMemory *pOsDescMemory) {
return pOsDescMemory->__osdescCanCopy__(pOsDescMemory);
}
static inline NV_STATUS osdescCheckMemInterUnmap_DISPATCH(struct OsDescMemory *pMemory, NvBool bSubdeviceHandleProvided) {
return pMemory->__osdescCheckMemInterUnmap__(pMemory, bSubdeviceHandleProvided);
}
static inline NV_STATUS osdescControl_DISPATCH(struct OsDescMemory *pMemory, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pMemory->__osdescControl__(pMemory, pCallContext, pParams);
}
static inline NV_STATUS osdescUnmap_DISPATCH(struct OsDescMemory *pMemory, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return pMemory->__osdescUnmap__(pMemory, pCallContext, pCpuMapping);
}
static inline NV_STATUS osdescGetMemInterMapParams_DISPATCH(struct OsDescMemory *pMemory, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pMemory->__osdescGetMemInterMapParams__(pMemory, pParams);
}
static inline NV_STATUS osdescGetMemoryMappingDescriptor_DISPATCH(struct OsDescMemory *pMemory, MEMORY_DESCRIPTOR **ppMemDesc) {
return pMemory->__osdescGetMemoryMappingDescriptor__(pMemory, ppMemDesc);
}
static inline NV_STATUS osdescGetMapAddrSpace_DISPATCH(struct OsDescMemory *pMemory, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
return pMemory->__osdescGetMapAddrSpace__(pMemory, pCallContext, mapFlags, pAddrSpace);
}
static inline NvBool osdescShareCallback_DISPATCH(struct OsDescMemory *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pResource->__osdescShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
}
static inline NV_STATUS osdescControlFilter_DISPATCH(struct OsDescMemory *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__osdescControlFilter__(pResource, pCallContext, pParams);
}
static inline void osdescAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct OsDescMemory *pResource, RsResourceRef *pReference) {
pResource->__osdescAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvU32 osdescGetRefCount_DISPATCH(struct OsDescMemory *pResource) {
return pResource->__osdescGetRefCount__(pResource);
}
static inline NV_STATUS osdescMapTo_DISPATCH(struct OsDescMemory *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__osdescMapTo__(pResource, pParams);
}
static inline NV_STATUS osdescControl_Prologue_DISPATCH(struct OsDescMemory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__osdescControl_Prologue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS osdescIsReady_DISPATCH(struct OsDescMemory *pMemory) {
return pMemory->__osdescIsReady__(pMemory);
}
static inline NV_STATUS osdescCheckCopyPermissions_DISPATCH(struct OsDescMemory *pMemory, struct OBJGPU *pDstGpu, NvHandle hDstClientNvBool) {
return pMemory->__osdescCheckCopyPermissions__(pMemory, pDstGpu, hDstClientNvBool);
}
static inline void osdescPreDestruct_DISPATCH(struct OsDescMemory *pResource) {
pResource->__osdescPreDestruct__(pResource);
}
static inline NV_STATUS osdescUnmapFrom_DISPATCH(struct OsDescMemory *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__osdescUnmapFrom__(pResource, pParams);
}
static inline void osdescControl_Epilogue_DISPATCH(struct OsDescMemory *pResource, CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__osdescControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS osdescControlLookup_DISPATCH(struct OsDescMemory *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__osdescControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS osdescMap_DISPATCH(struct OsDescMemory *pMemory, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return pMemory->__osdescMap__(pMemory, pCallContext, pParams, pCpuMapping);
}
static inline NvBool osdescAccessCallback_DISPATCH(struct OsDescMemory *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__osdescAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NV_STATUS osdescConstruct_IMPL(struct OsDescMemory *arg_pOsDescMemory, CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_osdescConstruct(arg_pOsDescMemory, arg_pCallContext, arg_pParams) osdescConstruct_IMPL(arg_pOsDescMemory, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_OS_DESC_MEM_NVOC_H_

View File

@@ -0,0 +1,10 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_eng_empty.h
//
// The file is added to smoonth NVOC migration. After converting a module to
// NVOC class, the stale generated headers in output directory causes failures
// of incremental builds. This file ensures the content of the old header is
// removed.
//

View File

@@ -0,0 +1,149 @@
#define NVOC_OS_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_os_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0xaa1d70 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJOS;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_OBJOS(OBJOS*);
void __nvoc_init_funcTable_OBJOS(OBJOS*);
NV_STATUS __nvoc_ctor_OBJOS(OBJOS*);
void __nvoc_init_dataField_OBJOS(OBJOS*);
void __nvoc_dtor_OBJOS(OBJOS*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJOS;
static const struct NVOC_RTTI __nvoc_rtti_OBJOS_OBJOS = {
/*pClassDef=*/ &__nvoc_class_def_OBJOS,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_OBJOS,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_OBJOS_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(OBJOS, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_OBJOS = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_OBJOS_OBJOS,
&__nvoc_rtti_OBJOS_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_OBJOS =
{
/*classInfo=*/ {
/*size=*/ sizeof(OBJOS),
/*classId=*/ classId(OBJOS),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "OBJOS",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_OBJOS,
/*pCastInfo=*/ &__nvoc_castinfo_OBJOS,
/*pExportInfo=*/ &__nvoc_export_info_OBJOS
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_OBJOS =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_OBJOS(OBJOS *pThis) {
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_OBJOS(OBJOS *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->setProperty(pThis, PDB_PROP_OS_SUPPORTS_DISPLAY_REMAPPER, !(1));
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_OBJOS(OBJOS *pThis) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_OBJOS_fail_Object;
__nvoc_init_dataField_OBJOS(pThis);
goto __nvoc_ctor_OBJOS_exit; // Success
__nvoc_ctor_OBJOS_fail_Object:
__nvoc_ctor_OBJOS_exit:
return status;
}
static void __nvoc_init_funcTable_OBJOS_1(OBJOS *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_OBJOS(OBJOS *pThis) {
__nvoc_init_funcTable_OBJOS_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_OBJOS(OBJOS *pThis) {
pThis->__nvoc_pbase_OBJOS = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_OBJOS(pThis);
}
NV_STATUS __nvoc_objCreate_OBJOS(OBJOS **ppThis, Dynamic *pParent, NvU32 createFlags) {
NV_STATUS status;
Object *pParentObj;
OBJOS *pThis;
pThis = portMemAllocNonPaged(sizeof(OBJOS));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(OBJOS));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_OBJOS);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_OBJOS(pThis);
status = __nvoc_ctor_OBJOS(pThis);
if (status != NV_OK) goto __nvoc_objCreate_OBJOS_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_OBJOS_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_OBJOS(OBJOS **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
status = __nvoc_objCreate_OBJOS(ppThis, pParent, createFlags);
return status;
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,10 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_eng_empty.h
//
// The file is added to smoonth NVOC migration. After converting a module to
// NVOC class, the stale generated headers in output directory causes failures
// of incremental builds. This file ensures the content of the old header is
// removed.
//

View File

@@ -0,0 +1,155 @@
#define NVOC_PREREQ_TRACKER_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_prereq_tracker_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x0e171b = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_PrereqTracker;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_PrereqTracker(PrereqTracker*);
void __nvoc_init_funcTable_PrereqTracker(PrereqTracker*);
NV_STATUS __nvoc_ctor_PrereqTracker(PrereqTracker*, struct OBJGPU * arg_pParent);
void __nvoc_init_dataField_PrereqTracker(PrereqTracker*);
void __nvoc_dtor_PrereqTracker(PrereqTracker*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_PrereqTracker;
static const struct NVOC_RTTI __nvoc_rtti_PrereqTracker_PrereqTracker = {
/*pClassDef=*/ &__nvoc_class_def_PrereqTracker,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_PrereqTracker,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_PrereqTracker_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(PrereqTracker, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_PrereqTracker = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_PrereqTracker_PrereqTracker,
&__nvoc_rtti_PrereqTracker_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_PrereqTracker =
{
/*classInfo=*/ {
/*size=*/ sizeof(PrereqTracker),
/*classId=*/ classId(PrereqTracker),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "PrereqTracker",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_PrereqTracker,
/*pCastInfo=*/ &__nvoc_castinfo_PrereqTracker,
/*pExportInfo=*/ &__nvoc_export_info_PrereqTracker
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_PrereqTracker =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_PrereqTracker(PrereqTracker *pThis) {
__nvoc_prereqDestruct(pThis);
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_PrereqTracker(PrereqTracker *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_PrereqTracker(PrereqTracker *pThis, struct OBJGPU * arg_pParent) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_PrereqTracker_fail_Object;
__nvoc_init_dataField_PrereqTracker(pThis);
status = __nvoc_prereqConstruct(pThis, arg_pParent);
if (status != NV_OK) goto __nvoc_ctor_PrereqTracker_fail__init;
goto __nvoc_ctor_PrereqTracker_exit; // Success
__nvoc_ctor_PrereqTracker_fail__init:
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_ctor_PrereqTracker_fail_Object:
__nvoc_ctor_PrereqTracker_exit:
return status;
}
static void __nvoc_init_funcTable_PrereqTracker_1(PrereqTracker *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_PrereqTracker(PrereqTracker *pThis) {
__nvoc_init_funcTable_PrereqTracker_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_PrereqTracker(PrereqTracker *pThis) {
pThis->__nvoc_pbase_PrereqTracker = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_PrereqTracker(pThis);
}
NV_STATUS __nvoc_objCreate_PrereqTracker(PrereqTracker **ppThis, Dynamic *pParent, NvU32 createFlags, struct OBJGPU * arg_pParent) {
NV_STATUS status;
Object *pParentObj;
PrereqTracker *pThis;
pThis = portMemAllocNonPaged(sizeof(PrereqTracker));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(PrereqTracker));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_PrereqTracker);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_PrereqTracker(pThis);
status = __nvoc_ctor_PrereqTracker(pThis, arg_pParent);
if (status != NV_OK) goto __nvoc_objCreate_PrereqTracker_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_PrereqTracker_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_PrereqTracker(PrereqTracker **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct OBJGPU * arg_pParent = va_arg(args, struct OBJGPU *);
status = __nvoc_objCreate_PrereqTracker(ppThis, pParent, createFlags, arg_pParent);
return status;
}

View File

@@ -0,0 +1,254 @@
#ifndef _G_PREREQ_TRACKER_NVOC_H_
#define _G_PREREQ_TRACKER_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2015-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/*!
* @file prereq_tracker.h
* @brief Holds interfaces and data structures required by the prerequisite
* tracking feature/code.
*
* Code depending on multiple other features should use prereqComposeEntry() to create
* a prerequisite tracking structure with a provided bitVector of all necessary
* dependencies, which will arm the prereq to start watching those dependencies.
* Once those dependencies are fulfilled they should issue prereqSatisfy() (one-by-one)
* This common code should broadcast those to all prerequisite tracking structures
* and once all respective dependencies are satisfied, will issue the
* registered callback.
* Similarly, dependencies should issue prereqRetract() before they change
* their state and common code will broadcast that to all tracking structures
* and issue callbacks again with bSatisfied=false, if all dependencies
* for that prereq were previously satisfied.
*
* @note Feature is designed to prevent creating new prerequisites once
* dependencies start issuing Satisfy()/Retract() notifications.
* Therefore, ComposeEntry all prerequisites during
* stateInit() and allow code to issue Satisfy()/Retract() only in
* stateLoad() or later.
*/
#include "g_prereq_tracker_nvoc.h"
#ifndef __PREREQUISITE_TRACKER_H__
#define __PREREQUISITE_TRACKER_H__
/* ------------------------ Includes ---------------------------------------- */
#include "containers/list.h"
#include "utils/nvbitvector.h"
#include "nvoc/object.h"
/* ------------------------ Macros ------------------------------------------ */
#define PREREQ_ID_VECTOR_SIZE 64
/*!
* Performs check if all dependencies of the given prerequisite tracking
* structure has been satisfied.
*
* @param[in] _pPrereq PREREQ_ENTRY pointer
*
* @return boolean if prerequisite has been satisfied.
*/
#define PREREQ_IS_SATISFIED(_pPrereq) \
((_pPrereq)->countRequested == (_pPrereq)->countSatisfied)
/* ------------------------ Datatypes --------------------------------------- */
struct OBJGPU;
#ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
#define __NVOC_CLASS_OBJGPU_TYPEDEF__
typedef struct OBJGPU OBJGPU;
#endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJGPU
#define __nvoc_class_id_OBJGPU 0x7ef3cb
#endif /* __nvoc_class_id_OBJGPU */
/*!
* @brief Callback prototype.
*
* @param[in] pGpu OBJGPU pointer
* @param[in] bSatisfied
* Indicates if dependencies were just satisfied or about to be retracted.
*
* @return NV_OK if callback successfully executed
* @return status failure specific error code
*/
typedef NV_STATUS GpuPrereqCallback(struct OBJGPU *pGpu, NvBool bSatisfied);
typedef NvU16 PREREQ_ID;
/*!
* Bitvector for storing prereq IDs required for another prereq struct
* Limited to size defined above, set to largest required by users
*/
MAKE_BITVECTOR(PREREQ_ID_BIT_VECTOR, PREREQ_ID_VECTOR_SIZE);
/*!
* An individual prerequisite tracking entry structure.
*/
typedef struct
{
/*!
* Mask of the dependencies (prerequisites that have to be satisfied before
* callback can be issues).
*/
PREREQ_ID_BIT_VECTOR requested;
/*!
* Counter of all dependencies (prerequisites) tracked by this structure.
*/
NvS32 countRequested;
/*!
* Counter of currently satisfied dependencies (prerequisites) tracked by
* this structure. Once equal to @ref countRequested, callback can be issued.
*/
NvS32 countSatisfied;
/*!
* Boolean indicating that the given PREREQ_ENTRY is armed and ready to fire @ref
* callback whenever all PREREQ_IDs specified in @ref requested are satisfied.
*
* This bit is set during @ref prereqComposeEntry_IMPL(), which will also do an
* initial satisfaction check of all @ref requested PREREQ_IDs
* and fire the @ref callback if necessary.
*/
NvBool bArmed;
/*!
* @copydoc GpuPrereqCallback
*/
GpuPrereqCallback *callback;
} PREREQ_ENTRY;
MAKE_LIST(PrereqList, PREREQ_ENTRY);
/*!
* Holds common prerequisite tracking information.
*/
#ifdef NVOC_PREREQ_TRACKER_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct PrereqTracker {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct PrereqTracker *__nvoc_pbase_PrereqTracker;
union PREREQ_ID_BIT_VECTOR satisfied;
NvBool bInitialized;
PrereqList prereqList;
struct OBJGPU *pParent;
};
#ifndef __NVOC_CLASS_PrereqTracker_TYPEDEF__
#define __NVOC_CLASS_PrereqTracker_TYPEDEF__
typedef struct PrereqTracker PrereqTracker;
#endif /* __NVOC_CLASS_PrereqTracker_TYPEDEF__ */
#ifndef __nvoc_class_id_PrereqTracker
#define __nvoc_class_id_PrereqTracker 0x0e171b
#endif /* __nvoc_class_id_PrereqTracker */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_PrereqTracker;
#define __staticCast_PrereqTracker(pThis) \
((pThis)->__nvoc_pbase_PrereqTracker)
#ifdef __nvoc_prereq_tracker_h_disabled
#define __dynamicCast_PrereqTracker(pThis) ((PrereqTracker*)NULL)
#else //__nvoc_prereq_tracker_h_disabled
#define __dynamicCast_PrereqTracker(pThis) \
((PrereqTracker*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(PrereqTracker)))
#endif //__nvoc_prereq_tracker_h_disabled
NV_STATUS __nvoc_objCreateDynamic_PrereqTracker(PrereqTracker**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_PrereqTracker(PrereqTracker**, Dynamic*, NvU32, struct OBJGPU * arg_pParent);
#define __objCreate_PrereqTracker(ppNewObj, pParent, createFlags, arg_pParent) \
__nvoc_objCreate_PrereqTracker((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pParent)
NV_STATUS prereqConstruct_IMPL(struct PrereqTracker *arg_pTracker, struct OBJGPU *arg_pParent);
#define __nvoc_prereqConstruct(arg_pTracker, arg_pParent) prereqConstruct_IMPL(arg_pTracker, arg_pParent)
void prereqDestruct_IMPL(struct PrereqTracker *pTracker);
#define __nvoc_prereqDestruct(pTracker) prereqDestruct_IMPL(pTracker)
NV_STATUS prereqSatisfy_IMPL(struct PrereqTracker *pTracker, PREREQ_ID prereqId);
#ifdef __nvoc_prereq_tracker_h_disabled
static inline NV_STATUS prereqSatisfy(struct PrereqTracker *pTracker, PREREQ_ID prereqId) {
NV_ASSERT_FAILED_PRECOMP("PrereqTracker was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_prereq_tracker_h_disabled
#define prereqSatisfy(pTracker, prereqId) prereqSatisfy_IMPL(pTracker, prereqId)
#endif //__nvoc_prereq_tracker_h_disabled
NV_STATUS prereqRetract_IMPL(struct PrereqTracker *pTracker, PREREQ_ID prereqId);
#ifdef __nvoc_prereq_tracker_h_disabled
static inline NV_STATUS prereqRetract(struct PrereqTracker *pTracker, PREREQ_ID prereqId) {
NV_ASSERT_FAILED_PRECOMP("PrereqTracker was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_prereq_tracker_h_disabled
#define prereqRetract(pTracker, prereqId) prereqRetract_IMPL(pTracker, prereqId)
#endif //__nvoc_prereq_tracker_h_disabled
NvBool prereqIdIsSatisfied_IMPL(struct PrereqTracker *pTracker, PREREQ_ID prereqId);
#ifdef __nvoc_prereq_tracker_h_disabled
static inline NvBool prereqIdIsSatisfied(struct PrereqTracker *pTracker, PREREQ_ID prereqId) {
NV_ASSERT_FAILED_PRECOMP("PrereqTracker was disabled!");
return NV_FALSE;
}
#else //__nvoc_prereq_tracker_h_disabled
#define prereqIdIsSatisfied(pTracker, prereqId) prereqIdIsSatisfied_IMPL(pTracker, prereqId)
#endif //__nvoc_prereq_tracker_h_disabled
NV_STATUS prereqComposeEntry_IMPL(struct PrereqTracker *pTracker, GpuPrereqCallback *callback, union PREREQ_ID_BIT_VECTOR *pDepends, PREREQ_ENTRY **ppPrereq);
#ifdef __nvoc_prereq_tracker_h_disabled
static inline NV_STATUS prereqComposeEntry(struct PrereqTracker *pTracker, GpuPrereqCallback *callback, union PREREQ_ID_BIT_VECTOR *pDepends, PREREQ_ENTRY **ppPrereq) {
NV_ASSERT_FAILED_PRECOMP("PrereqTracker was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_prereq_tracker_h_disabled
#define prereqComposeEntry(pTracker, callback, pDepends, ppPrereq) prereqComposeEntry_IMPL(pTracker, callback, pDepends, ppPrereq)
#endif //__nvoc_prereq_tracker_h_disabled
#undef PRIVATE_FIELD
#endif // __PREREQUISITE_TRACKER_H__
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_PREREQ_TRACKER_NVOC_H_

View File

@@ -0,0 +1,183 @@
#ifndef _G_REF_COUNT_NVOC_H_
#define _G_REF_COUNT_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2013-2020 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_ref_count_nvoc.h"
#ifndef REF_COUNT_H
#define REF_COUNT_H
/****************** Resource Manager Defines and Structures *****************\
* *
* Defines and structures used for the Reference-Counting Object. *
* *
\****************************************************************************/
#include "containers/map.h"
#include "nvoc/object.h"
#define NV_REQUESTER_INIT NV_U64_MIN
#define NV_REQUESTER_RM NV_U64_MAX
#define NV_REQUESTER_CLIENT_OBJECT(c,o) (((NvU64)(c) << 32) | o)
typedef enum
{
REFCNT_STATE_DEFAULT = 0,
REFCNT_STATE_ENABLED,
REFCNT_STATE_DISABLED,
REFCNT_STATE_ERROR,
} REFCNT_STATE;
typedef struct
{
NvU32 numReferences;
} REFCNT_REQUESTER_ENTRY, *PREFCNT_REQUESTER_ENTRY;
MAKE_MAP(REFCNT_REQUESTER_ENTRY_MAP, REFCNT_REQUESTER_ENTRY);
typedef struct OBJREFCNT *POBJREFCNT;
#ifndef __NVOC_CLASS_OBJREFCNT_TYPEDEF__
#define __NVOC_CLASS_OBJREFCNT_TYPEDEF__
typedef struct OBJREFCNT OBJREFCNT;
#endif /* __NVOC_CLASS_OBJREFCNT_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJREFCNT
#define __nvoc_class_id_OBJREFCNT 0xf89281
#endif /* __nvoc_class_id_OBJREFCNT */
//
// XXX-IOM:
// These callback types are good candidates to be replaced with IOM
// functionality, where small derived classes can be created on a 'callback'
// base interface, should that become more practical (currently, adding any
// kind of class still requires a non-trivial amount of boilerplate to wire
// up).
//
typedef NV_STATUS RefcntStateChangeCallback(POBJREFCNT, Dynamic *,
REFCNT_STATE, REFCNT_STATE);
typedef void RefcntResetCallback(POBJREFCNT, Dynamic *, NvU64);
#ifdef NVOC_REF_COUNT_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct OBJREFCNT {
const struct NVOC_RTTI *__nvoc_rtti;
struct Object __nvoc_base_Object;
struct Object *__nvoc_pbase_Object;
struct OBJREFCNT *__nvoc_pbase_OBJREFCNT;
NvBool PDB_PROP_REFCNT_ALLOW_RECURSIVE_REQUESTS;
Dynamic *pParent;
NvU32 tag;
REFCNT_REQUESTER_ENTRY_MAP requesterTree;
REFCNT_STATE state;
NvU32 count;
RefcntStateChangeCallback *refcntStateChangeCallback;
RefcntResetCallback *refcntResetCallback;
};
#ifndef __NVOC_CLASS_OBJREFCNT_TYPEDEF__
#define __NVOC_CLASS_OBJREFCNT_TYPEDEF__
typedef struct OBJREFCNT OBJREFCNT;
#endif /* __NVOC_CLASS_OBJREFCNT_TYPEDEF__ */
#ifndef __nvoc_class_id_OBJREFCNT
#define __nvoc_class_id_OBJREFCNT 0xf89281
#endif /* __nvoc_class_id_OBJREFCNT */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJREFCNT;
#define __staticCast_OBJREFCNT(pThis) \
((pThis)->__nvoc_pbase_OBJREFCNT)
#ifdef __nvoc_ref_count_h_disabled
#define __dynamicCast_OBJREFCNT(pThis) ((OBJREFCNT*)NULL)
#else //__nvoc_ref_count_h_disabled
#define __dynamicCast_OBJREFCNT(pThis) \
((OBJREFCNT*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(OBJREFCNT)))
#endif //__nvoc_ref_count_h_disabled
#define PDB_PROP_REFCNT_ALLOW_RECURSIVE_REQUESTS_BASE_CAST
#define PDB_PROP_REFCNT_ALLOW_RECURSIVE_REQUESTS_BASE_NAME PDB_PROP_REFCNT_ALLOW_RECURSIVE_REQUESTS
NV_STATUS __nvoc_objCreateDynamic_OBJREFCNT(OBJREFCNT**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_OBJREFCNT(OBJREFCNT**, Dynamic*, NvU32, Dynamic * arg_pParent, NvU32 arg_tag, RefcntStateChangeCallback * arg_pStateChangeCallback, RefcntResetCallback * arg_pResetCallback);
#define __objCreate_OBJREFCNT(ppNewObj, pParent, createFlags, arg_pParent, arg_tag, arg_pStateChangeCallback, arg_pResetCallback) \
__nvoc_objCreate_OBJREFCNT((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pParent, arg_tag, arg_pStateChangeCallback, arg_pResetCallback)
NV_STATUS refcntConstruct_IMPL(POBJREFCNT arg_pRefcnt, Dynamic *arg_pParent, NvU32 arg_tag, RefcntStateChangeCallback *arg_pStateChangeCallback, RefcntResetCallback *arg_pResetCallback);
#define __nvoc_refcntConstruct(arg_pRefcnt, arg_pParent, arg_tag, arg_pStateChangeCallback, arg_pResetCallback) refcntConstruct_IMPL(arg_pRefcnt, arg_pParent, arg_tag, arg_pStateChangeCallback, arg_pResetCallback)
void refcntDestruct_IMPL(POBJREFCNT pRefcnt);
#define __nvoc_refcntDestruct(pRefcnt) refcntDestruct_IMPL(pRefcnt)
NV_STATUS refcntRequestReference_IMPL(POBJREFCNT pRefcnt, NvU64 arg0, NvU32 arg1, NvBool arg2);
#ifdef __nvoc_ref_count_h_disabled
static inline NV_STATUS refcntRequestReference(POBJREFCNT pRefcnt, NvU64 arg0, NvU32 arg1, NvBool arg2) {
NV_ASSERT_FAILED_PRECOMP("OBJREFCNT was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_ref_count_h_disabled
#define refcntRequestReference(pRefcnt, arg0, arg1, arg2) refcntRequestReference_IMPL(pRefcnt, arg0, arg1, arg2)
#endif //__nvoc_ref_count_h_disabled
NV_STATUS refcntReleaseReferences_IMPL(POBJREFCNT pRefcnt, NvU64 arg0, NvBool arg1);
#ifdef __nvoc_ref_count_h_disabled
static inline NV_STATUS refcntReleaseReferences(POBJREFCNT pRefcnt, NvU64 arg0, NvBool arg1) {
NV_ASSERT_FAILED_PRECOMP("OBJREFCNT was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_ref_count_h_disabled
#define refcntReleaseReferences(pRefcnt, arg0, arg1) refcntReleaseReferences_IMPL(pRefcnt, arg0, arg1)
#endif //__nvoc_ref_count_h_disabled
NV_STATUS refcntReset_IMPL(POBJREFCNT pRefcnt, NvBool arg0);
#ifdef __nvoc_ref_count_h_disabled
static inline NV_STATUS refcntReset(POBJREFCNT pRefcnt, NvBool arg0) {
NV_ASSERT_FAILED_PRECOMP("OBJREFCNT was disabled!");
return NV_ERR_NOT_SUPPORTED;
}
#else //__nvoc_ref_count_h_disabled
#define refcntReset(pRefcnt, arg0) refcntReset_IMPL(pRefcnt, arg0)
#endif //__nvoc_ref_count_h_disabled
#undef PRIVATE_FIELD
#endif // REF_COUNT_H
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_REF_COUNT_NVOC_H_

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,371 @@
#define NVOC_RESOURCE_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_resource_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x8ef259 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
void __nvoc_init_RmResourceCommon(RmResourceCommon*);
void __nvoc_init_funcTable_RmResourceCommon(RmResourceCommon*);
NV_STATUS __nvoc_ctor_RmResourceCommon(RmResourceCommon*);
void __nvoc_init_dataField_RmResourceCommon(RmResourceCommon*);
void __nvoc_dtor_RmResourceCommon(RmResourceCommon*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_RmResourceCommon;
static const struct NVOC_RTTI __nvoc_rtti_RmResourceCommon_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_RmResourceCommon,
/*offset=*/ 0,
};
static const struct NVOC_CASTINFO __nvoc_castinfo_RmResourceCommon = {
/*numRelatives=*/ 1,
/*relatives=*/ {
&__nvoc_rtti_RmResourceCommon_RmResourceCommon,
},
};
// Not instantiable because it's not derived from class "Object"
const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon =
{
/*classInfo=*/ {
/*size=*/ sizeof(RmResourceCommon),
/*classId=*/ classId(RmResourceCommon),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "RmResourceCommon",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) NULL,
/*pCastInfo=*/ &__nvoc_castinfo_RmResourceCommon,
/*pExportInfo=*/ &__nvoc_export_info_RmResourceCommon
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_RmResourceCommon =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RmResourceCommon(RmResourceCommon *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_RmResourceCommon(RmResourceCommon *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RmResourceCommon(RmResourceCommon *pThis) {
NV_STATUS status = NV_OK;
__nvoc_init_dataField_RmResourceCommon(pThis);
status = __nvoc_rmrescmnConstruct(pThis);
if (status != NV_OK) goto __nvoc_ctor_RmResourceCommon_fail__init;
goto __nvoc_ctor_RmResourceCommon_exit; // Success
__nvoc_ctor_RmResourceCommon_fail__init:
__nvoc_ctor_RmResourceCommon_exit:
return status;
}
static void __nvoc_init_funcTable_RmResourceCommon_1(RmResourceCommon *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_funcTable_RmResourceCommon(RmResourceCommon *pThis) {
__nvoc_init_funcTable_RmResourceCommon_1(pThis);
}
void __nvoc_init_RmResourceCommon(RmResourceCommon *pThis) {
pThis->__nvoc_pbase_RmResourceCommon = pThis;
__nvoc_init_funcTable_RmResourceCommon(pThis);
}
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x03610d = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
void __nvoc_init_RmResource(RmResource*);
void __nvoc_init_funcTable_RmResource(RmResource*);
NV_STATUS __nvoc_ctor_RmResource(RmResource*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_RmResource(RmResource*);
void __nvoc_dtor_RmResource(RmResource*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_RmResource;
static const struct NVOC_RTTI __nvoc_rtti_RmResource_RmResource = {
/*pClassDef=*/ &__nvoc_class_def_RmResource,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_RmResource,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_RmResource_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(RmResource, __nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_RmResource_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(RmResource, __nvoc_base_RsResource),
};
static const struct NVOC_RTTI __nvoc_rtti_RmResource_RmResourceCommon = {
/*pClassDef=*/ &__nvoc_class_def_RmResourceCommon,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(RmResource, __nvoc_base_RmResourceCommon),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_RmResource = {
/*numRelatives=*/ 4,
/*relatives=*/ {
&__nvoc_rtti_RmResource_RmResource,
&__nvoc_rtti_RmResource_RmResourceCommon,
&__nvoc_rtti_RmResource_RsResource,
&__nvoc_rtti_RmResource_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource =
{
/*classInfo=*/ {
/*size=*/ sizeof(RmResource),
/*classId=*/ classId(RmResource),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "RmResource",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_RmResource,
/*pCastInfo=*/ &__nvoc_castinfo_RmResource,
/*pExportInfo=*/ &__nvoc_export_info_RmResource
};
static NvBool __nvoc_thunk_RmResource_resAccessCallback(struct RsResource *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return rmresAccessCallback((struct RmResource *)(((unsigned char *)pResource) - __nvoc_rtti_RmResource_RsResource.offset), pInvokingClient, pAllocParams, accessRight);
}
static NvBool __nvoc_thunk_RmResource_resShareCallback(struct RsResource *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return rmresShareCallback((struct RmResource *)(((unsigned char *)pResource) - __nvoc_rtti_RmResource_RsResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_RmResource_resControl_Prologue(struct RsResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return rmresControl_Prologue((struct RmResource *)(((unsigned char *)pResource) - __nvoc_rtti_RmResource_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RmResource_resControl_Epilogue(struct RsResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
rmresControl_Epilogue((struct RmResource *)(((unsigned char *)pResource) - __nvoc_rtti_RmResource_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_rmresControl(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControl((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_rmresUnmap(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return resUnmap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RsResource_rmresMapTo(struct RmResource *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset), pParams);
}
static NvU32 __nvoc_thunk_RsResource_rmresGetRefCount(struct RmResource *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_rmresControlFilter(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_rmresAddAdditionalDependants(struct RsClient *pClient, struct RmResource *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset), pReference);
}
static NvBool __nvoc_thunk_RsResource_rmresCanCopy(struct RmResource *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset));
}
static void __nvoc_thunk_RsResource_rmresPreDestruct(struct RmResource *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_rmresUnmapFrom(struct RmResource *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset), pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_rmresControlLookup(struct RmResource *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_RsResource_rmresMap(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return resMap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RmResource_RsResource.offset), pCallContext, pParams, pCpuMapping);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_RmResource =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RsResource(RsResource*);
void __nvoc_dtor_RmResourceCommon(RmResourceCommon*);
void __nvoc_dtor_RmResource(RmResource *pThis) {
__nvoc_dtor_RsResource(&pThis->__nvoc_base_RsResource);
__nvoc_dtor_RmResourceCommon(&pThis->__nvoc_base_RmResourceCommon);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_RmResource(RmResource *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RsResource(RsResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_RmResourceCommon(RmResourceCommon* );
NV_STATUS __nvoc_ctor_RmResource(RmResource *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RsResource(&pThis->__nvoc_base_RsResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_RmResource_fail_RsResource;
status = __nvoc_ctor_RmResourceCommon(&pThis->__nvoc_base_RmResourceCommon);
if (status != NV_OK) goto __nvoc_ctor_RmResource_fail_RmResourceCommon;
__nvoc_init_dataField_RmResource(pThis);
status = __nvoc_rmresConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_RmResource_fail__init;
goto __nvoc_ctor_RmResource_exit; // Success
__nvoc_ctor_RmResource_fail__init:
__nvoc_dtor_RmResourceCommon(&pThis->__nvoc_base_RmResourceCommon);
__nvoc_ctor_RmResource_fail_RmResourceCommon:
__nvoc_dtor_RsResource(&pThis->__nvoc_base_RsResource);
__nvoc_ctor_RmResource_fail_RsResource:
__nvoc_ctor_RmResource_exit:
return status;
}
static void __nvoc_init_funcTable_RmResource_1(RmResource *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__rmresAccessCallback__ = &rmresAccessCallback_IMPL;
pThis->__rmresShareCallback__ = &rmresShareCallback_IMPL;
pThis->__rmresGetMemInterMapParams__ = &rmresGetMemInterMapParams_IMPL;
pThis->__rmresCheckMemInterUnmap__ = &rmresCheckMemInterUnmap_IMPL;
pThis->__rmresGetMemoryMappingDescriptor__ = &rmresGetMemoryMappingDescriptor_IMPL;
pThis->__rmresControl_Prologue__ = &rmresControl_Prologue_IMPL;
pThis->__rmresControl_Epilogue__ = &rmresControl_Epilogue_IMPL;
pThis->__nvoc_base_RsResource.__resAccessCallback__ = &__nvoc_thunk_RmResource_resAccessCallback;
pThis->__nvoc_base_RsResource.__resShareCallback__ = &__nvoc_thunk_RmResource_resShareCallback;
pThis->__nvoc_base_RsResource.__resControl_Prologue__ = &__nvoc_thunk_RmResource_resControl_Prologue;
pThis->__nvoc_base_RsResource.__resControl_Epilogue__ = &__nvoc_thunk_RmResource_resControl_Epilogue;
pThis->__rmresControl__ = &__nvoc_thunk_RsResource_rmresControl;
pThis->__rmresUnmap__ = &__nvoc_thunk_RsResource_rmresUnmap;
pThis->__rmresMapTo__ = &__nvoc_thunk_RsResource_rmresMapTo;
pThis->__rmresGetRefCount__ = &__nvoc_thunk_RsResource_rmresGetRefCount;
pThis->__rmresControlFilter__ = &__nvoc_thunk_RsResource_rmresControlFilter;
pThis->__rmresAddAdditionalDependants__ = &__nvoc_thunk_RsResource_rmresAddAdditionalDependants;
pThis->__rmresCanCopy__ = &__nvoc_thunk_RsResource_rmresCanCopy;
pThis->__rmresPreDestruct__ = &__nvoc_thunk_RsResource_rmresPreDestruct;
pThis->__rmresUnmapFrom__ = &__nvoc_thunk_RsResource_rmresUnmapFrom;
pThis->__rmresControlLookup__ = &__nvoc_thunk_RsResource_rmresControlLookup;
pThis->__rmresMap__ = &__nvoc_thunk_RsResource_rmresMap;
}
void __nvoc_init_funcTable_RmResource(RmResource *pThis) {
__nvoc_init_funcTable_RmResource_1(pThis);
}
void __nvoc_init_RsResource(RsResource*);
void __nvoc_init_RmResourceCommon(RmResourceCommon*);
void __nvoc_init_RmResource(RmResource *pThis) {
pThis->__nvoc_pbase_RmResource = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_RsResource;
pThis->__nvoc_pbase_RmResourceCommon = &pThis->__nvoc_base_RmResourceCommon;
__nvoc_init_RsResource(&pThis->__nvoc_base_RsResource);
__nvoc_init_RmResourceCommon(&pThis->__nvoc_base_RmResourceCommon);
__nvoc_init_funcTable_RmResource(pThis);
}
NV_STATUS __nvoc_objCreate_RmResource(RmResource **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
RmResource *pThis;
pThis = portMemAllocNonPaged(sizeof(RmResource));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(RmResource));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_RmResource);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_RmResource(pThis);
status = __nvoc_ctor_RmResource(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_RmResource_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_RmResource_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_RmResource(RmResource **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_RmResource(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

View File

@@ -0,0 +1,355 @@
#ifndef _G_RESOURCE_NVOC_H_
#define _G_RESOURCE_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2018-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_resource_nvoc.h"
#ifndef _RESOURCE_H_
#define _RESOURCE_H_
#include "core/core.h"
#include "resserv/rs_resource.h"
#include "rmapi/control.h"
/* Forward declarations */
struct MEMORY_DESCRIPTOR;
struct OBJVASPACE;
struct RMRES_MEM_INTER_MAP_PARAMS
{
/// [in]
OBJGPU *pGpu;
RsResourceRef *pMemoryRef;
NvBool bSubdeviceHandleProvided;
/// [out]
OBJGPU *pSrcGpu;
struct MEMORY_DESCRIPTOR *pSrcMemDesc;
NvHandle hMemoryDevice;
NvBool bDmaMapNeeded;
// This flag will be set when this is FLA mapping
NvBool bFlaMapping;
};
struct RS_RES_MAP_TO_PARAMS
{
OBJGPU *pGpu; ///< [in]
OBJGPU *pSrcGpu; ///< [in]
struct MEMORY_DESCRIPTOR *pSrcMemDesc; ///< [in]
struct MEMORY_DESCRIPTOR **ppMemDesc; ///< [out]
RsResourceRef *pMemoryRef; ///< [in]
NvHandle hBroadcastDevice; ///< [in]
NvHandle hMemoryDevice; ///< [in]
NvU32 gpuMask; ///< [in]
NvU64 offset; ///< [in]
NvU64 length; ///< [in]
NvU32 flags; ///< [in]
NvU64 *pDmaOffset; ///< [inout]
NvBool bSubdeviceHandleProvided; ///< [in]
NvBool bDmaMapNeeded; ///< [in]
NvBool bFlaMapping; ///< [in]
};
struct RS_RES_UNMAP_FROM_PARAMS
{
OBJGPU *pGpu; ///< [in]
NvHandle hMemory; ///< [in]
NvHandle hBroadcastDevice; ///< [in]
NvU32 gpuMask; ///< [in]
NvU32 flags; ///< [in]
NvU64 dmaOffset; ///< [in]
struct MEMORY_DESCRIPTOR *pMemDesc; ///< [in]
NvBool bSubdeviceHandleProvided; ///< [in]
};
struct RS_INTER_MAP_PRIVATE
{
OBJGPU *pGpu;
OBJGPU *pSrcGpu;
struct MEMORY_DESCRIPTOR *pSrcMemDesc;
NvHandle hBroadcastDevice;
NvHandle hMemoryDevice;
NvU32 gpuMask;
NvBool bSubdeviceHandleProvided;
NvBool bDmaMapNeeded;
NvBool bFlaMapping;
};
struct RS_INTER_UNMAP_PRIVATE
{
OBJGPU *pGpu;
NvHandle hBroadcastDevice;
NvU32 gpuMask;
NvBool bSubdeviceHandleProvided;
NvBool bcState;
NvBool bAllocated; ///< This struct has been allocated and must be freed
};
struct RS_CPU_MAPPING_PRIVATE
{
NvU64 gpuAddress;
NvU64 gpuMapLength;
OBJGPU *pGpu;
NvP64 pPriv;
NvU32 protect;
NvBool bKernel;
};
typedef struct RMRES_MEM_INTER_MAP_PARAMS RMRES_MEM_INTER_MAP_PARAMS;
/*!
* All RsResource subclasses in RM must inherit from this class
*/
#ifdef NVOC_RESOURCE_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct RmResourceCommon {
const struct NVOC_RTTI *__nvoc_rtti;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
};
#ifndef __NVOC_CLASS_RmResourceCommon_TYPEDEF__
#define __NVOC_CLASS_RmResourceCommon_TYPEDEF__
typedef struct RmResourceCommon RmResourceCommon;
#endif /* __NVOC_CLASS_RmResourceCommon_TYPEDEF__ */
#ifndef __nvoc_class_id_RmResourceCommon
#define __nvoc_class_id_RmResourceCommon 0x8ef259
#endif /* __nvoc_class_id_RmResourceCommon */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResourceCommon;
#define __staticCast_RmResourceCommon(pThis) \
((pThis)->__nvoc_pbase_RmResourceCommon)
#ifdef __nvoc_resource_h_disabled
#define __dynamicCast_RmResourceCommon(pThis) ((RmResourceCommon*)NULL)
#else //__nvoc_resource_h_disabled
#define __dynamicCast_RmResourceCommon(pThis) \
((RmResourceCommon*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(RmResourceCommon)))
#endif //__nvoc_resource_h_disabled
NV_STATUS __nvoc_objCreateDynamic_RmResourceCommon(RmResourceCommon**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_RmResourceCommon(RmResourceCommon**, Dynamic*, NvU32);
#define __objCreate_RmResourceCommon(ppNewObj, pParent, createFlags) \
__nvoc_objCreate_RmResourceCommon((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
NV_STATUS rmrescmnConstruct_IMPL(struct RmResourceCommon *arg_pResourceCommmon);
#define __nvoc_rmrescmnConstruct(arg_pResourceCommmon) rmrescmnConstruct_IMPL(arg_pResourceCommmon)
#undef PRIVATE_FIELD
/*!
* Utility base class for all RsResource subclasses in by RM. Doesn't have to be
* used but if it isn't used RmResourceCommon must be inherited manually
*/
#ifdef NVOC_RESOURCE_H_PRIVATE_ACCESS_ALLOWED
#define PRIVATE_FIELD(x) x
#else
#define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
#endif
struct RmResource {
const struct NVOC_RTTI *__nvoc_rtti;
struct RsResource __nvoc_base_RsResource;
struct RmResourceCommon __nvoc_base_RmResourceCommon;
struct Object *__nvoc_pbase_Object;
struct RsResource *__nvoc_pbase_RsResource;
struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
struct RmResource *__nvoc_pbase_RmResource;
NvBool (*__rmresAccessCallback__)(struct RmResource *, struct RsClient *, void *, RsAccessRight);
NvBool (*__rmresShareCallback__)(struct RmResource *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
NV_STATUS (*__rmresGetMemInterMapParams__)(struct RmResource *, RMRES_MEM_INTER_MAP_PARAMS *);
NV_STATUS (*__rmresCheckMemInterUnmap__)(struct RmResource *, NvBool);
NV_STATUS (*__rmresGetMemoryMappingDescriptor__)(struct RmResource *, struct MEMORY_DESCRIPTOR **);
NV_STATUS (*__rmresControl_Prologue__)(struct RmResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__rmresControl_Epilogue__)(struct RmResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__rmresControl__)(struct RmResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
NV_STATUS (*__rmresUnmap__)(struct RmResource *, struct CALL_CONTEXT *, RsCpuMapping *);
NV_STATUS (*__rmresMapTo__)(struct RmResource *, RS_RES_MAP_TO_PARAMS *);
NvU32 (*__rmresGetRefCount__)(struct RmResource *);
NV_STATUS (*__rmresControlFilter__)(struct RmResource *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
void (*__rmresAddAdditionalDependants__)(struct RsClient *, struct RmResource *, RsResourceRef *);
NvBool (*__rmresCanCopy__)(struct RmResource *);
void (*__rmresPreDestruct__)(struct RmResource *);
NV_STATUS (*__rmresUnmapFrom__)(struct RmResource *, RS_RES_UNMAP_FROM_PARAMS *);
NV_STATUS (*__rmresControlLookup__)(struct RmResource *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
NV_STATUS (*__rmresMap__)(struct RmResource *, struct CALL_CONTEXT *, RS_CPU_MAP_PARAMS *, RsCpuMapping *);
NvU32 rpcGpuInstance;
NvBool bRpcFree;
};
#ifndef __NVOC_CLASS_RmResource_TYPEDEF__
#define __NVOC_CLASS_RmResource_TYPEDEF__
typedef struct RmResource RmResource;
#endif /* __NVOC_CLASS_RmResource_TYPEDEF__ */
#ifndef __nvoc_class_id_RmResource
#define __nvoc_class_id_RmResource 0x03610d
#endif /* __nvoc_class_id_RmResource */
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RmResource;
#define __staticCast_RmResource(pThis) \
((pThis)->__nvoc_pbase_RmResource)
#ifdef __nvoc_resource_h_disabled
#define __dynamicCast_RmResource(pThis) ((RmResource*)NULL)
#else //__nvoc_resource_h_disabled
#define __dynamicCast_RmResource(pThis) \
((RmResource*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(RmResource)))
#endif //__nvoc_resource_h_disabled
NV_STATUS __nvoc_objCreateDynamic_RmResource(RmResource**, Dynamic*, NvU32, va_list);
NV_STATUS __nvoc_objCreate_RmResource(RmResource**, Dynamic*, NvU32, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
#define __objCreate_RmResource(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
__nvoc_objCreate_RmResource((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
#define rmresAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) rmresAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
#define rmresShareCallback(pResource, pInvokingClient, pParentRef, pSharePolicy) rmresShareCallback_DISPATCH(pResource, pInvokingClient, pParentRef, pSharePolicy)
#define rmresGetMemInterMapParams(pRmResource, pParams) rmresGetMemInterMapParams_DISPATCH(pRmResource, pParams)
#define rmresCheckMemInterUnmap(pRmResource, bSubdeviceHandleProvided) rmresCheckMemInterUnmap_DISPATCH(pRmResource, bSubdeviceHandleProvided)
#define rmresGetMemoryMappingDescriptor(pRmResource, ppMemDesc) rmresGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
#define rmresControl_Prologue(pResource, pCallContext, pParams) rmresControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
#define rmresControl_Epilogue(pResource, pCallContext, pParams) rmresControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
#define rmresControl(pResource, pCallContext, pParams) rmresControl_DISPATCH(pResource, pCallContext, pParams)
#define rmresUnmap(pResource, pCallContext, pCpuMapping) rmresUnmap_DISPATCH(pResource, pCallContext, pCpuMapping)
#define rmresMapTo(pResource, pParams) rmresMapTo_DISPATCH(pResource, pParams)
#define rmresGetRefCount(pResource) rmresGetRefCount_DISPATCH(pResource)
#define rmresControlFilter(pResource, pCallContext, pParams) rmresControlFilter_DISPATCH(pResource, pCallContext, pParams)
#define rmresAddAdditionalDependants(pClient, pResource, pReference) rmresAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
#define rmresCanCopy(pResource) rmresCanCopy_DISPATCH(pResource)
#define rmresPreDestruct(pResource) rmresPreDestruct_DISPATCH(pResource)
#define rmresUnmapFrom(pResource, pParams) rmresUnmapFrom_DISPATCH(pResource, pParams)
#define rmresControlLookup(pResource, pParams, ppEntry) rmresControlLookup_DISPATCH(pResource, pParams, ppEntry)
#define rmresMap(pResource, pCallContext, pParams, pCpuMapping) rmresMap_DISPATCH(pResource, pCallContext, pParams, pCpuMapping)
NvBool rmresAccessCallback_IMPL(struct RmResource *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight);
static inline NvBool rmresAccessCallback_DISPATCH(struct RmResource *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return pResource->__rmresAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
}
NvBool rmresShareCallback_IMPL(struct RmResource *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy);
static inline NvBool rmresShareCallback_DISPATCH(struct RmResource *pResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return pResource->__rmresShareCallback__(pResource, pInvokingClient, pParentRef, pSharePolicy);
}
NV_STATUS rmresGetMemInterMapParams_IMPL(struct RmResource *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams);
static inline NV_STATUS rmresGetMemInterMapParams_DISPATCH(struct RmResource *pRmResource, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
return pRmResource->__rmresGetMemInterMapParams__(pRmResource, pParams);
}
NV_STATUS rmresCheckMemInterUnmap_IMPL(struct RmResource *pRmResource, NvBool bSubdeviceHandleProvided);
static inline NV_STATUS rmresCheckMemInterUnmap_DISPATCH(struct RmResource *pRmResource, NvBool bSubdeviceHandleProvided) {
return pRmResource->__rmresCheckMemInterUnmap__(pRmResource, bSubdeviceHandleProvided);
}
NV_STATUS rmresGetMemoryMappingDescriptor_IMPL(struct RmResource *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc);
static inline NV_STATUS rmresGetMemoryMappingDescriptor_DISPATCH(struct RmResource *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
return pRmResource->__rmresGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
}
NV_STATUS rmresControl_Prologue_IMPL(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
static inline NV_STATUS rmresControl_Prologue_DISPATCH(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__rmresControl_Prologue__(pResource, pCallContext, pParams);
}
void rmresControl_Epilogue_IMPL(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams);
static inline void rmresControl_Epilogue_DISPATCH(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
pResource->__rmresControl_Epilogue__(pResource, pCallContext, pParams);
}
static inline NV_STATUS rmresControl_DISPATCH(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__rmresControl__(pResource, pCallContext, pParams);
}
static inline NV_STATUS rmresUnmap_DISPATCH(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return pResource->__rmresUnmap__(pResource, pCallContext, pCpuMapping);
}
static inline NV_STATUS rmresMapTo_DISPATCH(struct RmResource *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return pResource->__rmresMapTo__(pResource, pParams);
}
static inline NvU32 rmresGetRefCount_DISPATCH(struct RmResource *pResource) {
return pResource->__rmresGetRefCount__(pResource);
}
static inline NV_STATUS rmresControlFilter_DISPATCH(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return pResource->__rmresControlFilter__(pResource, pCallContext, pParams);
}
static inline void rmresAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct RmResource *pResource, RsResourceRef *pReference) {
pResource->__rmresAddAdditionalDependants__(pClient, pResource, pReference);
}
static inline NvBool rmresCanCopy_DISPATCH(struct RmResource *pResource) {
return pResource->__rmresCanCopy__(pResource);
}
static inline void rmresPreDestruct_DISPATCH(struct RmResource *pResource) {
pResource->__rmresPreDestruct__(pResource);
}
static inline NV_STATUS rmresUnmapFrom_DISPATCH(struct RmResource *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return pResource->__rmresUnmapFrom__(pResource, pParams);
}
static inline NV_STATUS rmresControlLookup_DISPATCH(struct RmResource *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return pResource->__rmresControlLookup__(pResource, pParams, ppEntry);
}
static inline NV_STATUS rmresMap_DISPATCH(struct RmResource *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return pResource->__rmresMap__(pResource, pCallContext, pParams, pCpuMapping);
}
NV_STATUS rmresConstruct_IMPL(struct RmResource *arg_pResource, struct CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
#define __nvoc_rmresConstruct(arg_pResource, arg_pCallContext, arg_pParams) rmresConstruct_IMPL(arg_pResource, arg_pCallContext, arg_pParams)
#undef PRIVATE_FIELD
#endif // _RESOURCE_H_
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_RESOURCE_NVOC_H_

View File

@@ -0,0 +1,418 @@
#ifndef _G_RESSERV_NVOC_H_
#define _G_RESSERV_NVOC_H_
#include "nvoc/runtime.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* SPDX-FileCopyrightText: Copyright (c) 2015-2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "g_resserv_nvoc.h"
#ifndef _RESSERV_H_
#define _RESSERV_H_
#include "nvoc/object.h"
#include "containers/list.h"
#include "containers/map.h"
#include "containers/multimap.h"
#include "nvtypes.h"
#include "nvstatus.h"
#include "nvos.h"
#include "nvsecurityinfo.h"
#include "rs_access.h"
#if LOCK_VAL_ENABLED
#include "lockval/lockval.h"
#endif
#ifdef __cplusplus
extern "C" {
#endif
#if (RS_STANDALONE)
#include <stddef.h>
#ifndef NV_PRINTF
extern int g_debugLevel;
#define NV_PRINTF(level, format, ...) if (g_debugLevel) { printf(format, ##__VA_ARGS__); }
#endif
#include "utils/nvprintf.h"
#endif
//
// Forward declarations
//
typedef struct RsServer RsServer;
typedef struct RsDomain RsDomain;
typedef struct CLIENT_ENTRY CLIENT_ENTRY;
typedef struct RsResourceDep RsResourceDep;
typedef struct RsResourceRef RsResourceRef;
typedef struct RsInterMapping RsInterMapping;
typedef struct RsCpuMapping RsCpuMapping;
// RS-TODO INTERNAL and EXTERNAL params should be different structures
typedef struct RS_CLIENT_FREE_PARAMS_INTERNAL RS_CLIENT_FREE_PARAMS_INTERNAL;
typedef struct RS_CLIENT_FREE_PARAMS_INTERNAL RS_CLIENT_FREE_PARAMS;
typedef struct RS_RES_ALLOC_PARAMS_INTERNAL RS_RES_ALLOC_PARAMS_INTERNAL;
typedef struct RS_RES_ALLOC_PARAMS_INTERNAL RS_RES_ALLOC_PARAMS;
typedef struct RS_RES_DUP_PARAMS_INTERNAL RS_RES_DUP_PARAMS_INTERNAL;
typedef struct RS_RES_DUP_PARAMS_INTERNAL RS_RES_DUP_PARAMS;
typedef struct RS_RES_SHARE_PARAMS_INTERNAL RS_RES_SHARE_PARAMS_INTERNAL;
typedef struct RS_RES_SHARE_PARAMS_INTERNAL RS_RES_SHARE_PARAMS;
typedef struct RS_RES_ALLOC_PARAMS_INTERNAL RS_CLIENT_ALLOC_PARAMS_INTERNAL;
typedef struct RS_RES_ALLOC_PARAMS_INTERNAL RS_CLIENT_ALLOC_PARAMS;
typedef struct RS_RES_FREE_PARAMS_INTERNAL RS_RES_FREE_PARAMS_INTERNAL;
typedef struct RS_RES_FREE_PARAMS_INTERNAL RS_RES_FREE_PARAMS;
typedef struct RS_RES_CONTROL_PARAMS_INTERNAL RS_RES_CONTROL_PARAMS_INTERNAL;
typedef struct RS_RES_CONTROL_PARAMS_INTERNAL RS_RES_CONTROL_PARAMS;
typedef struct RS_RES_CONTROL_PARAMS_INTERNAL RS_LEGACY_CONTROL_PARAMS;
typedef struct RS_LEGACY_ALLOC_PARAMS RS_LEGACY_ALLOC_PARAMS;
typedef struct RS_LEGACY_FREE_PARAMS RS_LEGACY_FREE_PARAMS;
typedef struct RS_CPU_MAP_PARAMS RS_CPU_MAP_PARAMS;
typedef struct RS_CPU_UNMAP_PARAMS RS_CPU_UNMAP_PARAMS;
typedef struct RS_INTER_MAP_PARAMS RS_INTER_MAP_PARAMS;
typedef struct RS_INTER_UNMAP_PARAMS RS_INTER_UNMAP_PARAMS;
// Forward declarations for structs defined by user
typedef struct RS_RES_MAP_TO_PARAMS RS_RES_MAP_TO_PARAMS;
typedef struct RS_RES_UNMAP_FROM_PARAMS RS_RES_UNMAP_FROM_PARAMS;
typedef struct RS_INTER_MAP_PRIVATE RS_INTER_MAP_PRIVATE;
typedef struct RS_INTER_UNMAP_PRIVATE RS_INTER_UNMAP_PRIVATE;
typedef struct RS_CPU_MAPPING_PRIVATE RS_CPU_MAPPING_PRIVATE;
typedef struct RS_CPU_MAPPING_BACK_REF RS_CPU_MAPPING_BACK_REF;
typedef struct RS_INTER_MAPPING_BACK_REF RS_INTER_MAPPING_BACK_REF;
typedef struct RS_FREE_STACK RS_FREE_STACK;
typedef struct CALL_CONTEXT CALL_CONTEXT;
typedef struct ACCESS_CONTROL ACCESS_CONTROL;
typedef struct RS_ITERATOR RS_ITERATOR;
typedef struct RS_ORDERED_ITERATOR RS_ORDERED_ITERATOR;
typedef struct RS_SHARE_ITERATOR RS_SHARE_ITERATOR;
typedef struct API_STATE API_STATE;
typedef struct RS_LOCK_INFO RS_LOCK_INFO;
typedef struct RS_CONTROL_COOKIE RS_CONTROL_COOKIE;
typedef NV_STATUS RsCtrlFunc(struct RS_RES_CONTROL_PARAMS_INTERNAL*);
struct RsClient;
#ifndef __NVOC_CLASS_RsClient_TYPEDEF__
#define __NVOC_CLASS_RsClient_TYPEDEF__
typedef struct RsClient RsClient;
#endif /* __NVOC_CLASS_RsClient_TYPEDEF__ */
#ifndef __nvoc_class_id_RsClient
#define __nvoc_class_id_RsClient 0x8f87e5
#endif /* __nvoc_class_id_RsClient */
struct RsResource;
#ifndef __NVOC_CLASS_RsResource_TYPEDEF__
#define __NVOC_CLASS_RsResource_TYPEDEF__
typedef struct RsResource RsResource;
#endif /* __NVOC_CLASS_RsResource_TYPEDEF__ */
#ifndef __nvoc_class_id_RsResource
#define __nvoc_class_id_RsResource 0xd551cb
#endif /* __nvoc_class_id_RsResource */
struct RsShared;
#ifndef __NVOC_CLASS_RsShared_TYPEDEF__
#define __NVOC_CLASS_RsShared_TYPEDEF__
typedef struct RsShared RsShared;
#endif /* __NVOC_CLASS_RsShared_TYPEDEF__ */
#ifndef __nvoc_class_id_RsShared
#define __nvoc_class_id_RsShared 0x830542
#endif /* __nvoc_class_id_RsShared */
MAKE_LIST(RsResourceRefList, RsResourceRef*);
MAKE_LIST(RsResourceList, RsResource*);
MAKE_LIST(RsHandleList, NvHandle);
MAKE_LIST(RsClientList, CLIENT_ENTRY*);
MAKE_LIST(RsShareList, RS_SHARE_POLICY);
MAKE_MULTIMAP(RsIndex, RsResourceRef*);
typedef NV_STATUS (*CtrlImpl_t)(struct RsClient*, struct RsResource*, void*);
typedef void *PUID_TOKEN;
//
// Defines
//
/// Domain handles must start at this base value
#define RS_DOMAIN_HANDLE_BASE 0xD0D00000
/// Client handles must start at this base value
#define RS_CLIENT_HANDLE_BASE 0xC1D00000
///
/// Internal Client handles must start at this base value
/// at either of these two bases
///
#define RS_CLIENT_INTERNAL_HANDLE_BASE 0xC1E00000
#define RS_CLIENT_INTERNAL_HANDLE_BASE_EX 0xC1F00000
//
// Print a warning if any client's resource count exceeds this
// threshold. Unless this was intentional, this is likely a client bug.
//
#define RS_CLIENT_RESOURCE_WARNING_THRESHOLD 100000
/// 0xFFFF max client handles.
#define RS_CLIENT_HANDLE_BUCKET_COUNT 0x400 // 1024
#define RS_CLIENT_HANDLE_BUCKET_MASK 0x3FF
/// The default maximum number of domains a resource server can allocate
#define RS_MAX_DOMAINS_DEFAULT 4096
/// The maximum length of a line of ancestry for resource references
#define RS_MAX_RESOURCE_DEPTH 6
/// RS_LOCK_FLAGS
#define RS_LOCK_FLAGS_NO_TOP_LOCK NVBIT(0)
#define RS_LOCK_FLAGS_NO_CLIENT_LOCK NVBIT(1)
#define RS_LOCK_FLAGS_NO_CUSTOM_LOCK_1 NVBIT(2)
#define RS_LOCK_FLAGS_NO_CUSTOM_LOCK_2 NVBIT(3)
#define RS_LOCK_FLAGS_NO_CUSTOM_LOCK_3 NVBIT(4)
#define RS_LOCK_FLAGS_NO_DEPENDANT_SESSION_LOCK NVBIT(5)
#define RS_LOCK_FLAGS_FREE_SESSION_LOCK NVBIT(6)
/// RS_LOCK_STATE
#define RS_LOCK_STATE_TOP_LOCK_ACQUIRED NVBIT(0)
#define RS_LOCK_STATE_CUSTOM_LOCK_1_ACQUIRED NVBIT(1)
#define RS_LOCK_STATE_CUSTOM_LOCK_2_ACQUIRED NVBIT(2)
#define RS_LOCK_STATE_CUSTOM_LOCK_3_ACQUIRED NVBIT(3)
#define RS_LOCK_STATE_ALLOW_RECURSIVE_RES_LOCK NVBIT(6)
#define RS_LOCK_STATE_CLIENT_LOCK_ACQUIRED NVBIT(7)
#define RS_LOCK_STATE_SESSION_LOCK_ACQUIRED NVBIT(8)
/// RS_LOCK_RELEASE
#define RS_LOCK_RELEASE_TOP_LOCK NVBIT(0)
#define RS_LOCK_RELEASE_CLIENT_LOCK NVBIT(1)
#define RS_LOCK_RELEASE_CUSTOM_LOCK_1 NVBIT(2)
#define RS_LOCK_RELEASE_CUSTOM_LOCK_2 NVBIT(3)
#define RS_LOCK_RELEASE_CUSTOM_LOCK_3 NVBIT(4)
#define RS_LOCK_RELEASE_SESSION_LOCK NVBIT(5)
/// API enumerations used for locking knobs
typedef enum
{
RS_LOCK_CLIENT =0,
RS_LOCK_TOP =1,
RS_LOCK_RESOURCE =2,
RS_LOCK_CUSTOM_3 =3,
} RS_LOCK_ENUM;
typedef enum
{
RS_API_ALLOC_CLIENT = 0,
RS_API_ALLOC_RESOURCE = 1,
RS_API_FREE_RESOURCE = 2,
RS_API_MAP = 3,
RS_API_UNMAP = 4,
RS_API_INTER_MAP = 5,
RS_API_INTER_UNMAP = 6,
RS_API_COPY = 7,
RS_API_SHARE = 8,
RS_API_CTRL = 9,
RS_API_MAX,
} RS_API_ENUM;
NV_STATUS indexAdd(RsIndex *pIndex, NvU32 index, RsResourceRef *pResourceRef);
NV_STATUS indexRemove(RsIndex *pIndex, NvU32 index, RsResourceRef *pResourceRef);
//
// Externs
//
/**
* NVOC wrapper for constructing resources of a given type
*
* @param[in] pAllocator Allocator for the resource object
* @param[in] pCallContext Caller context passed to resource constructor
* @param[inout] pParams Resource allocation parameters
* @param[out] ppResource New resource object
*/
extern NV_STATUS resservResourceFactory(PORT_MEM_ALLOCATOR *pAllocator, CALL_CONTEXT *pCallContext,
RS_RES_ALLOC_PARAMS_INTERNAL *pParams, struct RsResource **ppResource);
/**
* NVOC wrapper for constructing an application-specific client.
*/
extern NV_STATUS resservClientFactory(PORT_MEM_ALLOCATOR *pAllocator, RS_RES_ALLOC_PARAMS_INTERNAL *pParams, struct RsClient **ppRsClient);
/**
* Validate the UID/PID security token of the current user against a client's security token.
*
* This will be obsolete after phase 1.
*
* @param[in] pClientToken
* @param[in] pCurrentToken
*
* @returns NV_OK if the current user's security token matches the client's security token
*/
extern NV_STATUS osValidateClientTokens(PSECURITY_TOKEN pClientToken, PSECURITY_TOKEN pCurrentToken);
/**
* Get the security token of the current user for the UID/PID security model.
*
* This will be obsolete after phase 1.
*/
extern PSECURITY_TOKEN osGetSecurityToken(void);
/**
* TLS entry id for call contexts. All servers will use the same id.
*/
#define TLS_ENTRY_ID_RESSERV_CALL_CONTEXT TLS_ENTRY_ID_RESSERV_1
//
// Structs
//
struct RS_FREE_STACK
{
RS_FREE_STACK *pPrev;
RsResourceRef *pResourceRef;
};
struct CALL_CONTEXT
{
RsServer *pServer; ///< The resource server instance that owns the client
struct RsClient *pClient; ///< Client that was the target of the call
RsResourceRef *pResourceRef; ///< Reference that was the target of the call
RsResourceRef *pContextRef; ///< Reference that may be used to provide more context [optional]
RS_LOCK_INFO *pLockInfo; ///< Saved locking context information for the call
API_SECURITY_INFO secInfo;
RS_RES_CONTROL_PARAMS_INTERNAL *pControlParams; ///< parameters of the call [optional]
};
typedef enum {
RS_ITERATE_CHILDREN, ///< Iterate over a RsResourceRef's children
RS_ITERATE_DESCENDANTS, ///< Iterate over a RsResourceRef's children, grandchildren, etc. (unspecified order)
RS_ITERATE_CACHED, ///< Iterate over a RsResourceRef's cache
RS_ITERATE_DEPENDANTS, ///< Iterate over a RsResourceRef's dependants
} RS_ITER_TYPE;
typedef enum
{
LOCK_ACCESS_READ,
LOCK_ACCESS_WRITE,
} LOCK_ACCESS_TYPE;
/**
* Access control information. This information will be filled out by the user
* of the Resource Server when allocating a client or resource.
*/
struct ACCESS_CONTROL
{
/**
* The privilege level of this access control
*/
RS_PRIV_LEVEL privilegeLevel;
/**
* Opaque pointer for storing a security token
*/
PSECURITY_TOKEN pSecurityToken;
};
//
// Utility wrappers for locking validator
//
#if LOCK_VAL_ENABLED
#define RS_LOCK_VALIDATOR_INIT(lock, lockClass, inst) \
do { NV_ASSERT_OK(lockvalLockInit((lock), (lockClass), (inst))); } while(0)
#define RS_RWLOCK_ACQUIRE_READ(lock, validator) do \
{ \
NV_ASSERT_OK(lockvalPreAcquire((validator))); \
portSyncRwLockAcquireRead((lock)); \
lockvalPostAcquire((validator), LOCK_VAL_RLOCK); \
} while(0)
#define RS_RWLOCK_ACQUIRE_WRITE(lock, validator) do \
{ \
NV_ASSERT_OK(lockvalPreAcquire((validator))); \
portSyncRwLockAcquireWrite((lock)); \
lockvalPostAcquire((validator), LOCK_VAL_WLOCK); \
} while(0)
#define RS_RWLOCK_RELEASE_READ_EXT(lock, validator, bOutOfOrder) do \
{ \
void *pLockValTlsEntry, *pReleasedLockNode; \
if (bOutOfOrder) \
NV_ASSERT_OK(lockvalReleaseOutOfOrder((validator), LOCK_VAL_RLOCK, &pLockValTlsEntry, &pReleasedLockNode)); \
else \
NV_ASSERT_OK(lockvalRelease((validator), LOCK_VAL_RLOCK, &pLockValTlsEntry, &pReleasedLockNode)); \
portSyncRwLockReleaseRead((lock)); \
lockvalMemoryRelease(pLockValTlsEntry, pReleasedLockNode); \
} while(0)
#define RS_RWLOCK_RELEASE_WRITE_EXT(lock, validator, bOutOfOrder) do \
{ \
void *pLockValTlsEntry, *pReleasedLockNode; \
if (bOutOfOrder) \
NV_ASSERT_OK(lockvalReleaseOutOfOrder((validator), LOCK_VAL_WLOCK, &pLockValTlsEntry, &pReleasedLockNode)); \
else \
NV_ASSERT_OK(lockvalRelease((validator), LOCK_VAL_WLOCK, &pLockValTlsEntry, &pReleasedLockNode)); \
portSyncRwLockReleaseWrite((lock)); \
lockvalMemoryRelease(pLockValTlsEntry, pReleasedLockNode); \
} while(0)
#else
#define RS_LOCK_VALIDATOR_INIT(lock, lockClass, inst)
#define RS_RWLOCK_ACQUIRE_READ(lock, validator) do { portSyncRwLockAcquireRead((lock)); } while(0)
#define RS_RWLOCK_ACQUIRE_WRITE(lock, validator) do { portSyncRwLockAcquireWrite((lock)); } while(0)
#define RS_RWLOCK_RELEASE_READ_EXT(lock, validator, bOutOfOrder) do { portSyncRwLockReleaseRead((lock)); } while(0)
#define RS_RWLOCK_RELEASE_WRITE_EXT(lock, validator, bOutOfOrder) do { portSyncRwLockReleaseWrite((lock)); } while(0)
#endif
#define RS_RWLOCK_RELEASE_READ(lock, validator) RS_RWLOCK_RELEASE_READ_EXT(lock, validator, NV_FALSE)
#define RS_RWLOCK_RELEASE_WRITE(lock, validator) RS_RWLOCK_RELEASE_WRITE_EXT(lock, validator, NV_FALSE)
#ifdef __cplusplus
}
#endif
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // _G_RESSERV_NVOC_H_

View File

@@ -0,0 +1,695 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// private rmconfig generated #defines such as IsG84(),
// RMCFG_FEATURE_ENABLED_STATUS(), etc.
//
// Only for use within resman.
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_rmconfig_private.h
//
// Chips: T234D
//
#ifndef _G_RMCFG_PRIVATE_H_
#define _G_RMCFG_PRIVATE_H_
//
// CHIP identity macros such as IsGK104()
//
// GF10X
#define IsGF100(pGpu) ((0) && (pGpu))
#define IsGF100orBetter(pGpu) ((0) && (pGpu))
#define IsGF100B(pGpu) ((0) && (pGpu))
#define IsGF100BorBetter(pGpu) ((0) && (pGpu))
#define IsGF104(pGpu) ((0) && (pGpu))
#define IsGF104orBetter(pGpu) ((0) && (pGpu))
#define IsGF104B(pGpu) ((0) && (pGpu))
#define IsGF104BorBetter(pGpu) ((0) && (pGpu))
#define IsGF106(pGpu) ((0) && (pGpu))
#define IsGF106orBetter(pGpu) ((0) && (pGpu))
#define IsGF106B(pGpu) ((0) && (pGpu))
#define IsGF106BorBetter(pGpu) ((0) && (pGpu))
#define IsGF108(pGpu) ((0) && (pGpu))
#define IsGF108orBetter(pGpu) ((0) && (pGpu))
// Any GF10X chip?
#define IsGF10X(pGpu) (0 && (pGpu))
#define IsGF10XorBetter(pGpu) (0 && (pGpu))
// GF11X
#define IsGF110D(pGpu) ((0) && (pGpu))
#define IsGF110DorBetter(pGpu) ((0) && (pGpu))
#define IsGF110(pGpu) ((0) && (pGpu))
#define IsGF110orBetter(pGpu) ((0) && (pGpu))
#define IsGF117(pGpu) ((0) && (pGpu))
#define IsGF117orBetter(pGpu) ((0) && (pGpu))
#define IsGF117MaskRevA01(pGpu) ((0) && (pGpu))
#define IsGF118(pGpu) ((0) && (pGpu))
#define IsGF118orBetter(pGpu) ((0) && (pGpu))
#define IsGF119(pGpu) ((0) && (pGpu))
#define IsGF119orBetter(pGpu) ((0) && (pGpu))
#define IsGF119MaskRevA01(pGpu) ((0) && (pGpu))
// Any GF11X chip?
#define IsGF11X(pGpu) (0 && (pGpu))
#define IsGF11XorBetter(pGpu) (0 && (pGpu))
// GF10XF
#define IsGF110F(pGpu) ((0) && (pGpu))
#define IsGF110ForBetter(pGpu) ((0) && (pGpu))
#define IsGF110F2(pGpu) ((0) && (pGpu))
#define IsGF110F2orBetter(pGpu) ((0) && (pGpu))
#define IsGF110F3(pGpu) ((0) && (pGpu))
#define IsGF110F3orBetter(pGpu) ((0) && (pGpu))
// Any GF10XF chip?
#define IsGF10XF(pGpu) (0 && (pGpu))
#define IsGF10XForBetter(pGpu) (0 && (pGpu))
// GK10X
#define IsGK104(pGpu) ((0) && (pGpu))
#define IsGK104orBetter(pGpu) ((0) && (pGpu))
#define IsGK104MaskRevA01(pGpu) ((0) && (pGpu))
#define IsGK106(pGpu) ((0) && (pGpu))
#define IsGK106orBetter(pGpu) ((0) && (pGpu))
#define IsGK107(pGpu) ((0) && (pGpu))
#define IsGK107orBetter(pGpu) ((0) && (pGpu))
#define IsGK107MaskRevA01(pGpu) ((0) && (pGpu))
#define IsGK20A(pGpu) ((0) && (pGpu))
#define IsGK20AorBetter(pGpu) ((0) && (pGpu))
// Any GK10X chip?
#define IsGK10X(pGpu) (0 && (pGpu))
#define IsGK10XorBetter(pGpu) (0 && (pGpu))
// GK11X
#define IsGK110(pGpu) ((0) && (pGpu))
#define IsGK110orBetter(pGpu) ((0) && (pGpu))
#define IsGK110B(pGpu) ((0) && (pGpu))
#define IsGK110BorBetter(pGpu) ((0) && (pGpu))
#define IsGK110C(pGpu) ((0) && (pGpu))
#define IsGK110CorBetter(pGpu) ((0) && (pGpu))
// Any GK11X chip?
#define IsGK11X(pGpu) (0 && (pGpu))
#define IsGK11XorBetter(pGpu) (0 && (pGpu))
// GK20X
#define IsGK208(pGpu) ((0) && (pGpu))
#define IsGK208orBetter(pGpu) ((0) && (pGpu))
#define IsGK208S(pGpu) ((0) && (pGpu))
#define IsGK208SorBetter(pGpu) ((0) && (pGpu))
// Any GK20X chip?
#define IsGK20X(pGpu) (0 && (pGpu))
#define IsGK20XorBetter(pGpu) (0 && (pGpu))
// GM10X
#define IsGM107(pGpu) ((0) && (pGpu))
#define IsGM107orBetter(pGpu) ((0) && (pGpu))
#define IsGM107MaskRevA01(pGpu) ((0) && (pGpu))
#define IsGM108(pGpu) ((0) && (pGpu))
#define IsGM108orBetter(pGpu) ((0) && (pGpu))
#define IsGM108MaskRevA01(pGpu) ((0) && (pGpu))
// Any GM10X chip?
#define IsGM10X(pGpu) (0 && (pGpu))
#define IsGM10XorBetter(pGpu) (0 && (pGpu))
// GM20X
#define IsGM200(pGpu) ((0) && (pGpu))
#define IsGM200orBetter(pGpu) ((0) && (pGpu))
#define IsGM204(pGpu) ((0) && (pGpu))
#define IsGM204orBetter(pGpu) ((0) && (pGpu))
#define IsGM206(pGpu) ((0) && (pGpu))
#define IsGM206orBetter(pGpu) ((0) && (pGpu))
// Any GM20X chip?
#define IsGM20X(pGpu) (0 && (pGpu))
#define IsGM20XorBetter(pGpu) (0 && (pGpu))
// GP10X
#define IsGP100(pGpu) ((0) && (pGpu))
#define IsGP100orBetter(pGpu) ((0) && (pGpu))
#define IsGP102(pGpu) ((0) && (pGpu))
#define IsGP102orBetter(pGpu) ((0) && (pGpu))
#define IsGP104(pGpu) ((0) && (pGpu))
#define IsGP104orBetter(pGpu) ((0) && (pGpu))
#define IsGP106(pGpu) ((0) && (pGpu))
#define IsGP106orBetter(pGpu) ((0) && (pGpu))
#define IsGP107(pGpu) ((0) && (pGpu))
#define IsGP107orBetter(pGpu) ((0) && (pGpu))
#define IsGP108(pGpu) ((0) && (pGpu))
#define IsGP108orBetter(pGpu) ((0) && (pGpu))
// Any GP10X chip?
#define IsGP10X(pGpu) (0 && (pGpu))
#define IsGP10XorBetter(pGpu) (0 && (pGpu))
// GV10X
#define IsGV100(pGpu) ((0) && (pGpu))
#define IsGV100orBetter(pGpu) ((0) && (pGpu))
// Any GV10X chip?
#define IsGV10X(pGpu) (0 && (pGpu))
#define IsGV10XorBetter(pGpu) (0 && (pGpu))
// GV11X
#define IsGV11B(pGpu) ((0) && (pGpu))
#define IsGV11BorBetter(pGpu) ((0) && (pGpu))
// Any GV11X chip?
#define IsGV11X(pGpu) (0 && (pGpu))
#define IsGV11XorBetter(pGpu) (0 && (pGpu))
// TU10X
#define IsTU102(pGpu) ((0) && (pGpu))
#define IsTU102orBetter(pGpu) ((0) && (pGpu))
#define IsTU104(pGpu) ((0) && (pGpu))
#define IsTU104orBetter(pGpu) ((0) && (pGpu))
#define IsTU106(pGpu) ((0) && (pGpu))
#define IsTU106orBetter(pGpu) ((0) && (pGpu))
#define IsTU116(pGpu) ((0) && (pGpu))
#define IsTU116orBetter(pGpu) ((0) && (pGpu))
#define IsTU117(pGpu) ((0) && (pGpu))
#define IsTU117orBetter(pGpu) ((0) && (pGpu))
// Any TU10X chip?
#define IsTU10X(pGpu) (0 && (pGpu))
#define IsTU10XorBetter(pGpu) (0 && (pGpu))
// GA10X
#define IsGA100(pGpu) ((0) && (pGpu))
#define IsGA100orBetter(pGpu) ((0) && (pGpu))
#define IsGA102(pGpu) ((0) && (pGpu))
#define IsGA102orBetter(pGpu) ((0) && (pGpu))
#define IsGA103(pGpu) ((0) && (pGpu))
#define IsGA103orBetter(pGpu) ((0) && (pGpu))
#define IsGA104(pGpu) ((0) && (pGpu))
#define IsGA104orBetter(pGpu) ((0) && (pGpu))
#define IsGA106(pGpu) ((0) && (pGpu))
#define IsGA106orBetter(pGpu) ((0) && (pGpu))
#define IsGA107(pGpu) ((0) && (pGpu))
#define IsGA107orBetter(pGpu) ((0) && (pGpu))
#define IsGA10B(pGpu) ((0) && (pGpu))
#define IsGA10BorBetter(pGpu) ((0) && (pGpu))
// Any GA10X chip?
#define IsGA10X(pGpu) (0 && (pGpu))
#define IsGA10XorBetter(pGpu) (0 && (pGpu))
// GA10XF
#define IsGA102F(pGpu) ((0) && (pGpu))
#define IsGA102ForBetter(pGpu) ((0) && (pGpu))
// Any GA10XF chip?
#define IsGA10XF(pGpu) (0 && (pGpu))
#define IsGA10XForBetter(pGpu) (0 && (pGpu))
// T12X
#define IsT001_FERMI_NOT_EXIST(pGpu) ((0) && (pGpu))
#define IsT001_FERMI_NOT_EXISTorBetter(pGpu) ((0) && (pGpu))
#define IsT124(pGpu) ((0) && (pGpu))
#define IsT124orBetter(pGpu) ((0) && (pGpu))
// Any T12X chip?
#define IsT12X(pGpu) (0 && (pGpu))
#define IsT12XorBetter(pGpu) (0 && (pGpu))
// T13X
#define IsT132(pGpu) ((0) && (pGpu))
#define IsT132orBetter(pGpu) ((0) && (pGpu))
// Any T13X chip?
#define IsT13X(pGpu) (0 && (pGpu))
#define IsT13XorBetter(pGpu) (0 && (pGpu))
// T21X
#define IsT210(pGpu) ((0) && (pGpu))
#define IsT210orBetter(pGpu) ((0) && (pGpu))
// Any T21X chip?
#define IsT21X(pGpu) (0 && (pGpu))
#define IsT21XorBetter(pGpu) (0 && (pGpu))
// T18X
#define IsT186(pGpu) ((0) && (pGpu))
#define IsT186orBetter(pGpu) ((0) && (pGpu))
// Any T18X chip?
#define IsT18X(pGpu) (0 && (pGpu))
#define IsT18XorBetter(pGpu) (0 && (pGpu))
// T19X
#define IsT194(pGpu) ((0) && (pGpu))
#define IsT194orBetter(pGpu) ((0) && (pGpu))
#define IsT002_TURING_NOT_EXIST(pGpu) ((0) && (pGpu))
#define IsT002_TURING_NOT_EXISTorBetter(pGpu) ((0) && (pGpu))
// Any T19X chip?
#define IsT19X(pGpu) (0 && (pGpu))
#define IsT19XorBetter(pGpu) (0 && (pGpu))
// T23XG
#define IsT234(pGpu) ((0) && (pGpu))
#define IsT234orBetter(pGpu) ((0) && (pGpu))
// Any T23XG chip?
#define IsT23XG(pGpu) (0 && (pGpu))
#define IsT23XGorBetter(pGpu) (0 && (pGpu))
// T23XD
#define IsT234D(pGpu) ((1) && (pGpu))
#define IsT234DorBetter(pGpu) ((1) && (pGpu))
// Any T23XD chip?
#define IsT23XD(pGpu) (1 || (pGpu))
#define IsT23XDorBetter(pGpu) (1 || (pGpu))
// SIMS
#define IsAMODEL(pGpu) ((0) && (pGpu))
#define IsAMODELorBetter(pGpu) ((0) && (pGpu))
// Any SIMS chip?
#define IsSIMS(pGpu) (0 && (pGpu))
#define IsSIMSorBetter(pGpu) (0 && (pGpu))
// Any CLASSIC_GPUS chip?
#define IsCLASSIC_GPUS(pGpu) (0 && (pGpu))
#define IsCLASSIC_GPUSorBetter(pGpu) (0 && (pGpu))
// Any dFERMI chip?
#define IsdFERMI(pGpu) (0 && (pGpu))
#define IsdFERMIorBetter(pGpu) (0 && (pGpu))
// Any FERMI chip?
#define IsFERMI(pGpu) (IsFERMI_CLASSIC_GPUS(pGpu) || IsFERMI_TEGRA_BIG_GPUS(pGpu))
#define IsFERMIorBetter(pGpu) (IsFERMI_CLASSIC_GPUSorBetter(pGpu) || IsFERMI_TEGRA_BIG_GPUSorBetter(pGpu))
// Any FERMI_CLASSIC_GPUS chip?
#define IsFERMI_CLASSIC_GPUS(pGpu) (0 && (pGpu))
#define IsFERMI_CLASSIC_GPUSorBetter(pGpu) (0 && (pGpu))
// Any DISPLAYLESS chip?
#define IsDISPLAYLESS(pGpu) (0 && (pGpu))
// Any dKEPLER chip?
#define IsdKEPLER(pGpu) (0 && (pGpu))
#define IsdKEPLERorBetter(pGpu) (0 && (pGpu))
// Any KEPLER chip?
#define IsKEPLER(pGpu) (IsKEPLER_CLASSIC_GPUS(pGpu) || IsKEPLER_TEGRA_BIG_GPUS(pGpu))
#define IsKEPLERorBetter(pGpu) (IsKEPLER_CLASSIC_GPUSorBetter(pGpu) || IsKEPLER_TEGRA_BIG_GPUSorBetter(pGpu))
// Any KEPLER_CLASSIC_GPUS chip?
#define IsKEPLER_CLASSIC_GPUS(pGpu) (0 && (pGpu))
#define IsKEPLER_CLASSIC_GPUSorBetter(pGpu) (0 && (pGpu))
// Any dMAXWELL chip?
#define IsdMAXWELL(pGpu) (0 && (pGpu))
#define IsdMAXWELLorBetter(pGpu) (0 && (pGpu))
// Any MAXWELL chip?
#define IsMAXWELL(pGpu) (IsMAXWELL_CLASSIC_GPUS(pGpu) || IsMAXWELL_TEGRA_BIG_GPUS(pGpu))
#define IsMAXWELLorBetter(pGpu) (IsMAXWELL_CLASSIC_GPUSorBetter(pGpu) || IsMAXWELL_TEGRA_BIG_GPUSorBetter(pGpu))
// Any MAXWELL_CLASSIC_GPUS chip?
#define IsMAXWELL_CLASSIC_GPUS(pGpu) (0 && (pGpu))
#define IsMAXWELL_CLASSIC_GPUSorBetter(pGpu) (0 && (pGpu))
// Any dPASCAL chip?
#define IsdPASCAL(pGpu) (0 && (pGpu))
#define IsdPASCALorBetter(pGpu) (0 && (pGpu))
// Any PASCAL chip?
#define IsPASCAL(pGpu) (IsPASCAL_CLASSIC_GPUS(pGpu) || IsPASCAL_TEGRA_BIG_GPUS(pGpu))
#define IsPASCALorBetter(pGpu) (IsPASCAL_CLASSIC_GPUSorBetter(pGpu) || IsPASCAL_TEGRA_BIG_GPUSorBetter(pGpu))
// Any PASCAL_CLASSIC_GPUS chip?
#define IsPASCAL_CLASSIC_GPUS(pGpu) (0 && (pGpu))
#define IsPASCAL_CLASSIC_GPUSorBetter(pGpu) (0 && (pGpu))
// Any dVOLTA chip?
#define IsdVOLTA(pGpu) (0 && (pGpu))
#define IsdVOLTAorBetter(pGpu) (0 && (pGpu))
// Any VOLTA chip?
#define IsVOLTA(pGpu) (IsVOLTA_CLASSIC_GPUS(pGpu) || IsVOLTA_TEGRA_BIG_GPUS(pGpu))
#define IsVOLTAorBetter(pGpu) (IsVOLTA_CLASSIC_GPUSorBetter(pGpu) || IsVOLTA_TEGRA_BIG_GPUSorBetter(pGpu))
// Any VOLTA_CLASSIC_GPUS chip?
#define IsVOLTA_CLASSIC_GPUS(pGpu) (0 && (pGpu))
#define IsVOLTA_CLASSIC_GPUSorBetter(pGpu) (0 && (pGpu))
// Any dTURING chip?
#define IsdTURING(pGpu) (0 && (pGpu))
#define IsdTURINGorBetter(pGpu) (0 && (pGpu))
// Any TURING chip?
#define IsTURING(pGpu) (IsTURING_CLASSIC_GPUS(pGpu) || IsTURING_TEGRA_BIG_GPUS(pGpu))
#define IsTURINGorBetter(pGpu) (IsTURING_CLASSIC_GPUSorBetter(pGpu) || IsTURING_TEGRA_BIG_GPUSorBetter(pGpu))
// Any TURING_CLASSIC_GPUS chip?
#define IsTURING_CLASSIC_GPUS(pGpu) (0 && (pGpu))
#define IsTURING_CLASSIC_GPUSorBetter(pGpu) (0 && (pGpu))
// Any dAMPERE chip?
#define IsdAMPERE(pGpu) (0 && (pGpu))
#define IsdAMPEREorBetter(pGpu) (0 && (pGpu))
// Any AMPERE chip?
#define IsAMPERE(pGpu) (IsAMPERE_CLASSIC_GPUS(pGpu) || IsAMPERE_TEGRA_BIG_GPUS(pGpu))
#define IsAMPEREorBetter(pGpu) (IsAMPERE_CLASSIC_GPUSorBetter(pGpu) || IsAMPERE_TEGRA_BIG_GPUSorBetter(pGpu))
// Any AMPERE_CLASSIC_GPUS chip?
#define IsAMPERE_CLASSIC_GPUS(pGpu) (0 && (pGpu))
#define IsAMPERE_CLASSIC_GPUSorBetter(pGpu) (0 && (pGpu))
// Any TEGRA_DGPU_AMPERE chip?
#define IsTEGRA_DGPU_AMPERE(pGpu) (0 && (pGpu))
// Any TEGRA_DGPU chip?
#define IsTEGRA_DGPU(pGpu) (0 && (pGpu))
// Any DFPGA chip?
#define IsDFPGA(pGpu) (0 && (pGpu))
// Any TEGRA_BIG_GPUS chip?
#define IsTEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsTEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any FERMI_TEGRA_BIG_GPUS chip?
#define IsFERMI_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsFERMI_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any TEGRA chip?
#define IsTEGRA(pGpu) (1 || (pGpu))
#define IsTEGRAorBetter(pGpu) (1 || (pGpu))
// Any TEGRA_TEGRA_BIG_GPUS chip?
#define IsTEGRA_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsTEGRA_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any tKEPLER chip?
#define IstKEPLER(pGpu) (0 && (pGpu))
#define IstKEPLERorBetter(pGpu) (0 && (pGpu))
// Any KEPLER_TEGRA_BIG_GPUS chip?
#define IsKEPLER_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsKEPLER_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any tMAXWELL chip?
#define IstMAXWELL(pGpu) (0 && (pGpu))
#define IstMAXWELLorBetter(pGpu) (0 && (pGpu))
// Any MAXWELL_TEGRA_BIG_GPUS chip?
#define IsMAXWELL_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsMAXWELL_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any tPASCAL chip?
#define IstPASCAL(pGpu) (0 && (pGpu))
#define IstPASCALorBetter(pGpu) (0 && (pGpu))
// Any PASCAL_TEGRA_BIG_GPUS chip?
#define IsPASCAL_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsPASCAL_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any tVOLTA chip?
#define IstVOLTA(pGpu) (0 && (pGpu))
#define IstVOLTAorBetter(pGpu) (0 && (pGpu))
// Any VOLTA_TEGRA_BIG_GPUS chip?
#define IsVOLTA_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsVOLTA_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any TURING_TEGRA_BIG_GPUS chip?
#define IsTURING_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsTURING_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any T23X chip?
#define IsT23X(pGpu) (1 || (pGpu))
#define IsT23XorBetter(pGpu) (1 || (pGpu))
// Any T23X_TEGRA_BIG_GPUS chip?
#define IsT23X_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsT23X_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any tAMPERE chip?
#define IstAMPERE(pGpu) (0 && (pGpu))
#define IstAMPEREorBetter(pGpu) (0 && (pGpu))
// Any AMPERE_TEGRA_BIG_GPUS chip?
#define IsAMPERE_TEGRA_BIG_GPUS(pGpu) (0 && (pGpu))
#define IsAMPERE_TEGRA_BIG_GPUSorBetter(pGpu) (0 && (pGpu))
// Any TEGRA_NVDISP_GPUS chip?
#define IsTEGRA_NVDISP_GPUS(pGpu) (1 || (pGpu))
#define IsTEGRA_NVDISP_GPUSorBetter(pGpu) (1 || (pGpu))
// Any T23X_TEGRA_NVDISP_GPUS chip?
#define IsT23X_TEGRA_NVDISP_GPUS(pGpu) (1 || (pGpu))
#define IsT23X_TEGRA_NVDISP_GPUSorBetter(pGpu) (1 || (pGpu))
// Any TEGRA_TEGRA_NVDISP_GPUS chip?
#define IsTEGRA_TEGRA_NVDISP_GPUS(pGpu) (1 || (pGpu))
#define IsTEGRA_TEGRA_NVDISP_GPUSorBetter(pGpu) (1 || (pGpu))
// Any SIMULATION_GPUS chip?
#define IsSIMULATION_GPUS(pGpu) (0 && (pGpu))
#define IsSIMULATION_GPUSorBetter(pGpu) (0 && (pGpu))
//
// Enable/disable printing of entity names (class, engine, etc.)
//
#define RMCFG_ENTITY_NAME(entity) ""
//
// Macros to help with enabling or disabling code based on whether
// a feature (or chip or engine or ...) is enabled or not.
// Also have RMCFG_CHIP_), RMCFG_FEATURE_ENABLED(, etc
// from rmconfig.h.
//
// NOTE: these definitions are "flat" (ie they don't use some more general
// RMCFG_ENABLED(CHIP,X) form because the pre-processor would re-evaluate
// the expansion of the item (chip, feature, class, api). For classes,
// at least, this is a problem since we would end up with class number
// instead of its name...
// hack: MSVC is not C99 compliant
// CHIP's
#define RMCFG_CHIP_ENABLED_OR_BAIL(W) \
do { \
if ( ! RMCFG_CHIP_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "CHIP" RMCFG_ENTITY_NAME(#W) " not enabled, bailing\n"); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
#define RMCFG_CHIP_ENABLED_OR_ASSERT_AND_BAIL(W) \
do { \
if ( ! RMCFG_CHIP_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "CHIP" RMCFG_ENTITY_NAME(#W) " not enabled, assert and bail\n"); \
NV_ASSERT_PRECOMP(RMCFG_CHIP_##W); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
// FEATURE's
#define RMCFG_FEATURE_ENABLED_OR_BAIL(W) \
do { \
if ( ! RMCFG_FEATURE_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "FEATURE" RMCFG_ENTITY_NAME(#W) " not enabled, bailing\n"); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
#define RMCFG_FEATURE_ENABLED_OR_ASSERT_AND_BAIL(W) \
do { \
if ( ! RMCFG_FEATURE_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "FEATURE" RMCFG_ENTITY_NAME(#W) " not enabled, assert and bail\n"); \
NV_ASSERT_PRECOMP(RMCFG_FEATURE_##W); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
#define RMCFG_FEATURE_PLATFORM_P (RMCFG_FEATURE_PLATFORM_##P)
// MODULE's
#define RMCFG_MODULE_ENABLED_OR_BAIL(W) \
do { \
if ( ! RMCFG_MODULE_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "MODULE" RMCFG_ENTITY_NAME(#W) " not enabled, bailing\n"); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
#define RMCFG_MODULE_ENABLED_OR_ASSERT_AND_BAIL(W) \
do { \
if ( ! RMCFG_MODULE_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "MODULE" RMCFG_ENTITY_NAME(#W) " not enabled, assert and bail\n"); \
NV_ASSERT_PRECOMP(RMCFG_MODULE_##W); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
// CLASS's
#define RMCFG_CLASS_ENABLED_OR_BAIL(W) \
do { \
if ( ! RMCFG_CLASS_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "CLASS" RMCFG_ENTITY_NAME(#W) " not enabled, bailing\n"); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
#define RMCFG_CLASS_ENABLED_OR_ASSERT_AND_BAIL(W) \
do { \
if ( ! RMCFG_CLASS_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "CLASS" RMCFG_ENTITY_NAME(#W) " not enabled, assert and bail\n"); \
NV_ASSERT_PRECOMP(RMCFG_CLASS_##W); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
// API's
#define RMCFG_API_ENABLED_OR_BAIL(W) \
do { \
if ( ! RMCFG_API_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "API" RMCFG_ENTITY_NAME(#W) " not enabled, bailing\n"); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
#define RMCFG_API_ENABLED_OR_ASSERT_AND_BAIL(W) \
do { \
if ( ! RMCFG_API_##W) \
{ \
NV_PRINTF(LEVEL_ERROR, "API" RMCFG_ENTITY_NAME(#W) " not enabled, assert and bail\n"); \
NV_ASSERT_PRECOMP(RMCFG_API_##W); \
return NV_ERR_NOT_SUPPORTED; \
} \
} while(0)
// ARCH test
#define RMCFG_IS_ARCH(arch) RMCFG_FEATURE_ARCH_##arch
#endif // _G_RMCFG_PRIVATE_H_

View File

@@ -0,0 +1,32 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// rmconfig runtime support that will be part of "core" resman.
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_rmconfig_util.c
//
// Chips: T234D
//
#include "gpu/gpu.h"
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
// NVOC RTTI provider for IOM objects
const NVOC_RTTI_PROVIDER __iom_rtti_provider = { 0 };
//
// helper functions for IsCHIP() et.al.
// These help to reduce code size for runtime IsCHIP() and IsCHIPALIAS() invocations
//
// NVOC class ID uniqueness checks
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x0x05c7b5 = 1; /* OBJGPIO */
char __nvoc_class_id_uniqueness_check_0x0x1ab16a = 1; /* OBJRPC */
char __nvoc_class_id_uniqueness_check_0x0xd4dff8 = 1; /* OBJRPCSTRUCTURECOPY */
#endif

View File

@@ -0,0 +1,23 @@
// This file is automatically generated by rmconfig - DO NOT EDIT!
//
// Prototypes for rmconfig utility functions such as _IsGK104(), etc.
//
// Only for use within resman.
//
// Profile: devel-soc-disp-dce-client
// Template: templates/gt_rmconfig_util.h
//
// Chips: T234D
//
#ifndef _G_RMCFG_UTIL_H_
#define _G_RMCFG_UTIL_H_
//
// Any needed prototypes for helper functions for IsCHIP(), eg rmcfg_IsGK104()
// These cannot be put in rmconfig_private.h as they need the OBJ typedefs.
//
#endif // _G_RMCFG_UTIL_H_

View File

@@ -0,0 +1,68 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2008-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/*
* WARNING: This is an autogenerated file. DO NOT EDIT.
* This file is generated using below files:
* template file: kernel/inc/vgpu/gt_rpc-message.h
* definition file: kernel/inc/vgpu/rpc-message-header.def
*/
#ifdef RPC_MESSAGE_STRUCTURES
typedef union rpc_message_rpc_union_field_v03_00
{
NvU32 spare;
NvU32 cpuRmGfid;
} rpc_message_rpc_union_field_v03_00;
typedef rpc_message_rpc_union_field_v03_00 rpc_message_rpc_union_field_v;
typedef struct rpc_message_header_v03_00
{
NvU32 header_version;
NvU32 signature;
NvU32 length;
NvU32 function;
NvU32 rpc_result;
NvU32 rpc_result_private;
NvU32 sequence;
rpc_message_rpc_union_field_v u;
rpc_generic_union rpc_message_data[];
} rpc_message_header_v03_00;
typedef rpc_message_header_v03_00 rpc_message_header_v;
#endif
#ifdef RPC_MESSAGE_GENERIC_UNION
// This is a generic union, that will be used for the communication between the vmioplugin & guest RM.
typedef union rpc_message_generic_union {
rpc_message_rpc_union_field_v03_00 rpc_union_field_v03_00;
rpc_message_rpc_union_field_v rpc_union_field_v;
rpc_message_header_v03_00 header_v03_00;
rpc_message_header_v header_v;
} rpc_message_generic_union;
#endif

View File

@@ -0,0 +1,216 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2008-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/*
* WARNING: This is an autogenerated file. DO NOT EDIT.
* This file is generated using below files:
* template file: kernel/inc/vgpu/gt_rpc-structures.h
* definition file: kernel/inc/vgpu/rpc-structures.def
*/
#ifdef RPC_STRUCTURES
// These structures will be used for the communication between the vmioplugin & guest RM.
#define SDK_STRUCTURES
#include "g_sdk-structures.h"
#undef SDK_STRUCTURES
typedef struct rpc_free_v03_00
{
NVOS00_PARAMETERS_v03_00 params;
} rpc_free_v03_00;
typedef rpc_free_v03_00 rpc_free_v;
typedef struct rpc_dup_object_v03_00
{
NVOS55_PARAMETERS_v03_00 params;
} rpc_dup_object_v03_00;
typedef rpc_dup_object_v03_00 rpc_dup_object_v;
typedef struct rpc_gsp_rm_alloc_v03_00
{
NvHandle hClient;
NvHandle hParent;
NvHandle hObject;
NvU32 hClass;
NvU32 status;
NvU32 paramsSize;
NvU8 params[];
} rpc_gsp_rm_alloc_v03_00;
typedef rpc_gsp_rm_alloc_v03_00 rpc_gsp_rm_alloc_v;
typedef struct rpc_gsp_rm_control_v03_00
{
NvHandle hClient;
NvHandle hObject;
NvU32 cmd;
NvU32 status;
NvU32 paramsSize;
NvBool serialized;
NvU8 reserved[3];
NvU8 params[];
} rpc_gsp_rm_control_v03_00;
typedef rpc_gsp_rm_control_v03_00 rpc_gsp_rm_control_v;
typedef struct rpc_post_event_v17_00
{
NvHandle hClient;
NvHandle hEvent;
NvU32 notifyIndex;
NvU32 data;
NvU32 status;
NvU32 eventDataSize;
NvBool bNotifyList;
NvU8 eventData[];
} rpc_post_event_v17_00;
typedef rpc_post_event_v17_00 rpc_post_event_v;
typedef struct rpc_rg_line_intr_v17_00
{
NvU32 head;
NvU32 rgIntr;
} rpc_rg_line_intr_v17_00;
typedef rpc_rg_line_intr_v17_00 rpc_rg_line_intr_v;
typedef struct rpc_display_modeset_v01_00
{
NvBool bModesetStart;
NvU32 minRequiredIsoBandwidthKBPS;
NvU32 minRequiredFloorBandwidthKBPS;
} rpc_display_modeset_v01_00;
typedef rpc_display_modeset_v01_00 rpc_display_modeset_v;
typedef struct rpc_dce_rm_init_v01_00
{
NvBool bInit;
} rpc_dce_rm_init_v01_00;
typedef rpc_dce_rm_init_v01_00 rpc_dce_rm_init_v;
#endif
#ifdef RPC_DEBUG_PRINT_FUNCTIONS
// These are definitions for versioned functions. These will be used for RPC logging in the vmioplugin.
#define SDK_DEBUG_PRINT_FUNCTIONS
#include "g_sdk-structures.h"
#undef SDK_DEBUG_PRINT_FUNCTIONS
#ifndef SKIP_PRINT_rpc_free_v03_00
vmiopd_mdesc_t *rpcdebugFree_v03_00(void)
{
return &vmiopd_mdesc_t_rpc_free_v03_00;
}
#endif
#ifndef SKIP_PRINT_rpc_dup_object_v03_00
vmiopd_mdesc_t *rpcdebugDupObject_v03_00(void)
{
return &vmiopd_mdesc_t_rpc_dup_object_v03_00;
}
#endif
#ifndef SKIP_PRINT_rpc_gsp_rm_alloc_v03_00
vmiopd_mdesc_t *rpcdebugGspRmAlloc_v03_00(void)
{
return &vmiopd_mdesc_t_rpc_gsp_rm_alloc_v03_00;
}
#endif
#ifndef SKIP_PRINT_rpc_gsp_rm_control_v03_00
vmiopd_mdesc_t *rpcdebugGspRmControl_v03_00(void)
{
return &vmiopd_mdesc_t_rpc_gsp_rm_control_v03_00;
}
#endif
#ifndef SKIP_PRINT_rpc_post_event_v17_00
vmiopd_mdesc_t *rpcdebugPostEvent_v17_00(void)
{
return &vmiopd_mdesc_t_rpc_post_event_v17_00;
}
#endif
#ifndef SKIP_PRINT_rpc_rg_line_intr_v17_00
vmiopd_mdesc_t *rpcdebugRgLineIntr_v17_00(void)
{
return &vmiopd_mdesc_t_rpc_rg_line_intr_v17_00;
}
#endif
#ifndef SKIP_PRINT_rpc_display_modeset_v01_00
vmiopd_mdesc_t *rpcdebugDisplayModeset_v01_00(void)
{
return &vmiopd_mdesc_t_rpc_display_modeset_v01_00;
}
#endif
#ifndef SKIP_PRINT_rpc_dce_rm_init_v01_00
vmiopd_mdesc_t *rpcdebugDceRmInit_v01_00(void)
{
return &vmiopd_mdesc_t_rpc_dce_rm_init_v01_00;
}
#endif
#endif
#ifdef RPC_GENERIC_UNION
// This is a generic union, that will be used for the communication between the vmioplugin & guest RM.
typedef union rpc_generic_union {
rpc_free_v03_00 free_v03_00;
rpc_free_v free_v;
rpc_dup_object_v03_00 dup_object_v03_00;
rpc_dup_object_v dup_object_v;
rpc_gsp_rm_alloc_v03_00 gsp_rm_alloc_v03_00;
rpc_gsp_rm_alloc_v gsp_rm_alloc_v;
rpc_gsp_rm_control_v03_00 gsp_rm_control_v03_00;
rpc_gsp_rm_control_v gsp_rm_control_v;
rpc_post_event_v17_00 post_event_v17_00;
rpc_post_event_v post_event_v;
rpc_rg_line_intr_v17_00 rg_line_intr_v17_00;
rpc_rg_line_intr_v rg_line_intr_v;
rpc_display_modeset_v01_00 display_modeset_v01_00;
rpc_display_modeset_v display_modeset_v;
rpc_dce_rm_init_v01_00 dce_rm_init_v01_00;
rpc_dce_rm_init_v dce_rm_init_v;
} rpc_generic_union;
#endif
#ifdef RPC_ARRAY_LENGTH_FUNCTIONS
#define SDK_ARRAY_LENGTH_FUNCTIONS
#include "g_sdk-structures.h"
#undef SDK_ARRAY_LENGTH_FUNCTIONS
#endif
#ifdef AUTOGENERATE_RPC_MIN_SUPPORTED_VERSION_INFORMATION
#define NV_VGPU_GRIDSW_VERSION_MIN_SUPPORTED_INTERNAL_MAJOR 0x18
#define NV_VGPU_GRIDSW_VERSION_MIN_SUPPORTED_INTERNAL_MINOR 0x00
#endif

View File

@@ -0,0 +1,421 @@
#define NVOC_RS_CLIENT_H_PRIVATE_ACCESS_ALLOWED
#include "nvoc/runtime.h"
#include "nvoc/rtti.h"
#include "nvtypes.h"
#include "nvport/nvport.h"
#include "nvport/inline/util_valist.h"
#include "utils/nvassert.h"
#include "g_rs_client_nvoc.h"
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x8f87e5 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsClient;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
void __nvoc_init_RsClient(RsClient*);
void __nvoc_init_funcTable_RsClient(RsClient*);
NV_STATUS __nvoc_ctor_RsClient(RsClient*, struct PORT_MEM_ALLOCATOR * arg_pAllocator, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_RsClient(RsClient*);
void __nvoc_dtor_RsClient(RsClient*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_RsClient;
static const struct NVOC_RTTI __nvoc_rtti_RsClient_RsClient = {
/*pClassDef=*/ &__nvoc_class_def_RsClient,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_RsClient,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_RsClient_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(RsClient, __nvoc_base_Object),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_RsClient = {
/*numRelatives=*/ 2,
/*relatives=*/ {
&__nvoc_rtti_RsClient_RsClient,
&__nvoc_rtti_RsClient_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_RsClient =
{
/*classInfo=*/ {
/*size=*/ sizeof(RsClient),
/*classId=*/ classId(RsClient),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "RsClient",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_RsClient,
/*pCastInfo=*/ &__nvoc_castinfo_RsClient,
/*pExportInfo=*/ &__nvoc_export_info_RsClient
};
const struct NVOC_EXPORT_INFO __nvoc_export_info_RsClient =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_Object(Object*);
void __nvoc_dtor_RsClient(RsClient *pThis) {
__nvoc_clientDestruct(pThis);
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_RsClient(RsClient *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_Object(Object* );
NV_STATUS __nvoc_ctor_RsClient(RsClient *pThis, struct PORT_MEM_ALLOCATOR * arg_pAllocator, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_Object(&pThis->__nvoc_base_Object);
if (status != NV_OK) goto __nvoc_ctor_RsClient_fail_Object;
__nvoc_init_dataField_RsClient(pThis);
status = __nvoc_clientConstruct(pThis, arg_pAllocator, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_RsClient_fail__init;
goto __nvoc_ctor_RsClient_exit; // Success
__nvoc_ctor_RsClient_fail__init:
__nvoc_dtor_Object(&pThis->__nvoc_base_Object);
__nvoc_ctor_RsClient_fail_Object:
__nvoc_ctor_RsClient_exit:
return status;
}
static void __nvoc_init_funcTable_RsClient_1(RsClient *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__clientValidate__ = &clientValidate_IMPL;
pThis->__clientFreeResource__ = &clientFreeResource_IMPL;
pThis->__clientDestructResourceRef__ = &clientDestructResourceRef_IMPL;
pThis->__clientUnmapMemory__ = &clientUnmapMemory_IMPL;
pThis->__clientInterMap__ = &clientInterMap_IMPL;
pThis->__clientInterUnmap__ = &clientInterUnmap_IMPL;
pThis->__clientValidateNewResourceHandle__ = &clientValidateNewResourceHandle_IMPL;
pThis->__clientPostProcessPendingFreeList__ = &clientPostProcessPendingFreeList_IMPL;
pThis->__clientShareResource__ = &clientShareResource_IMPL;
}
void __nvoc_init_funcTable_RsClient(RsClient *pThis) {
__nvoc_init_funcTable_RsClient_1(pThis);
}
void __nvoc_init_Object(Object*);
void __nvoc_init_RsClient(RsClient *pThis) {
pThis->__nvoc_pbase_RsClient = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_Object;
__nvoc_init_Object(&pThis->__nvoc_base_Object);
__nvoc_init_funcTable_RsClient(pThis);
}
NV_STATUS __nvoc_objCreate_RsClient(RsClient **ppThis, Dynamic *pParent, NvU32 createFlags, struct PORT_MEM_ALLOCATOR * arg_pAllocator, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
RsClient *pThis;
pThis = portMemAllocNonPaged(sizeof(RsClient));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(RsClient));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_RsClient);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_RsClient(pThis);
status = __nvoc_ctor_RsClient(pThis, arg_pAllocator, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_RsClient_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_RsClient_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_RsClient(RsClient **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct PORT_MEM_ALLOCATOR * arg_pAllocator = va_arg(args, struct PORT_MEM_ALLOCATOR *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_RsClient(ppThis, pParent, createFlags, arg_pAllocator, arg_pParams);
return status;
}
#ifdef DEBUG
char __nvoc_class_id_uniqueness_check_0x083442 = 1;
#endif
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsClientResource;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object;
extern const struct NVOC_CLASS_DEF __nvoc_class_def_RsResource;
void __nvoc_init_RsClientResource(RsClientResource*);
void __nvoc_init_funcTable_RsClientResource(RsClientResource*);
NV_STATUS __nvoc_ctor_RsClientResource(RsClientResource*, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
void __nvoc_init_dataField_RsClientResource(RsClientResource*);
void __nvoc_dtor_RsClientResource(RsClientResource*);
extern const struct NVOC_EXPORT_INFO __nvoc_export_info_RsClientResource;
static const struct NVOC_RTTI __nvoc_rtti_RsClientResource_RsClientResource = {
/*pClassDef=*/ &__nvoc_class_def_RsClientResource,
/*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_RsClientResource,
/*offset=*/ 0,
};
static const struct NVOC_RTTI __nvoc_rtti_RsClientResource_Object = {
/*pClassDef=*/ &__nvoc_class_def_Object,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(RsClientResource, __nvoc_base_RsResource.__nvoc_base_Object),
};
static const struct NVOC_RTTI __nvoc_rtti_RsClientResource_RsResource = {
/*pClassDef=*/ &__nvoc_class_def_RsResource,
/*dtor=*/ &__nvoc_destructFromBase,
/*offset=*/ NV_OFFSETOF(RsClientResource, __nvoc_base_RsResource),
};
static const struct NVOC_CASTINFO __nvoc_castinfo_RsClientResource = {
/*numRelatives=*/ 3,
/*relatives=*/ {
&__nvoc_rtti_RsClientResource_RsClientResource,
&__nvoc_rtti_RsClientResource_RsResource,
&__nvoc_rtti_RsClientResource_Object,
},
};
const struct NVOC_CLASS_DEF __nvoc_class_def_RsClientResource =
{
/*classInfo=*/ {
/*size=*/ sizeof(RsClientResource),
/*classId=*/ classId(RsClientResource),
/*providerId=*/ &__nvoc_rtti_provider,
#if NV_PRINTF_STRINGS_ALLOWED
/*name=*/ "RsClientResource",
#endif
},
/*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_RsClientResource,
/*pCastInfo=*/ &__nvoc_castinfo_RsClientResource,
/*pExportInfo=*/ &__nvoc_export_info_RsClientResource
};
static NvBool __nvoc_thunk_RsResource_clientresShareCallback(struct RsClientResource *pResource, struct RsClient *pInvokingClient, RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
return resShareCallback((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pInvokingClient, pParentRef, pSharePolicy);
}
static NV_STATUS __nvoc_thunk_RsResource_clientresControl(struct RsClientResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControl((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_clientresUnmap(struct RsClientResource *pResource, struct CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
return resUnmap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pCallContext, pCpuMapping);
}
static NV_STATUS __nvoc_thunk_RsResource_clientresMapTo(struct RsClientResource *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
return resMapTo((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pParams);
}
static NvU32 __nvoc_thunk_RsResource_clientresGetRefCount(struct RsClientResource *pResource) {
return resGetRefCount((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_clientresControlFilter(struct RsClientResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControlFilter((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_clientresAddAdditionalDependants(struct RsClient *pClient, struct RsClientResource *pResource, RsResourceRef *pReference) {
resAddAdditionalDependants(pClient, (struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pReference);
}
static NvBool __nvoc_thunk_RsResource_clientresCanCopy(struct RsClientResource *pResource) {
return resCanCopy((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_clientresControl_Prologue(struct RsClientResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
return resControl_Prologue((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pCallContext, pParams);
}
static void __nvoc_thunk_RsResource_clientresPreDestruct(struct RsClientResource *pResource) {
resPreDestruct((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset));
}
static NV_STATUS __nvoc_thunk_RsResource_clientresUnmapFrom(struct RsClientResource *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
return resUnmapFrom((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pParams);
}
static void __nvoc_thunk_RsResource_clientresControl_Epilogue(struct RsClientResource *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
resControl_Epilogue((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pCallContext, pParams);
}
static NV_STATUS __nvoc_thunk_RsResource_clientresControlLookup(struct RsClientResource *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
return resControlLookup((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pParams, ppEntry);
}
static NV_STATUS __nvoc_thunk_RsResource_clientresMap(struct RsClientResource *pResource, struct CALL_CONTEXT *pCallContext, RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
return resMap((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pCallContext, pParams, pCpuMapping);
}
static NvBool __nvoc_thunk_RsResource_clientresAccessCallback(struct RsClientResource *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
return resAccessCallback((struct RsResource *)(((unsigned char *)pResource) + __nvoc_rtti_RsClientResource_RsResource.offset), pInvokingClient, pAllocParams, accessRight);
}
const struct NVOC_EXPORT_INFO __nvoc_export_info_RsClientResource =
{
/*numEntries=*/ 0,
/*pExportEntries=*/ 0
};
void __nvoc_dtor_RsResource(RsResource*);
void __nvoc_dtor_RsClientResource(RsClientResource *pThis) {
__nvoc_clientresDestruct(pThis);
__nvoc_dtor_RsResource(&pThis->__nvoc_base_RsResource);
PORT_UNREFERENCED_VARIABLE(pThis);
}
void __nvoc_init_dataField_RsClientResource(RsClientResource *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
}
NV_STATUS __nvoc_ctor_RsResource(RsResource* , struct CALL_CONTEXT *, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
NV_STATUS __nvoc_ctor_RsClientResource(RsClientResource *pThis, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status = NV_OK;
status = __nvoc_ctor_RsResource(&pThis->__nvoc_base_RsResource, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_RsClientResource_fail_RsResource;
__nvoc_init_dataField_RsClientResource(pThis);
status = __nvoc_clientresConstruct(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_ctor_RsClientResource_fail__init;
goto __nvoc_ctor_RsClientResource_exit; // Success
__nvoc_ctor_RsClientResource_fail__init:
__nvoc_dtor_RsResource(&pThis->__nvoc_base_RsResource);
__nvoc_ctor_RsClientResource_fail_RsResource:
__nvoc_ctor_RsClientResource_exit:
return status;
}
static void __nvoc_init_funcTable_RsClientResource_1(RsClientResource *pThis) {
PORT_UNREFERENCED_VARIABLE(pThis);
pThis->__clientresShareCallback__ = &__nvoc_thunk_RsResource_clientresShareCallback;
pThis->__clientresControl__ = &__nvoc_thunk_RsResource_clientresControl;
pThis->__clientresUnmap__ = &__nvoc_thunk_RsResource_clientresUnmap;
pThis->__clientresMapTo__ = &__nvoc_thunk_RsResource_clientresMapTo;
pThis->__clientresGetRefCount__ = &__nvoc_thunk_RsResource_clientresGetRefCount;
pThis->__clientresControlFilter__ = &__nvoc_thunk_RsResource_clientresControlFilter;
pThis->__clientresAddAdditionalDependants__ = &__nvoc_thunk_RsResource_clientresAddAdditionalDependants;
pThis->__clientresCanCopy__ = &__nvoc_thunk_RsResource_clientresCanCopy;
pThis->__clientresControl_Prologue__ = &__nvoc_thunk_RsResource_clientresControl_Prologue;
pThis->__clientresPreDestruct__ = &__nvoc_thunk_RsResource_clientresPreDestruct;
pThis->__clientresUnmapFrom__ = &__nvoc_thunk_RsResource_clientresUnmapFrom;
pThis->__clientresControl_Epilogue__ = &__nvoc_thunk_RsResource_clientresControl_Epilogue;
pThis->__clientresControlLookup__ = &__nvoc_thunk_RsResource_clientresControlLookup;
pThis->__clientresMap__ = &__nvoc_thunk_RsResource_clientresMap;
pThis->__clientresAccessCallback__ = &__nvoc_thunk_RsResource_clientresAccessCallback;
}
void __nvoc_init_funcTable_RsClientResource(RsClientResource *pThis) {
__nvoc_init_funcTable_RsClientResource_1(pThis);
}
void __nvoc_init_RsResource(RsResource*);
void __nvoc_init_RsClientResource(RsClientResource *pThis) {
pThis->__nvoc_pbase_RsClientResource = pThis;
pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_RsResource.__nvoc_base_Object;
pThis->__nvoc_pbase_RsResource = &pThis->__nvoc_base_RsResource;
__nvoc_init_RsResource(&pThis->__nvoc_base_RsResource);
__nvoc_init_funcTable_RsClientResource(pThis);
}
NV_STATUS __nvoc_objCreate_RsClientResource(RsClientResource **ppThis, Dynamic *pParent, NvU32 createFlags, struct CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams) {
NV_STATUS status;
Object *pParentObj;
RsClientResource *pThis;
pThis = portMemAllocNonPaged(sizeof(RsClientResource));
if (pThis == NULL) return NV_ERR_NO_MEMORY;
portMemSet(pThis, 0, sizeof(RsClientResource));
__nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_RsClientResource);
if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY))
{
pParentObj = dynamicCast(pParent, Object);
objAddChild(pParentObj, &pThis->__nvoc_base_RsResource.__nvoc_base_Object);
}
else
{
pThis->__nvoc_base_RsResource.__nvoc_base_Object.pParent = NULL;
}
__nvoc_init_RsClientResource(pThis);
status = __nvoc_ctor_RsClientResource(pThis, arg_pCallContext, arg_pParams);
if (status != NV_OK) goto __nvoc_objCreate_RsClientResource_cleanup;
*ppThis = pThis;
return NV_OK;
__nvoc_objCreate_RsClientResource_cleanup:
// do not call destructors here since the constructor already called them
portMemFree(pThis);
return status;
}
NV_STATUS __nvoc_objCreateDynamic_RsClientResource(RsClientResource **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) {
NV_STATUS status;
struct CALL_CONTEXT * arg_pCallContext = va_arg(args, struct CALL_CONTEXT *);
struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams = va_arg(args, struct RS_RES_ALLOC_PARAMS_INTERNAL *);
status = __nvoc_objCreate_RsClientResource(ppThis, pParent, createFlags, arg_pCallContext, arg_pParams);
return status;
}

Some files were not shown because too many files have changed in this diff Show More