Home
last modified time | relevance | path

Searched refs:vk_device (Results 1 – 25 of 73) sorted by relevance

123

/hardware/google/gfxstream/guest/mesa/src/vulkan/runtime/
Dvk_sync.h35 struct vk_device;
168 VkResult (*init)(struct vk_device *device,
176 void (*finish)(struct vk_device *device,
183 VkResult (*signal)(struct vk_device *device,
188 VkResult (*get_value)(struct vk_device *device,
193 VkResult (*reset)(struct vk_device *device,
204 VkResult (*move)(struct vk_device *device,
220 VkResult (*wait)(struct vk_device *device,
233 VkResult (*wait_many)(struct vk_device *device,
245 VkResult (*import_opaque_fd)(struct vk_device *device,
[all …]
Dvk_device.h94 struct vk_device { struct
166 VkResult (*check_status)(struct vk_device *device); argument
186 VkResult (*create_sync_for_memory)(struct vk_device *device, argument
265 VK_DEFINE_HANDLE_CASTS(vk_device, base, VkDevice, argument
288 vk_device_init(struct vk_device *device,
295 vk_device_set_drm_fd(struct vk_device *device, int drm_fd) in vk_device_set_drm_fd()
305 vk_device_finish(struct vk_device *device);
316 void vk_device_enable_threaded_submit(struct vk_device *device);
319 vk_device_supports_threaded_submit(const struct vk_device *device) in vk_device_supports_threaded_submit()
325 VkResult vk_device_flush(struct vk_device *device);
[all …]
Dvk_object.h39 struct vk_device;
58 struct vk_device *device;
76 void vk_object_base_init(struct vk_device *device,
207 vk_object_alloc(struct vk_device *device,
213 vk_object_zalloc(struct vk_device *device,
221 vk_object_multialloc(struct vk_device *device,
227 vk_object_multizalloc(struct vk_device *device,
233 vk_object_free(struct vk_device *device,
247 vk_private_data_slot_create(struct vk_device *device,
252 vk_private_data_slot_destroy(struct vk_device *device,
[all …]
Dvk_object.c34 vk_object_base_init(struct vk_device *device, in vk_object_base_init()
58 struct vk_device *device = base->device; in vk_object_base_recycle()
65 vk_object_alloc(struct vk_device *device, in vk_object_alloc()
81 vk_object_zalloc(struct vk_device *device, in vk_object_zalloc()
97 vk_object_multialloc(struct vk_device *device, in vk_object_multialloc()
113 vk_object_multizalloc(struct vk_device *device, in vk_object_multizalloc()
129 vk_object_free(struct vk_device *device, in vk_object_free()
138 vk_private_data_slot_create(struct vk_device *device, in vk_private_data_slot_create()
159 vk_private_data_slot_destroy(struct vk_device *device, in vk_private_data_slot_destroy()
173 get_swapchain_private_data_locked(struct vk_device *device, in get_swapchain_private_data_locked()
[all …]
Dvk_sync_timeline.h91 VkResult vk_sync_timeline_init(struct vk_device *device,
95 VkResult vk_sync_timeline_alloc_point(struct vk_device *device,
100 void vk_sync_timeline_point_free(struct vk_device *device,
103 VkResult vk_sync_timeline_point_install(struct vk_device *device,
106 VkResult vk_sync_timeline_get_point(struct vk_device *device,
111 void vk_sync_timeline_point_release(struct vk_device *device,
Dvk_descriptor_set_layout.h37 void (*destroy)(struct vk_device *device,
61 void *vk_descriptor_set_layout_zalloc(struct vk_device *device, size_t size);
63 void *vk_descriptor_set_layout_multizalloc(struct vk_device *device,
66 void vk_descriptor_set_layout_destroy(struct vk_device *device,
78 vk_descriptor_set_layout_unref(struct vk_device *device, in vk_descriptor_set_layout_unref()
Dvk_pipeline_layout.h72 void (*destroy)(struct vk_device *device,
80 vk_pipeline_layout_zalloc(struct vk_device *device, size_t size,
84 vk_pipeline_layout_multizalloc(struct vk_device *device,
88 void vk_pipeline_layout_destroy(struct vk_device *device,
100 vk_pipeline_layout_unref(struct vk_device *device, in vk_pipeline_layout_unref()
Dvk_descriptor_set_layout.c31 vk_descriptor_set_layout_init(struct vk_device *device, in vk_descriptor_set_layout_init()
42 vk_descriptor_set_layout_zalloc(struct vk_device *device, size_t size) in vk_descriptor_set_layout_zalloc()
59 vk_descriptor_set_layout_multizalloc(struct vk_device *device, in vk_descriptor_set_layout_multizalloc()
78 vk_descriptor_set_layout_destroy(struct vk_device *device, in vk_descriptor_set_layout_destroy()
89 VK_FROM_HANDLE(vk_device, device, _device); in vk_common_DestroyDescriptorSetLayout()
Dvk_sync.c81 vk_sync_init(struct vk_device *device, in vk_sync_init()
103 vk_sync_finish(struct vk_device *device, in vk_sync_finish()
110 vk_sync_create(struct vk_device *device, in vk_sync_create()
135 vk_sync_destroy(struct vk_device *device, in vk_sync_destroy()
143 vk_sync_signal(struct vk_device *device, in vk_sync_signal()
158 vk_sync_get_value(struct vk_device *device, in vk_sync_get_value()
167 vk_sync_reset(struct vk_device *device, in vk_sync_reset()
175 VkResult vk_sync_move(struct vk_device *device, in vk_sync_move()
214 __vk_sync_wait(struct vk_device *device, in __vk_sync_wait()
240 vk_sync_wait(struct vk_device *device, in vk_sync_wait()
[all …]
Dvk_meta.h38 struct vk_device;
74 VkResult vk_meta_device_init(struct vk_device *device,
76 void vk_meta_device_finish(struct vk_device *device,
91 uint64_t vk_meta_cache_object(struct vk_device *device,
141 vk_meta_create_descriptor_set_layout(struct vk_device *device,
148 vk_meta_create_pipeline_layout(struct vk_device *device,
155 vk_meta_get_pipeline_layout(struct vk_device *device,
163 vk_meta_create_graphics_pipeline(struct vk_device *device,
171 vk_meta_create_compute_pipeline(struct vk_device *device,
178 vk_meta_create_sampler(struct vk_device *device,
[all …]
Dvk_pipeline_layout.c35 vk_pipeline_layout_init(struct vk_device *device, in vk_pipeline_layout_init()
61 vk_pipeline_layout_zalloc(struct vk_device *device, size_t size, in vk_pipeline_layout_zalloc()
78 vk_pipeline_layout_multizalloc(struct vk_device *device, in vk_pipeline_layout_multizalloc()
99 VK_FROM_HANDLE(vk_device, device, _device); in vk_common_CreatePipelineLayout()
113 vk_pipeline_layout_destroy(struct vk_device *device, in vk_pipeline_layout_destroy()
131 VK_FROM_HANDLE(vk_device, device, _device); in vk_common_DestroyPipelineLayout()
Dvk_buffer.c32 vk_buffer_init(struct vk_device *device, in vk_buffer_init()
53 vk_buffer_create(struct vk_device *device, in vk_buffer_create()
76 vk_buffer_destroy(struct vk_device *device, in vk_buffer_destroy()
88 VK_FROM_HANDLE(vk_device, device, _device); in vk_common_GetBufferMemoryRequirements()
107 VK_FROM_HANDLE(vk_device, device, _device); in vk_common_GetBufferMemoryRequirements2()
135 VK_FROM_HANDLE(vk_device, device, _device); in vk_common_BindBufferMemory()
Dvk_drm_syncobj.c45 vk_drm_syncobj_init(struct vk_device *device, in vk_drm_syncobj_init()
76 vk_drm_syncobj_finish(struct vk_device *device, in vk_drm_syncobj_finish()
87 vk_drm_syncobj_signal(struct vk_device *device, in vk_drm_syncobj_signal()
108 vk_drm_syncobj_get_value(struct vk_device *device, in vk_drm_syncobj_get_value()
125 vk_drm_syncobj_reset(struct vk_device *device, in vk_drm_syncobj_reset()
141 sync_has_sync_file(struct vk_device *device, struct vk_sync *sync) in sync_has_sync_file()
170 spin_wait_for_sync_file(struct vk_device *device, in spin_wait_for_sync_file()
208 vk_drm_syncobj_wait_many(struct vk_device *device, in vk_drm_syncobj_wait_many()
294 vk_drm_syncobj_import_opaque_fd(struct vk_device *device, in vk_drm_syncobj_import_opaque_fd()
317 vk_drm_syncobj_export_opaque_fd(struct vk_device *device, in vk_drm_syncobj_export_opaque_fd()
[all …]
Dvk_query_pool.h48 void vk_query_pool_init(struct vk_device *device,
52 void *vk_query_pool_create(struct vk_device *device,
56 void vk_query_pool_destroy(struct vk_device *device,
Dvk_sync_timeline.c57 vk_sync_timeline_init(struct vk_device *device, in vk_sync_timeline_init()
87 vk_sync_timeline_finish(struct vk_device *device, in vk_sync_timeline_finish()
123 vk_sync_timeline_gc_locked(struct vk_device *device,
128 vk_sync_timeline_alloc_point_locked(struct vk_device *device, in vk_sync_timeline_alloc_point_locked()
181 vk_sync_timeline_alloc_point(struct vk_device *device, in vk_sync_timeline_alloc_point()
204 vk_sync_timeline_point_free(struct vk_device *device, in vk_sync_timeline_point_free()
248 vk_sync_timeline_gc_locked(struct vk_device *device, in vk_sync_timeline_gc_locked()
293 vk_sync_timeline_point_install(struct vk_device *device, in vk_sync_timeline_point_install()
318 vk_sync_timeline_get_point_locked(struct vk_device *device, in vk_sync_timeline_get_point_locked()
342 vk_sync_timeline_get_point(struct vk_device *device, in vk_sync_timeline_get_point()
[all …]
Dvk_sync_dummy.c27 vk_sync_dummy_init(struct vk_device *device, in vk_sync_dummy_init()
35 vk_sync_dummy_finish(struct vk_device *device, in vk_sync_dummy_finish()
40 vk_sync_dummy_wait_many(struct vk_device *device, in vk_sync_dummy_wait_many()
Dvk_buffer_view.h59 void vk_buffer_view_init(struct vk_device *device,
63 void *vk_buffer_view_create(struct vk_device *device,
67 void vk_buffer_view_destroy(struct vk_device *device,
Dvk_fence.h60 VkResult vk_fence_create(struct vk_device *device,
65 void vk_fence_destroy(struct vk_device *device,
69 void vk_fence_reset_temporary(struct vk_device *device,
Dvk_pipeline.h34 struct vk_device;
56 vk_pipeline_shader_stage_to_nir(struct vk_device *device,
86 vk_pipeline_robustness_state_fill(const struct vk_device *device,
Dvk_sync_binary.c37 vk_sync_binary_init(struct vk_device *device, in vk_sync_binary_init()
56 vk_sync_binary_finish(struct vk_device *device, in vk_sync_binary_finish()
65 vk_sync_binary_reset(struct vk_device *device, in vk_sync_binary_reset()
76 vk_sync_binary_signal(struct vk_device *device, in vk_sync_binary_signal()
88 vk_sync_binary_wait_many(struct vk_device *device, in vk_sync_binary_wait_many()
Dvk_pipeline_cache.h89 void (*destroy)(struct vk_device *device,
119 vk_pipeline_cache_object_init(struct vk_device *device, in vk_pipeline_cache_object_init()
147 vk_pipeline_cache_object_unref(struct vk_device *device,
204 vk_pipeline_cache_create(struct vk_device *device,
304 vk_raw_data_cache_object_create(struct vk_device *device,
Dvk_buffer.h47 void vk_buffer_init(struct vk_device *device,
52 void *vk_buffer_create(struct vk_device *device,
56 void vk_buffer_destroy(struct vk_device *device,
Dvk_image.h90 void vk_image_init(struct vk_device *device,
95 void *vk_image_create(struct vk_device *device,
99 void vk_image_destroy(struct vk_device *device,
104 vk_image_create_get_format_list(struct vk_device *device,
332 void vk_image_view_init(struct vk_device *device,
338 void *vk_image_view_create(struct vk_device *device,
343 void vk_image_view_destroy(struct vk_device *device,
/hardware/google/gfxstream/guest/mesa/src/vulkan/runtime/rmv/
Dvk_rmv_common.h103 struct vk_device;
117 struct vk_device;
119 void vk_memory_trace_init(struct vk_device *device, const struct vk_rmv_device_info *device_info);
121 void vk_memory_trace_finish(struct vk_device *device);
127 void vk_rmv_log_buffer_create(struct vk_device *device, bool is_internal, VkBuffer _buffer);
128 void vk_rmv_log_cpu_map(struct vk_device *device, uint64_t va, bool is_unmap);
129 void vk_rmv_log_misc_token(struct vk_device *device, enum vk_rmv_misc_event_type type);
134 uint32_t vk_rmv_get_resource_id_locked(struct vk_device *device, uint64_t handle);
138 void vk_rmv_destroy_resource_id_locked(struct vk_device *device, uint64_t handle);
Dvk_rmv_common.c29 vk_memory_trace_init(struct vk_device *device, const struct vk_rmv_device_info *device_info) in vk_memory_trace_init()
41 vk_memory_trace_finish(struct vk_device *device) in vk_memory_trace_finish()
80 vk_rmv_get_resource_id_locked(struct vk_device *device, uint64_t handle) in vk_rmv_get_resource_id_locked()
93 vk_rmv_destroy_resource_id_locked(struct vk_device *device, uint64_t handle) in vk_rmv_destroy_resource_id_locked()
99 vk_rmv_log_buffer_create(struct vk_device *device, bool is_internal, VkBuffer _buffer) in vk_rmv_log_buffer_create()
119 vk_rmv_log_cpu_map(struct vk_device *device, uint64_t va, bool is_unmap) in vk_rmv_log_cpu_map()
134 vk_rmv_log_misc_token(struct vk_device *device, enum vk_rmv_misc_event_type type) in vk_rmv_log_misc_token()

123