Lines Matching refs:submission

4355 radv_queue_trigger_submission(struct radv_deferred_queue_submission *submission,
4361 const struct radv_queue_submission *submission, in radv_create_deferred_submission() argument
4368 for (uint32_t i = 0; i < submission->wait_semaphore_count; ++i) { in radv_create_deferred_submission()
4369 RADV_FROM_HANDLE(radv_semaphore, semaphore, submission->wait_semaphores[i]); in radv_create_deferred_submission()
4374 size += submission->cmd_buffer_count * sizeof(VkCommandBuffer); in radv_create_deferred_submission()
4375 size += submission->buffer_bind_count * sizeof(VkSparseBufferMemoryBindInfo); in radv_create_deferred_submission()
4376 size += submission->image_opaque_bind_count * sizeof(VkSparseImageOpaqueMemoryBindInfo); in radv_create_deferred_submission()
4377 size += submission->wait_semaphore_count * sizeof(struct radv_semaphore_part *); in radv_create_deferred_submission()
4379 size += submission->signal_semaphore_count * sizeof(struct radv_semaphore_part *); in radv_create_deferred_submission()
4380 size += submission->wait_value_count * sizeof(uint64_t); in radv_create_deferred_submission()
4381 size += submission->signal_value_count * sizeof(uint64_t); in radv_create_deferred_submission()
4382 size += submission->wait_semaphore_count * sizeof(struct radv_timeline_waiter); in radv_create_deferred_submission()
4391 deferred->cmd_buffer_count = submission->cmd_buffer_count; in radv_create_deferred_submission()
4392 if (submission->cmd_buffer_count) { in radv_create_deferred_submission()
4393 memcpy(deferred->cmd_buffers, submission->cmd_buffers, in radv_create_deferred_submission()
4394 submission->cmd_buffer_count * sizeof(*deferred->cmd_buffers)); in radv_create_deferred_submission()
4397 deferred->buffer_binds = (void*)(deferred->cmd_buffers + submission->cmd_buffer_count); in radv_create_deferred_submission()
4398 deferred->buffer_bind_count = submission->buffer_bind_count; in radv_create_deferred_submission()
4399 if (submission->buffer_bind_count) { in radv_create_deferred_submission()
4400 memcpy(deferred->buffer_binds, submission->buffer_binds, in radv_create_deferred_submission()
4401 submission->buffer_bind_count * sizeof(*deferred->buffer_binds)); in radv_create_deferred_submission()
4404 deferred->image_opaque_binds = (void*)(deferred->buffer_binds + submission->buffer_bind_count); in radv_create_deferred_submission()
4405 deferred->image_opaque_bind_count = submission->image_opaque_bind_count; in radv_create_deferred_submission()
4406 if (submission->image_opaque_bind_count) { in radv_create_deferred_submission()
4407 memcpy(deferred->image_opaque_binds, submission->image_opaque_binds, in radv_create_deferred_submission()
4408 submission->image_opaque_bind_count * sizeof(*deferred->image_opaque_binds)); in radv_create_deferred_submission()
4411 deferred->flush_caches = submission->flush_caches; in radv_create_deferred_submission()
4412 deferred->wait_dst_stage_mask = submission->wait_dst_stage_mask; in radv_create_deferred_submission()
4415 deferred->wait_semaphore_count = submission->wait_semaphore_count; in radv_create_deferred_submission()
4418 deferred->signal_semaphore_count = submission->signal_semaphore_count; in radv_create_deferred_submission()
4420 deferred->fence = submission->fence; in radv_create_deferred_submission()
4426 for (uint32_t i = 0; i < submission->wait_semaphore_count; ++i) { in radv_create_deferred_submission()
4427 RADV_FROM_HANDLE(radv_semaphore, semaphore, submission->wait_semaphores[i]); in radv_create_deferred_submission()
4437 for (uint32_t i = 0; i < submission->signal_semaphore_count; ++i) { in radv_create_deferred_submission()
4438 RADV_FROM_HANDLE(radv_semaphore, semaphore, submission->signal_semaphores[i]); in radv_create_deferred_submission()
4447 if (submission->wait_value_count) { in radv_create_deferred_submission()
4448 …memcpy(deferred->wait_values, submission->wait_values, submission->wait_value_count * sizeof(uint6… in radv_create_deferred_submission()
4450 deferred->signal_values = deferred->wait_values + submission->wait_value_count; in radv_create_deferred_submission()
4451 if (submission->signal_value_count) { in radv_create_deferred_submission()
4452 …memcpy(deferred->signal_values, submission->signal_values, submission->signal_value_count * sizeof… in radv_create_deferred_submission()
4455 deferred->wait_nodes = (void*)(deferred->signal_values + submission->signal_value_count); in radv_create_deferred_submission()
4458 deferred->submission_wait_count = 1 + submission->wait_semaphore_count; in radv_create_deferred_submission()
4465 radv_queue_enqueue_submission(struct radv_deferred_queue_submission *submission, in radv_queue_enqueue_submission() argument
4469 struct radv_timeline_waiter *waiter = submission->wait_nodes; in radv_queue_enqueue_submission()
4470 for (uint32_t i = 0; i < submission->wait_semaphore_count; ++i) { in radv_queue_enqueue_submission()
4471 if (submission->wait_semaphores[i]->kind == RADV_SEMAPHORE_TIMELINE) { in radv_queue_enqueue_submission()
4472 pthread_mutex_lock(&submission->wait_semaphores[i]->timeline.mutex); in radv_queue_enqueue_submission()
4473 if (submission->wait_semaphores[i]->timeline.highest_submitted < submission->wait_values[i]) { in radv_queue_enqueue_submission()
4475 waiter->value = submission->wait_values[i]; in radv_queue_enqueue_submission()
4476 waiter->submission = submission; in radv_queue_enqueue_submission()
4477 list_addtail(&waiter->list, &submission->wait_semaphores[i]->timeline.waiters); in radv_queue_enqueue_submission()
4480 pthread_mutex_unlock(&submission->wait_semaphores[i]->timeline.mutex); in radv_queue_enqueue_submission()
4484 pthread_mutex_lock(&submission->queue->pending_mutex); in radv_queue_enqueue_submission()
4486 bool is_first = list_is_empty(&submission->queue->pending_submissions); in radv_queue_enqueue_submission()
4487 list_addtail(&submission->queue_pending_list, &submission->queue->pending_submissions); in radv_queue_enqueue_submission()
4489 pthread_mutex_unlock(&submission->queue->pending_mutex); in radv_queue_enqueue_submission()
4494 uint32_t decrement = submission->wait_semaphore_count - wait_cnt + (is_first ? 1 : 0); in radv_queue_enqueue_submission()
4501 return radv_queue_trigger_submission(submission, decrement, processing_list); in radv_queue_enqueue_submission()
4505 radv_queue_submission_update_queue(struct radv_deferred_queue_submission *submission, in radv_queue_submission_update_queue() argument
4508 pthread_mutex_lock(&submission->queue->pending_mutex); in radv_queue_submission_update_queue()
4509 list_del(&submission->queue_pending_list); in radv_queue_submission_update_queue()
4512 if (!list_is_empty(&submission->queue->pending_submissions)) { in radv_queue_submission_update_queue()
4514 list_first_entry(&submission->queue->pending_submissions, in radv_queue_submission_update_queue()
4519 pthread_mutex_unlock(&submission->queue->pending_mutex); in radv_queue_submission_update_queue()
4521 pthread_cond_broadcast(&submission->queue->device->timeline_cond); in radv_queue_submission_update_queue()
4525 radv_queue_submit_deferred(struct radv_deferred_queue_submission *submission, in radv_queue_submit_deferred() argument
4528 RADV_FROM_HANDLE(radv_fence, fence, submission->fence); in radv_queue_submit_deferred()
4529 struct radv_queue *queue = submission->queue; in radv_queue_submit_deferred()
4533 bool do_flush = submission->flush_caches || submission->wait_dst_stage_mask; in radv_queue_submit_deferred()
4559 result = radv_get_preambles(queue, submission->cmd_buffers, in radv_queue_submit_deferred()
4560 submission->cmd_buffer_count, in radv_queue_submit_deferred()
4569 submission->wait_semaphore_count, in radv_queue_submit_deferred()
4570 submission->wait_semaphores, in radv_queue_submit_deferred()
4571 submission->wait_values, in radv_queue_submit_deferred()
4572 submission->signal_semaphore_count, in radv_queue_submit_deferred()
4573 submission->signal_semaphores, in radv_queue_submit_deferred()
4574 submission->signal_values, in radv_queue_submit_deferred()
4575 submission->fence); in radv_queue_submit_deferred()
4579 for (uint32_t i = 0; i < submission->buffer_bind_count; ++i) { in radv_queue_submit_deferred()
4581 submission->buffer_binds + i); in radv_queue_submit_deferred()
4586 for (uint32_t i = 0; i < submission->image_opaque_bind_count; ++i) { in radv_queue_submit_deferred()
4588 submission->image_opaque_binds + i); in radv_queue_submit_deferred()
4593 if (!submission->cmd_buffer_count) { in radv_queue_submit_deferred()
4603 (submission->cmd_buffer_count)); in radv_queue_submit_deferred()
4605 for (uint32_t j = 0; j < submission->cmd_buffer_count; j++) { in radv_queue_submit_deferred()
4606 RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, submission->cmd_buffers[j]); in radv_queue_submit_deferred()
4616 for (uint32_t j = 0; j < submission->cmd_buffer_count; j += advance) { in radv_queue_submit_deferred()
4621 submission->cmd_buffer_count - j); in radv_queue_submit_deferred()
4627 sem_info.cs_emit_signal = j + advance == submission->cmd_buffer_count; in radv_queue_submit_deferred()
4658 submission->temporary_semaphore_part_count, in radv_queue_submit_deferred()
4659 submission->temporary_semaphore_parts); in radv_queue_submit_deferred()
4661 submission->wait_semaphore_count, in radv_queue_submit_deferred()
4662 submission->wait_semaphores, in radv_queue_submit_deferred()
4663 submission->wait_values, in radv_queue_submit_deferred()
4664 submission->signal_semaphore_count, in radv_queue_submit_deferred()
4665 submission->signal_semaphores, in radv_queue_submit_deferred()
4666 submission->signal_values, in radv_queue_submit_deferred()
4671 radv_queue_submission_update_queue(submission, processing_list); in radv_queue_submit_deferred()
4673 free(submission); in radv_queue_submit_deferred()
4689 submission->temporary_semaphore_part_count, in radv_queue_submit_deferred()
4690 submission->temporary_semaphore_parts); in radv_queue_submit_deferred()
4691 free(submission); in radv_queue_submit_deferred()
4699 struct radv_deferred_queue_submission *submission = in radv_process_submissions() local
4701 list_del(&submission->processing_list); in radv_process_submissions()
4703 VkResult result = radv_queue_submit_deferred(submission, processing_list); in radv_process_submissions()
4711 wait_for_submission_timelines_available(struct radv_deferred_queue_submission *submission, in wait_for_submission_timelines_available() argument
4714 struct radv_device *device = submission->queue->device; in wait_for_submission_timelines_available()
4718 for (uint32_t i = 0; i < submission->wait_semaphore_count; ++i) { in wait_for_submission_timelines_available()
4719 if (submission->wait_semaphores[i]->kind != RADV_SEMAPHORE_TIMELINE_SYNCOBJ) in wait_for_submission_timelines_available()
4722 if (submission->wait_semaphores[i]->timeline_syncobj.max_point >= submission->wait_values[i]) in wait_for_submission_timelines_available()
4736 for (uint32_t i = 0; i < submission->wait_semaphore_count; ++i) { in wait_for_submission_timelines_available()
4737 if (submission->wait_semaphores[i]->kind != RADV_SEMAPHORE_TIMELINE_SYNCOBJ) in wait_for_submission_timelines_available()
4740 if (submission->wait_semaphores[i]->timeline_syncobj.max_point >= submission->wait_values[i]) in wait_for_submission_timelines_available()
4743 syncobj[syncobj_idx] = submission->wait_semaphores[i]->syncobj; in wait_for_submission_timelines_available()
4744 points[syncobj_idx] = submission->wait_values[i]; in wait_for_submission_timelines_available()
4759 struct radv_deferred_queue_submission *submission = queue->thread_submission; in radv_queue_submission_thread_run() local
4762 if (!submission) { in radv_queue_submission_thread_run()
4771 result = wait_for_submission_timelines_available(submission, in radv_queue_submission_thread_run()
4783 list_addtail(&submission->processing_list, &processing_list); in radv_queue_submission_thread_run()
4793 radv_queue_trigger_submission(struct radv_deferred_queue_submission *submission, in radv_queue_trigger_submission() argument
4797 struct radv_queue *queue = submission->queue; in radv_queue_trigger_submission()
4799 if (p_atomic_add_return(&submission->submission_wait_count, -decrement)) in radv_queue_trigger_submission()
4802 …if (wait_for_submission_timelines_available(submission, radv_get_absolute_timeout(0)) == VK_SUCCES… in radv_queue_trigger_submission()
4803 list_addtail(&submission->processing_list, processing_list); in radv_queue_trigger_submission()
4828 queue->thread_submission = submission; in radv_queue_trigger_submission()
4836 const struct radv_queue_submission *submission) in radv_queue_submit() argument
4840 VkResult result = radv_create_deferred_submission(queue, submission, &deferred); in radv_queue_submit()
6184 radv_queue_trigger_submission(waiter->submission, 1, processing_list); in radv_timeline_trigger_waiters_locked()