Lines Matching full:fd
55 int fd, ret; in i915_reset_control() local
59 fd = open(path, O_RDWR); in i915_reset_control()
60 igt_require(fd >= 0); in i915_reset_control()
62 ret = write(fd, &"01"[enable], 1) == 1; in i915_reset_control()
63 close(fd); in i915_reset_control()
68 static void trigger_reset(int fd) in trigger_reset() argument
75 igt_force_gpu_reset(fd); in trigger_reset()
79 gem_test_engine(fd, ALL_ENGINES); in trigger_reset()
80 igt_drop_caches_set(fd, DROP_ACTIVE); in trigger_reset()
95 static void wedge_gpu(int fd) in wedge_gpu() argument
98 gem_quiescent_gpu(fd); in wedge_gpu()
101 manual_hang(fd); in wedge_gpu()
105 static int __gem_throttle(int fd) in __gem_throttle() argument
108 if (drmIoctl(fd, DRM_IOCTL_I915_GEM_THROTTLE, NULL)) in __gem_throttle()
113 static void test_throttle(int fd) in test_throttle() argument
115 wedge_gpu(fd); in test_throttle()
117 igt_assert_eq(__gem_throttle(fd), -EIO); in test_throttle()
119 trigger_reset(fd); in test_throttle()
122 static void test_context_create(int fd) in test_context_create() argument
126 gem_require_contexts(fd); in test_context_create()
128 wedge_gpu(fd); in test_context_create()
130 igt_assert_eq(__gem_context_create(fd, &ctx), -EIO); in test_context_create()
132 trigger_reset(fd); in test_context_create()
135 static void test_execbuf(int fd) in test_execbuf() argument
144 exec.handle = gem_create(fd, 4096); in test_execbuf()
145 gem_write(fd, exec.handle, 0, tmp, sizeof(tmp)); in test_execbuf()
150 wedge_gpu(fd); in test_execbuf()
152 igt_assert_eq(__gem_execbuf(fd, &execbuf), -EIO); in test_execbuf()
153 gem_close(fd, exec.handle); in test_execbuf()
155 trigger_reset(fd); in test_execbuf()
158 static int __gem_wait(int fd, uint32_t handle, int64_t timeout) in __gem_wait() argument
167 if (drmIoctl(fd, DRM_IOCTL_I915_GEM_WAIT, &wait)) in __gem_wait()
174 static igt_spin_t * __spin_poll(int fd, uint32_t ctx, unsigned long flags) in __spin_poll() argument
182 if (gem_can_store_dword(fd, opts.engine)) in __spin_poll()
185 return __igt_spin_factory(fd, &opts); in __spin_poll()
188 static void __spin_wait(int fd, igt_spin_t *spin) in __spin_wait() argument
198 static igt_spin_t * spin_sync(int fd, uint32_t ctx, unsigned long flags) in spin_sync() argument
200 igt_spin_t *spin = __spin_poll(fd, ctx, flags); in spin_sync()
202 __spin_wait(fd, spin); in spin_sync()
229 static void hang_after(int fd, unsigned int us, struct timespec *ts) in hang_after() argument
244 ctx->debugfs = igt_debugfs_dir(fd); in hang_after()
257 static void check_wait(int fd, uint32_t bo, unsigned int wait, igt_stats_t *st) in check_wait() argument
262 hang_after(fd, wait, &ts); in check_wait()
265 manual_hang(fd); in check_wait()
268 gem_sync(fd, bo); in check_wait()
274 static void check_wait_elapsed(const char *prefix, int fd, igt_stats_t *st) in check_wait_elapsed() argument
294 if (intel_gen(intel_get_drm_devid(fd)) < 5) in check_wait_elapsed()
309 static void __test_banned(int fd) in __test_banned() argument
312 .handle = gem_create(fd, 4096), in __test_banned()
321 gem_write(fd, obj.handle, 0, &bbe, sizeof(bbe)); in __test_banned()
323 gem_quiescent_gpu(fd); in __test_banned()
329 if (__gem_execbuf(fd, &execbuf) == -EIO) { in __test_banned()
336 igt_assert_neq(__gem_context_create(fd, &ctx), -EIO); in __test_banned()
340 gem_execbuf(fd, &execbuf); in __test_banned()
342 gem_context_destroy(fd, ctx); in __test_banned()
348 hang = spin_sync(fd, 0, 0); in __test_banned()
349 trigger_reset(fd); in __test_banned()
350 igt_spin_free(fd, hang); in __test_banned()
360 static void test_banned(int fd) in test_banned() argument
362 fd = gem_reopen_driver(fd); in test_banned()
363 __test_banned(fd); in test_banned()
364 close(fd); in test_banned()
369 static void test_wait(int fd, unsigned int flags, unsigned int wait) in test_wait() argument
373 fd = gem_reopen_driver(fd); in test_wait()
374 igt_require_gem(fd); in test_wait()
386 hang = spin_sync(fd, 0, I915_EXEC_DEFAULT); in test_wait()
388 check_wait(fd, hang->handle, wait, NULL); in test_wait()
390 igt_spin_free(fd, hang); in test_wait()
394 trigger_reset(fd); in test_wait()
395 close(fd); in test_wait()
398 static void test_suspend(int fd, int state) in test_suspend() argument
400 fd = gem_reopen_driver(fd); in test_suspend()
401 igt_require_gem(fd); in test_suspend()
408 manual_hang(fd); in test_suspend()
413 trigger_reset(fd); in test_suspend()
414 close(fd); in test_suspend()
417 static void test_inflight(int fd, unsigned int wait) in test_inflight() argument
419 int parent_fd = fd; in test_inflight()
424 igt_require_gem(fd); in test_inflight()
425 igt_require(gem_has_exec_fence(fd)); in test_inflight()
427 max = gem_measure_ring_inflight(fd, -1, 0); in test_inflight()
437 fd = gem_reopen_driver(parent_fd); in test_inflight()
438 igt_require_gem(fd); in test_inflight()
442 obj[1].handle = gem_create(fd, 4096); in test_inflight()
443 gem_write(fd, obj[1].handle, 0, &bbe, sizeof(bbe)); in test_inflight()
445 gem_quiescent_gpu(fd); in test_inflight()
449 hang = spin_sync(fd, 0, engine); in test_inflight()
458 gem_execbuf_wr(fd, &execbuf); in test_inflight()
463 check_wait(fd, obj[1].handle, wait, NULL); in test_inflight()
470 igt_spin_free(fd, hang); in test_inflight()
472 trigger_reset(fd); in test_inflight()
474 gem_close(fd, obj[1].handle); in test_inflight()
475 close(fd); in test_inflight()
479 static void test_inflight_suspend(int fd) in test_inflight_suspend() argument
488 max = gem_measure_ring_inflight(fd, -1, 0); in test_inflight_suspend()
492 fd = gem_reopen_driver(fd); in test_inflight_suspend()
493 igt_require_gem(fd); in test_inflight_suspend()
494 igt_require(gem_has_exec_fence(fd)); in test_inflight_suspend()
499 obj[1].handle = gem_create(fd, 4096); in test_inflight_suspend()
500 gem_write(fd, obj[1].handle, 0, &bbe, sizeof(bbe)); in test_inflight_suspend()
502 hang = spin_sync(fd, 0, 0); in test_inflight_suspend()
511 gem_execbuf_wr(fd, &execbuf); in test_inflight_suspend()
519 check_wait(fd, obj[1].handle, 10, NULL); in test_inflight_suspend()
526 igt_spin_free(fd, hang); in test_inflight_suspend()
528 trigger_reset(fd); in test_inflight_suspend()
529 close(fd); in test_inflight_suspend()
549 static void test_inflight_contexts(int fd, unsigned int wait) in test_inflight_contexts() argument
551 int parent_fd = fd; in test_inflight_contexts()
554 igt_require_gem(fd); in test_inflight_contexts()
555 igt_require(gem_has_exec_fence(fd)); in test_inflight_contexts()
556 gem_require_contexts(fd); in test_inflight_contexts()
567 fd = gem_reopen_driver(parent_fd); in test_inflight_contexts()
568 igt_require_gem(fd); in test_inflight_contexts()
571 ctx[n] = context_create_safe(fd); in test_inflight_contexts()
573 gem_quiescent_gpu(fd); in test_inflight_contexts()
580 obj[1].handle = gem_create(fd, 4096); in test_inflight_contexts()
581 gem_write(fd, obj[1].handle, 0, &bbe, sizeof(bbe)); in test_inflight_contexts()
583 hang = spin_sync(fd, 0, engine); in test_inflight_contexts()
594 if (__gem_execbuf_wr(fd, &execbuf)) in test_inflight_contexts()
601 check_wait(fd, obj[1].handle, wait, NULL); in test_inflight_contexts()
608 igt_spin_free(fd, hang); in test_inflight_contexts()
609 gem_close(fd, obj[1].handle); in test_inflight_contexts()
611 trigger_reset(fd); in test_inflight_contexts()
614 gem_context_destroy(fd, ctx[n]); in test_inflight_contexts()
616 close(fd); in test_inflight_contexts()
620 static void test_inflight_external(int fd) in test_inflight_external() argument
630 igt_require(gem_has_exec_fence(fd)); in test_inflight_external()
632 fd = gem_reopen_driver(fd); in test_inflight_external()
633 igt_require_gem(fd); in test_inflight_external()
635 fence = igt_cork_plug(&cork, fd); in test_inflight_external()
638 hang = __spin_poll(fd, 0, 0); in test_inflight_external()
641 obj.handle = gem_create(fd, 4096); in test_inflight_external()
642 gem_write(fd, obj.handle, 0, &bbe, sizeof(bbe)); in test_inflight_external()
650 gem_execbuf_wr(fd, &execbuf); in test_inflight_external()
656 __spin_wait(fd, hang); in test_inflight_external()
657 manual_hang(fd); in test_inflight_external()
659 gem_sync(fd, hang->handle); /* wedged, with an unready batch */ in test_inflight_external()
660 igt_assert(!gem_bo_busy(fd, hang->handle)); in test_inflight_external()
661 igt_assert(gem_bo_busy(fd, obj.handle)); in test_inflight_external()
664 igt_assert_eq(__gem_wait(fd, obj.handle, -1), 0); in test_inflight_external()
668 igt_spin_free(fd, hang); in test_inflight_external()
670 trigger_reset(fd); in test_inflight_external()
671 close(fd); in test_inflight_external()
674 static void test_inflight_internal(int fd, unsigned int wait) in test_inflight_internal() argument
683 igt_require(gem_has_exec_fence(fd)); in test_inflight_internal()
685 fd = gem_reopen_driver(fd); in test_inflight_internal()
686 igt_require_gem(fd); in test_inflight_internal()
689 hang = spin_sync(fd, 0, 0); in test_inflight_internal()
694 obj[1].handle = gem_create(fd, 4096); in test_inflight_internal()
695 gem_write(fd, obj[1].handle, 0, &bbe, sizeof(bbe)); in test_inflight_internal()
700 for_each_engine(fd, engine) { in test_inflight_internal()
703 gem_execbuf_wr(fd, &execbuf); in test_inflight_internal()
710 check_wait(fd, obj[1].handle, wait, NULL); in test_inflight_internal()
717 igt_spin_free(fd, hang); in test_inflight_internal()
719 trigger_reset(fd); in test_inflight_internal()
720 close(fd); in test_inflight_internal()
723 static void reset_stress(int fd, uint32_t ctx0, in reset_stress() argument
729 .handle = gem_create(fd, 4096) in reset_stress()
739 max = gem_measure_ring_inflight(fd, engine, 0); in reset_stress()
743 gem_write(fd, obj.handle, 0, &bbe, sizeof(bbe)); in reset_stress()
747 uint32_t ctx = context_create_safe(fd); in reset_stress()
751 gem_quiescent_gpu(fd); in reset_stress()
760 hang = spin_sync(fd, ctx0, engine); in reset_stress()
764 gem_execbuf(fd, &execbuf); in reset_stress()
768 gem_execbuf(fd, &execbuf); in reset_stress()
771 check_wait(fd, obj.handle, 100e3, &stats); in reset_stress()
776 trigger_reset(fd); in reset_stress()
778 gem_quiescent_gpu(fd); in reset_stress()
786 gem_execbuf(fd, &execbuf); in reset_stress()
790 gem_execbuf(fd, &execbuf); in reset_stress()
792 gem_sync(fd, obj.handle); in reset_stress()
793 igt_spin_free(fd, hang); in reset_stress()
794 gem_context_destroy(fd, ctx); in reset_stress()
796 check_wait_elapsed(name, fd, &stats); in reset_stress()
799 gem_close(fd, obj.handle); in reset_stress()
805 static void test_reset_stress(int fd, unsigned int flags) in test_reset_stress() argument
807 uint32_t ctx0 = context_create_safe(fd); in test_reset_stress()
810 for_each_engine(fd, engine) in test_reset_stress()
811 reset_stress(fd, ctx0, e__->name, engine, flags); in test_reset_stress()
813 gem_context_destroy(fd, ctx0); in test_reset_stress()
816 static int fd = -1; variable
822 igt_force_gpu_reset(fd); in exit_handler()
830 fd = drm_open_driver(DRIVER_INTEL);
831 igt_device_drop_master(fd);
833 gem_submission_print_method(fd);
834 igt_require_gem(fd);
836 igt_allow_hang(fd, 0, 0);
839 igt_force_gpu_reset(fd);
844 test_throttle(fd);
847 test_context_create(fd);
850 test_execbuf(fd);
853 test_banned(fd);
856 test_suspend(fd, SUSPEND_STATE_MEM);
859 test_suspend(fd, SUSPEND_STATE_DISK);
862 test_inflight_external(fd);
865 test_inflight_suspend(fd);
869 igt_require(gem_has_contexts(fd));
873 test_reset_stress(fd, 0);
876 test_reset_stress(fd, TEST_WEDGE);
892 test_wait(fd, 0, waits[i].wait);
895 test_wait(fd, TEST_WEDGE, waits[i].wait);
898 test_inflight(fd, waits[i].wait);
901 test_inflight_contexts(fd, waits[i].wait);
904 igt_skip_on(gem_has_semaphores(fd));
905 test_inflight_internal(fd, waits[i].wait);