Lines Matching full:fd

67 uint32_t __sync_read_u32(int fd, uint32_t handle, uint64_t offset)  in __sync_read_u32()  argument
71 gem_set_domain(fd, handle, /* No write hazard lies! */ in __sync_read_u32()
73 gem_read(fd, handle, offset, &value, sizeof(value)); in __sync_read_u32()
79 void __sync_read_u32_count(int fd, uint32_t handle, uint32_t *dst, uint64_t size) in __sync_read_u32_count() argument
81 gem_set_domain(fd, handle, /* No write hazard lies! */ in __sync_read_u32_count()
83 gem_read(fd, handle, 0, dst, size); in __sync_read_u32_count()
86 static uint32_t __store_dword(int fd, uint32_t ctx, unsigned ring, in __store_dword() argument
90 const int gen = intel_gen(intel_get_drm_devid(fd)); in __store_dword()
108 obj[2].handle = gem_create(fd, 4096); in __store_dword()
135 gem_write(fd, obj[2].handle, 0, batch, sizeof(batch)); in __store_dword()
136 gem_execbuf(fd, &execbuf); in __store_dword()
141 static void store_dword(int fd, uint32_t ctx, unsigned ring, in store_dword() argument
145 gem_close(fd, __store_dword(fd, ctx, ring, in store_dword()
150 static uint32_t create_highest_priority(int fd) in create_highest_priority() argument
152 uint32_t ctx = gem_context_create(fd); in create_highest_priority()
159 __gem_context_set_priority(fd, ctx, MAX_PRIO); in create_highest_priority()
164 static void unplug_show_queue(int fd, struct igt_cork *c, unsigned int engine) in unplug_show_queue() argument
170 if (!gem_scheduler_enabled(fd)) in unplug_show_queue()
175 .ctx = create_highest_priority(fd), in unplug_show_queue()
178 spin[n] = __igt_spin_factory(fd, &opts); in unplug_show_queue()
179 gem_context_destroy(fd, opts.ctx); in unplug_show_queue()
183 igt_debugfs_dump(fd, "i915_engine_info"); in unplug_show_queue()
186 igt_spin_free(fd, spin[n]); in unplug_show_queue()
190 static void fifo(int fd, unsigned ring) in fifo() argument
196 scratch = gem_create(fd, 4096); in fifo()
198 plug = igt_cork_plug(&cork, fd); in fifo()
201 store_dword(fd, 0, ring, scratch, 0, 1, plug, 0); in fifo()
202 store_dword(fd, 0, ring, scratch, 0, 2, plug, 0); in fifo()
204 unplug_show_queue(fd, &cork, ring); in fifo()
205 gem_close(fd, plug); in fifo()
207 result = __sync_read_u32(fd, scratch, 0); in fifo()
208 gem_close(fd, scratch); in fifo()
213 static void independent(int fd, unsigned int engine) in independent() argument
223 scratch = gem_create(fd, 4096); in independent()
224 ptr = gem_mmap__gtt(fd, scratch, 4096, PROT_READ); in independent()
227 plug = igt_cork_plug(&cork, fd); in independent()
230 for_each_physical_engine(fd, other) { in independent()
234 if (!gem_can_store_dword(fd, other)) in independent()
238 spin = __igt_spin_new(fd, .engine = other); in independent()
245 gem_execbuf(fd, &eb); in independent()
248 store_dword(fd, 0, other, scratch, 0, other, plug, 0); in independent()
253 batch = __store_dword(fd, 0, engine, scratch, 0, engine, plug, 0); in independent()
255 unplug_show_queue(fd, &cork, engine); in independent()
256 gem_close(fd, plug); in independent()
258 gem_sync(fd, batch); in independent()
259 igt_assert(!gem_bo_busy(fd, batch)); in independent()
260 igt_assert(gem_bo_busy(fd, spin->handle)); in independent()
261 gem_close(fd, batch); in independent()
264 igt_assert(gem_bo_busy(fd, scratch)); in independent()
267 igt_spin_free(fd, spin); in independent()
268 gem_quiescent_gpu(fd); in independent()
271 igt_assert(!gem_bo_busy(fd, scratch)); in independent()
275 gem_close(fd, scratch); in independent()
278 static void smoketest(int fd, unsigned ring, unsigned timeout) in smoketest() argument
289 for_each_physical_engine(fd, engine) in smoketest()
290 if (gem_can_store_dword(fd, engine)) in smoketest()
293 if (gem_can_store_dword(fd, ring)) in smoketest()
298 scratch = gem_create(fd, 4096); in smoketest()
305 ctx = gem_context_create(fd); in smoketest()
310 gem_context_set_priority(fd, ctx, prio); in smoketest()
313 store_dword(fd, ctx, engine, scratch, in smoketest()
317 store_dword(fd, ctx, engine, scratch, in smoketest()
321 gem_context_destroy(fd, ctx); in smoketest()
325 __sync_read_u32_count(fd, scratch, result, sizeof(result)); in smoketest()
326 gem_close(fd, scratch); in smoketest()
698 static void reorder(int fd, unsigned ring, unsigned flags) in reorder() argument
706 ctx[LO] = gem_context_create(fd); in reorder()
707 gem_context_set_priority(fd, ctx[LO], MIN_PRIO); in reorder()
709 ctx[HI] = gem_context_create(fd); in reorder()
710 gem_context_set_priority(fd, ctx[HI], flags & EQUAL ? MIN_PRIO : 0); in reorder()
712 scratch = gem_create(fd, 4096); in reorder()
713 plug = igt_cork_plug(&cork, fd); in reorder()
718 store_dword(fd, ctx[LO], ring, scratch, 0, ctx[LO], plug, 0); in reorder()
719 store_dword(fd, ctx[HI], ring, scratch, 0, ctx[HI], plug, 0); in reorder()
721 unplug_show_queue(fd, &cork, ring); in reorder()
722 gem_close(fd, plug); in reorder()
724 gem_context_destroy(fd, ctx[LO]); in reorder()
725 gem_context_destroy(fd, ctx[HI]); in reorder()
727 result = __sync_read_u32(fd, scratch, 0); in reorder()
728 gem_close(fd, scratch); in reorder()
736 static void promotion(int fd, unsigned ring) in promotion() argument
744 ctx[LO] = gem_context_create(fd); in promotion()
745 gem_context_set_priority(fd, ctx[LO], MIN_PRIO); in promotion()
747 ctx[HI] = gem_context_create(fd); in promotion()
748 gem_context_set_priority(fd, ctx[HI], 0); in promotion()
750 ctx[NOISE] = gem_context_create(fd); in promotion()
751 gem_context_set_priority(fd, ctx[NOISE], MIN_PRIO/2); in promotion()
753 result = gem_create(fd, 4096); in promotion()
754 dep = gem_create(fd, 4096); in promotion()
756 plug = igt_cork_plug(&cork, fd); in promotion()
763 store_dword(fd, ctx[NOISE], ring, result, 0, ctx[NOISE], plug, 0); in promotion()
764 store_dword(fd, ctx[LO], ring, result, 0, ctx[LO], plug, 0); in promotion()
767 store_dword(fd, ctx[LO], ring, dep, 0, ctx[LO], 0, I915_GEM_DOMAIN_INSTRUCTION); in promotion()
768 store_dword(fd, ctx[HI], ring, dep, 0, ctx[HI], 0, 0); in promotion()
770 store_dword(fd, ctx[HI], ring, result, 0, ctx[HI], 0, 0); in promotion()
772 unplug_show_queue(fd, &cork, ring); in promotion()
773 gem_close(fd, plug); in promotion()
775 gem_context_destroy(fd, ctx[NOISE]); in promotion()
776 gem_context_destroy(fd, ctx[LO]); in promotion()
777 gem_context_destroy(fd, ctx[HI]); in promotion()
779 dep_read = __sync_read_u32(fd, dep, 0); in promotion()
780 gem_close(fd, dep); in promotion()
782 result_read = __sync_read_u32(fd, result, 0); in promotion()
783 gem_close(fd, result); in promotion()
791 static void preempt(int fd, unsigned ring, unsigned flags) in preempt() argument
793 uint32_t result = gem_create(fd, 4096); in preempt()
799 ctx[LO] = gem_context_create(fd); in preempt()
800 gem_context_set_priority(fd, ctx[LO], MIN_PRIO); in preempt()
802 ctx[HI] = gem_context_create(fd); in preempt()
803 gem_context_set_priority(fd, ctx[HI], MAX_PRIO); in preempt()
806 hang = igt_hang_ctx(fd, ctx[LO], ring, 0); in preempt()
810 gem_context_destroy(fd, ctx[LO]); in preempt()
811 ctx[LO] = gem_context_create(fd); in preempt()
812 gem_context_set_priority(fd, ctx[LO], MIN_PRIO); in preempt()
814 spin[n] = __igt_spin_new(fd, in preempt()
819 store_dword(fd, ctx[HI], ring, result, 0, n + 1, 0, I915_GEM_DOMAIN_RENDER); in preempt()
821 result_read = __sync_read_u32(fd, result, 0); in preempt()
823 igt_assert(gem_bo_busy(fd, spin[0]->handle)); in preempt()
827 igt_spin_free(fd, spin[n]); in preempt()
830 igt_post_hang_ring(fd, hang); in preempt()
832 gem_context_destroy(fd, ctx[LO]); in preempt()
833 gem_context_destroy(fd, ctx[HI]); in preempt()
835 gem_close(fd, result); in preempt()
841 static igt_spin_t *__noise(int fd, uint32_t ctx, int prio, igt_spin_t *spin) in __noise() argument
845 gem_context_set_priority(fd, ctx, prio); in __noise()
847 for_each_physical_engine(fd, other) { in __noise()
849 spin = __igt_spin_new(fd, in __noise()
859 gem_execbuf(fd, &eb); in __noise()
866 static void __preempt_other(int fd, in __preempt_other() argument
871 uint32_t result = gem_create(fd, 4096); in __preempt_other()
876 store_dword(fd, ctx[LO], primary, in __preempt_other()
882 for_each_physical_engine(fd, other) { in __preempt_other()
883 store_dword(fd, ctx[LO], other, in __preempt_other()
890 store_dword(fd, ctx[HI], target, in __preempt_other()
894 igt_debugfs_dump(fd, "i915_engine_info"); in __preempt_other()
895 gem_set_domain(fd, result, I915_GEM_DOMAIN_GTT, 0); in __preempt_other()
899 __sync_read_u32_count(fd, result, result_read, sizeof(result_read)); in __preempt_other()
903 gem_close(fd, result); in __preempt_other()
906 static void preempt_other(int fd, unsigned ring, unsigned int flags) in preempt_other() argument
922 ctx[LO] = gem_context_create(fd); in preempt_other()
923 gem_context_set_priority(fd, ctx[LO], MIN_PRIO); in preempt_other()
925 ctx[NOISE] = gem_context_create(fd); in preempt_other()
926 spin = __noise(fd, ctx[NOISE], 0, NULL); in preempt_other()
928 ctx[HI] = gem_context_create(fd); in preempt_other()
929 gem_context_set_priority(fd, ctx[HI], MAX_PRIO); in preempt_other()
931 for_each_physical_engine(fd, primary) { in preempt_other()
933 __preempt_other(fd, ctx, ring, primary, flags); in preempt_other()
937 igt_assert(gem_bo_busy(fd, spin->handle)); in preempt_other()
938 igt_spin_free(fd, spin); in preempt_other()
940 gem_context_destroy(fd, ctx[LO]); in preempt_other()
941 gem_context_destroy(fd, ctx[NOISE]); in preempt_other()
942 gem_context_destroy(fd, ctx[HI]); in preempt_other()
945 static void __preempt_queue(int fd, in __preempt_queue() argument
949 uint32_t result = gem_create(fd, 4096); in __preempt_queue()
955 gem_context_create(fd), in __preempt_queue()
956 gem_context_create(fd), in __preempt_queue()
957 gem_context_create(fd), in __preempt_queue()
962 gem_context_destroy(fd, ctx[NOISE]); in __preempt_queue()
963 ctx[NOISE] = gem_context_create(fd); in __preempt_queue()
965 above = __noise(fd, ctx[NOISE], prio--, above); in __preempt_queue()
968 gem_context_set_priority(fd, ctx[HI], prio--); in __preempt_queue()
972 gem_context_destroy(fd, ctx[NOISE]); in __preempt_queue()
973 ctx[NOISE] = gem_context_create(fd); in __preempt_queue()
975 below = __noise(fd, ctx[NOISE], prio--, below); in __preempt_queue()
978 gem_context_set_priority(fd, ctx[LO], prio--); in __preempt_queue()
981 store_dword(fd, ctx[LO], primary, in __preempt_queue()
987 for_each_physical_engine(fd, other) { in __preempt_queue()
988 store_dword(fd, ctx[LO], other, in __preempt_queue()
995 store_dword(fd, ctx[HI], target, in __preempt_queue()
999 igt_debugfs_dump(fd, "i915_engine_info"); in __preempt_queue()
1002 igt_assert(gem_bo_busy(fd, above->handle)); in __preempt_queue()
1003 igt_spin_free(fd, above); in __preempt_queue()
1006 gem_set_domain(fd, result, I915_GEM_DOMAIN_GTT, 0); in __preempt_queue()
1008 __sync_read_u32_count(fd, result, result_read, sizeof(result_read)); in __preempt_queue()
1015 igt_assert(gem_bo_busy(fd, below->handle)); in __preempt_queue()
1016 igt_spin_free(fd, below); in __preempt_queue()
1019 gem_context_destroy(fd, ctx[LO]); in __preempt_queue()
1020 gem_context_destroy(fd, ctx[NOISE]); in __preempt_queue()
1021 gem_context_destroy(fd, ctx[HI]); in __preempt_queue()
1023 gem_close(fd, result); in __preempt_queue()
1026 static void preempt_queue(int fd, unsigned ring, unsigned int flags) in preempt_queue() argument
1030 for_each_physical_engine(fd, other) { in preempt_queue()
1032 __preempt_queue(fd, ring, other, depth, flags); in preempt_queue()
1036 static void preempt_self(int fd, unsigned ring) in preempt_self() argument
1038 uint32_t result = gem_create(fd, 4096); in preempt_self()
1053 ctx[NOISE] = gem_context_create(fd); in preempt_self()
1055 ctx[HI] = gem_context_create(fd); in preempt_self()
1058 gem_context_set_priority(fd, ctx[HI], MIN_PRIO); in preempt_self()
1059 for_each_physical_engine(fd, other) { in preempt_self()
1060 spin[n] = __igt_spin_new(fd, in preempt_self()
1063 store_dword(fd, ctx[HI], other, in preempt_self()
1068 gem_context_set_priority(fd, ctx[HI], MAX_PRIO); in preempt_self()
1069 store_dword(fd, ctx[HI], ring, in preempt_self()
1073 gem_set_domain(fd, result, I915_GEM_DOMAIN_GTT, 0); in preempt_self()
1076 igt_assert(gem_bo_busy(fd, spin[i]->handle)); in preempt_self()
1077 igt_spin_free(fd, spin[i]); in preempt_self()
1080 __sync_read_u32_count(fd, result, result_read, sizeof(result_read)); in preempt_self()
1086 gem_context_destroy(fd, ctx[NOISE]); in preempt_self()
1087 gem_context_destroy(fd, ctx[HI]); in preempt_self()
1089 gem_close(fd, result); in preempt_self()
1092 static void preemptive_hang(int fd, unsigned ring) in preemptive_hang() argument
1098 ctx[HI] = gem_context_create(fd); in preemptive_hang()
1099 gem_context_set_priority(fd, ctx[HI], MAX_PRIO); in preemptive_hang()
1102 ctx[LO] = gem_context_create(fd); in preemptive_hang()
1103 gem_context_set_priority(fd, ctx[LO], MIN_PRIO); in preemptive_hang()
1105 spin[n] = __igt_spin_new(fd, in preemptive_hang()
1109 gem_context_destroy(fd, ctx[LO]); in preemptive_hang()
1112 hang = igt_hang_ctx(fd, ctx[HI], ring, 0); in preemptive_hang()
1113 igt_post_hang_ring(fd, hang); in preemptive_hang()
1120 igt_assert(gem_bo_busy(fd, spin[n]->handle)); in preemptive_hang()
1121 igt_spin_free(fd, spin[n]); in preemptive_hang()
1124 gem_context_destroy(fd, ctx[HI]); in preemptive_hang()
1127 static void deep(int fd, unsigned ring) in deep() argument
1145 ctx[n] = gem_context_create(fd); in deep()
1148 nreq = gem_measure_ring_inflight(fd, ring, 0) / (4 * XS) * MAX_CONTEXTS; in deep()
1153 result = gem_create(fd, size); in deep()
1155 dep[m] = gem_create(fd, size); in deep()
1167 obj[XS+1].handle = gem_create(fd, 4096); in deep()
1168 gem_write(fd, obj[XS+1].handle, 0, &bbe, sizeof(bbe)); in deep()
1176 gem_execbuf(fd, &execbuf); in deep()
1178 gem_close(fd, obj[XS+1].handle); in deep()
1179 gem_sync(fd, result); in deep()
1182 plug = igt_cork_plug(&cork, fd); in deep()
1186 const int gen = intel_gen(intel_get_drm_devid(fd)); in deep()
1207 obj[2].handle = gem_create(fd, 4096); in deep()
1226 gem_write(fd, obj[2].handle, 0, batch, sizeof(batch)); in deep()
1228 gem_context_set_priority(fd, eb.rsvd1, MAX_PRIO - nreq + n); in deep()
1232 gem_execbuf(fd, &eb); in deep()
1234 gem_close(fd, obj[2].handle); in deep()
1242 gem_context_set_priority(fd, context, MAX_PRIO - nreq + n); in deep()
1245 store_dword(fd, context, ring, result, 4*n, context, dep[m], 0); in deep()
1246 store_dword(fd, context, ring, result, 4*m, context, 0, I915_GEM_DOMAIN_INSTRUCTION); in deep()
1253 unplug_show_queue(fd, &cork, ring); in deep()
1254 gem_close(fd, plug); in deep()
1258 gem_context_destroy(fd, ctx[n]); in deep()
1261 __sync_read_u32_count(fd, dep[m], read_buf, sizeof(read_buf)); in deep()
1262 gem_close(fd, dep[m]); in deep()
1268 __sync_read_u32_count(fd, result, read_buf, sizeof(read_buf)); in deep()
1269 gem_close(fd, result); in deep()
1283 static int __execbuf(int fd, struct drm_i915_gem_execbuffer2 *execbuf) in __execbuf() argument
1286 if (ioctl(fd, DRM_IOCTL_I915_GEM_EXECBUFFER2, execbuf)) in __execbuf()
1291 static void wide(int fd, unsigned ring) in wide() argument
1294 unsigned int ring_size = gem_measure_ring_inflight(fd, ring, MEASURE_RING_NEW_CTX); in wide()
1305 ctx[n] = gem_context_create(fd); in wide()
1307 result = gem_create(fd, 4*MAX_CONTEXTS); in wide()
1309 plug = igt_cork_plug(&cork, fd); in wide()
1316 store_dword(fd, ctx[n], ring, result, 4*n, ctx[n], plug, I915_GEM_DOMAIN_INSTRUCTION); in wide()
1322 unplug_show_queue(fd, &cork, ring); in wide()
1323 gem_close(fd, plug); in wide()
1326 gem_context_destroy(fd, ctx[n]); in wide()
1328 __sync_read_u32_count(fd, result, result_read, sizeof(result_read)); in wide()
1332 gem_close(fd, result); in wide()
1336 static void reorder_wide(int fd, unsigned ring) in reorder_wide() argument
1338 const int gen = intel_gen(intel_get_drm_devid(fd)); in reorder_wide()
1343 unsigned int ring_size = gem_measure_ring_inflight(fd, ring, MEASURE_RING_NEW_CTX); in reorder_wide()
1349 result = gem_create(fd, 4096); in reorder_wide()
1350 target = gem_create(fd, 4096); in reorder_wide()
1351 plug = igt_cork_plug(&cork, fd); in reorder_wide()
1353 expected = gem_mmap__cpu(fd, target, 0, 4096, PROT_WRITE); in reorder_wide()
1354 gem_set_domain(fd, target, I915_GEM_DOMAIN_CPU, I915_GEM_DOMAIN_CPU); in reorder_wide()
1380 execbuf.rsvd1 = gem_context_create(fd); in reorder_wide()
1381 gem_context_set_priority(fd, execbuf.rsvd1, n); in reorder_wide()
1383 obj[2].handle = gem_create(fd, sz); in reorder_wide()
1384 batch = gem_mmap__gtt(fd, obj[2].handle, sz, PROT_WRITE); in reorder_wide()
1385 gem_set_domain(fd, obj[2].handle, I915_GEM_DOMAIN_GTT, I915_GEM_DOMAIN_GTT); in reorder_wide()
1416 gem_execbuf(fd, &execbuf); in reorder_wide()
1420 gem_close(fd, obj[2].handle); in reorder_wide()
1421 gem_context_destroy(fd, execbuf.rsvd1); in reorder_wide()
1424 unplug_show_queue(fd, &cork, ring); in reorder_wide()
1425 gem_close(fd, plug); in reorder_wide()
1427 __sync_read_u32_count(fd, result, result_read, sizeof(result_read)); in reorder_wide()
1433 gem_close(fd, result); in reorder_wide()
1434 gem_close(fd, target); in reorder_wide()
1450 static void test_pi_ringfull(int fd, unsigned int engine) in test_pi_ringfull() argument
1468 obj[1].handle = gem_create(fd, 4096); in test_pi_ringfull()
1469 gem_write(fd, obj[1].handle, 0, &bbe, sizeof(bbe)); in test_pi_ringfull()
1476 execbuf.rsvd1 = gem_context_create(fd); in test_pi_ringfull()
1477 gem_context_set_priority(fd, execbuf.rsvd1, MAX_PRIO); in test_pi_ringfull()
1478 gem_execbuf(fd, &execbuf); in test_pi_ringfull()
1479 gem_sync(fd, obj[1].handle); in test_pi_ringfull()
1482 execbuf.rsvd1 = gem_context_create(fd); in test_pi_ringfull()
1483 gem_context_set_priority(fd, execbuf.rsvd1, MIN_PRIO); in test_pi_ringfull()
1484 gem_execbuf(fd, &execbuf); in test_pi_ringfull()
1485 gem_sync(fd, obj[1].handle); in test_pi_ringfull()
1488 obj[0].handle = igt_cork_plug(&c, fd); in test_pi_ringfull()
1503 if (__execbuf(fd, &execbuf) == 0) { in test_pi_ringfull()
1544 err = __execbuf(fd, &execbuf); in test_pi_ringfull()
1563 igt_assert_eq(__execbuf(fd, &execbuf), -EINTR); in test_pi_ringfull()
1571 gem_context_destroy(fd, execbuf.rsvd1); in test_pi_ringfull()
1572 gem_context_destroy(fd, vip); in test_pi_ringfull()
1573 gem_close(fd, obj[1].handle); in test_pi_ringfull()
1574 gem_close(fd, obj[0].handle); in test_pi_ringfull()
1639 int fd = -1; variable
1644 fd = drm_open_driver_master(DRIVER_INTEL);
1645 gem_submission_print_method(fd);
1646 gem_scheduler_print_capability(fd);
1648 igt_require_gem(fd);
1649 gem_require_mmap_wc(fd);
1650 gem_require_contexts(fd);
1652 igt_fork_hang_detector(fd);
1662 igt_require(gem_ring_has_physical_engine(fd, e->exec_id | e->flags));
1663 igt_require(gem_can_store_dword(fd, e->exec_id | e->flags));
1664 fifo(fd, e->exec_id | e->flags);
1668 igt_require(gem_ring_has_physical_engine(fd, e->exec_id | e->flags));
1669 igt_require(gem_can_store_dword(fd, e->exec_id | e->flags));
1670 independent(fd, e->exec_id | e->flags);
1677 igt_require(gem_scheduler_enabled(fd));
1678 igt_require(gem_scheduler_has_ctx_priority(fd));
1682 semaphore_userlock(fd);
1684 semaphore_codependency(fd);
1686 semaphore_resolve(fd);
1688 semaphore_noskip(fd);
1691 smoketest(fd, ALL_ENGINES, 30);
1699 igt_require(gem_ring_has_physical_engine(fd, e->exec_id | e->flags));
1700 igt_require(gem_can_store_dword(fd, e->exec_id | e->flags));
1704 reorder(fd, e->exec_id | e->flags, EQUAL);
1707 reorder(fd, e->exec_id | e->flags, 0);
1710 promotion(fd, e->exec_id | e->flags);
1714 igt_require(gem_scheduler_has_preemption(fd));
1718 preempt(fd, e->exec_id | e->flags, 0);
1721 preempt(fd, e->exec_id | e->flags, NEW_CTX);
1724 preempt_self(fd, e->exec_id | e->flags);
1727 preempt_other(fd, e->exec_id | e->flags, 0);
1730 preempt_other(fd, e->exec_id | e->flags, CHAIN);
1733 preempt_queue(fd, e->exec_id | e->flags, 0);
1736 preempt_queue(fd, e->exec_id | e->flags, CHAIN);
1738 preempt_queue(fd, e->exec_id | e->flags, CONTEXTS);
1741 preempt_queue(fd, e->exec_id | e->flags, CONTEXTS | CHAIN);
1748 hang = igt_allow_hang(fd, 0, 0);
1752 preempt(fd, e->exec_id | e->flags, NEW_CTX | HANG_LP);
1756 preemptive_hang(fd, e->exec_id | e->flags);
1759 igt_disallow_hang(fd, hang);
1760 igt_fork_hang_detector(fd);
1766 deep(fd, e->exec_id | e->flags);
1769 wide(fd, e->exec_id | e->flags);
1772 reorder_wide(fd, e->exec_id | e->flags);
1775 smoketest(fd, e->exec_id | e->flags, 5);
1782 igt_require(gem_scheduler_enabled(fd));
1783 igt_require(gem_scheduler_has_ctx_priority(fd));
1792 igt_require(gem_ring_has_physical_engine(fd, e->exec_id | e->flags));
1793 igt_require(gem_scheduler_has_preemption(fd));
1797 test_pi_ringfull(fd, e->exec_id | e->flags);
1804 igt_require(gem_scheduler_enabled(fd));
1805 igt_require(gem_scheduler_has_semaphores(fd));
1809 measure_semaphore_power(fd);
1814 close(fd);