1 /*
2  * Copyright © 2019 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "drmtest.h"
25 #include "ioctl_wrappers.h"
26 
27 #include "i915/gem_engine_topology.h"
28 
29 /*
30  * Limit what we support for simplicity due limitation in how much we
31  * can address via execbuf2.
32  */
33 #define SIZEOF_CTX_PARAM	offsetof(struct i915_context_param_engines, \
34 					 engines[GEM_MAX_ENGINES])
35 #define SIZEOF_QUERY		offsetof(struct drm_i915_query_engine_info, \
36 					 engines[GEM_MAX_ENGINES])
37 
38 #define DEFINE_CONTEXT_ENGINES_PARAM(e__, p__, c__, N__) \
39 		I915_DEFINE_CONTEXT_PARAM_ENGINES(e__, N__); \
40 		struct drm_i915_gem_context_param p__ = { \
41 			.param = I915_CONTEXT_PARAM_ENGINES, \
42 			.ctx_id = c__, \
43 			.size = SIZEOF_CTX_PARAM, \
44 			.value = to_user_pointer(&e__), \
45 		}
46 
__gem_query(int fd,struct drm_i915_query * q)47 static int __gem_query(int fd, struct drm_i915_query *q)
48 {
49 	int err = 0;
50 
51 	if (igt_ioctl(fd, DRM_IOCTL_I915_QUERY, q))
52 		err = -errno;
53 
54 	errno = 0;
55 	return err;
56 }
57 
gem_query(int fd,struct drm_i915_query * q)58 static void gem_query(int fd, struct drm_i915_query *q)
59 {
60 	igt_assert_eq(__gem_query(fd, q), 0);
61 }
62 
query_engines(int fd,struct drm_i915_query_engine_info * query_engines,int length)63 static void query_engines(int fd,
64 			  struct drm_i915_query_engine_info *query_engines,
65 			  int length)
66 {
67 	struct drm_i915_query_item item = { };
68 	struct drm_i915_query query = { };
69 
70 	item.query_id = DRM_I915_QUERY_ENGINE_INFO;
71 	query.items_ptr = to_user_pointer(&item);
72 	query.num_items = 1;
73 	item.length = length;
74 
75 	item.data_ptr = to_user_pointer(query_engines);
76 
77 	gem_query(fd, &query);
78 }
79 
ctx_map_engines(int fd,struct intel_engine_data * ed,struct drm_i915_gem_context_param * param)80 static void ctx_map_engines(int fd, struct intel_engine_data *ed,
81 			    struct drm_i915_gem_context_param *param)
82 {
83 	struct i915_context_param_engines *engines =
84 			from_user_pointer(param->value);
85 	int i = 0;
86 
87 	for (typeof(engines->engines[0]) *p =
88 	     &engines->engines[0];
89 	     i < ed->nengines; i++, p++) {
90 		p->engine_class = ed->engines[i].class;
91 		p->engine_instance = ed->engines[i].instance;
92 	}
93 
94 	param->size = offsetof(typeof(*engines), engines[i]);
95 	engines->extensions = 0;
96 
97 	gem_context_set_param(fd, param);
98 }
99 
init_engine(struct intel_execution_engine2 * e2,int class,int instance,uint64_t flags)100 static void init_engine(struct intel_execution_engine2 *e2,
101 			int class, int instance, uint64_t flags)
102 {
103 	const struct intel_execution_engine2 *__e2;
104 	static const char *unknown_name = "unknown",
105 			  *virtual_name = "virtual";
106 
107 	e2->class    = class;
108 	e2->instance = instance;
109 	e2->flags    = flags;
110 
111 	/* engine is a virtual engine */
112 	if (class == I915_ENGINE_CLASS_INVALID &&
113 	    instance == I915_ENGINE_CLASS_INVALID_VIRTUAL) {
114 		e2->name = virtual_name;
115 		e2->is_virtual = true;
116 		return;
117 	}
118 
119 	__for_each_static_engine(__e2)
120 		if (__e2->class == class && __e2->instance == instance)
121 			break;
122 
123 	if (__e2->name) {
124 		e2->name = __e2->name;
125 	} else {
126 		igt_warn("found unknown engine (%d, %d)\n", class, instance);
127 		e2->name = unknown_name;
128 		e2->flags = -1;
129 	}
130 
131 	/* just to remark it */
132 	e2->is_virtual = false;
133 }
134 
query_engine_list(int fd,struct intel_engine_data * ed)135 static void query_engine_list(int fd, struct intel_engine_data *ed)
136 {
137 	uint8_t buff[SIZEOF_QUERY] = { };
138 	struct drm_i915_query_engine_info *query_engine =
139 			(struct drm_i915_query_engine_info *) buff;
140 	int i;
141 
142 	query_engines(fd, query_engine, SIZEOF_QUERY);
143 
144 	for (i = 0; i < query_engine->num_engines; i++)
145 		init_engine(&ed->engines[i],
146 			    query_engine->engines[i].engine.engine_class,
147 			    query_engine->engines[i].engine.engine_instance, i);
148 
149 	ed->nengines = query_engine->num_engines;
150 }
151 
152 struct intel_execution_engine2 *
intel_get_current_engine(struct intel_engine_data * ed)153 intel_get_current_engine(struct intel_engine_data *ed)
154 {
155 	if (!ed->n)
156 		ed->current_engine = &ed->engines[0];
157 	else if (ed->n >= ed->nengines)
158 		ed->current_engine = NULL;
159 
160 	return ed->current_engine;
161 }
162 
intel_next_engine(struct intel_engine_data * ed)163 void intel_next_engine(struct intel_engine_data *ed)
164 {
165 	if (ed->n + 1 < ed->nengines) {
166 		ed->n++;
167 		ed->current_engine = &ed->engines[ed->n];
168 	} else {
169 		ed->n = ed->nengines;
170 		ed->current_engine = NULL;
171 	}
172 }
173 
174 struct intel_execution_engine2 *
intel_get_current_physical_engine(struct intel_engine_data * ed)175 intel_get_current_physical_engine(struct intel_engine_data *ed)
176 {
177 	struct intel_execution_engine2 *e;
178 
179 	for (e = intel_get_current_engine(ed);
180 	     e && e->is_virtual;
181 	     intel_next_engine(ed))
182 		;
183 
184 	return e;
185 }
186 
gem_topology_get_param(int fd,struct drm_i915_gem_context_param * p)187 static int gem_topology_get_param(int fd,
188 				  struct drm_i915_gem_context_param *p)
189 {
190 	if (igt_only_list_subtests())
191 		return -ENODEV;
192 
193 	if (__gem_context_get_param(fd, p))
194 		return -1; /* using default engine map */
195 
196 	if (!p->size)
197 		return 0;
198 
199 	/* size will store the engine count */
200 	p->size = (p->size - sizeof(struct i915_context_param_engines)) /
201 		  (offsetof(struct i915_context_param_engines,
202 			    engines[1]) -
203 		  sizeof(struct i915_context_param_engines));
204 
205 	igt_assert_f(p->size <= GEM_MAX_ENGINES, "unsupported engine count\n");
206 
207 	return 0;
208 }
209 
intel_init_engine_list(int fd,uint32_t ctx_id)210 struct intel_engine_data intel_init_engine_list(int fd, uint32_t ctx_id)
211 {
212 	DEFINE_CONTEXT_ENGINES_PARAM(engines, param, ctx_id, GEM_MAX_ENGINES);
213 	struct intel_engine_data engine_data = { };
214 	int i;
215 
216 	if (gem_topology_get_param(fd, &param)) {
217 		/* if kernel does not support engine/context mapping */
218 		const struct intel_execution_engine2 *e2;
219 
220 		igt_debug("using pre-allocated engine list\n");
221 
222 		__for_each_static_engine(e2) {
223 			struct intel_execution_engine2 *__e2 =
224 				&engine_data.engines[engine_data.nengines];
225 
226 			__e2->name       = e2->name;
227 			__e2->instance   = e2->instance;
228 			__e2->class      = e2->class;
229 			__e2->flags      = e2->flags;
230 			__e2->is_virtual = false;
231 
232 			if (igt_only_list_subtests() ||
233 			    gem_has_ring(fd, e2->flags))
234 				engine_data.nengines++;
235 		}
236 		return engine_data;
237 	}
238 
239 	if (!param.size) {
240 		query_engine_list(fd, &engine_data);
241 		ctx_map_engines(fd, &engine_data, &param);
242 	} else {
243 		/* param.size contains the engine count */
244 		for (i = 0; i < param.size; i++)
245 			init_engine(&engine_data.engines[i],
246 				    engines.engines[i].engine_class,
247 				    engines.engines[i].engine_instance,
248 				    i);
249 
250 		engine_data.nengines = i;
251 	}
252 
253 	return engine_data;
254 }
255 
gem_context_lookup_engine(int fd,uint64_t engine,uint32_t ctx_id,struct intel_execution_engine2 * e)256 int gem_context_lookup_engine(int fd, uint64_t engine, uint32_t ctx_id,
257 			      struct intel_execution_engine2 *e)
258 {
259 	DEFINE_CONTEXT_ENGINES_PARAM(engines, param, ctx_id, GEM_MAX_ENGINES);
260 
261 	/* a bit paranoic */
262 	igt_assert(e);
263 
264 	if (gem_topology_get_param(fd, &param) || !param.size)
265 		return -EINVAL;
266 
267 	e->class = engines.engines[engine].engine_class;
268 	e->instance = engines.engines[engine].engine_instance;
269 
270 	return 0;
271 }
272 
gem_context_set_all_engines(int fd,uint32_t ctx)273 void gem_context_set_all_engines(int fd, uint32_t ctx)
274 {
275 	DEFINE_CONTEXT_ENGINES_PARAM(engines, param, ctx, GEM_MAX_ENGINES);
276 	struct intel_engine_data engine_data = { };
277 
278 	if (!gem_topology_get_param(fd, &param) && !param.size) {
279 		query_engine_list(fd, &engine_data);
280 		ctx_map_engines(fd, &engine_data, &param);
281 	}
282 }
283 
gem_has_engine_topology(int fd)284 bool gem_has_engine_topology(int fd)
285 {
286 	struct drm_i915_gem_context_param param = {
287 		.param = I915_CONTEXT_PARAM_ENGINES,
288 	};
289 
290 	return !__gem_context_get_param(fd, &param);
291 }
292 
gem_eb_flags_to_engine(unsigned int flags)293 struct intel_execution_engine2 gem_eb_flags_to_engine(unsigned int flags)
294 {
295 	const unsigned int ring = flags & (I915_EXEC_RING_MASK | 3 << 13);
296 	struct intel_execution_engine2 e2__ = {
297 		.class = -1,
298 		.instance = -1,
299 		.flags = -1,
300 		.name = "invalid"
301 	};
302 
303 	if (ring == I915_EXEC_DEFAULT) {
304 		e2__.flags = I915_EXEC_DEFAULT;
305 		e2__.name = "default";
306 	} else {
307 		const struct intel_execution_engine2 *e2;
308 
309 		__for_each_static_engine(e2) {
310 			if (e2->flags == ring)
311 				return *e2;
312 		}
313 	}
314 
315 	return e2__;
316 }
317 
gem_context_has_engine_map(int fd,uint32_t ctx)318 bool gem_context_has_engine_map(int fd, uint32_t ctx)
319 {
320 	struct drm_i915_gem_context_param param = {
321 		.param = I915_CONTEXT_PARAM_ENGINES,
322 		.ctx_id = ctx
323 	};
324 
325 	/*
326 	 * If the kernel is too old to support PARAM_ENGINES,
327 	 * then naturally the context has no engine map.
328 	 */
329 	if (__gem_context_get_param(fd, &param))
330 		return false;
331 
332 	return param.size;
333 }
334 
gem_engine_is_equal(const struct intel_execution_engine2 * e1,const struct intel_execution_engine2 * e2)335 bool gem_engine_is_equal(const struct intel_execution_engine2 *e1,
336 			 const struct intel_execution_engine2 *e2)
337 {
338 	return e1->class == e2->class && e1->instance == e2->instance;
339 }
340