1 // Copyright (C) 2023 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "host/magma/IntelDrmDecoder.h"
16
17 #include <i915_drm.h>
18 #include <magma_intel_gen_defs.h>
19 #include <sys/ioctl.h>
20 #include <unistd.h>
21
22 #include <cerrno>
23 #include <cstring>
24 #include <ctime>
25 #include <vector>
26
27 #include "RenderThreadInfoMagma.h"
28 #include "host/magma/Connection.h"
29 #include "host/magma/DrmDevice.h"
30 #include "magma/magma_common_defs.h"
31
32 namespace gfxstream {
33 namespace magma {
34
create(uint32_t context_id)35 std::unique_ptr<IntelDrmDecoder> IntelDrmDecoder::create(uint32_t context_id) {
36 std::unique_ptr<IntelDrmDecoder> decoder(new IntelDrmDecoder());
37 decoder->mContextId = context_id;
38 INFO("IntelDrmDecoder created for context %" PRIu32, context_id);
39 return decoder;
40 }
41
42 #define MAGMA_DECODER_BIND_METHOD(method) \
43 magma_server_context_t::method = [](auto... args) { \
44 auto decoder = RenderThreadInfoMagma::get() -> mMagmaDec.get(); \
45 return static_cast<IntelDrmDecoder*>(decoder)->method(args...); \
46 }
47
IntelDrmDecoder()48 IntelDrmDecoder::IntelDrmDecoder() : Decoder() {
49 MAGMA_DECODER_BIND_METHOD(magma_device_import);
50 MAGMA_DECODER_BIND_METHOD(magma_device_release);
51 MAGMA_DECODER_BIND_METHOD(magma_device_query_fudge);
52 MAGMA_DECODER_BIND_METHOD(magma_device_create_connection);
53 MAGMA_DECODER_BIND_METHOD(magma_connection_release);
54 MAGMA_DECODER_BIND_METHOD(magma_connection_create_buffer);
55 MAGMA_DECODER_BIND_METHOD(magma_connection_release_buffer);
56 MAGMA_DECODER_BIND_METHOD(magma_connection_create_semaphore);
57 MAGMA_DECODER_BIND_METHOD(magma_connection_release_semaphore);
58 MAGMA_DECODER_BIND_METHOD(magma_buffer_export);
59 MAGMA_DECODER_BIND_METHOD(magma_semaphore_signal);
60 MAGMA_DECODER_BIND_METHOD(magma_semaphore_reset);
61 MAGMA_DECODER_BIND_METHOD(magma_poll);
62 MAGMA_DECODER_BIND_METHOD(magma_connection_get_error);
63 MAGMA_DECODER_BIND_METHOD(magma_connection_create_context);
64 MAGMA_DECODER_BIND_METHOD(magma_connection_release_context);
65 MAGMA_DECODER_BIND_METHOD(magma_connection_map_buffer);
66 MAGMA_DECODER_BIND_METHOD(magma_connection_unmap_buffer);
67 }
68
69 // TODO(b/279936417): Make objects and their IDs orthogonal.
70 #define MAGMA_OBJECT_TO_ID(x) ((x) << 32ull)
71 #define MAGMA_ID_TO_OBJECT(x) ((x) >> 32ull)
72
magma_device_import(magma_handle_t device_channel,magma_device_t * device_out)73 magma_status_t IntelDrmDecoder::magma_device_import(magma_handle_t device_channel,
74 magma_device_t* device_out) {
75 *device_out = 0;
76 auto device = DrmDevice::create();
77 if (!device) {
78 return MAGMA_STATUS_INTERNAL_ERROR;
79 }
80 *device_out = mDevices.create(std::move(*device));
81 INFO("magma_device_import() -> %" PRIu64, *device_out);
82 return MAGMA_STATUS_OK;
83 }
84
magma_device_release(magma_device_t device)85 void IntelDrmDecoder::magma_device_release(magma_device_t device) {
86 INFO("magma_device_release(%" PRIu64 ")", device);
87 }
88
GetNsMonotonic(bool raw)89 static uint64_t GetNsMonotonic(bool raw) {
90 timespec ts{};
91 int result = clock_gettime(raw ? CLOCK_MONOTONIC_RAW : CLOCK_MONOTONIC, &ts);
92 if (result < 0) {
93 return 0;
94 }
95 constexpr uint64_t kNsPerSec = 1'000'000'000ull;
96 return static_cast<uint64_t>(ts.tv_sec) * kNsPerSec + ts.tv_nsec;
97 }
98
99 // Converts a DRM topology to a Magma topology.
MakeMagmaTopology(const drm_i915_query_topology_info * info)100 static std::vector<uint8_t> MakeMagmaTopology(const drm_i915_query_topology_info* info) {
101 auto read_bit = [](const uint8_t* ptr, size_t offset) -> bool {
102 return (ptr[offset / 8] >> (offset % 8)) & 1;
103 };
104
105 auto append_buffer = [](std::vector<uint8_t>& buffer, const uint8_t* src, size_t len) -> void {
106 auto offset = buffer.size();
107 buffer.resize(offset + len);
108 memcpy(buffer.data() + offset, src, len);
109 };
110
111 const auto* slice_base = info->data;
112 const auto* subslice_base = info->data + info->subslice_offset;
113 const auto* eu_base = info->data + info->eu_offset;
114
115 // Start with a buffer just large enough to hold the magma struct.
116 std::vector<uint8_t> buffer(sizeof(magma_intel_gen_topology));
117
118 // Copy the slice mask.
119 size_t slice_data_bytes = (info->max_slices + 7) / 8;
120 append_buffer(buffer, slice_base, slice_data_bytes);
121
122 for (uint32_t slice = 0; slice < info->max_slices; ++slice) {
123 if (!read_bit(slice_base, slice)) {
124 continue;
125 }
126 const auto* subslice_data = &subslice_base[slice * info->subslice_stride];
127
128 // For each active slice, copy the subslice mask.
129 size_t subslice_data_bytes = (info->max_subslices + 7) / 8;
130 append_buffer(buffer, subslice_data, subslice_data_bytes);
131
132 for (uint32_t subslice = 0; subslice < info->max_subslices; ++subslice) {
133 if (!read_bit(subslice_data, subslice)) {
134 continue;
135 }
136 const auto* eu_data =
137 &eu_base[(slice * info->max_subslices + subslice) * info->eu_stride];
138
139 // For each active subslice, copy the eu mask.
140 size_t eu_data_bytes = (info->max_eus_per_subslice + 7) / 8;
141 append_buffer(buffer, eu_data, eu_data_bytes);
142 }
143 }
144
145 // Populate the base struct elements.
146 auto magma_topology = reinterpret_cast<magma_intel_gen_topology*>(buffer.data());
147 magma_topology->max_slice_count = info->max_slices;
148 magma_topology->max_subslice_count = info->max_subslices;
149 magma_topology->max_eu_count = info->max_eus_per_subslice;
150 magma_topology->data_byte_count = buffer.size() - sizeof(magma_intel_gen_topology);
151
152 return buffer;
153 }
154
magma_device_query_fudge(magma_device_t device,uint64_t id,magma_bool_t host_allocate,uint64_t * result_buffer_mapping_id_inout,uint64_t * result_buffer_size_inout,uint64_t * result_out)155 magma_status_t IntelDrmDecoder::magma_device_query_fudge(magma_device_t device, uint64_t id,
156 magma_bool_t host_allocate,
157 uint64_t* result_buffer_mapping_id_inout,
158 uint64_t* result_buffer_size_inout,
159 uint64_t* result_out) {
160 *result_out = 0;
161
162 auto dev = mDevices.get(device);
163 if (!dev) {
164 return MAGMA_STATUS_INVALID_ARGS;
165 }
166
167 // TODO(b/275093891): query or standardize hard-coded values
168 constexpr uint32_t kExtraPageCount = 9;
169 constexpr uint64_t kIntelTimestampRegisterOffset = 0x23f8;
170 switch (id) {
171 case MAGMA_QUERY_VENDOR_ID: {
172 *result_out = MAGMA_VENDOR_ID_INTEL;
173 return MAGMA_STATUS_OK;
174 }
175 case MAGMA_QUERY_DEVICE_ID: {
176 auto result = dev->getParam(I915_PARAM_CHIPSET_ID);
177 if (!result) {
178 return MAGMA_STATUS_INTERNAL_ERROR;
179 }
180 *result_out = *result;
181 return MAGMA_STATUS_OK;
182 }
183 case MAGMA_QUERY_IS_TOTAL_TIME_SUPPORTED: {
184 *result_out = 0;
185 return MAGMA_STATUS_OK;
186 }
187 case kMagmaIntelGenQuerySubsliceAndEuTotal: {
188 auto subslice_result = dev->getParam(I915_PARAM_SUBSLICE_TOTAL);
189 auto eu_result = dev->getParam(I915_PARAM_EU_TOTAL);
190 if (!subslice_result || !eu_result) {
191 return MAGMA_STATUS_INTERNAL_ERROR;
192 }
193 *result_out = (static_cast<uint64_t>(*subslice_result) << 32) | *eu_result;
194 return MAGMA_STATUS_OK;
195 }
196 case kMagmaIntelGenQueryGttSize: {
197 *result_out = 0;
198 drm_i915_gem_context_create_ext create_params{};
199 int create_result = dev->ioctl(DRM_IOCTL_I915_GEM_CONTEXT_CREATE_EXT, &create_params);
200 if (create_result) {
201 ERR("DRM_IOCTL_I915_GEM_CONTEXT_CREATE_EXT failed: %d", errno);
202 return MAGMA_STATUS_INTERNAL_ERROR;
203 }
204 drm_i915_gem_context_param query_params{.ctx_id = create_params.ctx_id,
205 .param = I915_CONTEXT_PARAM_GTT_SIZE};
206 int query_result = dev->ioctl(DRM_IOCTL_I915_GEM_CONTEXT_GETPARAM, &query_params);
207 if (query_result) {
208 ERR("DRM_IOCTL_I915_GEM_CONTEXT_GETPARAM failed: %d", errno);
209 return MAGMA_STATUS_INTERNAL_ERROR;
210 }
211 drm_i915_gem_context_destroy destroy_params{.ctx_id = create_params.ctx_id};
212 int destroy_result = dev->ioctl(DRM_IOCTL_I915_GEM_CONTEXT_DESTROY, &destroy_params);
213 if (destroy_result) {
214 ERR("DRM_IOCTL_I915_GEM_CONTEXT_DESTROY failed: %d", errno);
215 return MAGMA_STATUS_INTERNAL_ERROR;
216 }
217 *result_out = query_params.value;
218 ERR("GTT size %" PRIu64, *result_out);
219 return MAGMA_STATUS_OK;
220 }
221 case kMagmaIntelGenQueryExtraPageCount: {
222 *result_out = kExtraPageCount;
223 return MAGMA_STATUS_OK;
224 }
225 case kMagmaIntelGenQueryTimestamp: {
226 if (!host_allocate) {
227 WARN("Guest-allocated buffers are not currently supported.");
228 return MAGMA_STATUS_UNIMPLEMENTED;
229 }
230 auto buffer =
231 DrmBuffer::create(*dev, mContextId, sizeof(magma_intel_gen_timestamp_query));
232 if (!buffer) {
233 return MAGMA_STATUS_MEMORY_ERROR;
234 }
235 auto ptr = buffer->map();
236 if (!ptr) {
237 return MAGMA_STATUS_MEMORY_ERROR;
238 }
239 auto ts = reinterpret_cast<magma_intel_gen_timestamp_query*>(ptr);
240 ts->monotonic_raw_timestamp[0] = GetNsMonotonic(true);
241 ts->monotonic_timestamp = GetNsMonotonic(false);
242 // Attempt to read device timestamp register.
243 drm_i915_reg_read params{.offset = kIntelTimestampRegisterOffset | I915_REG_READ_8B_WA};
244 int result = dev->ioctl(DRM_IOCTL_I915_REG_READ, ¶ms);
245 if (result == 0) {
246 ts->device_timestamp = params.val;
247 } else {
248 ts->device_timestamp = 0;
249 }
250 // The driver uses the second timestamp to determine the sampling span.
251 ts->monotonic_raw_timestamp[1] = GetNsMonotonic(true);
252 *result_buffer_mapping_id_inout = buffer->getId();
253 *result_buffer_size_inout = buffer->size();
254 // Add the buffer to the container.
255 auto gem_handle = buffer->getHandle();
256 auto magma_handle = mBuffers.create(std::move(*buffer));
257 mGemHandleToBuffer.emplace(gem_handle, magma_handle);
258 return MAGMA_STATUS_OK;
259 }
260 case kMagmaIntelGenQueryTopology: {
261 if (!host_allocate) {
262 WARN("Guest-allocated buffers are not currently supported.");
263 return MAGMA_STATUS_UNIMPLEMENTED;
264 }
265 // Check how much space is needed to represent topology.
266 drm_i915_query_item item{.query_id = DRM_I915_QUERY_TOPOLOGY_INFO};
267 drm_i915_query query{.num_items = 1, .items_ptr = reinterpret_cast<uint64_t>(&item)};
268 int result = dev->ioctl(DRM_IOCTL_I915_QUERY, &query);
269 if (result != 0) {
270 ERR("DRM_IOCTL_I915_QUERY failed: %d", errno);
271 return MAGMA_STATUS_INTERNAL_ERROR;
272 }
273 std::vector<uint8_t> topology_buffer(item.length);
274 item.data_ptr = reinterpret_cast<uint64_t>(topology_buffer.data());
275
276 // Re-run the query with the allocated buffer.
277 result = dev->ioctl(DRM_IOCTL_I915_QUERY, &query);
278 if (result != 0) {
279 ERR("DRM_IOCTL_I915_QUERY failed: %d", errno);
280 return MAGMA_STATUS_INTERNAL_ERROR;
281 }
282
283 // Convert to the magma-compatible topology layout.
284 auto magma_topology_buffer = MakeMagmaTopology(
285 reinterpret_cast<drm_i915_query_topology_info*>(topology_buffer.data()));
286
287 // Create a magma buffer and copy the layout struct to it.
288 auto buffer = DrmBuffer::create(*dev, mContextId, magma_topology_buffer.size());
289 if (!buffer) {
290 return MAGMA_STATUS_MEMORY_ERROR;
291 }
292 auto ptr = buffer->map();
293 if (!ptr) {
294 return MAGMA_STATUS_MEMORY_ERROR;
295 }
296 memcpy(ptr, magma_topology_buffer.data(), magma_topology_buffer.size());
297 *result_buffer_mapping_id_inout = buffer->getId();
298 *result_buffer_size_inout = buffer->size();
299 auto gem_handle = buffer->getHandle();
300 auto magma_handle = mBuffers.create(std::move(*buffer));
301 mGemHandleToBuffer.emplace(gem_handle, magma_handle);
302 return MAGMA_STATUS_OK;
303 }
304 case kMagmaIntelGenQueryHasContextIsolation: {
305 auto result = dev->getParam(I915_PARAM_HAS_CONTEXT_ISOLATION);
306 if (!result) {
307 return MAGMA_STATUS_INTERNAL_ERROR;
308 }
309 *result_out = *result;
310 return MAGMA_STATUS_OK;
311 }
312 case kMagmaIntelGenQueryTimestampFrequency: {
313 auto result = dev->getParam(I915_PARAM_CS_TIMESTAMP_FREQUENCY);
314 if (!result) {
315 return MAGMA_STATUS_INTERNAL_ERROR;
316 }
317 *result_out = *result;
318 return MAGMA_STATUS_OK;
319 }
320 default: {
321 return MAGMA_STATUS_INVALID_ARGS;
322 }
323 }
324 }
325
magma_device_create_connection(magma_device_t device,magma_connection_t * connection_out)326 magma_status_t IntelDrmDecoder::magma_device_create_connection(magma_device_t device,
327 magma_connection_t* connection_out) {
328 *connection_out = MAGMA_INVALID_OBJECT_ID;
329 auto dev = mDevices.get(device);
330 if (!dev) {
331 return MAGMA_STATUS_INVALID_ARGS;
332 }
333 *connection_out = mConnections.create(*dev);
334 return MAGMA_STATUS_OK;
335 }
336
magma_connection_release(magma_connection_t connection)337 void IntelDrmDecoder::magma_connection_release(magma_connection_t connection) {
338 bool erased = mConnections.erase(connection);
339 if (!erased) {
340 WARN("invalid connection %" PRIu64, connection);
341 }
342 }
343
magma_connection_create_buffer(magma_connection_t connection,uint64_t size,uint64_t * size_out,magma_buffer_t * buffer_out,magma_buffer_id_t * id_out)344 magma_status_t IntelDrmDecoder::magma_connection_create_buffer(magma_connection_t connection,
345 uint64_t size, uint64_t* size_out,
346 magma_buffer_t* buffer_out,
347 magma_buffer_id_t* id_out) {
348 *size_out = 0;
349 *buffer_out = MAGMA_INVALID_OBJECT_ID;
350 *id_out = MAGMA_INVALID_OBJECT_ID;
351 auto con = mConnections.get(connection);
352 if (!con) {
353 return MAGMA_STATUS_INVALID_ARGS;
354 }
355 auto buffer = DrmBuffer::create(con->getDevice(), mContextId, size);
356 if (!buffer) {
357 return MAGMA_STATUS_MEMORY_ERROR;
358 }
359 auto gem_handle = buffer->getHandle();
360 auto magma_handle = mBuffers.create(std::move(*buffer));
361 mGemHandleToBuffer.emplace(gem_handle, magma_handle);
362 *size_out = buffer->size();
363 *buffer_out = magma_handle;
364 *id_out = MAGMA_OBJECT_TO_ID(magma_handle);
365 return MAGMA_STATUS_OK;
366 }
367
magma_connection_release_buffer(magma_connection_t connection,magma_buffer_t buffer)368 void IntelDrmDecoder::magma_connection_release_buffer(magma_connection_t connection,
369 magma_buffer_t buffer) {
370 auto con = mConnections.get(connection);
371 if (!con) {
372 return;
373 }
374 auto buf = mBuffers.get(buffer);
375 if (!buf) {
376 return;
377 }
378 mGemHandleToBuffer.erase(buf->getHandle());
379 mBuffers.erase(buffer);
380 }
381
magma_connection_create_semaphore(magma_connection_t magma_connection,magma_semaphore_t * semaphore_out,magma_semaphore_id_t * id_out)382 magma_status_t IntelDrmDecoder::magma_connection_create_semaphore(
383 magma_connection_t magma_connection, magma_semaphore_t* semaphore_out,
384 magma_semaphore_id_t* id_out) {
385 *semaphore_out = MAGMA_INVALID_OBJECT_ID;
386 *id_out = MAGMA_INVALID_OBJECT_ID;
387 WARN("%s not implemented", __FUNCTION__);
388 return MAGMA_STATUS_UNIMPLEMENTED;
389 }
390
magma_connection_release_semaphore(magma_connection_t connection,magma_semaphore_t semaphore)391 void IntelDrmDecoder::magma_connection_release_semaphore(magma_connection_t connection,
392 magma_semaphore_t semaphore) {
393 WARN("%s not implemented", __FUNCTION__);
394 }
395
magma_buffer_get_info(magma_buffer_t buffer,magma_buffer_info_t * info_out)396 magma_status_t IntelDrmDecoder::magma_buffer_get_info(magma_buffer_t buffer,
397 magma_buffer_info_t* info_out) {
398 auto buf = mBuffers.get(buffer);
399 if (!buf) {
400 return MAGMA_STATUS_INVALID_ARGS;
401 }
402 info_out->size = buf->size();
403 info_out->committed_byte_count = buf->size();
404 return MAGMA_STATUS_OK;
405 }
406
magma_buffer_get_handle(magma_buffer_t buffer,magma_handle_t * handle_out)407 magma_status_t IntelDrmDecoder::magma_buffer_get_handle(magma_buffer_t buffer,
408 magma_handle_t* handle_out) {
409 auto buf = mBuffers.get(buffer);
410 if (!buf) {
411 return MAGMA_STATUS_INVALID_ARGS;
412 }
413 *handle_out = buf->getId();
414 return MAGMA_STATUS_OK;
415 }
416
magma_buffer_export(magma_buffer_t buffer,magma_handle_t * buffer_handle_out)417 magma_status_t IntelDrmDecoder::magma_buffer_export(magma_buffer_t buffer,
418 magma_handle_t* buffer_handle_out) {
419 *buffer_handle_out = MAGMA_INVALID_OBJECT_ID;
420 WARN("%s not implemented", __FUNCTION__);
421 return MAGMA_STATUS_UNIMPLEMENTED;
422 }
423
magma_semaphore_signal(magma_semaphore_t semaphore)424 void IntelDrmDecoder::magma_semaphore_signal(magma_semaphore_t semaphore) {
425 WARN("%s not implemented", __FUNCTION__);
426 }
427
magma_semaphore_reset(magma_semaphore_t semaphore)428 void IntelDrmDecoder::magma_semaphore_reset(magma_semaphore_t semaphore) {
429 WARN("%s not implemented", __FUNCTION__);
430 }
431
magma_poll(magma_poll_item_t * items,uint32_t count,uint64_t timeout_ns)432 magma_status_t IntelDrmDecoder::magma_poll(magma_poll_item_t* items, uint32_t count,
433 uint64_t timeout_ns) {
434 WARN("%s not implemented", __FUNCTION__);
435 return MAGMA_STATUS_UNIMPLEMENTED;
436 }
437
magma_connection_get_error(magma_connection_t connection)438 magma_status_t IntelDrmDecoder::magma_connection_get_error(magma_connection_t connection) {
439 WARN("%s not implemented", __FUNCTION__);
440 return MAGMA_STATUS_UNIMPLEMENTED;
441 }
442
magma_connection_create_context(magma_connection_t connection,uint32_t * context_id_out)443 magma_status_t IntelDrmDecoder::magma_connection_create_context(magma_connection_t connection,
444 uint32_t* context_id_out) {
445 *context_id_out = MAGMA_INVALID_OBJECT_ID;
446 auto con = mConnections.get(connection);
447 if (!con) {
448 return MAGMA_STATUS_INVALID_ARGS;
449 }
450 auto ctx = con->createContext();
451 if (!ctx) {
452 WARN("error creating context");
453 return MAGMA_STATUS_INTERNAL_ERROR;
454 }
455 *context_id_out = ctx.value();
456 return MAGMA_STATUS_OK;
457 }
458
magma_connection_release_context(magma_connection_t connection,uint32_t context_id)459 void IntelDrmDecoder::magma_connection_release_context(magma_connection_t connection,
460 uint32_t context_id) {
461 WARN("%s not implemented", __FUNCTION__);
462 }
463
magma_connection_map_buffer(magma_connection_t connection,uint64_t hw_va,magma_buffer_t buffer,uint64_t offset,uint64_t length,uint64_t map_flags)464 magma_status_t IntelDrmDecoder::magma_connection_map_buffer(magma_connection_t connection,
465 uint64_t hw_va, magma_buffer_t buffer,
466 uint64_t offset, uint64_t length,
467 uint64_t map_flags) {
468 WARN("%s not implemented", __FUNCTION__);
469 return MAGMA_STATUS_UNIMPLEMENTED;
470 }
471
magma_connection_unmap_buffer(magma_connection_t connection,uint64_t hw_va,magma_buffer_t buffer)472 void IntelDrmDecoder::magma_connection_unmap_buffer(magma_connection_t connection, uint64_t hw_va,
473 magma_buffer_t buffer) {
474 WARN("%s not implemented", __FUNCTION__);
475 }
476
477 } // namespace magma
478 } // namespace gfxstream
479