1 /* Copyright (c) 2015-2019 The Khronos Group Inc.
2  * Copyright (c) 2015-2019 Valve Corporation
3  * Copyright (c) 2015-2019 LunarG, Inc.
4  * Copyright (C) 2015-2019 Google Inc.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * Author: Mark Lobodzinski <mark@lunarg.com>
19  * Author: Jon Ashburn <jon@lunarg.com>
20  * Author: Tobin Ehlis <tobine@google.com>
21  */
22 
23 // shared_mutex support added in MSVC 2015 update 2
24 #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
25 #include <shared_mutex>
26 typedef std::shared_mutex object_lifetime_mutex_t;
27 typedef std::shared_lock<object_lifetime_mutex_t> read_object_lifetime_mutex_t;
28 typedef std::unique_lock<object_lifetime_mutex_t> write_object_lifetime_mutex_t;
29 #else
30 typedef std::mutex object_lifetime_mutex_t;
31 typedef std::unique_lock<object_lifetime_mutex_t> read_object_lifetime_mutex_t;
32 typedef std::unique_lock<object_lifetime_mutex_t> write_object_lifetime_mutex_t;
33 #endif
34 
35 // Suppress unused warning on Linux
36 #if defined(__GNUC__)
37 #define DECORATE_UNUSED __attribute__((unused))
38 #else
39 #define DECORATE_UNUSED
40 #endif
41 
42 // clang-format off
43 static const char DECORATE_UNUSED *kVUID_ObjectTracker_Info = "UNASSIGNED-ObjectTracker-Info";
44 static const char DECORATE_UNUSED *kVUID_ObjectTracker_InternalError = "UNASSIGNED-ObjectTracker-InternalError";
45 static const char DECORATE_UNUSED *kVUID_ObjectTracker_ObjectLeak =    "UNASSIGNED-ObjectTracker-ObjectLeak";
46 static const char DECORATE_UNUSED *kVUID_ObjectTracker_UnknownObject = "UNASSIGNED-ObjectTracker-UnknownObject";
47 // clang-format on
48 
49 #undef DECORATE_UNUSED
50 
51 extern uint64_t object_track_index;
52 
53 // Object Status -- used to track state of individual objects
54 typedef VkFlags ObjectStatusFlags;
55 enum ObjectStatusFlagBits {
56     OBJSTATUS_NONE = 0x00000000,                      // No status is set
57     OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000001,  // Command Buffer is of type SECONDARY
58     OBJSTATUS_CUSTOM_ALLOCATOR = 0x00000002,          // Allocated with custom allocator
59 };
60 
61 // Object and state information structure
62 struct ObjTrackState {
63     uint64_t handle;                                               // Object handle (new)
64     VulkanObjectType object_type;                                  // Object type identifier
65     ObjectStatusFlags status;                                      // Object state
66     uint64_t parent_object;                                        // Parent object
67     std::unique_ptr<std::unordered_set<uint64_t> > child_objects;  // Child objects (used for VkDescriptorPool only)
68 };
69 
70 typedef vl_concurrent_unordered_map<uint64_t, std::shared_ptr<ObjTrackState>, 6> object_map_type;
71 
72 class ObjectLifetimes : public ValidationObject {
73    public:
74     // Override chassis read/write locks for this validation object
75     // This override takes a deferred lock. i.e. it is not acquired.
76     // This class does its own locking with a shared mutex.
write_lock()77     virtual std::unique_lock<std::mutex> write_lock() {
78         return std::unique_lock<std::mutex>(validation_object_mutex, std::defer_lock);
79     }
80 
81     object_lifetime_mutex_t object_lifetime_mutex;
write_shared_lock()82     write_object_lifetime_mutex_t write_shared_lock() { return write_object_lifetime_mutex_t(object_lifetime_mutex); }
read_shared_lock()83     read_object_lifetime_mutex_t read_shared_lock() { return read_object_lifetime_mutex_t(object_lifetime_mutex); }
84 
85     std::atomic<uint64_t> num_objects[kVulkanObjectTypeMax + 1];
86     std::atomic<uint64_t> num_total_objects;
87     // Vector of unordered_maps per object type to hold ObjTrackState info
88     object_map_type object_map[kVulkanObjectTypeMax + 1];
89     // Special-case map for swapchain images
90     object_map_type swapchainImageMap;
91 
92     // Constructor for object lifetime tracking
ObjectLifetimes()93     ObjectLifetimes() : num_objects{}, num_total_objects(0) {}
94 
InsertObject(object_map_type & map,uint64_t object_handle,VulkanObjectType object_type,std::shared_ptr<ObjTrackState> pNode)95     void InsertObject(object_map_type &map, uint64_t object_handle, VulkanObjectType object_type,
96                       std::shared_ptr<ObjTrackState> pNode) {
97         bool inserted = map.insert(object_handle, pNode);
98         if (!inserted) {
99             // The object should not already exist. If we couldn't add it to the map, there was probably
100             // a race condition in the app. Report an error and move on.
101             VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
102             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, kVUID_ObjectTracker_Info,
103                     "Couldn't insert %s Object 0x%" PRIxLEAST64
104                     ", already existed. This should not happen and may indicate a "
105                     "race condition in the application.",
106                     object_string[object_type], object_handle);
107         }
108     }
109 
110     bool DeviceReportUndestroyedObjects(VkDevice device, VulkanObjectType object_type, const std::string &error_code);
111     void DeviceDestroyUndestroyedObjects(VkDevice device, VulkanObjectType object_type);
112     void CreateQueue(VkDevice device, VkQueue vkObj);
113     void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
114                                VkCommandBufferLevel level);
115     void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
116     void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain);
117     bool ReportUndestroyedObjects(VkDevice device, const std::string &error_code);
118     void DestroyUndestroyedObjects(VkDevice device);
119     bool ValidateDeviceObject(const VulkanTypedHandle &device_typed, const char *invalid_handle_code,
120                               const char *wrong_device_code);
121     void DestroyQueueDataStructures(VkDevice device);
122     bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer);
123     bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
124     bool ValidateSamplerObjects(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo);
125     template <typename DispObj>
126     bool ValidateDescriptorWrite(DispObj disp, VkWriteDescriptorSet const *desc, bool isPush);
127 
GetObjectLifetimeData(std::vector<ValidationObject * > & object_dispatch)128     ObjectLifetimes *GetObjectLifetimeData(std::vector<ValidationObject *> &object_dispatch) {
129         for (auto layer_object : object_dispatch) {
130             if (layer_object->container_type == LayerObjectTypeObjectTracker) {
131                 return (reinterpret_cast<ObjectLifetimes *>(layer_object));
132             }
133         }
134         return nullptr;
135     };
136 
137     template <typename T1, typename T2>
ValidateObject(T1 dispatchable_object,T2 object,VulkanObjectType object_type,bool null_allowed,const char * invalid_handle_code,const char * wrong_device_code)138     bool ValidateObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type, bool null_allowed,
139                         const char *invalid_handle_code, const char *wrong_device_code) {
140         if (null_allowed && (object == VK_NULL_HANDLE)) {
141             return false;
142         }
143         auto object_handle = HandleToUint64(object);
144 
145         if (object_type == kVulkanObjectTypeDevice) {
146             return ValidateDeviceObject(VulkanTypedHandle(object, object_type), invalid_handle_code, wrong_device_code);
147         }
148 
149         VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
150 
151         // Look for object in object map
152         if (!object_map[object_type].contains(object_handle)) {
153             // If object is an image, also look for it in the swapchain image map
154             if ((object_type != kVulkanObjectTypeImage) || (swapchainImageMap.find(object_handle) == swapchainImageMap.end())) {
155                 // Object not found, look for it in other device object maps
156                 for (auto other_device_data : layer_data_map) {
157                     for (auto layer_object_data : other_device_data.second->object_dispatch) {
158                         if (layer_object_data->container_type == LayerObjectTypeObjectTracker) {
159                             auto object_lifetime_data = reinterpret_cast<ObjectLifetimes *>(layer_object_data);
160                             if (object_lifetime_data && (object_lifetime_data != this)) {
161                                 if (object_lifetime_data->object_map[object_type].find(object_handle) !=
162                                         object_lifetime_data->object_map[object_type].end() ||
163                                     (object_type == kVulkanObjectTypeImage &&
164                                      object_lifetime_data->swapchainImageMap.find(object_handle) !=
165                                          object_lifetime_data->swapchainImageMap.end())) {
166                                     // Object found on other device, report an error if object has a device parent error code
167                                     if ((wrong_device_code != kVUIDUndefined) && (object_type != kVulkanObjectTypeSurfaceKHR)) {
168                                         return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
169                                                        wrong_device_code,
170                                                        "Object 0x%" PRIxLEAST64
171                                                        " was not created, allocated or retrieved from the correct device.",
172                                                        object_handle);
173                                     } else {
174                                         return false;
175                                     }
176                                 }
177                             }
178                         }
179                     }
180                 }
181                 // Report an error if object was not found anywhere
182                 return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, invalid_handle_code,
183                                "Invalid %s Object 0x%" PRIxLEAST64 ".", object_string[object_type], object_handle);
184             }
185         }
186         return false;
187     }
188 
189     template <typename T1, typename T2>
CreateObject(T1 dispatchable_object,T2 object,VulkanObjectType object_type,const VkAllocationCallbacks * pAllocator)190     void CreateObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type, const VkAllocationCallbacks *pAllocator) {
191         uint64_t object_handle = HandleToUint64(object);
192         bool custom_allocator = (pAllocator != nullptr);
193         if (!object_map[object_type].contains(object_handle)) {
194             auto pNewObjNode = std::make_shared<ObjTrackState>();
195             pNewObjNode->object_type = object_type;
196             pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
197             pNewObjNode->handle = object_handle;
198 
199             InsertObject(object_map[object_type], object_handle, object_type, pNewObjNode);
200             num_objects[object_type]++;
201             num_total_objects++;
202 
203             if (object_type == kVulkanObjectTypeDescriptorPool) {
204                 pNewObjNode->child_objects.reset(new std::unordered_set<uint64_t>);
205             }
206         }
207     }
208 
209     template <typename T1>
DestroyObjectSilently(T1 object,VulkanObjectType object_type)210     void DestroyObjectSilently(T1 object, VulkanObjectType object_type) {
211         auto object_handle = HandleToUint64(object);
212         assert(object_handle != VK_NULL_HANDLE);
213 
214         auto item = object_map[object_type].pop(object_handle);
215         if (item == object_map[object_type].end()) {
216             // We've already checked that the object exists. If we couldn't find and atomically remove it
217             // from the map, there must have been a race condition in the app. Report an error and move on.
218             VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
219             log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, kVUID_ObjectTracker_Info,
220                     "Couldn't destroy %s Object 0x%" PRIxLEAST64
221                     ", not found. This should not happen and may indicate a "
222                     "race condition in the application.",
223                     object_string[object_type], object_handle);
224             return;
225         }
226         assert(num_total_objects > 0);
227 
228         num_total_objects--;
229         assert(num_objects[item->second->object_type] > 0);
230 
231         num_objects[item->second->object_type]--;
232     }
233 
234     template <typename T1, typename T2>
RecordDestroyObject(T1 dispatchable_object,T2 object,VulkanObjectType object_type)235     void RecordDestroyObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type) {
236         auto object_handle = HandleToUint64(object);
237         if (object_handle != VK_NULL_HANDLE) {
238             if (object_map[object_type].contains(object_handle)) {
239                 DestroyObjectSilently(object, object_type);
240             }
241         }
242     }
243 
244     template <typename T1, typename T2>
ValidateDestroyObject(T1 dispatchable_object,T2 object,VulkanObjectType object_type,const VkAllocationCallbacks * pAllocator,const char * expected_custom_allocator_code,const char * expected_default_allocator_code)245     bool ValidateDestroyObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type,
246                                const VkAllocationCallbacks *pAllocator, const char *expected_custom_allocator_code,
247                                const char *expected_default_allocator_code) {
248         auto object_handle = HandleToUint64(object);
249         bool custom_allocator = pAllocator != nullptr;
250         VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
251         bool skip = false;
252 
253         if ((expected_custom_allocator_code != kVUIDUndefined || expected_default_allocator_code != kVUIDUndefined) &&
254             object_handle != VK_NULL_HANDLE) {
255             auto item = object_map[object_type].find(object_handle);
256             if (item != object_map[object_type].end()) {
257                 auto allocated_with_custom = (item->second->status & OBJSTATUS_CUSTOM_ALLOCATOR) ? true : false;
258                 if (allocated_with_custom && !custom_allocator && expected_custom_allocator_code != kVUIDUndefined) {
259                     // This check only verifies that custom allocation callbacks were provided to both Create and Destroy calls,
260                     // it cannot verify that these allocation callbacks are compatible with each other.
261                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
262                                     expected_custom_allocator_code,
263                                     "Custom allocator not specified while destroying %s obj 0x%" PRIxLEAST64
264                                     " but specified at creation.",
265                                     object_string[object_type], object_handle);
266                 } else if (!allocated_with_custom && custom_allocator && expected_default_allocator_code != kVUIDUndefined) {
267                     skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
268                                     expected_default_allocator_code,
269                                     "Custom allocator specified while destroying %s obj 0x%" PRIxLEAST64
270                                     " but not specified at creation.",
271                                     object_string[object_type], object_handle);
272                 }
273             }
274         }
275         return skip;
276     }
277 
278 #include "object_tracker.h"
279 };
280