1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //    http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "VkBuffer.hpp"
16 #include "VkConfig.hpp"
17 #include "VkDeviceMemory.hpp"
18 
19 #include <cstring>
20 
21 namespace vk {
22 
Buffer(const VkBufferCreateInfo * pCreateInfo,void * mem)23 Buffer::Buffer(const VkBufferCreateInfo *pCreateInfo, void *mem)
24     : flags(pCreateInfo->flags)
25     , size(pCreateInfo->size)
26     , usage(pCreateInfo->usage)
27     , sharingMode(pCreateInfo->sharingMode)
28 {
29 	if(pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT)
30 	{
31 		queueFamilyIndexCount = pCreateInfo->queueFamilyIndexCount;
32 		queueFamilyIndices = reinterpret_cast<uint32_t *>(mem);
33 		memcpy(queueFamilyIndices, pCreateInfo->pQueueFamilyIndices, sizeof(uint32_t) * queueFamilyIndexCount);
34 	}
35 
36 	const auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
37 	for(; nextInfo != nullptr; nextInfo = nextInfo->pNext)
38 	{
39 		if(nextInfo->sType == VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO)
40 		{
41 			const auto *externalInfo = reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>(nextInfo);
42 			supportedExternalMemoryHandleTypes = externalInfo->handleTypes;
43 		}
44 	}
45 }
46 
destroy(const VkAllocationCallbacks * pAllocator)47 void Buffer::destroy(const VkAllocationCallbacks *pAllocator)
48 {
49 	vk::deallocate(queueFamilyIndices, pAllocator);
50 }
51 
ComputeRequiredAllocationSize(const VkBufferCreateInfo * pCreateInfo)52 size_t Buffer::ComputeRequiredAllocationSize(const VkBufferCreateInfo *pCreateInfo)
53 {
54 	return (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) ? sizeof(uint32_t) * pCreateInfo->queueFamilyIndexCount : 0;
55 }
56 
getMemoryRequirements() const57 const VkMemoryRequirements Buffer::getMemoryRequirements() const
58 {
59 	VkMemoryRequirements memoryRequirements = {};
60 	if(usage & (VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT))
61 	{
62 		memoryRequirements.alignment = vk::MIN_TEXEL_BUFFER_OFFSET_ALIGNMENT;
63 	}
64 	else if(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)
65 	{
66 		memoryRequirements.alignment = vk::MIN_STORAGE_BUFFER_OFFSET_ALIGNMENT;
67 	}
68 	else if(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)
69 	{
70 		memoryRequirements.alignment = vk::MIN_UNIFORM_BUFFER_OFFSET_ALIGNMENT;
71 	}
72 	else
73 	{
74 		memoryRequirements.alignment = REQUIRED_MEMORY_ALIGNMENT;
75 	}
76 	memoryRequirements.memoryTypeBits = vk::MEMORY_TYPE_GENERIC_BIT;
77 	memoryRequirements.size = size;  // TODO: also reserve space for a header containing
78 	                                 // the size of the buffer (for robust buffer access)
79 	return memoryRequirements;
80 }
81 
canBindToMemory(DeviceMemory * pDeviceMemory) const82 bool Buffer::canBindToMemory(DeviceMemory *pDeviceMemory) const
83 {
84 	return pDeviceMemory->checkExternalMemoryHandleType(supportedExternalMemoryHandleTypes);
85 }
86 
bind(DeviceMemory * pDeviceMemory,VkDeviceSize pMemoryOffset)87 void Buffer::bind(DeviceMemory *pDeviceMemory, VkDeviceSize pMemoryOffset)
88 {
89 	memory = pDeviceMemory->getOffsetPointer(pMemoryOffset);
90 }
91 
copyFrom(const void * srcMemory,VkDeviceSize pSize,VkDeviceSize pOffset)92 void Buffer::copyFrom(const void *srcMemory, VkDeviceSize pSize, VkDeviceSize pOffset)
93 {
94 	ASSERT((pSize + pOffset) <= size);
95 
96 	memcpy(getOffsetPointer(pOffset), srcMemory, pSize);
97 }
98 
copyTo(void * dstMemory,VkDeviceSize pSize,VkDeviceSize pOffset) const99 void Buffer::copyTo(void *dstMemory, VkDeviceSize pSize, VkDeviceSize pOffset) const
100 {
101 	ASSERT((pSize + pOffset) <= size);
102 
103 	memcpy(dstMemory, getOffsetPointer(pOffset), pSize);
104 }
105 
copyTo(Buffer * dstBuffer,const VkBufferCopy & pRegion) const106 void Buffer::copyTo(Buffer *dstBuffer, const VkBufferCopy &pRegion) const
107 {
108 	copyTo(dstBuffer->getOffsetPointer(pRegion.dstOffset), pRegion.size, pRegion.srcOffset);
109 }
110 
fill(VkDeviceSize dstOffset,VkDeviceSize fillSize,uint32_t data)111 void Buffer::fill(VkDeviceSize dstOffset, VkDeviceSize fillSize, uint32_t data)
112 {
113 	size_t bytes = (fillSize == VK_WHOLE_SIZE) ? (size - dstOffset) : fillSize;
114 
115 	ASSERT((bytes + dstOffset) <= size);
116 
117 	uint32_t *memToWrite = static_cast<uint32_t *>(getOffsetPointer(dstOffset));
118 
119 	// Vulkan 1.1 spec: "If VK_WHOLE_SIZE is used and the remaining size of the buffer is
120 	//                   not a multiple of 4, then the nearest smaller multiple is used."
121 	for(; bytes >= 4; bytes -= 4, memToWrite++)
122 	{
123 		*memToWrite = data;
124 	}
125 }
126 
update(VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)127 void Buffer::update(VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
128 {
129 	ASSERT((dataSize + dstOffset) <= size);
130 
131 	memcpy(getOffsetPointer(dstOffset), pData, dataSize);
132 }
133 
getOffsetPointer(VkDeviceSize offset) const134 void *Buffer::getOffsetPointer(VkDeviceSize offset) const
135 {
136 	return reinterpret_cast<uint8_t *>(memory) + offset;
137 }
138 
end() const139 uint8_t *Buffer::end() const
140 {
141 	return reinterpret_cast<uint8_t *>(getOffsetPointer(size + 1));
142 }
143 
144 }  // namespace vk
145