blob: f54ac903f970282ff869b46ffa96a37daf15149f [file] [log] [blame]
Alexis Hetu1424ef62019-04-05 18:03:53 -04001// Copyright 2019 The SwiftShader Authors. All Rights Reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "VkPipelineCache.hpp"
16#include <cstring>
17
Nicolas Capens157ba262019-12-10 17:49:14 -050018namespace vk {
Alexis Hetu1424ef62019-04-05 18:03:53 -040019
Ben Clayton2ed93ab2019-12-17 20:38:03 +000020PipelineCache::SpirvShaderKey::SpecializationInfo::SpecializationInfo(const VkSpecializationInfo *specializationInfo)
Alexis Hetu52edb172019-06-26 10:17:18 -040021{
22 if(specializationInfo)
23 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +000024 auto ptr = reinterpret_cast<VkSpecializationInfo *>(
25 allocate(sizeof(VkSpecializationInfo), REQUIRED_MEMORY_ALIGNMENT, DEVICE_MEMORY));
Alexis Hetu52edb172019-06-26 10:17:18 -040026
Ben Claytone6092f32019-07-29 19:44:13 +010027 info = std::shared_ptr<VkSpecializationInfo>(ptr, Deleter());
28
Alexis Hetu52edb172019-06-26 10:17:18 -040029 info->mapEntryCount = specializationInfo->mapEntryCount;
30 if(specializationInfo->mapEntryCount > 0)
31 {
32 size_t entriesSize = specializationInfo->mapEntryCount * sizeof(VkSpecializationMapEntry);
Ben Clayton2ed93ab2019-12-17 20:38:03 +000033 VkSpecializationMapEntry *mapEntries = reinterpret_cast<VkSpecializationMapEntry *>(
34 allocate(entriesSize, REQUIRED_MEMORY_ALIGNMENT, DEVICE_MEMORY));
Alexis Hetu52edb172019-06-26 10:17:18 -040035 memcpy(mapEntries, specializationInfo->pMapEntries, entriesSize);
36 info->pMapEntries = mapEntries;
37 }
38
39 info->dataSize = specializationInfo->dataSize;
40 if(specializationInfo->dataSize > 0)
41 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +000042 void *data = allocate(specializationInfo->dataSize, REQUIRED_MEMORY_ALIGNMENT, DEVICE_MEMORY);
Alexis Hetu52edb172019-06-26 10:17:18 -040043 memcpy(data, specializationInfo->pData, specializationInfo->dataSize);
44 info->pData = data;
45 }
Ben Claytone6092f32019-07-29 19:44:13 +010046 else
47 {
48 info->pData = nullptr;
49 }
Alexis Hetu52edb172019-06-26 10:17:18 -040050 }
51}
52
Ben Clayton2ed93ab2019-12-17 20:38:03 +000053void PipelineCache::SpirvShaderKey::SpecializationInfo::Deleter::operator()(VkSpecializationInfo *info) const
Alexis Hetu52edb172019-06-26 10:17:18 -040054{
55 if(info)
56 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +000057 deallocate(const_cast<VkSpecializationMapEntry *>(info->pMapEntries), DEVICE_MEMORY);
58 deallocate(const_cast<void *>(info->pData), DEVICE_MEMORY);
Alexis Hetu52edb172019-06-26 10:17:18 -040059 deallocate(info, DEVICE_MEMORY);
60 }
61}
62
Ben Clayton2ed93ab2019-12-17 20:38:03 +000063bool PipelineCache::SpirvShaderKey::SpecializationInfo::operator<(const SpecializationInfo &specializationInfo) const
Alexis Hetu52edb172019-06-26 10:17:18 -040064{
Jamie Madill416d3742020-02-27 11:34:09 -050065 // Check that either both or neither keys have specialization info.
66 if((info.get() == nullptr) != (specializationInfo.info.get() == nullptr))
Ben Claytone6092f32019-07-29 19:44:13 +010067 {
Jamie Madill416d3742020-02-27 11:34:09 -050068 return info.get() == nullptr;
69 }
Ben Claytone6092f32019-07-29 19:44:13 +010070
Jamie Madill416d3742020-02-27 11:34:09 -050071 if(!info)
72 {
73 ASSERT(!specializationInfo.info);
74 return false;
75 }
Ben Claytone6092f32019-07-29 19:44:13 +010076
Jamie Madill416d3742020-02-27 11:34:09 -050077 if(info->mapEntryCount != specializationInfo.info->mapEntryCount)
78 {
79 return info->mapEntryCount < specializationInfo.info->mapEntryCount;
80 }
Ben Claytone6092f32019-07-29 19:44:13 +010081
Jamie Madill416d3742020-02-27 11:34:09 -050082 if(info->dataSize != specializationInfo.info->dataSize)
83 {
84 return info->dataSize < specializationInfo.info->dataSize;
85 }
86
87 if(info->mapEntryCount > 0)
88 {
89 int cmp = memcmp(info->pMapEntries, specializationInfo.info->pMapEntries, info->mapEntryCount * sizeof(VkSpecializationMapEntry));
90 if(cmp != 0)
Alexis Hetu52edb172019-06-26 10:17:18 -040091 {
Jamie Madill416d3742020-02-27 11:34:09 -050092 return cmp < 0;
93 }
94 }
95
96 if(info->dataSize > 0)
97 {
98 int cmp = memcmp(info->pData, specializationInfo.info->pData, info->dataSize);
99 if(cmp != 0)
100 {
101 return cmp < 0;
Alexis Hetu52edb172019-06-26 10:17:18 -0400102 }
103 }
104
Jamie Madill4b34ee32020-02-26 17:39:53 -0500105 return false;
Alexis Hetu52edb172019-06-26 10:17:18 -0400106}
107
108PipelineCache::SpirvShaderKey::SpirvShaderKey(const VkShaderStageFlagBits pipelineStage,
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000109 const std::string &entryPointName,
110 const std::vector<uint32_t> &insns,
111 const vk::RenderPass *renderPass,
112 const uint32_t subpassIndex,
113 const VkSpecializationInfo *specializationInfo)
114 : pipelineStage(pipelineStage)
115 , entryPointName(entryPointName)
116 , insns(insns)
117 , renderPass(renderPass)
118 , subpassIndex(subpassIndex)
119 , specializationInfo(specializationInfo)
Alexis Hetu52edb172019-06-26 10:17:18 -0400120{
121}
122
Ben Claytone6092f32019-07-29 19:44:13 +0100123bool PipelineCache::SpirvShaderKey::operator<(const SpirvShaderKey &other) const
124{
125 if(pipelineStage != other.pipelineStage)
126 {
127 return pipelineStage < other.pipelineStage;
128 }
129
130 if(renderPass != other.renderPass)
131 {
132 return renderPass < other.renderPass;
133 }
134
135 if(subpassIndex != other.subpassIndex)
136 {
137 return subpassIndex < other.subpassIndex;
138 }
139
140 if(insns.size() != other.insns.size())
141 {
142 return insns.size() < other.insns.size();
143 }
144
145 if(entryPointName.size() != other.entryPointName.size())
146 {
147 return entryPointName.size() < other.entryPointName.size();
148 }
149
150 int cmp = memcmp(entryPointName.c_str(), other.entryPointName.c_str(), entryPointName.size());
151 if(cmp != 0)
152 {
153 return cmp < 0;
154 }
155
156 cmp = memcmp(insns.data(), other.insns.data(), insns.size() * sizeof(uint32_t));
157 if(cmp != 0)
158 {
159 return cmp < 0;
160 }
161
162 return (specializationInfo < other.specializationInfo);
Alexis Hetu52edb172019-06-26 10:17:18 -0400163}
164
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000165PipelineCache::PipelineCache(const VkPipelineCacheCreateInfo *pCreateInfo, void *mem)
166 : dataSize(ComputeRequiredAllocationSize(pCreateInfo))
167 , data(reinterpret_cast<uint8_t *>(mem))
Alexis Hetu1424ef62019-04-05 18:03:53 -0400168{
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000169 CacheHeader *header = reinterpret_cast<CacheHeader *>(mem);
Alexis Hetu1424ef62019-04-05 18:03:53 -0400170 header->headerLength = sizeof(CacheHeader);
171 header->headerVersion = VK_PIPELINE_CACHE_HEADER_VERSION_ONE;
172 header->vendorID = VENDOR_ID;
173 header->deviceID = DEVICE_ID;
174 memcpy(header->pipelineCacheUUID, SWIFTSHADER_UUID, VK_UUID_SIZE);
175
176 if(pCreateInfo->pInitialData && (pCreateInfo->initialDataSize > 0))
177 {
178 memcpy(data + sizeof(CacheHeader), pCreateInfo->pInitialData, pCreateInfo->initialDataSize);
179 }
180}
181
Alexis Hetu52edb172019-06-26 10:17:18 -0400182PipelineCache::~PipelineCache()
183{
184 spirvShaders.clear();
185 computePrograms.clear();
186}
187
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000188void PipelineCache::destroy(const VkAllocationCallbacks *pAllocator)
Alexis Hetu1424ef62019-04-05 18:03:53 -0400189{
190 vk::deallocate(data, pAllocator);
191}
192
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000193size_t PipelineCache::ComputeRequiredAllocationSize(const VkPipelineCacheCreateInfo *pCreateInfo)
Alexis Hetu1424ef62019-04-05 18:03:53 -0400194{
195 return pCreateInfo->initialDataSize + sizeof(CacheHeader);
196}
197
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000198VkResult PipelineCache::getData(size_t *pDataSize, void *pData)
Alexis Hetu1424ef62019-04-05 18:03:53 -0400199{
200 if(!pData)
201 {
202 *pDataSize = dataSize;
203 return VK_SUCCESS;
204 }
205
206 if(*pDataSize != dataSize)
207 {
208 *pDataSize = 0;
209 return VK_INCOMPLETE;
210 }
211
212 if(*pDataSize > 0)
213 {
214 memcpy(pData, data, *pDataSize);
215 }
216
217 return VK_SUCCESS;
218}
219
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000220VkResult PipelineCache::merge(uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches)
Alexis Hetu1424ef62019-04-05 18:03:53 -0400221{
222 for(uint32_t i = 0; i < srcCacheCount; i++)
223 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000224 PipelineCache *srcCache = Cast(pSrcCaches[i]);
Alexis Hetu52edb172019-06-26 10:17:18 -0400225
226 {
227 std::unique_lock<std::mutex> lock(spirvShadersMutex);
228 spirvShaders.insert(srcCache->spirvShaders.begin(), srcCache->spirvShaders.end());
229 }
230
231 {
232 std::unique_lock<std::mutex> lock(computeProgramsMutex);
233 computePrograms.insert(srcCache->computePrograms.begin(), srcCache->computePrograms.end());
234 }
Alexis Hetu1424ef62019-04-05 18:03:53 -0400235 }
236
237 return VK_SUCCESS;
238}
239
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000240const std::shared_ptr<sw::SpirvShader> *PipelineCache::operator[](const PipelineCache::SpirvShaderKey &key) const
Alexis Hetu52edb172019-06-26 10:17:18 -0400241{
242 auto it = spirvShaders.find(key);
243 return (it != spirvShaders.end()) ? &(it->second) : nullptr;
244}
245
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000246void PipelineCache::insert(const PipelineCache::SpirvShaderKey &key, const std::shared_ptr<sw::SpirvShader> &shader)
Alexis Hetu52edb172019-06-26 10:17:18 -0400247{
248 spirvShaders[key] = shader;
249}
250
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000251const std::shared_ptr<sw::ComputeProgram> *PipelineCache::operator[](const PipelineCache::ComputeProgramKey &key) const
Alexis Hetu52edb172019-06-26 10:17:18 -0400252{
253 auto it = computePrograms.find(key);
254 return (it != computePrograms.end()) ? &(it->second) : nullptr;
255}
256
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000257void PipelineCache::insert(const PipelineCache::ComputeProgramKey &key, const std::shared_ptr<sw::ComputeProgram> &computeProgram)
Alexis Hetu52edb172019-06-26 10:17:18 -0400258{
259 computePrograms[key] = computeProgram;
260}
261
Nicolas Capens157ba262019-12-10 17:49:14 -0500262} // namespace vk