HoviTron Video Pipeline
BuffersControllerWarping.cpp
1/* ----------------------
2* Copyright 2023 Université Libre de Bruxelles(ULB), Universidad Politécnica de Madrid(UPM), CREAL, Deutsches Zentrum für Luft - und Raumfahrt(DLR)
3
4* Licensed under the Apache License, Version 2.0 (the "License");
5* you may not use this file except in compliance with the License.
6* You may obtain a copy of the License at < http://www.apache.org/licenses/LICENSE-2.0>
7
8* Unless required by applicable law or agreed to in writing, software
9* distributed under the License is distributed on an "AS IS" BASIS,
10* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11* See the License for the specific language governing permissionsand
12* limitations under the License.
13---------------------- */
14
15
16#define GLM_FORCE_RADIANS
17#define GLM_FORCE_DEPTH_ZERO_TO_ONE
18#include <glm/glm.hpp>
19#include <glm/gtc/matrix_transform.hpp>
20#include <chrono>
21#include <iostream>
22
24#include"VulkanContext.h"
27#include"VulkanWrapper.h"
28
29
30#include"InputProvider.h"
31
32
33
35{
36 this->context = context;
37 this->pipeline = pipeline;
38 this->inputView = inputView;
39 this->wrapper = wraps;
40 this->renderpass = renderpass;
41 this->input = inputP;
42
43 this->idStep = inputView;
44 auto activation = wrapper->getCameraActivation();
45 for (int i = 0; i <= inputView; i++) {
46 if (!activation[i]) {
47 this->inputView++;
48 inputView++;
49 }
50 }
51
52 streamParams = input->enumerateStreamsParameters()[inputView];
53
54 textureStreamImages = input->enumerateStreamImages(inputView, false);
55 depthStreamImages = input->enumerateStreamImages(inputView, true);
56 attachmentSize = wrapper->multiviewSetup ? wrapper->getViewNumber() : wrapper->getAttachmentSize();
57
58 assert(textureStreamImages.size() == depthStreamImages.size());
59
60 memset(&rvsSynParam, 0, sizeof(rvsSynParam));
61 memset(&rvsSynGeom, 0, sizeof(rvsSynGeom));
62 memset(&rvsSynFrag, 0, sizeof(rvsSynFrag));
63
64 if (streamParams.depthFormat == vk::Format::eR32Sfloat) {
65 rvsSynParam.lin_depth = 1; //do like exr format
66 }
67 else {
68 rvsSynParam.lin_depth = 0;
69 }
70}
71
73{
74 if(idStep == 0){
75 createVertexBuffer();
76 createIndexBuffer();
77 }
78 createUniformBuffer(attachmentSize);
79 createDescriptorPool();
80 createDescriptorSets();
81}
82
83
84void BuffersControllerWarping::createVertexBuffer()
85{
86 auto res = streamParams.depthResolution;
87 const int size = res.x * res.y;
88
89 std::vector<uint8_t> vertices(size);
90
91 vk::DeviceSize bufferSize = sizeof(uint8_t) * vertices.size();
92
93 vk::Buffer stagingBuffer;
94 vk::DeviceMemory stagingBufferMemory;
95
96 createBuffer(bufferSize, vk::BufferUsageFlagBits::eTransferSrc, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent, stagingBuffer, stagingBufferMemory, context->device, context->physicalDevice);
97
98 // copy the vertex and color data into that device memory
99 void* data;
100 context->device.mapMemory(stagingBufferMemory, 0, bufferSize, {}, &data);
101 memcpy(data, vertices.data(), (size_t)bufferSize);
102 context->device.unmapMemory(stagingBufferMemory);
103
104 createBuffer(bufferSize, vk::BufferUsageFlagBits::eTransferDst | vk::BufferUsageFlagBits::eVertexBuffer, vk::MemoryPropertyFlagBits::eDeviceLocal , vertexBuffer, vertexBufferMemory, context->device, context->physicalDevice);
105
106 copyBuffer(stagingBuffer, vertexBuffer, bufferSize);
107
108 context->device.destroyBuffer(stagingBuffer, nullptr);
109 context->device.freeMemory(stagingBufferMemory, nullptr);
110
111}
112
113void BuffersControllerWarping::createIndexBuffer()
114{
115 auto res = streamParams.depthResolution;
116 indices = generate_picture_EBO(glm::vec2(res.x,res.y));
117 indiceCount = static_cast<uint32_t>(indices.size());
118 vk::DeviceSize bufferSize = sizeof(indices[0]) * indices.size();
119 vk::Buffer stagingBuffer;
120 vk::DeviceMemory stagingBufferMemory;
121 createBuffer(bufferSize, vk::BufferUsageFlagBits::eTransferSrc, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent, stagingBuffer, stagingBufferMemory, context->device, context->physicalDevice);
122
123 void* data;
124 context->device.mapMemory(stagingBufferMemory, 0, bufferSize, {}, &data);
125 memcpy(data, indices.data(), (size_t)bufferSize);
126 context->device.unmapMemory(stagingBufferMemory);
127
128 createBuffer(bufferSize, vk::BufferUsageFlagBits::eTransferDst | vk::BufferUsageFlagBits::eIndexBuffer, vk::MemoryPropertyFlagBits::eDeviceLocal, indexBuffer, indexBufferMemory, context->device, context->physicalDevice);
129
130 copyBuffer(stagingBuffer, indexBuffer, bufferSize);
131
132 context->device.destroyBuffer(stagingBuffer, nullptr);
133 context->device.freeMemory(stagingBufferMemory, nullptr);
134}
135
136void BuffersControllerWarping::createUniformBuffer(int size)
137{
138 VkDeviceSize bufferSizeCam = sizeof(RVScamExtrinsics);
139
140 uniformBuffersCam.resize(size);
141 uniformBuffersCamMemory.resize(size);
142
143 for (size_t i = 0; i < size; i++) {
144 createBuffer(bufferSizeCam, vk::BufferUsageFlagBits::eUniformBuffer, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent, uniformBuffersCam[i], uniformBuffersCamMemory[i], context->device, context->physicalDevice);
145 }
146
147 VkDeviceSize bufferSize = sizeof(RvsSynParam);
148
149 uniformBuffers.resize(size);
150 uniformBuffersMemory.resize(size);
151
152 for (size_t i = 0; i < size; i++) {
153 createBuffer(bufferSize, vk::BufferUsageFlagBits::eUniformBuffer, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent, uniformBuffers[i], uniformBuffersMemory[i], context->device, context->physicalDevice);
154 }
155
156 VkDeviceSize bufferSizeGeom = sizeof(RvsSynGeom);
157
158 uniformBuffersGeom.resize(size);
159 uniformBuffersGeomMemory.resize(size);
160
161 for (size_t i = 0; i < size; i++) {
162 createBuffer(bufferSizeGeom, vk::BufferUsageFlagBits::eUniformBuffer, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent, uniformBuffersGeom[i], uniformBuffersGeomMemory[i], context->device, context->physicalDevice);
163 }
164
165 VkDeviceSize bufferSizeFrag = sizeof(RvsSynFrag);
166 uniformBuffersFrag.resize(size);
167 uniformBuffersFragMemory.resize(size);
168
169 for (size_t i = 0; i < size; i++) {
170 createBuffer(bufferSizeFrag, vk::BufferUsageFlagBits::eUniformBuffer, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent, uniformBuffersFrag[i], uniformBuffersFragMemory[i], context->device, context->physicalDevice);
171 }
172}
173
174void BuffersControllerWarping::createDescriptorPool()
175{
176 std::array<vk::DescriptorPoolSize, 6> poolSizes{};
177 poolSizes[0] = vk::DescriptorPoolSize(vk::DescriptorType::eUniformBuffer, static_cast<uint32_t> (attachmentSize));
178 poolSizes[1] = vk::DescriptorPoolSize(vk::DescriptorType::eUniformBuffer, static_cast<uint32_t> (attachmentSize));
179 poolSizes[2] = vk::DescriptorPoolSize(vk::DescriptorType::eUniformBuffer, static_cast<uint32_t> (attachmentSize));
180 poolSizes[3] = vk::DescriptorPoolSize(vk::DescriptorType::eUniformBuffer, static_cast<uint32_t> (attachmentSize));
181 poolSizes[4] = vk::DescriptorPoolSize(vk::DescriptorType::eCombinedImageSampler, static_cast<uint32_t> (textureStreamImages.size()));
182 poolSizes[5] = vk::DescriptorPoolSize(vk::DescriptorType::eCombinedImageSampler, static_cast<uint32_t> (depthStreamImages.size()));
183
184
185 auto slotSize = textureStreamImages.size();
186
187 vk::DescriptorPoolCreateInfo poolInfo(
188 vk::DescriptorPoolCreateFlags(), //flags
189 static_cast<uint32_t>((attachmentSize)*slotSize), // max sets
190 static_cast<uint32_t>(poolSizes.size()), //pool size
191 poolSizes.data() //pPoolSize
192 );
193
194 descriptorPool = context->device.createDescriptorPool(poolInfo);
195}
196
197void BuffersControllerWarping::createDescriptorSets()
198{
199 auto slotSize = textureStreamImages.size();
200 std::vector<vk::DescriptorSetLayout> layouts(attachmentSize* slotSize, pipeline->descriptorSetLayout);
201
202
203 vk::DescriptorSetAllocateInfo allocInfo(descriptorPool, static_cast<uint32_t>(attachmentSize * slotSize), layouts.data());
204
205
206 descriptorSets = context->device.allocateDescriptorSets(allocInfo);
207
208 for (int slot = 0; slot < slotSize; slot++) {
209 for (size_t i = 0; i < attachmentSize; i++) {
210
211 updateDescriptorSets(i,slot);
212 }
213 }
214
215
216}
217
218void BuffersControllerWarping::updateDescriptorSets(int attachment,int slot)
219{
220 vk::DescriptorBufferInfo bufferInfoCam(uniformBuffersCam[attachment], 0, sizeof(RVScamExtrinsics));
221 vk::DescriptorBufferInfo bufferInfoVert(uniformBuffers[attachment], 0, sizeof(RvsSynParam));
222 vk::DescriptorBufferInfo bufferInfoGeom(uniformBuffersGeom[attachment], 0, sizeof(RvsSynGeom));
223 vk::DescriptorBufferInfo bufferInfoFrag(uniformBuffersFrag[attachment], 0, sizeof(RvsSynFrag));
224
225 vk::DescriptorImageInfo imageInfo(streamParams.colorSampler, textureStreamImages[slot].view, vk::ImageLayout::eShaderReadOnlyOptimal);
226 vk::DescriptorImageInfo depthInfo(streamParams.depthSampler, depthStreamImages[slot].view, vk::ImageLayout::eShaderReadOnlyOptimal);
227
228 auto idDescriptorSet = attachment + slot * attachmentSize;
229 std::array<vk::WriteDescriptorSet, 6> descriptorWrites{};
230 descriptorWrites[0] = vk::WriteDescriptorSet(
231 descriptorSets[idDescriptorSet],
232 2, //dstBinding
233 0, //dstArrayElement
234 1, //descriptorCount
235 vk::DescriptorType::eUniformBuffer, //descriptorType
236 nullptr, //pImageInfo
237 &bufferInfoCam, // pBufferInfo
238 nullptr //pTexelBufferView
239 );
240 descriptorWrites[1] = vk::WriteDescriptorSet(
241 descriptorSets[idDescriptorSet],
242 1, //dstBinding
243 0, //dstArrayElement
244 1, //descriptorCount
245 vk::DescriptorType::eCombinedImageSampler, //descriptorType
246 &depthInfo, //pImageInfo
247 nullptr, // pBufferInfo
248 nullptr //pTexelBufferView
249 );
250 descriptorWrites[2] = vk::WriteDescriptorSet(
251 descriptorSets[idDescriptorSet],
252 3, //dstBinding
253 0, //dstArrayElement
254 1, //descriptorCount
255 vk::DescriptorType::eUniformBuffer, //descriptorType
256 nullptr, //pImageInfo
257 &bufferInfoVert, // pBufferInfo
258 nullptr //pTexelBufferView
259 );
260 descriptorWrites[3] = vk::WriteDescriptorSet(
261 descriptorSets[idDescriptorSet],
262 6, //dstBinding
263 0, //dstArrayElement
264 1, //descriptorCount
265 vk::DescriptorType::eCombinedImageSampler, //descriptorType
266 &imageInfo, //pImageInfo
267 nullptr, // pBufferInfo
268 nullptr //pTexelBufferView
269 );
270 descriptorWrites[4] = vk::WriteDescriptorSet(
271 descriptorSets[idDescriptorSet],
272 4, //dstBinding
273 0, //dstArrayElement
274 1, //descriptorCount
275 vk::DescriptorType::eUniformBuffer, //descriptorType
276 nullptr, //pImageInfo
277 &bufferInfoGeom, // pBufferInfo
278 nullptr //pTexelBufferView
279 );
280 descriptorWrites[5] = vk::WriteDescriptorSet(
281 descriptorSets[idDescriptorSet],
282 5, //dstBinding
283 0, //dstArrayElement
284 1, //descriptorCount
285 vk::DescriptorType::eUniformBuffer, //descriptorType
286 nullptr, //pImageInfo
287 &bufferInfoFrag, // pBufferInfo
288 nullptr //pTexelBufferView
289 );
290
291
292 context->device.updateDescriptorSets(static_cast<uint32_t>(descriptorWrites.size()), descriptorWrites.data(), 0, nullptr);
293}
294
296{
297
298 if (idStep == 0) {
299 context->device.destroyBuffer(vertexBuffer);
300 context->device.freeMemory(vertexBufferMemory, nullptr);
301
302 context->device.destroyBuffer(indexBuffer);
303 context->device.freeMemory(indexBufferMemory, nullptr);
304 }
305
306 for (size_t i = 0; i < uniformBuffersFrag.size(); i++) {
307 context->device.destroyBuffer(uniformBuffersFrag[i], nullptr);
308 context->device.freeMemory(uniformBuffersFragMemory[i], nullptr);
309 }
310
311 for (size_t i = 0; i < uniformBuffersGeom.size(); i++) {
312 context->device.destroyBuffer(uniformBuffersGeom[i], nullptr);
313 context->device.freeMemory(uniformBuffersGeomMemory[i], nullptr);
314 }
315
316 for (size_t i = 0; i < uniformBuffers.size(); i++) {
317 context->device.destroyBuffer(uniformBuffers[i], nullptr);
318 context->device.freeMemory(uniformBuffersMemory[i], nullptr);
319 }
320
321 for (size_t i = 0; i < uniformBuffersCam.size(); i++) {
322 context->device.destroyBuffer(uniformBuffersCam[i], nullptr);
323 context->device.freeMemory(uniformBuffersCamMemory[i], nullptr);
324 }
325
326 uniformBuffers.clear();
327 uniformBuffersMemory.clear();
328
329 uniformBuffersCam.clear();
330 uniformBuffersCamMemory.clear();
331
332 uniformBuffersGeom.clear();
333 uniformBuffersGeomMemory.clear();
334
335 uniformBuffersFrag.clear();
336 uniformBuffersFragMemory.clear();
337
338 context->device.destroyDescriptorPool(descriptorPool);
339}
340
341
342
343void BuffersControllerWarping::bindBuffers(vk::CommandBuffer & commandBuffer, int currentSwapIndex, InputProvider::StreamFrameInfo & frameInfos, int view)
344{
345 int index = wrapper->multiviewSetup ? view : currentSwapIndex;
346 vk::Buffer vertexBuffers[] = { vertexBuffer };
347 vk::DeviceSize offsets[] = { 0 };
348 commandBuffer.bindVertexBuffers( 0, 1, vertexBuffers, offsets);
349 commandBuffer.bindIndexBuffer(indexBuffer, 0, vk::IndexType::eUint32);
350
351 auto idDescriptorSet = index + frameInfos.imageIndex * attachmentSize;
352 commandBuffer.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipeline->pipelineLayout, 0, 1, &descriptorSets[idDescriptorSet], 0, nullptr);
353}
354
355void BuffersControllerWarping:: updateCameraBuffer(uint32_t bufferId)
356{
357 RVScamExtrinsics uboCam = rvsCamExtrinsics;
358 void* data;
359 context->device.mapMemory(uniformBuffersCamMemory[bufferId], 0, sizeof(uboCam), {}, &data);
360 memcpy(data, &uboCam, sizeof(uboCam));
361 context->device.unmapMemory(uniformBuffersCamMemory[bufferId]);
362}
363void BuffersControllerWarping::updateUniformBuffer(uint32_t currentImage, int view)
364{
365 int index = wrapper->multiviewSetup ? view : currentImage;
366 updateCameraBuffer(currentImage);
367};
368void BuffersControllerWarping::updateAllUniformBuffer(uint32_t currentImage, int view)
369{
370
371 //camera extrinsics !!
372 int index = wrapper->multiviewSetup ? view : currentImage;
373 updateCameraBuffer(index);
374
375
376 RvsSynParam ubo = rvsSynParam;//rvs::SynthesizedView::getInstance().rvsSynParam;
377
378
379 void* data;
380 context->device.mapMemory(uniformBuffersMemory[index], 0, sizeof(ubo), {}, &data);
381 memcpy(data, &ubo, sizeof(ubo));
382 context->device.unmapMemory(uniformBuffersMemory[index]);
383
384 RvsSynGeom uboGeom = rvsSynGeom;//rvs::SynthesizedView::getInstance().rvsSynGeom;
385 void* dataGeom;
386 context->device.mapMemory(uniformBuffersGeomMemory[index], 0, sizeof(uboGeom), {}, &dataGeom);
387 memcpy(dataGeom, &uboGeom, sizeof(uboGeom));
388 context->device.unmapMemory(uniformBuffersGeomMemory[index]);
389
390 RvsSynFrag uboFrag = rvsSynFrag;//rvs::SynthesizedView::getInstance().rvsSynFrag;
391 void* dataFrag;
392 context->device.mapMemory(uniformBuffersFragMemory[index], 0, sizeof(uboFrag), {}, &dataFrag);
393 memcpy(dataFrag, &uboFrag, sizeof(uboFrag));
394 context->device.unmapMemory(uniformBuffersFragMemory[index]);
395
396}
397
398
399
400
401vk::VertexInputBindingDescription BuffersControllerWarping::getBindingDescription()
402{
403 vk::VertexInputBindingDescription bindingDescription{
404 0, //binding
405 sizeof(glm::vec1), // stride
406 vk::VertexInputRate::eVertex //inputRate
407 };
408 return bindingDescription;
409}
410
411std::vector<vk::VertexInputAttributeDescription> BuffersControllerWarping::getAttributeDescription()
412{
413 std::vector<vk::VertexInputAttributeDescription> attributeDescriptions{
414 vk::VertexInputAttributeDescription(
415 0, //location
416 0, //binding
417 vk::Format::eR32Sfloat, //format
418 0 //offset
419 )};
420 return attributeDescriptions;
421}
422
424{
425 return indiceCount;
426}
427
428
429void BuffersControllerWarping::copyBufferToImage(vk::Buffer buffer, vk::Image image, uint32_t width, uint32_t height)
430{
431 vk::CommandBuffer commandBuffer = beginSingleTimeCommands(context, renderpass->commandPoolBuffer);
432
433 vk::ImageSubresourceLayers subRessources(
434 vk::ImageAspectFlagBits::eColor, //aspectMask
435 0, //mipLevel
436 0, //baseArrayLayer
437 1 //layerCount
438 );
439 vk::BufferImageCopy region(
440 0, //bufferOffset
441 0, //bufferRowLength
442 0, //bufferImageHeight
443 subRessources, //imageSubresource
444 { 0,0,0 }, //imageOffset
445 { width,height,1 } //imageextent
446 );
447 commandBuffer.copyBufferToImage(buffer,image,vk::ImageLayout::eTransferDstOptimal, 1, &region);
448
449 endSingleTimeCommands(context, renderpass->commandPoolBuffer,commandBuffer);
450}
451
453 const InputProvider::StreamFrameInfo& frameInfo,
454 int view,
455 const InputProvider::StreamParameters& streamParams,
456 VulkanWrapper* wrapper,
457 RVScamExtrinsics& rvsCamExtrinsics,
458 RvsSynParam& rvsSynParam) {
459 //translation to glm ?
460 //Input coordinate are in OMAF system
461 //in OMAF rotation vector is rotation against z axis, y axis, and x axis
462 auto R_from = glmEulerAnglesDegreeToRotationMatrix(frameInfo.extrinsics.rotation);
463 //------------ Virtual camera parameter may not be in OMAF but can be converted to it using the conversion at wrapper->params.conversionToOMAF
464 // (in general it would be openxr coordinate system)
465 // and the rotation is defined as rotation against x axis, y axis, z axis
466 auto R_start = glmEulerAnglesDegreeToRotationMatrixNotOMAF(wrapper->params.getStartingRotation());
467 auto R_to = glmEulerAnglesDegreeToRotationMatrixNotOMAF(wrapper->params.getVirtualExtrinsics(view).rotation);
468 R_to = (R_start * R_to * glm::transpose(R_start)) * R_start ;
469 R_to = wrapper->params.conversionToOMAF * R_to * glm::transpose(wrapper->params.conversionToOMAF);
470
471 auto R = glm::transpose(R_to) * R_from;
472
473 rvsCamExtrinsics.R = glm::mat4(R);
474
475 auto t_from = frameInfo.extrinsics.position;
476 auto t_start = wrapper->params.getStartingPosition();
477 auto t_to = wrapper->params.getVirtualExtrinsics(view).position;
478 t_to = t_start + R_start * t_to;// *glm::transpose(R_start); //
479
480 t_to = wrapper->params.conversionToOMAF * t_to;
481 auto T = -glm::transpose(R_to) * (t_to - t_from);
482
483 rvsCamExtrinsics.t = glm::vec4(T,1);
484
485 //
486 rvsSynParam.w = streamParams.depthResolution.x; //float(inputView->get_depth().cols);//float(m_space_transformer->getInputParameters().getSize().width);
487 //rvsSynGeom.w = streamParams.depthResolution.x; //float(inputView->get_depth().cols);//float(m_space_transformer->getInputParameters().getSize().width);
488 rvsSynParam.h = streamParams.depthResolution.y; //float(inputView->get_depth().rows);//float(m_space_transformer->getInputParameters().getSize().height);
489 //rvsSynGeom.h = streamParams.depthResolution.y;
490 rvsSynParam.n_h = wrapper->params.virtualViewDef[view].y;
491 rvsSynParam.n_w = wrapper->params.virtualViewDef[view].x;
492
493 switch (streamParams.projectionType) {
494 case InputProvider::ProjectionType::PROJECTION_PERSPECTIVE: {
495 rvsSynParam.erp_in = false;
496 rvsSynParam.f = std::get<InputProvider::PerspectiveIntrinsics>(frameInfo.intrinsics).focals / (float(streamParams.colorResolution.x)/ streamParams.depthResolution.x);
497 rvsSynParam.p = std::get<InputProvider::PerspectiveIntrinsics>(frameInfo.intrinsics).principle / (float(streamParams.colorResolution.x) / streamParams.depthResolution.x);
498 break;
499 }
500
501 case InputProvider::ProjectionType::PROJECTION_EQUIRECTANGULAR: {
502 rvsSynParam.erp_in = true;
503 auto hor_range = std::get<InputProvider::EquirectangularIntrinsics>(frameInfo.intrinsics).horizontalRange;
504 auto ver_range = std::get<InputProvider::EquirectangularIntrinsics>(frameInfo.intrinsics).verticalRange;
505 auto constexpr radperdeg = 0.01745329252f;
506 rvsSynParam.phi0 = radperdeg * hor_range[1];
507 rvsSynParam.theta0 = radperdeg * ver_range[1];
508 rvsSynParam.dphi_du = -radperdeg * (hor_range[1] - hor_range[0]) / rvsSynParam.w;
509 rvsSynParam.dtheta_dv = -radperdeg * (ver_range[1] - ver_range[0]) / rvsSynParam.h;
510 break;
511 }
512 default:
513 throw std::runtime_error("Unsupported projection type for input");
514 break;
515 }
516
517
518 //for virtual camera
519 switch (wrapper->params.projectionType) {
520 case InputProvider::ProjectionType::PROJECTION_PERSPECTIVE: {
521 rvsSynParam.erp_out = false;
522 rvsSynParam.n_p = std::get<InputProvider::PerspectiveIntrinsics>(wrapper->params.getVirtualIntrinsics(view)).principle;
523 rvsSynParam.n_f = std::get<InputProvider::PerspectiveIntrinsics>(wrapper->params.getVirtualIntrinsics(view)).focals;
524 break;
525 }
526 case InputProvider::ProjectionType::PROJECTION_EQUIRECTANGULAR: {
527 rvsSynParam.erp_out = true;
528 auto ver_range = std::get < InputProvider::EquirectangularIntrinsics>(wrapper->params.getVirtualIntrinsics(view)).verticalRange;
529 auto hor_range = std::get < InputProvider::EquirectangularIntrinsics>(wrapper->params.getVirtualIntrinsics(view)).horizontalRange;
530 auto constexpr degperrad = 57.295779513f;
531 rvsSynParam.u0 = (hor_range[0] + hor_range[1]) / (hor_range[1] - hor_range[0]);
532 rvsSynParam.v0 = -(ver_range[0] + ver_range[1]) / (ver_range[1] - ver_range[0]);
533 rvsSynParam.du_dphi = -2.f * degperrad / (hor_range[1] - hor_range[0]);
534 rvsSynParam.dv_dtheta = +2.f * degperrad / (ver_range[1] - ver_range[0]);
535 break;
536 }
537 default: {
538 throw std::runtime_error("Unsupported projection type for input");
539 break; }
540 }
541 //rvsSynFrag.max_depth = streamParams.farDepth;
542 //rvsSynFrag.pixelThresh = wrapper->params.pixelThreshold;
543 rvsSynParam.min_depth = streamParams.nearDepth;
544 rvsSynParam.max_depth = streamParams.farDepth;
545 //rvsSynGeom.pixelThresh = wrapper->params.pixelThreshold;
546 //rvsSynFrag.maxMul = wrapper->params.maxMult;
547}
548
549void BuffersControllerWarping::update(std::span<InputProvider::StreamFrameInfo> infos, int view) {
550
551 auto frameInfo = infos[inputView];
552
553 // Do common update
554 updateSettings(frameInfo, view, streamParams, wrapper, rvsCamExtrinsics, rvsSynParam);
555
556
557 // Patch remaining structs
558 rvsSynGeom.w = streamParams.depthResolution.x; //float(inputView->get_depth().cols);//float(m_space_transformer->getInputParameters().getSize().width);
559 rvsSynGeom.h = streamParams.depthResolution.y;
560
561 rvsSynFrag.max_depth = streamParams.farDepth;
562 rvsSynFrag.pixelThresh = wrapper->params.pixelThreshold;
563 rvsSynGeom.pixelThresh = wrapper->params.pixelThreshold;
564 rvsSynFrag.maxMul = wrapper->params.maxMult;
565
566
567 //scaleFactor
568 rvsSynParam.scaleFactor = wrapper->params.scaleFactor;
569 rvsSynGeom.scaleFactor = wrapper->params.scaleFactor;
570 rvsSynFrag.scaleFactor = wrapper->params.scaleFactor;
571
572
573 if (wrapper->getViewNumber() == 4 && view % 2 == 1 && wrapper->params.projectionType == InputProvider::ProjectionType::PROJECTION_PERSPECTIVE) {
574 rvsSynGeom.pixelThresh *= std::get<InputProvider::PerspectiveIntrinsics>(wrapper->params.getVirtualIntrinsics(view - 1)).focals.x / rvsSynParam.n_f.x;
575 }
576
577 rvsSynFrag.maxMul = wrapper->params.maxMult;
578}
579
580
581
Class that manages the buffer for the warping step.
File that contain the VulkanContext class to manage Vulkan Instance, Physical device,...
Class that contains helper functions for Vulkan.
Contains the class that manages the warping pipeline.
file that contains the VulkanWrapper class that manages the classes related to Vulkan code and ease t...
void copyBuffer(vk::Buffer srcBuffer, vk::Buffer dstBuffer, vk::DeviceSize size)
VulkanRenderPass * renderpass
void update(std::span< InputProvider::StreamFrameInfo > infos, int view) override
BuffersControllerWarping(VulkanContext *context, VulkanRenderPass *renderpass, VulkanPipelineWarping *pipeline, InputProvider *input, VulkanWrapper *wraps, int inputView)
static std::vector< uint32_t > indices
static void updateSettings(const InputProvider::StreamFrameInfo &infos, int view, const InputProvider::StreamParameters &streamParams, VulkanWrapper *wrapper, RVScamExtrinsics &extrinsics, RvsSynParam &params)
void updateAllUniformBuffer(uint32_t currentImage, int view) override
vk::VertexInputBindingDescription getBindingDescription() override
void bindBuffers(vk::CommandBuffer &commandBuffer, int index, InputProvider::StreamFrameInfo &frameInfos, int view) override
void updateUniformBuffer(uint32_t currentImage, int view) override
std::vector< vk::VertexInputAttributeDescription > getAttributeDescription() override
Abstract interface around getting source views parameters and data.
Definition: InputProvider.h:35
const InputProvider::Extrinsics getVirtualExtrinsics(int view)
const glm::mat3x3 conversionToOMAF
const glm::vec3 getStartingPosition()
const glm::vec3 getStartingRotation()
std::vector< glm::vec2 > virtualViewDef
InputProvider::ProjectionType projectionType
const InputProvider::Intrinsics getVirtualIntrinsics(int view)
class that manages tasks related to Vulkan context (Vulkan Instance, Vulkan Physical device,...
Definition: VulkanContext.h:59
vk::PhysicalDevice physicalDevice
Definition: VulkanContext.h:85
vk::Device device
Definition: VulkanContext.h:87
vk::DescriptorSetLayout descriptorSetLayout
vk::PipelineLayout pipelineLayout
Class that manages the graphic pipeline for the warping.
Class that manage the renderPass containing the synthesis and blending steps.
Class that manages the classes related to Vulkan code and act as a wrapper around them.
Definition: VulkanWrapper.h:66
const bool multiviewSetup
RenderingParameters params
std::vector< bool > getCameraActivation()
Static parameters of an input view stream.
Definition: InputProvider.h:71
Struct that contains camera extrinsics values that must be sent to vertex shader of the warping pipel...
Struct that contains datas for the fragment shader of the warping pipeline.
Struct that contains datas for the geometry shader of the warping pipeline.
Struct that contains parameters for the vertex shader in the warping pipeline.