HoviTron Video Pipeline
DynamicInputProvider.cpp
1/* ----------------------
2* Copyright 2023 Université Libre de Bruxelles(ULB), Universidad Politécnica de Madrid(UPM), CREAL, Deutsches Zentrum für Luft - und Raumfahrt(DLR)
3
4* Licensed under the Apache License, Version 2.0 (the "License");
5* you may not use this file except in compliance with the License.
6* You may obtain a copy of the License at < http://www.apache.org/licenses/LICENSE-2.0>
7
8* Unless required by applicable law or agreed to in writing, software
9* distributed under the License is distributed on an "AS IS" BASIS,
10* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11* See the License for the specific language governing permissionsand
12* limitations under the License.
13---------------------- */
14
15
16
17#include "DynamicInputProvider.h"
19
20#include <map>
21#include <filesystem>
22
23#ifdef WIN32
24#include <libloaderapi.h>
25#else
26#include <errno.h>
27#endif
28
29#ifdef __ANDROID__
30#include <dlfcn.h>
31#endif
32static InputProvider::ProjectionType projectionHvtToOur(HvtProjectionType ptype) {
33 switch(ptype) {
34 case HvtProjectionType::HVT_PROJECTION_EQUIRECTANGULAR:
35 return InputProvider::ProjectionType::PROJECTION_EQUIRECTANGULAR;
36 case HvtProjectionType::HVT_PROJECTION_PERSPECTIVE:
37 return InputProvider::ProjectionType::PROJECTION_PERSPECTIVE;
38 default:
39 return InputProvider::ProjectionType::PROJECTION_INVALID;
40 }
41}
42
43static bool isFormatYUV(vk::Format format) {
44 return format >= vk::Format::eG8B8G8R8422Unorm && format <= vk::Format::eG16B16R163Plane444Unorm;
45}
46
47DynamicInputProvider::DynamicInputProvider(const std::string& libraryPath, VulkanWrapper& vkw) : vkw(vkw)
48{
49 loadLibrary(libraryPath);
50 init();
51}
52
53void DynamicInputProvider::loadLibrary(const std::string& libraryPath) {
54 std::filesystem::path libPath = libraryPath;
55 libPath = std::filesystem::absolute(libPath);
56 if(!std::filesystem::exists(libPath)) {
57 throw std::runtime_error("Could not load dynamic library " + libPath.string() + " : file does not exist.");
58 }
59
60 #ifdef WIN32
61 SetErrorMode(0);
62 auto module = LoadLibraryEx(libPath.string().c_str(), NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS | LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR);
63 #else
64 //#error TODO implement on posix-like systems
65
66 auto module = dlopen(libPath.string().c_str(),RTLD_LAZY);
67 #endif
68
69 if(module == NULL){
70 #ifdef WIN32
71 auto err = GetLastError();
72 auto message = std::to_string(err);
73 #else
74 auto err = errno;
75 auto message = strerror(err);
76 #endif
77 throw std::runtime_error("Could not load dynamic library " + libPath.string() + " ( " + message + " )");
78 }
79
80 auto fill_proc = [&]<typename Ptr>(Ptr& ptr, const char* name){
81 #ifdef WIN32
82 auto addr = GetProcAddress(module, name);
83 #else
84 auto addr = dlsym(module, name);
85 #endif
86 if(!addr) {
87 #ifdef WIN32
88 auto err = GetLastError();
89 auto message = std::to_string(err);
90 #else
91 auto err = errno;
92 auto message = strerror(err);
93 #endif
94 throw std::runtime_error(std::string("Failed to query entry point ") + name + " from module " + libraryPath + " ( " + message + " )");
95 }
96 ptr = reinterpret_cast<Ptr>(addr);
97 };
98
99 fill_proc(hvt.createStreamingContext, "hvtCreateStreamingContext");
100 fill_proc(hvt.enumerateStreamsParameters, "hvtEnumerateStreamsParameters");
101 fill_proc(hvt.exportStreamImages, "hvtExportStreamImages");
102 fill_proc(hvt.exportSemaphore, "hvtExportSemaphore");
103 fill_proc(hvt.acquireStreamsFrames, "hvtAcquireStreamsFrames");
104 fill_proc(hvt.releaseStreamsFrames, "hvtReleaseStreamsFrames");
105 fill_proc(hvt.startStreaming, "hvtStartStreaming");
106 fill_proc(hvt.stopStreaming, "hvtStopStreaming");
107 fill_proc(hvt.destroySemaphore, "hvtDestroySemaphore");
108 fill_proc(hvt.destroyStreamingContext, "hvtDestroyStreamingContext");
109
110
111}
112
113class HvtError : public std::exception {
114 public:
115
116 HvtError(HvtResult code) : code(code) {}
117 const char* what() const noexcept override{
118 auto it = err_map.find(code);
119 if(it == err_map.end()) {
120 return "INVALID_ERROR";
121 }
122 return it->second;
123 }
124 HvtResult code;
125 private:
126 static inline const auto err_map = std::map<HvtResult, const char*>{
127 {HVT_SUCESS, "HVT_SUCESS"},
128 {HVT_RESULT_TIMEOUT, "HVT_RESULT_TIMEOUT"},
129 {HVT_ERROR_UNKNOWN, "HVT_ERROR_UNKNOWN"} ,
130 {HVT_ERROR_INVALID_HANDLE, "HVT_ERROR_INVALID_HANDLE"},
131 {HVT_ERROR_WRONG_BUFFER_SIZE, "HVT_ERROR_WRONG_BUFFER_SIZE"},
132 {HVT_ERROR_NOT_FOUND, "HVT_ERROR_NOT_FOUND"},
133 {HVT_ERROR_UNSUPPORTED_FORMAT, "HVT_ERROR_UNSUPPORTED_FORMAT"},
134 {HVT_ERROR_UNSUPPORTED_MEMORY_TYPE, "HVT_ERROR_UNSUPPORTED_MEMORY_TYPE"},
135 {HVT_ERROR_UNSUPPORTED_SEMAPHORE_TYPE, "HVT_ERROR_UNSUPPORTED_SEMAPHORE_TYPE"},
136 {HVT_ERROR_HEADER_VERSION, "HVT_ERROR_HEADER_VERSION"},
137 {HVT_ERROR_CALL_ORDER, "HVT_ERROR_CALL_ORDER"}
138 };
139};
140
141template<typename R, typename ...Params, typename ...Args>
142static inline auto hvtCheck(R(*func)(Params...), Args&&... args) -> R {
143 auto err = func(args...);
144 if(err < 0){
145 throw HvtError(err);
146 }
147 return err;
148}
149
150void DynamicInputProvider::init() {
151 vk::PhysicalDeviceIDProperties idProps;
152 vk::PhysicalDeviceProperties2 prop2;
153 prop2.pNext = &idProps;
154
155 vkw.context.physicalDevice.getProperties2(&prop2);
156
157 // Init streaming context
159 .headerVersion = HVT_HEADER_VERSION
160 };
161
162 //Add our device UUID here
163 std::copy(idProps.deviceUUID.begin(), idProps.deviceUUID.end(), cinfos.graphicsDeviceUUID);
164
165 hvtCheck(hvt.createStreamingContext, &cinfos, &context);
166
167 commandPool = vkw.context.device.createCommandPoolUnique(
168 vk::CommandPoolCreateInfo({}, vkw.context.queueFamilyIndex));
169 initStreams();
170 initSynchronisation();
171
172 hvtCheck(hvt.startStreaming, context);
173}
174
175void DynamicInputProvider::initStreams() {
176 // Do two-call to get the list from the module
177 uint32_t size = 0;
178 hvtCheck(hvt.enumerateStreamsParameters, context, &size, nullptr);
179
180 std::vector<HvtRGBDStreamParameters> params(size);
181 hvtCheck(hvt.enumerateStreamsParameters, context, &size, params.data());
182
183 // Convert the list to our format
184 for(const auto& param : params) {
185 cachedStreamParameters.push_back(
186 StreamParameters{
187 .streamName = std::string(param.name),
188 .colorResolution = {param.colorResolution.width, param.colorResolution.height},
189 .depthResolution = {param.depthResolution.width, param.depthResolution.height},
190 .nearDepth = param.nearDepth,
191 .farDepth = param.farDepth,
192 .colorFormat = (vk::Format)param.colorFormat,
193 .depthFormat = (vk::Format)param.depthFormat,
194 .projectionType = projectionHvtToOur(param.projectionType)
195 });
196 }
197
198 // Allocate the images storages
199 auto dev = vkw.context.device;
200
201 for(int i = 0; const auto& param : params){
202 auto& sparam = cachedStreamParameters.at(i);
203 auto colorRes = sparam.colorResolution;
204 auto depthRes = sparam.depthResolution;
205
206 Stream stream;
207
208 bool yuv = isFormatYUV(sparam.colorFormat);
209 if(yuv) {
210 stream.ycbcrConversion = vkw.context.device.createSamplerYcbcrConversionUnique(
211 vk::SamplerYcbcrConversionCreateInfo(
212 sparam.colorFormat,
213 vk::SamplerYcbcrModelConversion::eYcbcr709,
214 vk::SamplerYcbcrRange::eItuFull,
215 vk::ComponentMapping(),
216 vk::ChromaLocation::eMidpoint,
217 vk::ChromaLocation::eMidpoint,
218 vk::Filter::eLinear,
219 VK_FALSE
220 )
221 );
222
223 stream.colorSampler = vkw.context.device.createSamplerUnique(
224 vk::StructureChain(vk::SamplerCreateInfo(
225 {},
226 vk::Filter::eLinear,
227 vk::Filter::eLinear,
228 vk::SamplerMipmapMode::eNearest,
229 vk::SamplerAddressMode::eClampToEdge,
230 vk::SamplerAddressMode::eClampToEdge,
231 vk::SamplerAddressMode::eClampToEdge)
232 .setBorderColor(vk::BorderColor::eFloatOpaqueBlack),
233 vk::SamplerYcbcrConversionInfo(
234 *stream.ycbcrConversion
235 )).get<vk::SamplerCreateInfo>());
236 } else {
237 stream.colorSampler = vkw.context.device.createSamplerUnique(
238 vk::SamplerCreateInfo(
239 {},
240 vk::Filter::eLinear,
241 vk::Filter::eLinear,
242 vk::SamplerMipmapMode::eNearest,
243 vk::SamplerAddressMode::eClampToEdge,
244 vk::SamplerAddressMode::eClampToEdge,
245 vk::SamplerAddressMode::eClampToEdge)
246 .setBorderColor(vk::BorderColor::eFloatOpaqueBlack)
247 );
248 }
249
250 auto makeSlot = [&](vk::Format format, glm::ivec2 res, bool yuv) {
251 auto createInfo = vk::ImageCreateInfo{
252 {},
253 vk::ImageType::e2D,
254 format,
255 vk::Extent3D(res.x, res.y, 1),
256 1, //Mip
257 1, //Layers
258 vk::SampleCountFlagBits::e1,
259 vk::ImageTiling::eOptimal,
260 yuv ? vk::ImageUsageFlagBits::eSampled : vk::ImageUsageFlagBits::eSampled| vk::ImageUsageFlagBits::eStorage ,
261 vk::SharingMode::eExclusive,
262 0,
263 nullptr,
264 vk::ImageLayout::eUndefined
265 };
266
267 auto imgExportInfo = vk::ExternalMemoryImageCreateInfo{memory_handle_type};
268 auto infoChain = vk::StructureChain(createInfo, imgExportInfo);
269
270 auto image = dev.createImageUnique(infoChain.get<vk::ImageCreateInfo>());
271 auto memreqs = dev.getImageMemoryRequirements(*image);
272 auto memIndex = findMemoryType(vkw.context.physicalDevice, memreqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eDeviceLocal);
273
274 auto allocInfo = vk::StructureChain(
275 vk::MemoryAllocateInfo{memreqs.size, memIndex},
276 vk::ExportMemoryAllocateInfo{memory_handle_type}
277 );
278
279 auto memory = dev.allocateMemoryUnique(allocInfo.get<vk::MemoryAllocateInfo>());
280 dev.bindImageMemory(*image, *memory, 0);
281
282 auto viewInfo = vk::ImageViewCreateInfo{
283 {}, //Flags
284 *image,
285 vk::ImageViewType::e2D,
286 format,
287 {}, //Components
288 { //Subresource range
289 vk::ImageAspectFlagBits::eColor, // TODO change for depth
290 0, //Base mip
291 1, //Mip count
292 0, //Base layer
293 1, //Layer count
294 }
295 };
296
297 vk::SamplerYcbcrConversionInfo yuvInfo(*stream.ycbcrConversion);
298 if(yuv){
299 viewInfo.setPNext(&yuvInfo);
300 }
301
302 auto imageView = dev.createImageViewUnique(viewInfo);
303
304
305 //Do image memory transition
306 transitionImageLayout(&vkw.context, *commandPool, *image, format, vk::ImageLayout::eUndefined, vk::ImageLayout::eShaderReadOnlyOptimal);
307
308 return ImageSlot{
309 .image = std::move(image),
310 .memory = std::move(memory),
311 .memoryRequirements = memreqs,
312 .imageView = std::move(imageView),
313 };
314 };
315
316 auto makeImageFromSlot = [](const ImageSlot& slot) {
317 return StreamImage{
318 .image = *slot.image,
319 .view = *slot.imageView
320 };
321 };
322
323 // Allocate slots
324 for(int j = 0; j < param.slotCount; j++) {
325 stream.colorSlots.push_back(makeSlot(sparam.colorFormat, sparam.colorResolution, yuv));
326 stream.colorImages.push_back(makeImageFromSlot(stream.colorSlots.back()));
327 stream.depthSlots.push_back(makeSlot(sparam.depthFormat, sparam.depthResolution, false));
328 stream.depthImages.push_back(makeImageFromSlot(stream.depthSlots.back()));
329 }
330
331 stream.depthSampler = vkw.context.device.createSamplerUnique(
332 vk::SamplerCreateInfo(
333 {},
334 vk::Filter::eNearest,
335 vk::Filter::eNearest,
336 vk::SamplerMipmapMode::eNearest,
337 vk::SamplerAddressMode::eClampToEdge,
338 vk::SamplerAddressMode::eClampToEdge,
339 vk::SamplerAddressMode::eClampToEdge)
340 .setBorderColor(vk::BorderColor::eFloatOpaqueBlack));
341
342 sparam.colorSampler = *stream.colorSampler;
343 sparam.depthSampler = *stream.depthSampler;
344
345 // Get memory handles and export them
346 auto exportMemoryHandles = [&](const Stream& stream, bool depth){
347 std::vector<HvtStreamImageMemoryInfo> handles;
348 const auto& imgs = depth ? stream.depthSlots : stream.colorSlots;
349 for(const auto& img : imgs){
350#ifdef WIN32
351 auto handle = dev.getMemoryWin32HandleKHR(
352 vk::MemoryGetWin32HandleInfoKHR(*img.memory, memory_handle_type));
353#else
354 auto handle = dev.getMemoryFdKHR(
355 vk::MemoryGetFdInfoKHR(*img.memory, memory_handle_type));
356#endif
357 handles.push_back(HvtStreamImageMemoryInfo{
358 .size = img.memoryRequirements.size,
359 .alignment = img.memoryRequirements.alignment,
360 .handle = handle
361 });
362 }
363
365 .streamIndex = (uint32_t)i,
366 .depth = depth,
367 .memoryType = (HvtImageMemoryType)memory_handle_type,
368 .imagesCount = (uint32_t)handles.size(),
369 .pImages = handles.data()
370 };
371 hvtCheck(hvt.exportStreamImages, context, &einfos);
372 };
373
374 exportMemoryHandles(stream, false);
375 exportMemoryHandles(stream, true);
376
377 streamStorage.push_back(std::move(stream));
378 i++;
379 }
380}
381
382void DynamicInputProvider::initSynchronisation() {
383 for(auto& sync : syncInfos) {
384 auto semInfoChain = vk::StructureChain(
385 vk::SemaphoreCreateInfo(),
386 vk::ExportSemaphoreCreateInfo(semaphore_handle_type)
387 );
388 auto semInfo = semInfoChain.get<vk::SemaphoreCreateInfo>();
389 auto acquire = vkw.context.device.createSemaphoreUnique(semInfo);
390 auto release = vkw.context.device.createSemaphoreUnique(semInfo);
391
392 auto exportSem = [&](vk::Semaphore sem, HvtSemaphore& toGet){
393 #ifdef WIN32
394 auto handle = vkw.context.device.getSemaphoreWin32HandleKHR(vk::SemaphoreGetWin32HandleInfoKHR(sem, semaphore_handle_type));
395 #else
396 auto handle = vkw.context.device.getSemaphoreFdKHR(vk::SemaphoreGetFdInfoKHR(sem, semaphore_handle_type));
397 #endif
399 .semaphore = handle,
400 .type = (HvtSemaphoreType)semaphore_handle_type
401 };
402 hvtCheck(hvt.exportSemaphore, context,
403 &einfo, &toGet);
404 };
405
406 exportSem(*acquire, sync.acquireHvt);
407 exportSem(*release, sync.releaseHvt);
408
409 sync.acquireSemaphore = std::move(acquire);
410 sync.releaseSempahore = std::move(release);
411 }
412}
413
414std::vector<DynamicInputProvider::StreamParameters> DynamicInputProvider::enumerateStreamsParameters() const {
415 return cachedStreamParameters;
416}
417
418std::vector<DynamicInputProvider::StreamImage> DynamicInputProvider::enumerateStreamImages(uint32_t streamIndex, bool depth) const {
419 const auto& stream = streamStorage.at(streamIndex);
420 if(depth) {
421 return stream.depthImages;
422 } else {
423 return stream.colorImages;
424 }
425}
426
427void DynamicInputProvider::acquireStreamsFrames(const Extrinsics& targetViewExtrinsics, std::span<StreamFrameInfo> outFrameInfos) {
428 auto extrinsicsToHvt = [](Extrinsics ex){
429 return HvtExtrinsics{
430 .position = HvtPosition{
431 .x = ex.position.x,
432 .y = ex.position.y,
433 .z = ex.position.z
434 },
435 .rotation = HvtRotation{
436 .yaw = ex.rotation.x,
437 .pitch = ex.rotation.y,
438 .roll = ex.rotation.z
439 }
440 };
441 };
442
443 // Take next sync slot
444 currentSyncSlot = (currentSyncSlot+1) % syncInfos.size();
445 auto& currentSync = syncInfos.at(currentSyncSlot);
446
447 // Get stream frame info from the module
448 std::vector<HvtStreamFrameInfo> hvtFrameInfos(outFrameInfos.size());
450 .viewerExtrinsics = extrinsicsToHvt(targetViewExtrinsics),
451 .frameInfoCount = (uint32_t)hvtFrameInfos.size(),
452 .pStreamFrameInfos = hvtFrameInfos.data(),
453 .signalSemaphore = currentSync.acquireHvt
454 };
455 hvtCheck(hvt.acquireStreamsFrames, context, &ainfos);
456
457
458 //Fill the output frames info
459 for(int i = 0; i < outFrameInfos.size(); i++) {
460 const auto& hfi = hvtFrameInfos.at(i);
461 auto& fi = outFrameInfos[i];
462
463 auto ext = hfi.extrinsics;
464 auto intr = hfi.intrinsics;
465
466 if (cachedStreamParameters.at(i).projectionType == ProjectionType::PROJECTION_PERSPECTIVE) {
467 fi = StreamFrameInfo{
468 .imageIndex = hfi.imageIndex,
469 .extrinsics = Extrinsics{
470 .position = {ext.position.x,ext.position.y,ext.position.z},
471 .rotation = {ext.rotation.yaw, ext.rotation.pitch, ext.rotation.roll}
472 },
473 .intrinsics = Intrinsics{ //TODO handle the equirectangular case
474 PerspectiveIntrinsics{
475 .focals = {hfi.intrinsics.perspective.focalX, hfi.intrinsics.perspective.focalY},
476 .principle = {hfi.intrinsics.perspective.principlePointX, hfi.intrinsics.perspective.principlePointY}
477 }
478 }
479 };
480 }
481 else if(cachedStreamParameters.at(i).projectionType == ProjectionType::PROJECTION_EQUIRECTANGULAR) {
482 fi = StreamFrameInfo{
483 .imageIndex = hfi.imageIndex,
484 .extrinsics = Extrinsics{
485 .position = {ext.position.x,ext.position.y,ext.position.z},
486 .rotation = {ext.rotation.yaw, ext.rotation.pitch, ext.rotation.roll}
487 },
488 .intrinsics = Intrinsics{
489 EquirectangularIntrinsics{
490 .verticalRange = {hfi.intrinsics.equirectangular.verticalRange[0], hfi.intrinsics.equirectangular.verticalRange[1]},
491 .horizontalRange = {hfi.intrinsics.equirectangular.horizontalRange[0], hfi.intrinsics.equirectangular.horizontalRange[1]}
492 }
493 }
494 };
495 }
496 else {
497 throw std::runtime_error("Invalid projection type !");
498 }
499 }
500
501 //Wait on the image sempahore
502 vk::PipelineStageFlags waitStage = vk::PipelineStageFlagBits::eTopOfPipe;
503 vkw.context.graphicsQueue.submit(
504 vk::SubmitInfo(*currentSync.acquireSemaphore, waitStage));
505}
506
507void DynamicInputProvider::releaseStreamsFrames() {
508 auto& currentSync = syncInfos.at(currentSyncSlot);
509
510 // Submit a signal that we are done with the image
511 vkw.context.graphicsQueue.submit(
512 vk::SubmitInfo({},{},{},*currentSync.releaseSempahore));
513
514 hvtCheck(hvt.releaseStreamsFrames, context, currentSync.releaseHvt); //TODO pass proper semaphore
515}
516
517DynamicInputProvider::~DynamicInputProvider() {
518 hvtCheck(hvt.stopStreaming, context);
519 hvtCheck(hvt.destroyStreamingContext, context);
520}
Class that contains helper functions for Vulkan.
std::variant< PerspectiveIntrinsics, EquirectangularIntrinsics > Intrinsics
Union of possible intrinsics types data.
Definition: InputProvider.h:67
vk::PhysicalDevice physicalDevice
Definition: VulkanContext.h:85
uint32_t queueFamilyIndex
Definition: VulkanContext.h:95
vk::Device device
Definition: VulkanContext.h:87
vk::Queue graphicsQueue
Definition: VulkanContext.h:89
Class that manages the classes related to Vulkan code and act as a wrapper around them.
Definition: VulkanWrapper.h:66
VulkanContext context
Parameters for query of the current frames infos.
Information about the memory backing an image slot.
Export info for images of a stream.
Parameters for the creation of the Streaming context.