17#include "DynamicInputProvider.h"
24#include <libloaderapi.h>
32static InputProvider::ProjectionType projectionHvtToOur(HvtProjectionType ptype) {
34 case HvtProjectionType::HVT_PROJECTION_EQUIRECTANGULAR:
35 return InputProvider::ProjectionType::PROJECTION_EQUIRECTANGULAR;
36 case HvtProjectionType::HVT_PROJECTION_PERSPECTIVE:
37 return InputProvider::ProjectionType::PROJECTION_PERSPECTIVE;
39 return InputProvider::ProjectionType::PROJECTION_INVALID;
43static bool isFormatYUV(vk::Format format) {
44 return format >= vk::Format::eG8B8G8R8422Unorm && format <= vk::Format::eG16B16R163Plane444Unorm;
47DynamicInputProvider::DynamicInputProvider(
const std::string& libraryPath,
VulkanWrapper& vkw) : vkw(vkw)
49 loadLibrary(libraryPath);
53void DynamicInputProvider::loadLibrary(
const std::string& libraryPath) {
54 std::filesystem::path libPath = libraryPath;
55 libPath = std::filesystem::absolute(libPath);
56 if(!std::filesystem::exists(libPath)) {
57 throw std::runtime_error(
"Could not load dynamic library " + libPath.string() +
" : file does not exist.");
62 auto module = LoadLibraryEx(libPath.string().c_str(), NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS | LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR);
66 auto module = dlopen(libPath.string().c_str(),RTLD_LAZY);
71 auto err = GetLastError();
72 auto message = std::to_string(err);
75 auto message = strerror(err);
77 throw std::runtime_error(
"Could not load dynamic library " + libPath.string() +
" ( " + message +
" )");
80 auto fill_proc = [&]<
typename Ptr>(Ptr& ptr,
const char* name){
82 auto addr = GetProcAddress(module, name);
84 auto addr = dlsym(module, name);
88 auto err = GetLastError();
89 auto message = std::to_string(err);
92 auto message = strerror(err);
94 throw std::runtime_error(std::string(
"Failed to query entry point ") + name +
" from module " + libraryPath +
" ( " + message +
" )");
96 ptr =
reinterpret_cast<Ptr
>(addr);
99 fill_proc(hvt.createStreamingContext,
"hvtCreateStreamingContext");
100 fill_proc(hvt.enumerateStreamsParameters,
"hvtEnumerateStreamsParameters");
101 fill_proc(hvt.exportStreamImages,
"hvtExportStreamImages");
102 fill_proc(hvt.exportSemaphore,
"hvtExportSemaphore");
103 fill_proc(hvt.acquireStreamsFrames,
"hvtAcquireStreamsFrames");
104 fill_proc(hvt.releaseStreamsFrames,
"hvtReleaseStreamsFrames");
105 fill_proc(hvt.startStreaming,
"hvtStartStreaming");
106 fill_proc(hvt.stopStreaming,
"hvtStopStreaming");
107 fill_proc(hvt.destroySemaphore,
"hvtDestroySemaphore");
108 fill_proc(hvt.destroyStreamingContext,
"hvtDestroyStreamingContext");
116 HvtError(HvtResult code) : code(code) {}
117 const char* what()
const noexcept override{
118 auto it = err_map.find(code);
119 if(it == err_map.end()) {
120 return "INVALID_ERROR";
126 static inline const auto err_map = std::map<HvtResult, const char*>{
127 {HVT_SUCESS,
"HVT_SUCESS"},
128 {HVT_RESULT_TIMEOUT,
"HVT_RESULT_TIMEOUT"},
129 {HVT_ERROR_UNKNOWN,
"HVT_ERROR_UNKNOWN"} ,
130 {HVT_ERROR_INVALID_HANDLE,
"HVT_ERROR_INVALID_HANDLE"},
131 {HVT_ERROR_WRONG_BUFFER_SIZE,
"HVT_ERROR_WRONG_BUFFER_SIZE"},
132 {HVT_ERROR_NOT_FOUND,
"HVT_ERROR_NOT_FOUND"},
133 {HVT_ERROR_UNSUPPORTED_FORMAT,
"HVT_ERROR_UNSUPPORTED_FORMAT"},
134 {HVT_ERROR_UNSUPPORTED_MEMORY_TYPE,
"HVT_ERROR_UNSUPPORTED_MEMORY_TYPE"},
135 {HVT_ERROR_UNSUPPORTED_SEMAPHORE_TYPE,
"HVT_ERROR_UNSUPPORTED_SEMAPHORE_TYPE"},
136 {HVT_ERROR_HEADER_VERSION,
"HVT_ERROR_HEADER_VERSION"},
137 {HVT_ERROR_CALL_ORDER,
"HVT_ERROR_CALL_ORDER"}
141template<
typename R,
typename ...Params,
typename ...Args>
142static inline auto hvtCheck(R(*func)(Params...), Args&&... args) -> R {
143 auto err = func(args...);
150void DynamicInputProvider::init() {
151 vk::PhysicalDeviceIDProperties idProps;
152 vk::PhysicalDeviceProperties2 prop2;
153 prop2.pNext = &idProps;
163 std::copy(idProps.deviceUUID.begin(), idProps.deviceUUID.end(), cinfos.graphicsDeviceUUID);
165 hvtCheck(hvt.createStreamingContext, &cinfos, &context);
170 initSynchronisation();
172 hvtCheck(hvt.startStreaming, context);
175void DynamicInputProvider::initStreams() {
178 hvtCheck(hvt.enumerateStreamsParameters, context, &size,
nullptr);
180 std::vector<HvtRGBDStreamParameters> params(size);
181 hvtCheck(hvt.enumerateStreamsParameters, context, &size, params.data());
184 for(
const auto& param : params) {
185 cachedStreamParameters.push_back(
187 .streamName = std::string(param.name),
188 .colorResolution = {param.colorResolution.width, param.colorResolution.height},
189 .depthResolution = {param.depthResolution.width, param.depthResolution.height},
190 .nearDepth = param.nearDepth,
191 .farDepth = param.farDepth,
192 .colorFormat = (vk::Format)param.colorFormat,
193 .depthFormat = (vk::Format)param.depthFormat,
194 .projectionType = projectionHvtToOur(param.projectionType)
201 for(
int i = 0;
const auto& param : params){
202 auto& sparam = cachedStreamParameters.at(i);
203 auto colorRes = sparam.colorResolution;
204 auto depthRes = sparam.depthResolution;
208 bool yuv = isFormatYUV(sparam.colorFormat);
210 stream.ycbcrConversion = vkw.
context.
device.createSamplerYcbcrConversionUnique(
211 vk::SamplerYcbcrConversionCreateInfo(
213 vk::SamplerYcbcrModelConversion::eYcbcr709,
214 vk::SamplerYcbcrRange::eItuFull,
215 vk::ComponentMapping(),
216 vk::ChromaLocation::eMidpoint,
217 vk::ChromaLocation::eMidpoint,
223 stream.colorSampler = vkw.
context.
device.createSamplerUnique(
224 vk::StructureChain(vk::SamplerCreateInfo(
228 vk::SamplerMipmapMode::eNearest,
229 vk::SamplerAddressMode::eClampToEdge,
230 vk::SamplerAddressMode::eClampToEdge,
231 vk::SamplerAddressMode::eClampToEdge)
232 .setBorderColor(vk::BorderColor::eFloatOpaqueBlack),
233 vk::SamplerYcbcrConversionInfo(
234 *stream.ycbcrConversion
235 )).get<vk::SamplerCreateInfo>());
237 stream.colorSampler = vkw.
context.
device.createSamplerUnique(
238 vk::SamplerCreateInfo(
242 vk::SamplerMipmapMode::eNearest,
243 vk::SamplerAddressMode::eClampToEdge,
244 vk::SamplerAddressMode::eClampToEdge,
245 vk::SamplerAddressMode::eClampToEdge)
246 .setBorderColor(vk::BorderColor::eFloatOpaqueBlack)
250 auto makeSlot = [&](vk::Format format, glm::ivec2 res,
bool yuv) {
251 auto createInfo = vk::ImageCreateInfo{
255 vk::Extent3D(res.x, res.y, 1),
258 vk::SampleCountFlagBits::e1,
259 vk::ImageTiling::eOptimal,
260 yuv ? vk::ImageUsageFlagBits::eSampled : vk::ImageUsageFlagBits::eSampled| vk::ImageUsageFlagBits::eStorage ,
261 vk::SharingMode::eExclusive,
264 vk::ImageLayout::eUndefined
267 auto imgExportInfo = vk::ExternalMemoryImageCreateInfo{memory_handle_type};
268 auto infoChain = vk::StructureChain(createInfo, imgExportInfo);
270 auto image = dev.createImageUnique(infoChain.get<vk::ImageCreateInfo>());
271 auto memreqs = dev.getImageMemoryRequirements(*image);
272 auto memIndex = findMemoryType(vkw.
context.
physicalDevice, memreqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eDeviceLocal);
274 auto allocInfo = vk::StructureChain(
275 vk::MemoryAllocateInfo{memreqs.size, memIndex},
276 vk::ExportMemoryAllocateInfo{memory_handle_type}
279 auto memory = dev.allocateMemoryUnique(allocInfo.get<vk::MemoryAllocateInfo>());
280 dev.bindImageMemory(*image, *memory, 0);
282 auto viewInfo = vk::ImageViewCreateInfo{
285 vk::ImageViewType::e2D,
289 vk::ImageAspectFlagBits::eColor,
297 vk::SamplerYcbcrConversionInfo yuvInfo(*stream.ycbcrConversion);
299 viewInfo.setPNext(&yuvInfo);
302 auto imageView = dev.createImageViewUnique(viewInfo);
306 transitionImageLayout(&vkw.
context, *commandPool, *image, format, vk::ImageLayout::eUndefined, vk::ImageLayout::eShaderReadOnlyOptimal);
309 .image = std::move(image),
310 .memory = std::move(memory),
311 .memoryRequirements = memreqs,
312 .imageView = std::move(imageView),
316 auto makeImageFromSlot = [](
const ImageSlot& slot) {
318 .image = *slot.image,
319 .view = *slot.imageView
324 for(
int j = 0; j < param.slotCount; j++) {
325 stream.colorSlots.push_back(makeSlot(sparam.colorFormat, sparam.colorResolution, yuv));
326 stream.colorImages.push_back(makeImageFromSlot(stream.colorSlots.back()));
327 stream.depthSlots.push_back(makeSlot(sparam.depthFormat, sparam.depthResolution,
false));
328 stream.depthImages.push_back(makeImageFromSlot(stream.depthSlots.back()));
331 stream.depthSampler = vkw.
context.
device.createSamplerUnique(
332 vk::SamplerCreateInfo(
334 vk::Filter::eNearest,
335 vk::Filter::eNearest,
336 vk::SamplerMipmapMode::eNearest,
337 vk::SamplerAddressMode::eClampToEdge,
338 vk::SamplerAddressMode::eClampToEdge,
339 vk::SamplerAddressMode::eClampToEdge)
340 .setBorderColor(vk::BorderColor::eFloatOpaqueBlack));
342 sparam.colorSampler = *stream.colorSampler;
343 sparam.depthSampler = *stream.depthSampler;
346 auto exportMemoryHandles = [&](
const Stream& stream,
bool depth){
347 std::vector<HvtStreamImageMemoryInfo> handles;
348 const auto& imgs = depth ? stream.depthSlots : stream.colorSlots;
349 for(
const auto& img : imgs){
351 auto handle = dev.getMemoryWin32HandleKHR(
352 vk::MemoryGetWin32HandleInfoKHR(*img.memory, memory_handle_type));
354 auto handle = dev.getMemoryFdKHR(
355 vk::MemoryGetFdInfoKHR(*img.memory, memory_handle_type));
358 .
size = img.memoryRequirements.size,
359 .alignment = img.memoryRequirements.alignment,
367 .memoryType = (HvtImageMemoryType)memory_handle_type,
368 .imagesCount = (uint32_t)handles.size(),
369 .pImages = handles.data()
371 hvtCheck(hvt.exportStreamImages, context, &einfos);
374 exportMemoryHandles(stream,
false);
375 exportMemoryHandles(stream,
true);
377 streamStorage.push_back(std::move(stream));
382void DynamicInputProvider::initSynchronisation() {
383 for(
auto& sync : syncInfos) {
384 auto semInfoChain = vk::StructureChain(
385 vk::SemaphoreCreateInfo(),
386 vk::ExportSemaphoreCreateInfo(semaphore_handle_type)
388 auto semInfo = semInfoChain.get<vk::SemaphoreCreateInfo>();
389 auto acquire = vkw.
context.
device.createSemaphoreUnique(semInfo);
390 auto release = vkw.
context.
device.createSemaphoreUnique(semInfo);
392 auto exportSem = [&](vk::Semaphore sem, HvtSemaphore& toGet){
394 auto handle = vkw.
context.
device.getSemaphoreWin32HandleKHR(vk::SemaphoreGetWin32HandleInfoKHR(sem, semaphore_handle_type));
396 auto handle = vkw.
context.
device.getSemaphoreFdKHR(vk::SemaphoreGetFdInfoKHR(sem, semaphore_handle_type));
400 .type = (HvtSemaphoreType)semaphore_handle_type
402 hvtCheck(hvt.exportSemaphore, context,
406 exportSem(*acquire, sync.acquireHvt);
407 exportSem(*release, sync.releaseHvt);
409 sync.acquireSemaphore = std::move(acquire);
410 sync.releaseSempahore = std::move(release);
414std::vector<DynamicInputProvider::StreamParameters> DynamicInputProvider::enumerateStreamsParameters()
const {
415 return cachedStreamParameters;
418std::vector<DynamicInputProvider::StreamImage> DynamicInputProvider::enumerateStreamImages(uint32_t streamIndex,
bool depth)
const {
419 const auto& stream = streamStorage.at(streamIndex);
421 return stream.depthImages;
423 return stream.colorImages;
427void DynamicInputProvider::acquireStreamsFrames(
const Extrinsics& targetViewExtrinsics, std::span<StreamFrameInfo> outFrameInfos) {
428 auto extrinsicsToHvt = [](Extrinsics ex){
436 .yaw = ex.rotation.x,
437 .pitch = ex.rotation.y,
438 .roll = ex.rotation.z
444 currentSyncSlot = (currentSyncSlot+1) % syncInfos.size();
445 auto& currentSync = syncInfos.at(currentSyncSlot);
448 std::vector<HvtStreamFrameInfo> hvtFrameInfos(outFrameInfos.size());
451 .frameInfoCount = (uint32_t)hvtFrameInfos.size(),
452 .pStreamFrameInfos = hvtFrameInfos.data(),
453 .signalSemaphore = currentSync.acquireHvt
455 hvtCheck(hvt.acquireStreamsFrames, context, &ainfos);
459 for(
int i = 0; i < outFrameInfos.size(); i++) {
460 const auto& hfi = hvtFrameInfos.at(i);
461 auto& fi = outFrameInfos[i];
463 auto ext = hfi.extrinsics;
464 auto intr = hfi.intrinsics;
466 if (cachedStreamParameters.at(i).projectionType == ProjectionType::PROJECTION_PERSPECTIVE) {
467 fi = StreamFrameInfo{
468 .imageIndex = hfi.imageIndex,
469 .extrinsics = Extrinsics{
470 .position = {ext.position.x,ext.position.y,ext.position.z},
471 .rotation = {ext.rotation.yaw, ext.rotation.pitch, ext.rotation.roll}
474 PerspectiveIntrinsics{
475 .focals = {hfi.intrinsics.perspective.focalX, hfi.intrinsics.perspective.focalY},
476 .principle = {hfi.intrinsics.perspective.principlePointX, hfi.intrinsics.perspective.principlePointY}
481 else if(cachedStreamParameters.at(i).projectionType == ProjectionType::PROJECTION_EQUIRECTANGULAR) {
482 fi = StreamFrameInfo{
483 .imageIndex = hfi.imageIndex,
484 .extrinsics = Extrinsics{
485 .position = {ext.position.x,ext.position.y,ext.position.z},
486 .rotation = {ext.rotation.yaw, ext.rotation.pitch, ext.rotation.roll}
489 EquirectangularIntrinsics{
490 .verticalRange = {hfi.intrinsics.equirectangular.verticalRange[0], hfi.intrinsics.equirectangular.verticalRange[1]},
491 .horizontalRange = {hfi.intrinsics.equirectangular.horizontalRange[0], hfi.intrinsics.equirectangular.horizontalRange[1]}
497 throw std::runtime_error(
"Invalid projection type !");
502 vk::PipelineStageFlags waitStage = vk::PipelineStageFlagBits::eTopOfPipe;
504 vk::SubmitInfo(*currentSync.acquireSemaphore, waitStage));
507void DynamicInputProvider::releaseStreamsFrames() {
508 auto& currentSync = syncInfos.at(currentSyncSlot);
512 vk::SubmitInfo({},{},{},*currentSync.releaseSempahore));
514 hvtCheck(hvt.releaseStreamsFrames, context, currentSync.releaseHvt);
517DynamicInputProvider::~DynamicInputProvider() {
518 hvtCheck(hvt.stopStreaming, context);
519 hvtCheck(hvt.destroyStreamingContext, context);
Class that contains helper functions for Vulkan.
vk::PhysicalDevice physicalDevice
uint32_t queueFamilyIndex
Class that manages the classes related to Vulkan code and act as a wrapper around them.
Parameters for query of the current frames infos.
HvtExtrinsics viewerExtrinsics
Information about the memory backing an image slot.
Export info for images of a stream.
Parameters for the creation of the Streaming context.