16#include "opencvStreamer.h"
21#include"opencvReading.h"
23using rvs::detail::ColorSpace;
24using rvs::detail::g_color_space;
26 VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE;
29 static std::pair<vk::Format, size_t> vkColorFormatFromParams(
const rvs::Parameters& params, InternalFormatClass internalFormat) {
30 if (internalFormat == InternalFormatClass::YUV) {
33 return { vk::Format::eG16B16R163Plane420Unorm, 6 };
35 return { vk::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16, 6 };
37 return { vk::Format::eG8B8R83Plane420Unorm, 3 };
41 return { vk::Format::eR16G16B16A16Unorm, 6 };
45 return { vk::Format::eR8G8B8A8Unorm, 3 };
51 static std::pair<vk::Format, size_t> vkDepthFormatFromParams(
const rvs::Parameters& params) {
54 return { vk::Format::eR32Sfloat, 3};
56 return { vk::Format::eR16Unorm, 2 };
59 return { vk::Format::eR16Unorm, 2 };
61 return { vk::Format::eR8Unorm, 1 };
65 int opencvStreamer::ReadStream::frameIndex(Clock::duration time)
const {
66 return (time / framePeriod) % frameCount;
69 bool opencvStreamer::ReadStream::nextFrameReady(Clock::duration time)
const {
71 return streamedFrame != frameIndex(time);
74 opencvStreamer::opencvStreamer(std::span<const uint8_t, VK_UUID_SIZE> uuid)
83 void opencvStreamer::initVk(std::span<const uint8_t, VK_UUID_SIZE> uuid) {
85 VULKAN_HPP_DEFAULT_DISPATCHER.init(dl.getProcAddress<PFN_vkGetInstanceProcAddr>(
"vkGetInstanceProcAddr"));
87 constexpr auto neededInstanceExts = std::array{
88 VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
89 VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
92 vk::ApplicationInfo ainfos(
93 "opencvStreamer", VK_MAKE_VERSION(0, 0, 1),
"No Engine", VK_MAKE_VERSION(0, 0, 0), VK_API_VERSION_1_2
96 instance = vk::createInstanceUnique(
97 vk::InstanceCreateInfo(
104 VULKAN_HPP_DEFAULT_DISPATCHER.init(*instance);
106 constexpr auto neededDeviceExts = std::array{
108 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
109 VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
111 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
112 VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME
117 auto phyDevs = instance->enumeratePhysicalDevices();
119 auto it = std::find_if(phyDevs.begin(), phyDevs.end(), [&](
const vk::PhysicalDevice& phy) {
120 vk::PhysicalDeviceIDProperties idProps;
121 vk::PhysicalDeviceProperties2 prop2;
122 prop2.pNext = &idProps;
123 phy.getProperties2(&prop2);
124 return strncmp((const char*)idProps.deviceUUID.data(), (const char*)uuid.data(), VK_UUID_SIZE) == 0;
127 if (it == phyDevs.end()) {
128 throw std::runtime_error(
"Could not find physical device corresponding to UUID");
132 float priority[] = { 1.f,1.0f };
134 queueFamilyIndex = 0;
135 auto queuesInfos = std::array{ vk::DeviceQueueCreateInfo({}, queueFamilyIndex, 2, priority) };
137 device = phyDevice.createDeviceUnique(
138 vk::DeviceCreateInfo(
145 VULKAN_HPP_DEFAULT_DISPATCHER.init(*device);
147 queue = device->getQueue(queueFamilyIndex, 0);
148 syncQueue = device->getQueue(queueFamilyIndex, 1);
151 template<
typename Closure>
152 void opencvStreamer::oneTimeSubmit(Closure&& func, vk::SubmitInfo submitInfo) {
153 static thread_local vk::UniqueCommandPool commandPool;
154 static thread_local vk::UniqueFence oneTimeFence;
157 commandPool = device->createCommandPoolUnique(
158 vk::CommandPoolCreateInfo({}, queueFamilyIndex));
162 oneTimeFence = device->createFenceUnique(vk::FenceCreateInfo());
165 auto commandBuffers = device->allocateCommandBuffersUnique(
166 vk::CommandBufferAllocateInfo(
168 vk::CommandBufferLevel::ePrimary,
170 auto& cmdbuf = *commandBuffers.front();
172 cmdbuf.begin(vk::CommandBufferBeginInfo{});
176 submitInfo.setCommandBuffers(cmdbuf);
178 device->resetFences(*oneTimeFence);
180 std::scoped_lock l(queueMutex);
181 queue.submit(submitInfo, *oneTimeFence);
183 (void)device->waitForFences(*oneTimeFence, VK_TRUE, UINT64_MAX);
187 void opencvStreamer::initSettings() {
188 const char* jsonPath = getenv(
"HVT_JSON_PATH");
191 std::cerr <<
"No HVT_JSON_PATH env variable set, cannot load settings" << std::endl;
192 throw HvtResult::HVT_ERROR_NOT_FOUND;
195 auto path = std::filesystem::path(jsonPath);
196 if (!std::filesystem::exists(path)) {
197 std::cerr <<
"File " << path <<
" does not exist" << std::endl;
198 throw HvtResult::HVT_ERROR_NOT_FOUND;
201 auto dir = std::filesystem::path(path).parent_path();
205 size_t numViews = config->InputCameraNames.size();
206 if (config->params_real.size() != numViews ||
207 config->depth_names.size() != numViews ||
208 config->texture_names.size() != numViews) {
209 throw std::runtime_error(
"File and settings size mismatch");
212 if (g_color_space == rvs::detail::ColorSpace::RGB) {
213 internalFormat = InternalFormatClass::RGB;
217 using namespace std::chrono_literals;
218 auto framePeriod = std::chrono::nanoseconds(1s) / frameRate;
220 for (
size_t i = 0; i < numViews; i++) {
221 const auto& params = config->params_real.at(i);
222 const auto& colorFileName = config->texture_names.at(i);
223 const auto& depthFileName = config->depth_names.at(i);
225 std::filesystem::path cp(colorFileName);
226 auto colorFilePath = cp.is_absolute() ? cp : dir / cp;
228 std::filesystem::path dp(depthFileName);
229 auto depthFilePath = dp.is_absolute() ? dp : dir / dp;
232 auto colorFile = std::ifstream(colorFilePath, std::ifstream::ate | std::ifstream::binary | std::ifstream::in);
233 if (!colorFile.is_open()) {
234 throw std::runtime_error(colorFileName);
237 auto depthFile = std::ifstream(depthFilePath, std::ifstream::ate | std::ifstream::binary | std::ifstream::in);
238 if (!depthFile.is_open()) {
239 throw std::runtime_error(depthFileName);
242 auto [colorFormat, colorHalfBytesPerPixel] = vkColorFormatFromParams(params, internalFormat);
243 auto [depthFormat, depthBytesPerPixel] = vkDepthFormatFromParams(params);
245 size_t pixelCount = params.
getSize().width * params.
getSize().height;
247 size_t colorSize = (pixelCount * colorHalfBytesPerPixel) / 2;
248 size_t depthSize = pixelCount * depthBytesPerPixel;
250 size_t depthStride = depthSize * 3 / 2;
251 if (config->params_real[i].getDepthColorFormat() == rvs::ColorFormat::YUV400) {
252 depthStride = depthSize;
255 auto colorFileSize = colorFile.tellg();
256 auto depthFileSize = depthFile.tellg();
257 auto frameCount = colorFileSize / colorSize;
258 if (frameCount != (depthFileSize / depthStride)) {
259 throw std::runtime_error(
"Mismatching depth and color framecount");
265 HvtProjectionType ptype;
267 ptype = HvtProjectionType::HVT_PROJECTION_PERSPECTIVE;
276 ptype = HvtProjectionType::HVT_PROJECTION_EQUIRECTANGULAR;
283 throw std::runtime_error(
"Unknown projection type " + params.
getProjectionType());
291 if (InternalFormatClass::RGB == internalFormat) {
292 colorSize = 2*colorHalfBytesPerPixel * pixelCount * 4;
295 readStreams.push_back(
297 .colorFile = std::move(colorFile),
298 .depthFile = std::move(depthFile),
299 .colorFrameStride = colorSize,
300 .colorFrameSize = colorSize,
301 .depthFrameStride = depthStride,
302 .depthFrameSize = depthSize,
304 .projectionType = ptype,
305 .intrinsics = intrinsics,
306 .extrinsics = extrinsics,
309 .colorFormat = colorFormat,
310 .depthFormat = depthFormat,
311 .framePeriod = framePeriod,
312 .frameCount = (int)frameCount,
313 .fileNameColor = colorFilePath.string(),
314 .fileNameDepth = depthFilePath.string()
320 void opencvStreamer::initVkResources() {
321 for (ReadStream& stream : readStreams) {
322 auto colorSize = stream.colorFrameSize;
323 auto depthSize = stream.depthFrameSize;
324 auto buffer = device->createBufferUnique(
325 vk::BufferCreateInfo(
327 colorSize + depthSize,
328 vk::BufferUsageFlagBits::eTransferSrc,
329 vk::SharingMode::eExclusive)
332 auto reqs = device->getBufferMemoryRequirements(*buffer);
333 auto memIndex = findMemoryType(reqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostVisible);
335 auto memory = device->allocateMemoryUnique(
336 vk::MemoryAllocateInfo(reqs.size, memIndex));
338 device->bindBufferMemory(*buffer, *memory, 0);
339 auto mappedStagging = device->mapMemory(*memory, 0, colorSize + depthSize);
341 stream.stagingBuffer = std::move(buffer);
342 stream.stagingMemory = std::move(memory);
343 stream.stagingMapping = (
char*)mappedStagging;
347 uint32_t opencvStreamer::findMemoryType(uint32_t typeFilter, vk::MemoryPropertyFlags properties)
349 vk::PhysicalDeviceMemoryProperties memProperties = phyDevice.getMemoryProperties();
350 for (uint32_t i = 0; i < memProperties.memoryTypeCount; i++) {
351 if ((typeFilter & (1 << i)) && (memProperties.memoryTypes[i].propertyFlags & properties) == properties) {
355 throw std::runtime_error(
"failed to find suitable memory type!");
358 void opencvStreamer::enumerateStreamsParameters(uint32_t* streamsCount,
HvtRGBDStreamParameters* parameters)
const {
360 *streamsCount = readStreams.size();
364 if (*streamsCount != readStreams.size()) {
365 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
368 for (
int i = 0;
const auto & readStream : readStreams) {
370 .
colorResolution = {(uint32_t)readStream.resolution.x, (uint32_t)readStream.resolution.y},
371 .depthResolution = {(uint32_t)readStream.resolution.x, (uint32_t)readStream.resolution.y},
373 .nearDepth = readStream.anear,
374 .farDepth = readStream.afar,
376 .colorFormat = (HvtImageFormat)readStream.colorFormat,
377 .depthFormat = (HvtImageFormat)readStream.depthFormat,
379 .slotCount = numSlots,
380 .projectionType = readStream.projectionType
382 snprintf(parameters->
name, HVT_MAX_STREAM_NAME_LENGHT,
"%s", config->InputCameraNames.at(i).c_str());
388 auto memtype = (vk::ExternalMemoryHandleTypeFlagBits)exportInfos.
memoryType;
391 if (memtype != vk::ExternalMemoryHandleTypeFlagBits::eOpaqueWin32)
393 if (memtype != vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd)
396 throw HvtResult::HVT_ERROR_UNSUPPORTED_MEMORY_TYPE;
399 auto isDepth = (bool)exportInfos.
depth;
400 auto& stream = readStreams.at(exportInfos.
streamIndex);
401 auto format = isDepth ? stream.depthFormat : stream.colorFormat;
402 auto& slots = isDepth ? stream.depthSlots : stream.colorSlots;
405 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
408 vk::ImageCreateInfo imgCreateInfo(
412 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1),
415 vk::SampleCountFlagBits::e1,
416 vk::ImageTiling::eOptimal,
417 vk::ImageUsageFlagBits::eTransferDst,
418 vk::SharingMode::eExclusive,
420 vk::ImageLayout::eUndefined
423 for (
int i = 0; i < numSlots; i++) {
424 auto memoryInfos = exportInfos.
pImages[i];
426 auto imgImportInfo = vk::ExternalMemoryImageCreateInfo(memtype);
427 auto createChain = vk::StructureChain(imgCreateInfo, imgImportInfo);
429 auto image = device->createImageUnique(createChain.get<vk::ImageCreateInfo>());
430 auto reqs = device->getImageMemoryRequirements(*image);
432 auto memIndex = findMemoryType(reqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eDeviceLocal);
434 assert(reqs.size == memoryInfos.size);
435 assert(reqs.alignment == memoryInfos.alignment);
437 auto memoryAllocInfo = vk::StructureChain(
438 vk::MemoryAllocateInfo(reqs.size, memIndex),
440 vk::ImportMemoryWin32HandleInfoKHR(memtype, memoryInfos.handle)
442 vk::ImportMemoryFdInfoKHR(memtype, memoryInfos.handle)
446 auto memory = device->allocateMemoryUnique(memoryAllocInfo.get<vk::MemoryAllocateInfo>());
448 device->bindImageMemory(*image, *memory, 0);
451 oneTimeSubmit([&](vk::CommandBuffer& cmd) {
452 vk::ImageMemoryBarrier colorMemoryBarrier(
454 vk::AccessFlagBits::eTransferWrite,
455 vk::ImageLayout::eUndefined,
456 vk::ImageLayout::eTransferDstOptimal,
457 VK_QUEUE_FAMILY_IGNORED,
458 VK_QUEUE_FAMILY_IGNORED,
460 vk::ImageSubresourceRange(
461 vk::ImageAspectFlagBits::ePlane0 | vk::ImageAspectFlagBits::ePlane1 | vk::ImageAspectFlagBits::ePlane2,
465 vk::ImageMemoryBarrier depthMemoryBarrier(
467 vk::AccessFlagBits::eTransferWrite,
468 vk::ImageLayout::eUndefined,
469 vk::ImageLayout::eTransferDstOptimal,
470 VK_QUEUE_FAMILY_IGNORED,
471 VK_QUEUE_FAMILY_IGNORED,
473 vk::ImageSubresourceRange(
474 vk::ImageAspectFlagBits::eColor,
479 vk::PipelineStageFlagBits::eTransfer,
480 vk::PipelineStageFlagBits::eTransfer,
481 vk::DependencyFlagBits::eByRegion,
484 isDepth ? depthMemoryBarrier : colorMemoryBarrier);
487 slots.at(i) = ImageSlot{
488 .image = std::move(image),
489 .memory = std::move(memory)
492 (isDepth ? stream.importedDepth : stream.importedColor) =
true;
497 if ((vk::ExternalSemaphoreHandleTypeFlagBits)exportInfos.type != vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) {
499 if ((vk::ExternalSemaphoreHandleTypeFlagBits)exportInfos.type != vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) {
501 throw HvtResult::HVT_ERROR_UNSUPPORTED_SEMAPHORE_TYPE;
504 auto sem = device->createSemaphoreUnique({});
506 device->importSemaphoreWin32HandleKHR(
507 vk::ImportSemaphoreWin32HandleInfoKHR(
510 vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32,
511 exportInfos.semaphore
514 device->importSemaphoreFdKHR(
515 vk::ImportSemaphoreFdInfoKHR(
518 vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
519 exportInfos.semaphore
523 .sem = std::move(sem)
527 void opencvStreamer::destroySemaphore(
Semaphore * sem)
const {
531 void opencvStreamer::startStreaming() {
533 for (
auto& stream : readStreams) {
534 if (!stream.importedColor || !stream.importedDepth) {
535 throw HvtResult::HVT_ERROR_CALL_ORDER;
541 streamingThread = std::thread([&] {
548 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
551 swapPendingToReading();
553 auto imageIndex = slotReadingIndex;
556 auto& stream = readStreams.at(i);
559 desc.intrinsics = stream.intrinsics;
560 desc.imageIndex = imageIndex;
565 std::scoped_lock l(queueMutex);
575 void opencvStreamer::releaseStreamsFrames(
Semaphore * waitSem) {
576 std::scoped_lock l(queueMutex);
579 vk::PipelineStageFlags stage = vk::PipelineStageFlagBits::eTopOfPipe;
589 void opencvStreamer::stopStreaming() {
591 if (streamingThread.joinable()) {
592 streamingThread.join();
596 opencvStreamer::~opencvStreamer() {
605 void opencvStreamer::streamingLoop() {
607 auto startTime = Clock::now();
611 auto now = Clock::now();
612 auto time = now - startTime;
615 for (
auto& stream : readStreams) {
616 if (stream.nextFrameReady(time)) {
621 if (readyCount == readStreams.size()) {
623 oneTimeSubmit([&](vk::CommandBuffer& cmd) {
624 for (
int i = 0;
auto & stream : readStreams) {
625 if (stream.nextFrameReady(time)) {
626 uploadFrame(cmd, i, stream, stream.frameIndex(time));
627 stream.streamedFrame = stream.frameIndex(time);
634 swapStreamingToPending();
636 std::this_thread::yield();
650 void opencvStreamer::uploadFrame(vk::CommandBuffer cmd,
int streamId, ReadStream & stream,
int frame) {
652 auto imageIndex = slotStreamingIndex;
654 auto& dstColorSlot = stream.colorSlots.at(imageIndex);
655 auto& dstDepthSlot = stream.depthSlots.at(imageIndex);
657 if (config->params_real.at(streamId).getDepthBitDepth() != 10) {
659 auto depthFileOffset = frame * stream.depthFrameStride;
660 stream.depthFile.seekg(depthFileOffset, std::ios_base::beg);
661 stream.depthFile.read(stream.stagingMapping + stream.colorFrameSize, stream.depthFrameSize);
663 if (!stream.depthFile) {
664 std::cerr <<
"Failed to read more than " << stream.depthFile.gcount() <<
" bytes of data" << std::endl;
668 cmd.copyBufferToImage(
669 *stream.stagingBuffer,
671 vk::ImageLayout::eTransferDstOptimal,
672 { vk::BufferImageCopy(
673 stream.colorFrameSize,
676 vk::ImageSubresourceLayers(
677 vk::ImageAspectFlagBits::eColor,
681 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
686 cv::Mat depth = read_depth(stream.fileNameDepth, frame, config->params_real.at(streamId));
687 auto imageIndex = slotStreamingIndex;
694 VkDeviceSize imageSize = depth.total() * depth.elemSize();
695 device->mapMemory(*stream.stagingMemory, stream.colorFrameSize, imageSize, {}, &data);
696 memcpy(data, depth.data, imageSize);
697 device->unmapMemory(*stream.stagingMemory);
700 cmd.copyBufferToImage(
701 *stream.stagingBuffer,
703 vk::ImageLayout::eTransferDstOptimal, { vk::BufferImageCopy(
704 stream.colorFrameSize,
707 vk::ImageSubresourceLayers(
708 vk::ImageAspectFlagBits::eColor,
712 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
717 if (InternalFormatClass::YUV == internalFormat) {
718 auto colorFileOffset = frame * stream.colorFrameStride;
721 stream.colorFile.seekg(colorFileOffset, std::ios_base::beg);
725 stream.colorFile.read(stream.stagingMapping, stream.colorFrameSize);
728 if (!stream.colorFile) {
729 std::cerr <<
"Failed to read more than " << stream.colorFile.gcount() <<
" bytes of data" << std::endl;
733 auto mainStride = (stream.colorFrameSize * 2) / 3;
734 auto secondStride = mainStride / 4;
735 auto half_res = stream.resolution / 2;
738 cmd.copyBufferToImage(
739 *stream.stagingBuffer,
741 vk::ImageLayout::eTransferDstOptimal,
747 vk::ImageSubresourceLayers(
748 vk::ImageAspectFlagBits::ePlane0,
752 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
758 vk::ImageSubresourceLayers(
759 vk::ImageAspectFlagBits::ePlane1,
763 vk::Extent3D(half_res.x, half_res.y, 1)
766 mainStride + secondStride,
769 vk::ImageSubresourceLayers(
770 vk::ImageAspectFlagBits::ePlane2,
774 vk::Extent3D(half_res.x, half_res.y, 1)
781 color = read_color(stream.fileNameColor, frame, config->params_real.at(streamId));
782 auto imageIndex = slotStreamingIndex;
789 VkDeviceSize imageSize = color.total() * color.elemSize();
790 device->mapMemory(*stream.stagingMemory, 0, imageSize, {}, &data);
791 memcpy(data, color.data, imageSize);
792 device->unmapMemory(*stream.stagingMemory);
795 cmd.copyBufferToImage(
796 *stream.stagingBuffer,
798 vk::ImageLayout::eTransferDstOptimal, { vk::BufferImageCopy(
802 vk::ImageSubresourceLayers(
803 vk::ImageAspectFlagBits::eColor,
807 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
812 void opencvStreamer::swapStreamingToPending() {
813 std::scoped_lock l(indicesMutex);
814 std::swap(slotPendingIndex, slotStreamingIndex);
815 newDataInPending =
true;
819 void opencvStreamer::swapPendingToReading() {
820 std::scoped_lock l(indicesMutex);
821 if (newDataInPending) {
822 std::swap(slotPendingIndex, slotReadingIndex);
823 newDataInPending =
false;
830 template<
typename Closure>
831 HvtResult exceptionFirewall(Closure && clos) {
835 catch (HvtResult res) {
838 catch (
const std::exception& e) {
839 std::cerr <<
"Catched exception at C boundary : \"" << e.what() <<
"\"" << std::endl;
840 return HvtResult::HVT_ERROR_UNKNOWN;
843 return HvtResult::HVT_ERROR_UNKNOWN;
845 return HvtResult::HVT_SUCESS;
849 void checkNonNull(T ptr) {
851 throw HvtResult::HVT_ERROR_INVALID_HANDLE;
865 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtCreateStreamingContext(
const HvtStreamingContextCreateInfo* createInfo, HvtStreamingContext* outStreamingContext) {
866 return exceptionFirewall([&] {
867 checkNonNull(createInfo);
870 throw HvtResult::HVT_ERROR_HEADER_VERSION;
874 *outStreamingContext = context->to_handle();
885 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtEnumerateStreamsParameters(HvtStreamingContext streamingContext, uint32_t* pStreamParameterCount,
HvtRGBDStreamParameters* pStreamParameters) {
886 return exceptionFirewall([&] {
887 auto context = opencvStreamer::check(streamingContext);
888 checkNonNull(pStreamParameterCount);
889 context->enumerateStreamsParameters(pStreamParameterCount, pStreamParameters);
899 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportStreamImages(HvtStreamingContext streamingContext,
const HvtStreamImagesExportInfo* exportInfos) {
900 return exceptionFirewall([&] {
901 auto context = opencvStreamer::check(streamingContext);
902 checkNonNull(exportInfos);
903 context->importStreamImages(*exportInfos);
914 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportSemaphore(HvtStreamingContext streamingContext,
const HvtSemaphoreExportInfo* exportInfo, HvtSemaphore* outSemaphore) {
915 return exceptionFirewall([&] {
916 auto context = opencvStreamer::check(streamingContext);
917 checkNonNull(exportInfo);
918 auto sem = context->importSemaphore(*exportInfo);
919 *outSemaphore = sem->to_handle();
929 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroySemaphore(HvtStreamingContext streamingContext, HvtSemaphore semaphore) {
930 return exceptionFirewall([&] {
931 auto context = opencvStreamer::check(streamingContext);
932 context->destroySemaphore(Semaphore::check(semaphore));
942 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStartStreaming(HvtStreamingContext streamingContext) {
943 return exceptionFirewall([&] {
944 auto context = opencvStreamer::check(streamingContext);
945 context->startStreaming();
955 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtAcquireStreamsFrames(HvtStreamingContext streamingContext,
const HvtAcquireStreamFramesInfo* infos) {
956 return exceptionFirewall([&] {
957 auto context = opencvStreamer::check(streamingContext);
958 context->acquireStreamsFrames(*infos);
971 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtReleaseStreamsFrames(HvtStreamingContext streamingContext, HvtSemaphore waitSemaphore) {
972 return exceptionFirewall([&] {
973 auto context = opencvStreamer::check(streamingContext);
974 context->releaseStreamsFrames(Semaphore::opt_check(waitSemaphore));
983 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStopStreaming(HvtStreamingContext streamingContext) {
984 return exceptionFirewall([&] {
985 auto context = opencvStreamer::check(streamingContext);
986 context->stopStreaming();
995 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroyStreamingContext(HvtStreamingContext streamingContext) {
996 return exceptionFirewall([&] {
997 auto context = opencvStreamer::check(streamingContext);
static Config loadFromFile(std::string const &filename, std::string const &dir)
int getColorBitDepth() const
std::string const & getProjectionType() const
cv::Vec3f getRotation() const
cv::Vec2f getDepthRange() const
cv::Vec2f getPrinciplePoint() const
cv::Vec2f getHorRange() const
cv::Vec2f getVerRange() const
cv::Vec3f getPosition() const
int getDepthBitDepth() const
cv::Vec2f getFocal() const
Parameters for query of the current frames infos.
HvtSemaphore signalSemaphore
HvtStreamFrameInfo * pStreamFrameInfos
Intrinsics parameters of an equirectangular projection.
Intrinsics parameters of a perspective projection.
Description of an RGBD stream.
char name[HVT_MAX_STREAM_NAME_LENGHT]
HvtExtent2D colorResolution
Export info for images of a stream.
HvtImageMemoryType memoryType
HvtStreamImageMemoryInfo * pImages
Parameters for the creation of the Streaming context.
uint8_t graphicsDeviceUUID[VK_UUID_SIZE]
Union of possible intrinsics types data.