HoviTron Video Pipeline
JsonStreamer.cpp
1/* ----------------------
2* Copyright 2023 Université Libre de Bruxelles(ULB), Universidad Politécnica de Madrid(UPM), CREAL, Deutsches Zentrum für Luft - und Raumfahrt(DLR)
3
4* Licensed under the Apache License, Version 2.0 (the "License");
5* you may not use this file except in compliance with the License.
6* You may obtain a copy of the License at < http://www.apache.org/licenses/LICENSE-2.0%3E
7
8* Unless required by applicable law or agreed to in writing, software
9* distributed under the License is distributed on an "AS IS" BASIS,
10* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11* See the License for the specific language governing permissionsand
12* limitations under the License.
13---------------------- */
14
15
16#include "JsonStreamer.h"
17
18#include <array>
19#include <iostream>
20
21VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE;
22
29static std::pair<vk::Format, size_t> vkColorFormatFromParams(const rvs::Parameters& params) {
30 switch(params.getColorBitDepth()){
31 case 16:
32 return {vk::Format::eG16B16R163Plane420Unorm, 6};
33 case 10:
34 return {vk::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16, 6};
35 default:
36 return {vk::Format::eG8B8R83Plane420Unorm, 3};
37 }
38}
39
46static std::pair<vk::Format, size_t> vkDepthFormatFromParams(const rvs::Parameters& params) {
47 switch(params.getDepthBitDepth()){
48 case 16:
49 return {vk::Format::eR16Unorm, 2};
50 case 10:
51 return {vk::Format::eR10X6UnormPack16, 2};
52 default:
53 return {vk::Format::eR8Unorm, 1};
54 }
55}
56
57int JsonStreamer::ReadStream::frameIndex(Clock::duration time) const {
58 return (time / framePeriod) % frameCount;
59}
60
61bool JsonStreamer::ReadStream::nextFrameReady(Clock::duration time) const {
62
63 return streamedFrame != frameIndex(time);
64}
65
66JsonStreamer::JsonStreamer(std::span<const uint8_t, VK_UUID_SIZE> uuid)
67{
68 initSettings();
69 initVk(uuid);
70 initVkResources();
71}
72
75void JsonStreamer::initVk(std::span<const uint8_t, VK_UUID_SIZE> uuid) {
76 vk::DynamicLoader dl;
77 VULKAN_HPP_DEFAULT_DISPATCHER.init(dl.getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr"));
78
79 constexpr auto neededInstanceExts = std::array{
80 VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
81 VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
82 };
83
84 vk::ApplicationInfo ainfos(
85 "JsonStreamer", VK_MAKE_VERSION(0,0,1), "No Engine", VK_MAKE_VERSION(0,0,0), VK_API_VERSION_1_2
86 );
87
88 instance = vk::createInstanceUnique(
89 vk::InstanceCreateInfo(
90 {}, //Flags
91 &ainfos,
92 {},
93 neededInstanceExts
94 ));
95
96 VULKAN_HPP_DEFAULT_DISPATCHER.init(*instance);
97
98 constexpr auto neededDeviceExts = std::array{
99 #ifdef WIN32
100 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
101 VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
102 #else
103 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
104 VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME
105 //no need for this
106 #endif
107 };
108
109 auto phyDevs = instance->enumeratePhysicalDevices();
110
111 auto it = std::find_if(phyDevs.begin(), phyDevs.end(), [&](const vk::PhysicalDevice& phy){
112 auto idProps = phy.getProperties2<vk::PhysicalDeviceProperties2, vk::PhysicalDeviceIDProperties>().get<vk::PhysicalDeviceIDProperties>();
113 return strncmp((const char*)idProps.deviceUUID.data(), (const char*)uuid.data(), VK_UUID_SIZE) == 0;
114 });
115
116 if(it == phyDevs.end()){
117 throw std::runtime_error("Could not find physical device corresponding to UUID");
118 }
119 phyDevice = *it; //TODO find device with right UUID
120
121 auto props = phyDevice.getProperties();
122 std::cout << "Loaded streaming device : " << props.deviceName << std::endl;
123
124 float priority[] = { 1.f,1.0f };
125
126 queueFamilyIndex = 0;
127 auto queuesInfos = std::array{vk::DeviceQueueCreateInfo({}, queueFamilyIndex, 2, priority)};
128
129 device = phyDevice.createDeviceUnique(
130 vk::DeviceCreateInfo(
131 {}, //Flags
132 queuesInfos, //Queues
133 {}, //Layers
134 neededDeviceExts
135 ));
136
137 VULKAN_HPP_DEFAULT_DISPATCHER.init(*device);
138
139 queue = device->getQueue(queueFamilyIndex, 0);
140 syncQueue = device->getQueue(queueFamilyIndex, 1);
141}
142
143template<typename Closure>
144void JsonStreamer::oneTimeSubmit(Closure&& func, vk::SubmitInfo submitInfo) {
145 static thread_local vk::UniqueCommandPool commandPool;
146 static thread_local vk::UniqueFence oneTimeFence;
147
148 if(!commandPool) {
149 commandPool = device->createCommandPoolUnique(
150 vk::CommandPoolCreateInfo({}, queueFamilyIndex)); //TODO choose proper queueFamilyIndex
151 }
152
153 if(!oneTimeFence) {
154 oneTimeFence = device->createFenceUnique(vk::FenceCreateInfo());
155 }
156
157 auto commandBuffers = device->allocateCommandBuffersUnique(
158 vk::CommandBufferAllocateInfo(
159 *commandPool,
160 vk::CommandBufferLevel::ePrimary,
161 1));
162 auto& cmdbuf = *commandBuffers.front();
163
164 cmdbuf.begin(vk::CommandBufferBeginInfo{});
165 func(cmdbuf);
166 cmdbuf.end();
167
168 submitInfo.setCommandBuffers(cmdbuf);
169
170 device->resetFences(*oneTimeFence);
171 {
172 std::scoped_lock l(queueMutex);
173 queue.submit(submitInfo, *oneTimeFence);
174 }
175 (void)device->waitForFences(*oneTimeFence, VK_TRUE, UINT64_MAX);
176}
177
179void JsonStreamer::initSettings() {
180 const char* jsonPath = getenv("HVT_JSON_PATH");
181
182 if(!jsonPath) {
183 std::cerr << "No HVT_JSON_PATH env variable set, cannot load settings" << std::endl;
184 throw HvtResult::HVT_ERROR_NOT_FOUND;
185 }
186
187 auto path = std::filesystem::path(jsonPath);
188 if(!std::filesystem::exists(path)) {
189 std::cerr << "File " << path << " does not exist" << std::endl;
190 throw HvtResult::HVT_ERROR_NOT_FOUND;
191 }
192
193 auto dir = std::filesystem::path(path).parent_path();
194
195 config = rvs::Config::loadFromFile(path.filename().string(), dir.string());
196
197 size_t numViews = config->InputCameraNames.size();
198 if(config->params_real.size() != numViews ||
199 config->depth_names.size() != numViews ||
200 config->texture_names.size() != numViews){
201 throw std::runtime_error("File and settings size mismatch");
202 }
203
204 auto frameRate = 30; //TODO read this from somewhere....
205 using namespace std::chrono_literals;
206 auto framePeriod = std::chrono::nanoseconds(1s) / frameRate;
207
208 for(size_t i = 0; i < numViews; i++) {
209 const auto& params = config->params_real.at(i);
210 const auto& colorFileName = config->texture_names.at(i);
211 const auto& depthFileName = config->depth_names.at(i);
212
213 std::filesystem::path cp(colorFileName);
214 auto colorFilePath = cp.is_absolute() ? cp : dir / cp;
215
216 std::filesystem::path dp(depthFileName);
217 auto depthFilePath = dp.is_absolute() ? dp : dir / dp;
218
219
220 auto colorFile = std::ifstream(colorFilePath, std::ifstream::ate | std::ifstream::binary | std::ifstream::in);
221 if(!colorFile.is_open()) {
222 throw std::runtime_error(colorFileName); //TODO better error message
223 }
224
225 auto depthFile = std::ifstream(depthFilePath, std::ifstream::ate | std::ifstream::binary | std::ifstream::in);
226 if(!depthFile.is_open()) {
227 throw std::runtime_error(depthFileName);
228 }
229
230 auto [colorFormat, colorHalfBytesPerPixel] = vkColorFormatFromParams(params);
231 auto [depthFormat, depthBytesPerPixel] = vkDepthFormatFromParams(params);
232
233 size_t pixelCount = params.getSize().width * params.getSize().height;
234
235 size_t colorSize = (pixelCount * colorHalfBytesPerPixel) / 2;
236 size_t depthSize = pixelCount * depthBytesPerPixel;
237
238 size_t depthStride = depthSize * 3 / 2;
239 if (config->params_real[i].getDepthColorFormat() == rvs::ColorFormat::YUV400) {
240 depthStride = depthSize;
241 }
242
243 auto colorFileSize = colorFile.tellg();
244 auto depthFileSize = depthFile.tellg();
245 auto frameCount = colorFileSize / colorSize;
246 if(frameCount != (depthFileSize / depthStride)) {
247 throw std::runtime_error("Mismatching depth and color framecount");
248 }
249
250
251
252 HvtIntrinsics intrinsics;
253 HvtProjectionType ptype;
254 if(params.getProjectionType() == "Perspective"){
255 ptype = HvtProjectionType::HVT_PROJECTION_PERSPECTIVE;
256 intrinsics.perspective = HvtIntrinsicsPerspective{
257 .focalX = params.getFocal()[0],
258 .focalY = params.getFocal()[1],
259 .principlePointX = params.getPrinciplePoint()[0],
260 .principlePointY = params.getPrinciplePoint()[1]
261 };
262 } else if(params.getProjectionType() == "Equirectangular") {
263 ptype = HvtProjectionType::HVT_PROJECTION_EQUIRECTANGULAR;
264 intrinsics.equirectangular = HvtIntrinsicsEquirectangular{
265 .verticalRange = {params.getVerRange()[0], params.getVerRange()[1]},
266 .horizontalRange = {params.getHorRange()[0], params.getHorRange()[1]},
267 };
268 } else {
269 throw std::runtime_error("Unknown projection type " + params.getProjectionType());
270 }
271
272 HvtExtrinsics extrinsics = {
273 .position = {params.getPosition()[0], params.getPosition()[1], params.getPosition()[2]},
274 .rotation = {params.getRotation()[0], params.getRotation()[1], params.getRotation()[2]}
275 };
276
277 readStreams.push_back(
278 ReadStream{
279 .colorFile = std::move(colorFile),
280 .depthFile = std::move(depthFile),
281 .colorFrameStride = colorSize,
282 .colorFrameSize = colorSize,
283 .depthFrameStride = depthStride,
284 .depthFrameSize = depthSize,
285 .resolution = {params.getSize().width, params.getSize().height},
286 .projectionType = ptype,
287 .intrinsics = intrinsics,
288 .extrinsics = extrinsics,
289 .anear = params.getDepthRange()[0],
290 .afar = params.getDepthRange()[1],
291 .colorFormat = colorFormat,
292 .depthFormat = depthFormat,
293 .framePeriod = framePeriod,
294 .frameCount = (int)frameCount
295 });
296 }
297}
298
300void JsonStreamer::initVkResources() {
301 for(ReadStream& stream : readStreams) {
302 auto colorSize = stream.colorFrameSize;
303 auto depthSize = stream.depthFrameSize;
304 auto buffer = device->createBufferUnique(
305 vk::BufferCreateInfo(
306 {}, //Flags,
307 colorSize+depthSize,
308 vk::BufferUsageFlagBits::eTransferSrc,
309 vk::SharingMode::eExclusive)
310 );
311
312 auto reqs = device->getBufferMemoryRequirements(*buffer);
313 auto memIndex = findMemoryType(reqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostVisible);
314
315 auto memory = device->allocateMemoryUnique(
316 vk::MemoryAllocateInfo(reqs.size, memIndex));
317
318 device->bindBufferMemory(*buffer, *memory, 0);
319 auto mappedStagging = device->mapMemory(*memory, 0, colorSize+depthSize);
320
321 stream.stagingBuffer = std::move(buffer);
322 stream.stagingMemory = std::move(memory);
323 stream.stagingMapping = (char*)mappedStagging;
324 }
325}
326
327uint32_t JsonStreamer::findMemoryType(uint32_t typeFilter, vk::MemoryPropertyFlags properties)
328{
329 vk::PhysicalDeviceMemoryProperties memProperties = phyDevice.getMemoryProperties();
330 for (uint32_t i = 0; i < memProperties.memoryTypeCount; i++) {
331 if ((typeFilter & (1 << i)) && (memProperties.memoryTypes[i].propertyFlags & properties) == properties) {
332 return i;
333 }
334 }
335 throw std::runtime_error("failed to find suitable memory type!");
336}
337
338void JsonStreamer::enumerateStreamsParameters(uint32_t* streamsCount, HvtRGBDStreamParameters* parameters) const {
339 if(!parameters) {
340 *streamsCount = readStreams.size();
341 return;
342 }
343
344 if(*streamsCount != readStreams.size()){
345 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
346 }
347
348 for(int i = 0; const auto& readStream : readStreams) {
349 parameters[i] = HvtRGBDStreamParameters{
350 .colorResolution = {(uint32_t)readStream.resolution.x, (uint32_t)readStream.resolution.y},
351 .depthResolution = {(uint32_t)readStream.resolution.x, (uint32_t)readStream.resolution.y},
352
353 .nearDepth = readStream.anear,
354 .farDepth = readStream.afar,
355
356 .colorFormat = (HvtImageFormat)readStream.colorFormat,
357 .depthFormat = (HvtImageFormat)readStream.depthFormat,
358
359 .slotCount = numSlots,
360 .projectionType = readStream.projectionType
361 };
362 snprintf(parameters->name, HVT_MAX_STREAM_NAME_LENGHT, "%s", config->InputCameraNames.at(i).c_str());
363 i++;
364 }
365}
366
367void JsonStreamer::importStreamImages(const HvtStreamImagesExportInfo& exportInfos) {
368 auto memtype = (vk::ExternalMemoryHandleTypeFlagBits)exportInfos.memoryType;
369
370#ifdef WIN32
371 if(memtype != vk::ExternalMemoryHandleTypeFlagBits::eOpaqueWin32)
372 #else
373 if(memtype != vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd)
374 #endif
375 {
376 throw HvtResult::HVT_ERROR_UNSUPPORTED_MEMORY_TYPE;
377 }
378
379 auto isDepth = (bool)exportInfos.depth;
380 auto& stream = readStreams.at(exportInfos.streamIndex);
381 auto format = isDepth ? stream.depthFormat : stream.colorFormat;
382 auto& slots = isDepth ? stream.depthSlots : stream.colorSlots;
383
384 if(exportInfos.imagesCount != numSlots) {
385 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
386 }
387
388 vk::ImageCreateInfo imgCreateInfo(
389 {}, //Flags,
390 vk::ImageType::e2D,
391 format,
392 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1),
393 1, //Mip count,
394 1, //Array layers,
395 vk::SampleCountFlagBits::e1,
396 vk::ImageTiling::eOptimal,
397 vk::ImageUsageFlagBits::eTransferDst,
398 vk::SharingMode::eExclusive,
399 {}, //Queue families
400 vk::ImageLayout::eUndefined
401 );
402
403 for(int i = 0; i < numSlots; i++) {
404 auto memoryInfos = exportInfos.pImages[i];
405
406 auto imgImportInfo = vk::ExternalMemoryImageCreateInfo(memtype);
407 auto createChain = vk::StructureChain(imgCreateInfo, imgImportInfo);
408
409 auto image = device->createImageUnique(createChain.get<vk::ImageCreateInfo>());
410 auto reqs = device->getImageMemoryRequirements(*image);
411
412 auto memIndex = findMemoryType(reqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eDeviceLocal);
413
414 assert(reqs.size == memoryInfos.size);
415 assert(reqs.alignment == memoryInfos.alignment);
416
417 auto memoryAllocInfo = vk::StructureChain(
418 vk::MemoryAllocateInfo(reqs.size, memIndex),
419 #ifdef WIN32
420 vk::ImportMemoryWin32HandleInfoKHR(memtype, memoryInfos.handle)
421 #else
422 vk::ImportMemoryFdInfoKHR(memtype, memoryInfos.handle)
423 #endif
424 );
425
426 auto memory = device->allocateMemoryUnique(memoryAllocInfo.get<vk::MemoryAllocateInfo>());
427
428 device->bindImageMemory(*image, *memory, 0);
429
430 // Transition images to their unique layout
431 oneTimeSubmit([&](vk::CommandBuffer& cmd){
432 vk::ImageMemoryBarrier colorMemoryBarrier(
433 {},
434 vk::AccessFlagBits::eTransferWrite,
435 vk::ImageLayout::eUndefined, //Don't care about what was there previously
436 vk::ImageLayout::eTransferDstOptimal,
437 VK_QUEUE_FAMILY_IGNORED, // No queue ownership transfer
438 VK_QUEUE_FAMILY_IGNORED,
439 *image,
440 vk::ImageSubresourceRange(
441 vk::ImageAspectFlagBits::ePlane0 | vk::ImageAspectFlagBits::ePlane1 | vk::ImageAspectFlagBits::ePlane2,
442 0, 1,
443 0, 1));
444
445 vk::ImageMemoryBarrier depthMemoryBarrier(
446 {},
447 vk::AccessFlagBits::eTransferWrite,
448 vk::ImageLayout::eUndefined,
449 vk::ImageLayout::eTransferDstOptimal,
450 VK_QUEUE_FAMILY_IGNORED,
451 VK_QUEUE_FAMILY_IGNORED,
452 *image,
453 vk::ImageSubresourceRange(
454 vk::ImageAspectFlagBits::eColor,
455 0, 1,
456 0, 1));
457
458 cmd.pipelineBarrier(
459 vk::PipelineStageFlagBits::eTransfer,
460 vk::PipelineStageFlagBits::eTransfer,
461 vk::DependencyFlagBits::eByRegion,
462 {},
463 {},
464 isDepth ? depthMemoryBarrier : colorMemoryBarrier );
465 });
466
467 slots.at(i) = ImageSlot{
468 .image = std::move(image),
469 .memory = std::move(memory)
470 };
471 }
472 (isDepth ? stream.importedDepth : stream.importedColor) = true;
473}
474
475Semaphore* JsonStreamer::importSemaphore(const HvtSemaphoreExportInfo& exportInfos) {
476 #ifdef WIN32
477 if((vk::ExternalSemaphoreHandleTypeFlagBits)exportInfos.type != vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32){
478 #else
479 if((vk::ExternalSemaphoreHandleTypeFlagBits)exportInfos.type != vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd){
480 #endif
481 throw HvtResult::HVT_ERROR_UNSUPPORTED_SEMAPHORE_TYPE;
482 }
483
484 auto sem = device->createSemaphoreUnique({});
485 #ifdef WIN32
486 device->importSemaphoreWin32HandleKHR(
487 vk::ImportSemaphoreWin32HandleInfoKHR(
488 *sem,
489 {},
490 vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32,
491 exportInfos.semaphore
492 ));
493 #else
494 device->importSemaphoreFdKHR(
495 vk::ImportSemaphoreFdInfoKHR(
496 *sem,
497 {},
498 vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
499 exportInfos.semaphore
500 ));
501 #endif
502 return new Semaphore{
503 .sem = std::move(sem)
504 };
505}
506
507void JsonStreamer::destroySemaphore(Semaphore* sem) const {
508 delete sem;
509}
510
511void JsonStreamer::startStreaming() {
512 //assert that we have everything ready for streaming
513 for(auto& stream : readStreams) {
514 if(!stream.importedColor || !stream.importedDepth) {
515 throw HvtResult::HVT_ERROR_CALL_ORDER;
516 }
517 }
518
519 // Start worker
520 running = true;
521 streamingThread = std::thread([&]{
522 streamingLoop();
523 });
524}
525
526void JsonStreamer::acquireStreamsFrames(const HvtAcquireStreamFramesInfo& infos) {
527 if(infos.frameInfoCount != readStreams.size()) {
528 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
529 }
530
531 swapPendingToReading();
532
533 auto imageIndex = slotReadingIndex;
534
535 for(int i = 0; i < infos.frameInfoCount; i++) {
536 auto& stream = readStreams.at(i);
537 auto& desc = infos.pStreamFrameInfos[i];
538 desc.extrinsics = stream.extrinsics;
539 desc.intrinsics = stream.intrinsics;
540 desc.imageIndex = imageIndex;
541 }
542
543 if(infos.signalSemaphore){
544 auto sem = Semaphore::check(infos.signalSemaphore);
545 std::scoped_lock l(queueMutex);
546 syncQueue.submit(
547 vk::SubmitInfo(
548 {},
549 {},
550 {},
551 *sem->sem));
552 }
553}
554
555void JsonStreamer::releaseStreamsFrames(Semaphore* waitSem) {
556 std::scoped_lock l(queueMutex);
557
558 if(waitSem) {
559 vk::PipelineStageFlags stage = vk::PipelineStageFlagBits::eTopOfPipe;
560 syncQueue.submit(
561 vk::SubmitInfo(
562 *waitSem->sem,
563 stage,
564 {},
565 {}));
566 }
567}
568
569void JsonStreamer::stopStreaming() {
570 running = false;
571 if(streamingThread.joinable()) {
572 streamingThread.join();
573 }
574}
575
576JsonStreamer::~JsonStreamer() {
577 stopStreaming();
578}
579
585void JsonStreamer::streamingLoop() {
586
587 auto startTime = Clock::now();
588
589 while(running) {
590
591 auto now = Clock::now();
592 auto time = now-startTime;
593
594 int readyCount = 0;
595 for(auto& stream : readStreams){
596 if(stream.nextFrameReady(time)){
597 readyCount++;
598 }
599 }
600
601 if(readyCount == readStreams.size()) {
602
603 oneTimeSubmit([&](vk::CommandBuffer& cmd){
604 for(int i = 0; auto& stream : readStreams){
605 if(stream.nextFrameReady(time)){
606 uploadFrame(cmd, i, stream, stream.frameIndex(time));
607 stream.streamedFrame = stream.frameIndex(time);
608 }
609 i++;
610 }
611 });
612
613 // Commit
614 swapStreamingToPending();
615 }
616 std::this_thread::yield();
617 }
618}
619
630void JsonStreamer::uploadFrame(vk::CommandBuffer cmd, int streamId, ReadStream &stream, int frame) {
631 auto colorFileOffset = frame * stream.colorFrameStride;
632 auto depthFileOffset = frame * stream.depthFrameStride;
633
634 stream.colorFile.seekg(colorFileOffset, std::ios_base::beg);
635 stream.depthFile.seekg(depthFileOffset, std::ios_base::beg);
636
637 // Those call might potentially block a lot on slow hdds....
638 stream.colorFile.read(stream.stagingMapping, stream.colorFrameSize);
639 stream.depthFile.read(stream.stagingMapping+stream.colorFrameSize, stream.depthFrameSize);
640
641 if(!stream.colorFile) {
642 std::cerr << "Failed to read more than " << stream.colorFile.gcount() << " bytes of data" << std::endl;
643 }
644
645 if(!stream.depthFile) {
646 std::cerr << "Failed to read more than " << stream.depthFile.gcount() << " bytes of data" << std::endl;
647 }
648
649 auto imageIndex = slotStreamingIndex;
650
651 auto& dstColorSlot = stream.colorSlots.at(imageIndex);
652 auto& dstDepthSlot = stream.depthSlots.at(imageIndex);
653
654 {
655 auto mainStride = (stream.colorFrameSize*2) / 3;
656 auto secondStride = mainStride / 4;
657 auto half_res = stream.resolution / 2;
658
659 //Copy to color //TODO CHECK YUV PLANES
660 cmd.copyBufferToImage(
661 *stream.stagingBuffer,
662 *dstColorSlot.image,
663 vk::ImageLayout::eTransferDstOptimal,
664 {
665 vk::BufferImageCopy(
666 0,
667 stream.resolution.x,
668 stream.resolution.y,
669 vk::ImageSubresourceLayers(
670 vk::ImageAspectFlagBits::ePlane0,
671 0, 0, 1
672 ),
673 vk::Offset3D(0,0,0),
674 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
675 ),
676 vk::BufferImageCopy(
677 mainStride,
678 half_res.x,
679 half_res.y,
680 vk::ImageSubresourceLayers(
681 vk::ImageAspectFlagBits::ePlane1,
682 0, 0, 1
683 ),
684 vk::Offset3D(0,0,0),
685 vk::Extent3D(half_res.x, half_res.y, 1)
686 ),
687 vk::BufferImageCopy(
688 mainStride+secondStride,
689 half_res.x,
690 half_res.y,
691 vk::ImageSubresourceLayers(
692 vk::ImageAspectFlagBits::ePlane2,
693 0, 0, 1
694 ),
695 vk::Offset3D(0,0,0),
696 vk::Extent3D(half_res.x, half_res.y, 1)
697 )
698 });
699 }
700
701 //Copy to depth
702 cmd.copyBufferToImage(
703 *stream.stagingBuffer,
704 *dstDepthSlot.image,
705 vk::ImageLayout::eTransferDstOptimal,
706 {vk::BufferImageCopy(
707 stream.colorFrameSize,
708 stream.resolution.x,
709 stream.resolution.y,
710 vk::ImageSubresourceLayers(
711 vk::ImageAspectFlagBits::eColor,
712 0, 0, 1
713 ),
714 vk::Offset3D(0,0,0),
715 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
716 )});
717}
718
719void JsonStreamer::swapStreamingToPending() {
720 std::scoped_lock l(indicesMutex);
721 std::swap(slotPendingIndex, slotStreamingIndex);
722 newDataInPending = true;
723 //printf("Swapped %d to pending\n", slotPendingIndex);
724}
725
726void JsonStreamer::swapPendingToReading() {
727 std::scoped_lock l(indicesMutex);
728 if(newDataInPending) {
729 std::swap(slotPendingIndex, slotReadingIndex);
730 newDataInPending = false;
731 //printf("Swapped %d to reading\n", slotReadingIndex);
732 }
733}
734
736
737template<typename Closure>
738HvtResult exceptionFirewall(Closure&& clos) {
739 try {
740 clos();
741 } catch(HvtResult res) {
742 return res;
743 } catch(const std::exception& e) {
744 std::cerr << "Catched exception at C boundary : \"" << e.what() << "\"" << std::endl;
745 return HvtResult::HVT_ERROR_UNKNOWN;
746 } catch (...) {
747 return HvtResult::HVT_ERROR_UNKNOWN;
748 }
749 return HvtResult::HVT_SUCESS;
750}
751
752template<typename T>
753void checkNonNull(T ptr){
754 if(!ptr){
755 throw HvtResult::HVT_ERROR_INVALID_HANDLE;
756 }
757}
758
760
761extern "C" {
762
769HVTAPI_ATTR HvtResult HVTAPI_CALL hvtCreateStreamingContext(const HvtStreamingContextCreateInfo* createInfo, HvtStreamingContext* outStreamingContext) {
770 return exceptionFirewall([&]{
771 checkNonNull(createInfo);
772
773 if(createInfo->headerVersion != HVT_HEADER_VERSION) {
774 throw HvtResult::HVT_ERROR_HEADER_VERSION;
775 }
776
777 auto context = new JsonStreamer(createInfo->graphicsDeviceUUID);
778 *outStreamingContext = context->to_handle();
779 });
780}
781
789HVTAPI_ATTR HvtResult HVTAPI_CALL hvtEnumerateStreamsParameters(HvtStreamingContext streamingContext, uint32_t* pStreamParameterCount, HvtRGBDStreamParameters* pStreamParameters) {
790 return exceptionFirewall([&]{
791 auto context = JsonStreamer::check(streamingContext);
792 checkNonNull(pStreamParameterCount);
793 context->enumerateStreamsParameters(pStreamParameterCount, pStreamParameters);
794 });
795}
796
803HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportStreamImages(HvtStreamingContext streamingContext, const HvtStreamImagesExportInfo* exportInfos) {
804 return exceptionFirewall([&]{
805 auto context = JsonStreamer::check(streamingContext);
806 checkNonNull(exportInfos);
807 context->importStreamImages(*exportInfos);
808 });
809}
810
818HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportSemaphore(HvtStreamingContext streamingContext, const HvtSemaphoreExportInfo* exportInfo, HvtSemaphore* outSemaphore) {
819 return exceptionFirewall([&]{
820 auto context = JsonStreamer::check(streamingContext);
821 checkNonNull(exportInfo);
822 auto sem = context->importSemaphore(*exportInfo);
823 *outSemaphore = sem->to_handle();
824 });
825}
826
833HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroySemaphore(HvtStreamingContext streamingContext, HvtSemaphore semaphore) {
834 return exceptionFirewall([&]{
835 auto context = JsonStreamer::check(streamingContext);
836 context->destroySemaphore(Semaphore::check(semaphore));
837 });
838}
839
840
846HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStartStreaming(HvtStreamingContext streamingContext) {
847 return exceptionFirewall([&]{
848 auto context = JsonStreamer::check(streamingContext);
849 context->startStreaming();
850 });
851}
852
859HVTAPI_ATTR HvtResult HVTAPI_CALL hvtAcquireStreamsFrames(HvtStreamingContext streamingContext, const HvtAcquireStreamFramesInfo* infos) {
860 return exceptionFirewall([&]{
861 auto context = JsonStreamer::check(streamingContext);
862 context->acquireStreamsFrames(*infos);
863 });
864}
865
875HVTAPI_ATTR HvtResult HVTAPI_CALL hvtReleaseStreamsFrames(HvtStreamingContext streamingContext, HvtSemaphore waitSemaphore) {
876 return exceptionFirewall([&]{
877 auto context = JsonStreamer::check(streamingContext);
878 context->releaseStreamsFrames(Semaphore::opt_check(waitSemaphore));
879 });
880}
881
887HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStopStreaming(HvtStreamingContext streamingContext) {
888 return exceptionFirewall([&]{
889 auto context = JsonStreamer::check(streamingContext);
890 context->stopStreaming();
891 });
892}
893
899HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroyStreamingContext(HvtStreamingContext streamingContext) {
900 return exceptionFirewall([&]{
901 auto context = JsonStreamer::check(streamingContext);
902 delete context;
903 });
904}
905
906}
static Config loadFromFile(std::string const &filename, std::string const &dir)
Definition: Config.cpp:72
int getColorBitDepth() const
Definition: Parameters.cpp:148
std::string const & getProjectionType() const
Definition: Parameters.cpp:91
cv::Size getSize() const
Definition: Parameters.cpp:138
cv::Vec3f getRotation() const
Definition: Parameters.cpp:96
cv::Vec2f getDepthRange() const
Definition: Parameters.cpp:123
cv::Vec2f getPrinciplePoint() const
Definition: Parameters.cpp:191
cv::Vec2f getHorRange() const
Definition: Parameters.cpp:168
cv::Vec2f getVerRange() const
Definition: Parameters.cpp:174
cv::Vec3f getPosition() const
Definition: Parameters.cpp:113
int getDepthBitDepth() const
Definition: Parameters.cpp:153
cv::Vec2f getFocal() const
Definition: Parameters.cpp:185
Parameters for query of the current frames infos.
Intrinsics parameters of an equirectangular projection.
Intrinsics parameters of a perspective projection.
Description of an RGBD stream.
char name[HVT_MAX_STREAM_NAME_LENGHT]
Export info for images of a stream.
Parameters for the creation of the Streaming context.
Union of possible intrinsics types data.