HoviTron Video Pipeline
opencvStreamer.cpp
1/* ----------------------
2* Copyright 2023 Université Libre de Bruxelles(ULB), Universidad Politécnica de Madrid(UPM), CREAL, Deutsches Zentrum für Luft - und Raumfahrt(DLR)
3
4* Licensed under the Apache License, Version 2.0 (the "License");
5* you may not use this file except in compliance with the License.
6* You may obtain a copy of the License at < http://www.apache.org/licenses/LICENSE-2.0%3E
7
8* Unless required by applicable law or agreed to in writing, software
9* distributed under the License is distributed on an "AS IS" BASIS,
10* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11* See the License for the specific language governing permissionsand
12* limitations under the License.
13---------------------- */
14
15
16#include "opencvStreamer.h"
17
18#include <array>
19#include <iostream>
20
21#include"opencvReading.h"
22
23using rvs::detail::ColorSpace;
24using rvs::detail::g_color_space;
25
26 VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE;
27
28 // Returns format and half bytes per pixels
29 static std::pair<vk::Format, size_t> vkColorFormatFromParams(const rvs::Parameters& params, InternalFormatClass internalFormat) {
30 if (internalFormat == InternalFormatClass::YUV) {
31 switch (params.getColorBitDepth()) {
32 case 16:
33 return { vk::Format::eG16B16R163Plane420Unorm, 6 };
34 case 10:
35 return { vk::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16, 6 };
36 default:
37 return { vk::Format::eG8B8R83Plane420Unorm, 3 };
38 }
39 }else {
40 if (params.getColorBitDepth() > 8 && params.getColorBitDepth() < 16) {
41 return { vk::Format::eR16G16B16A16Unorm, 6 };
43 }
44 else {
45 return { vk::Format::eR8G8B8A8Unorm, 3 };
46 }
47 }
48 }
49
50 // Returns format and byte per pixels
51 static std::pair<vk::Format, size_t> vkDepthFormatFromParams(const rvs::Parameters& params) {
52 switch (params.getDepthBitDepth()) {
53 case 32:
54 return { vk::Format::eR32Sfloat, 3};
55 case 16:
56 return { vk::Format::eR16Unorm, 2 };
57 case 10:
58 //TODO find a better solution
59 return { vk::Format::eR16Unorm, 2 };
60 default:
61 return { vk::Format::eR8Unorm, 1 };
62 }
63 }
64
65 int opencvStreamer::ReadStream::frameIndex(Clock::duration time) const {
66 return (time / framePeriod) % frameCount;
67 }
68
69 bool opencvStreamer::ReadStream::nextFrameReady(Clock::duration time) const {
70
71 return streamedFrame != frameIndex(time);
72 }
73
74 opencvStreamer::opencvStreamer(std::span<const uint8_t, VK_UUID_SIZE> uuid)
75 {
76 initSettings();
77 initVk(uuid);
78 initVkResources();
79 }
80
83 void opencvStreamer::initVk(std::span<const uint8_t, VK_UUID_SIZE> uuid) {
84 vk::DynamicLoader dl;
85 VULKAN_HPP_DEFAULT_DISPATCHER.init(dl.getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr"));
86
87 constexpr auto neededInstanceExts = std::array{
88 VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
89 VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
90 };
91
92 vk::ApplicationInfo ainfos(
93 "opencvStreamer", VK_MAKE_VERSION(0, 0, 1), "No Engine", VK_MAKE_VERSION(0, 0, 0), VK_API_VERSION_1_2
94 );
95
96 instance = vk::createInstanceUnique(
97 vk::InstanceCreateInfo(
98 {}, //Flags
99 &ainfos,
100 {},
101 neededInstanceExts
102 ));
103
104 VULKAN_HPP_DEFAULT_DISPATCHER.init(*instance);
105
106 constexpr auto neededDeviceExts = std::array{
107 #ifdef WIN32
108 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
109 VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
110 #else
111 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
112 VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME
113 //no need for this
114 #endif
115 };
116
117 auto phyDevs = instance->enumeratePhysicalDevices();
118
119 auto it = std::find_if(phyDevs.begin(), phyDevs.end(), [&](const vk::PhysicalDevice& phy) {
120 vk::PhysicalDeviceIDProperties idProps;
121 vk::PhysicalDeviceProperties2 prop2;
122 prop2.pNext = &idProps;
123 phy.getProperties2(&prop2);
124 return strncmp((const char*)idProps.deviceUUID.data(), (const char*)uuid.data(), VK_UUID_SIZE) == 0;
125 });
126
127 if (it == phyDevs.end()) {
128 throw std::runtime_error("Could not find physical device corresponding to UUID");
129 }
130 phyDevice = *it; //TODO find device with right UUID
131
132 float priority[] = { 1.f,1.0f };
133
134 queueFamilyIndex = 0;
135 auto queuesInfos = std::array{ vk::DeviceQueueCreateInfo({}, queueFamilyIndex, 2, priority) };
136
137 device = phyDevice.createDeviceUnique(
138 vk::DeviceCreateInfo(
139 {}, //Flags
140 queuesInfos, //Queues
141 {}, //Layers
142 neededDeviceExts
143 ));
144
145 VULKAN_HPP_DEFAULT_DISPATCHER.init(*device);
146
147 queue = device->getQueue(queueFamilyIndex, 0);
148 syncQueue = device->getQueue(queueFamilyIndex, 1);
149 }
150
151 template<typename Closure>
152 void opencvStreamer::oneTimeSubmit(Closure&& func, vk::SubmitInfo submitInfo) {
153 static thread_local vk::UniqueCommandPool commandPool;
154 static thread_local vk::UniqueFence oneTimeFence;
155
156 if (!commandPool) {
157 commandPool = device->createCommandPoolUnique(
158 vk::CommandPoolCreateInfo({}, queueFamilyIndex)); //TODO choose proper queueFamilyIndex
159 }
160
161 if (!oneTimeFence) {
162 oneTimeFence = device->createFenceUnique(vk::FenceCreateInfo());
163 }
164
165 auto commandBuffers = device->allocateCommandBuffersUnique(
166 vk::CommandBufferAllocateInfo(
167 *commandPool,
168 vk::CommandBufferLevel::ePrimary,
169 1));
170 auto& cmdbuf = *commandBuffers.front();
171
172 cmdbuf.begin(vk::CommandBufferBeginInfo{});
173 func(cmdbuf);
174 cmdbuf.end();
175
176 submitInfo.setCommandBuffers(cmdbuf);
177
178 device->resetFences(*oneTimeFence);
179 {
180 std::scoped_lock l(queueMutex);
181 queue.submit(submitInfo, *oneTimeFence);
182 }
183 (void)device->waitForFences(*oneTimeFence, VK_TRUE, UINT64_MAX);
184 }
185
187 void opencvStreamer::initSettings() {
188 const char* jsonPath = getenv("HVT_JSON_PATH");
189
190 if (!jsonPath) {
191 std::cerr << "No HVT_JSON_PATH env variable set, cannot load settings" << std::endl;
192 throw HvtResult::HVT_ERROR_NOT_FOUND;
193 }
194
195 auto path = std::filesystem::path(jsonPath);
196 if (!std::filesystem::exists(path)) {
197 std::cerr << "File " << path << " does not exist" << std::endl;
198 throw HvtResult::HVT_ERROR_NOT_FOUND;
199 }
200
201 auto dir = std::filesystem::path(path).parent_path();
202
203 config = rvs::Config::loadFromFile(path.filename().string(), dir.string());
204
205 size_t numViews = config->InputCameraNames.size();
206 if (config->params_real.size() != numViews ||
207 config->depth_names.size() != numViews ||
208 config->texture_names.size() != numViews) {
209 throw std::runtime_error("File and settings size mismatch");
210 }
211
212 if (g_color_space == rvs::detail::ColorSpace::RGB) {
213 internalFormat = InternalFormatClass::RGB;
214 }
215
216 auto frameRate = 3; //TODO read this from somewhere....
217 using namespace std::chrono_literals;
218 auto framePeriod = std::chrono::nanoseconds(1s) / frameRate;
219
220 for (size_t i = 0; i < numViews; i++) {
221 const auto& params = config->params_real.at(i);
222 const auto& colorFileName = config->texture_names.at(i);
223 const auto& depthFileName = config->depth_names.at(i);
224
225 std::filesystem::path cp(colorFileName);
226 auto colorFilePath = cp.is_absolute() ? cp : dir / cp;
227
228 std::filesystem::path dp(depthFileName);
229 auto depthFilePath = dp.is_absolute() ? dp : dir / dp;
230
231
232 auto colorFile = std::ifstream(colorFilePath, std::ifstream::ate | std::ifstream::binary | std::ifstream::in);
233 if (!colorFile.is_open()) {
234 throw std::runtime_error(colorFileName); //TODO better error message
235 }
236
237 auto depthFile = std::ifstream(depthFilePath, std::ifstream::ate | std::ifstream::binary | std::ifstream::in);
238 if (!depthFile.is_open()) {
239 throw std::runtime_error(depthFileName);
240 }
241
242 auto [colorFormat, colorHalfBytesPerPixel] = vkColorFormatFromParams(params, internalFormat);
243 auto [depthFormat, depthBytesPerPixel] = vkDepthFormatFromParams(params);
244
245 size_t pixelCount = params.getSize().width * params.getSize().height;
246
247 size_t colorSize = (pixelCount * colorHalfBytesPerPixel) / 2;
248 size_t depthSize = pixelCount * depthBytesPerPixel;
249
250 size_t depthStride = depthSize * 3 / 2;
251 if (config->params_real[i].getDepthColorFormat() == rvs::ColorFormat::YUV400) {
252 depthStride = depthSize;
253 }
254
255 auto colorFileSize = colorFile.tellg();
256 auto depthFileSize = depthFile.tellg();
257 auto frameCount = colorFileSize / colorSize;
258 if (frameCount != (depthFileSize / depthStride)) {
259 throw std::runtime_error("Mismatching depth and color framecount");
260 }
261
262
263
264 HvtIntrinsics intrinsics;
265 HvtProjectionType ptype;
266 if (params.getProjectionType() == "Perspective") {
267 ptype = HvtProjectionType::HVT_PROJECTION_PERSPECTIVE;
268 intrinsics.perspective = HvtIntrinsicsPerspective{
269 .focalX = params.getFocal()[0],
270 .focalY = params.getFocal()[1],
271 .principlePointX = params.getPrinciplePoint()[0],
272 .principlePointY = params.getPrinciplePoint()[1]
273 };
274 }
275 else if (params.getProjectionType() == "Equirectangular") {
276 ptype = HvtProjectionType::HVT_PROJECTION_EQUIRECTANGULAR;
277 intrinsics.equirectangular = HvtIntrinsicsEquirectangular{
278 .verticalRange = {params.getVerRange()[0], params.getVerRange()[1]},
279 .horizontalRange = {params.getHorRange()[0], params.getHorRange()[1]},
280 };
281 }
282 else {
283 throw std::runtime_error("Unknown projection type " + params.getProjectionType());
284 }
285
286 HvtExtrinsics extrinsics = {
287 .position = {params.getPosition()[0], params.getPosition()[1], params.getPosition()[2]},
288 .rotation = {params.getRotation()[0], params.getRotation()[1], params.getRotation()[2]}
289 };
290
291 if (InternalFormatClass::RGB == internalFormat) {
292 colorSize = 2*colorHalfBytesPerPixel * pixelCount * 4;
293 }
294
295 readStreams.push_back(
296 ReadStream{
297 .colorFile = std::move(colorFile),
298 .depthFile = std::move(depthFile),
299 .colorFrameStride = colorSize,
300 .colorFrameSize = colorSize,
301 .depthFrameStride = depthStride,
302 .depthFrameSize = depthSize,
303 .resolution = {params.getSize().width, params.getSize().height},
304 .projectionType = ptype,
305 .intrinsics = intrinsics,
306 .extrinsics = extrinsics,
307 .anear = params.getDepthRange()[0],
308 .afar = params.getDepthRange()[1],
309 .colorFormat = colorFormat,
310 .depthFormat = depthFormat,
311 .framePeriod = framePeriod,
312 .frameCount = (int)frameCount,
313 .fileNameColor = colorFilePath.string(),
314 .fileNameDepth = depthFilePath.string()
315 });
316 }
317 }
318
320 void opencvStreamer::initVkResources() {
321 for (ReadStream& stream : readStreams) {
322 auto colorSize = stream.colorFrameSize;
323 auto depthSize = stream.depthFrameSize;
324 auto buffer = device->createBufferUnique(
325 vk::BufferCreateInfo(
326 {}, //Flags,
327 colorSize + depthSize,
328 vk::BufferUsageFlagBits::eTransferSrc,
329 vk::SharingMode::eExclusive)
330 );
331
332 auto reqs = device->getBufferMemoryRequirements(*buffer);
333 auto memIndex = findMemoryType(reqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostVisible);
334
335 auto memory = device->allocateMemoryUnique(
336 vk::MemoryAllocateInfo(reqs.size, memIndex));
337
338 device->bindBufferMemory(*buffer, *memory, 0);
339 auto mappedStagging = device->mapMemory(*memory, 0, colorSize + depthSize);
340
341 stream.stagingBuffer = std::move(buffer);
342 stream.stagingMemory = std::move(memory);
343 stream.stagingMapping = (char*)mappedStagging;
344 }
345 }
346
347 uint32_t opencvStreamer::findMemoryType(uint32_t typeFilter, vk::MemoryPropertyFlags properties)
348 {
349 vk::PhysicalDeviceMemoryProperties memProperties = phyDevice.getMemoryProperties();
350 for (uint32_t i = 0; i < memProperties.memoryTypeCount; i++) {
351 if ((typeFilter & (1 << i)) && (memProperties.memoryTypes[i].propertyFlags & properties) == properties) {
352 return i;
353 }
354 }
355 throw std::runtime_error("failed to find suitable memory type!");
356 }
357
358 void opencvStreamer::enumerateStreamsParameters(uint32_t* streamsCount, HvtRGBDStreamParameters* parameters) const {
359 if (!parameters) {
360 *streamsCount = readStreams.size();
361 return;
362 }
363
364 if (*streamsCount != readStreams.size()) {
365 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
366 }
367
368 for (int i = 0; const auto & readStream : readStreams) {
369 parameters[i] = HvtRGBDStreamParameters{
370 .colorResolution = {(uint32_t)readStream.resolution.x, (uint32_t)readStream.resolution.y},
371 .depthResolution = {(uint32_t)readStream.resolution.x, (uint32_t)readStream.resolution.y},
372
373 .nearDepth = readStream.anear,
374 .farDepth = readStream.afar,
375
376 .colorFormat = (HvtImageFormat)readStream.colorFormat,
377 .depthFormat = (HvtImageFormat)readStream.depthFormat,
378
379 .slotCount = numSlots,
380 .projectionType = readStream.projectionType
381 };
382 snprintf(parameters->name, HVT_MAX_STREAM_NAME_LENGHT, "%s", config->InputCameraNames.at(i).c_str());
383 i++;
384 }
385 }
386
387 void opencvStreamer::importStreamImages(const HvtStreamImagesExportInfo& exportInfos) {
388 auto memtype = (vk::ExternalMemoryHandleTypeFlagBits)exportInfos.memoryType;
389
390#ifdef WIN32
391 if (memtype != vk::ExternalMemoryHandleTypeFlagBits::eOpaqueWin32)
392#else
393 if (memtype != vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd)
394#endif
395 {
396 throw HvtResult::HVT_ERROR_UNSUPPORTED_MEMORY_TYPE;
397 }
398
399 auto isDepth = (bool)exportInfos.depth;
400 auto& stream = readStreams.at(exportInfos.streamIndex);
401 auto format = isDepth ? stream.depthFormat : stream.colorFormat;
402 auto& slots = isDepth ? stream.depthSlots : stream.colorSlots;
403
404 if (exportInfos.imagesCount != numSlots) {
405 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
406 }
407
408 vk::ImageCreateInfo imgCreateInfo(
409 {}, //Flags,
410 vk::ImageType::e2D,
411 format,
412 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1),
413 1, //Mip count,
414 1, //Array layers,
415 vk::SampleCountFlagBits::e1,
416 vk::ImageTiling::eOptimal,
417 vk::ImageUsageFlagBits::eTransferDst,
418 vk::SharingMode::eExclusive,
419 {}, //Queue families
420 vk::ImageLayout::eUndefined
421 );
422
423 for (int i = 0; i < numSlots; i++) {
424 auto memoryInfos = exportInfos.pImages[i];
425
426 auto imgImportInfo = vk::ExternalMemoryImageCreateInfo(memtype);
427 auto createChain = vk::StructureChain(imgCreateInfo, imgImportInfo);
428
429 auto image = device->createImageUnique(createChain.get<vk::ImageCreateInfo>());
430 auto reqs = device->getImageMemoryRequirements(*image);
431
432 auto memIndex = findMemoryType(reqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eDeviceLocal);
433
434 assert(reqs.size == memoryInfos.size);
435 assert(reqs.alignment == memoryInfos.alignment);
436
437 auto memoryAllocInfo = vk::StructureChain(
438 vk::MemoryAllocateInfo(reqs.size, memIndex),
439#ifdef WIN32
440 vk::ImportMemoryWin32HandleInfoKHR(memtype, memoryInfos.handle)
441#else
442 vk::ImportMemoryFdInfoKHR(memtype, memoryInfos.handle)
443#endif
444 );
445
446 auto memory = device->allocateMemoryUnique(memoryAllocInfo.get<vk::MemoryAllocateInfo>());
447
448 device->bindImageMemory(*image, *memory, 0);
449
450 // Transition images to their unique layout
451 oneTimeSubmit([&](vk::CommandBuffer& cmd) {
452 vk::ImageMemoryBarrier colorMemoryBarrier(
453 {},
454 vk::AccessFlagBits::eTransferWrite,
455 vk::ImageLayout::eUndefined, //Don't care about what was there previously
456 vk::ImageLayout::eTransferDstOptimal,
457 VK_QUEUE_FAMILY_IGNORED, // No queue ownership transfer
458 VK_QUEUE_FAMILY_IGNORED,
459 *image,
460 vk::ImageSubresourceRange(
461 vk::ImageAspectFlagBits::ePlane0 | vk::ImageAspectFlagBits::ePlane1 | vk::ImageAspectFlagBits::ePlane2,
462 0, 1,
463 0, 1));
464
465 vk::ImageMemoryBarrier depthMemoryBarrier(
466 {},
467 vk::AccessFlagBits::eTransferWrite,
468 vk::ImageLayout::eUndefined,
469 vk::ImageLayout::eTransferDstOptimal,
470 VK_QUEUE_FAMILY_IGNORED,
471 VK_QUEUE_FAMILY_IGNORED,
472 *image,
473 vk::ImageSubresourceRange(
474 vk::ImageAspectFlagBits::eColor,
475 0, 1,
476 0, 1));
477
478 cmd.pipelineBarrier(
479 vk::PipelineStageFlagBits::eTransfer,
480 vk::PipelineStageFlagBits::eTransfer,
481 vk::DependencyFlagBits::eByRegion,
482 {},
483 {},
484 isDepth ? depthMemoryBarrier : colorMemoryBarrier);
485 });
486
487 slots.at(i) = ImageSlot{
488 .image = std::move(image),
489 .memory = std::move(memory)
490 };
491 }
492 (isDepth ? stream.importedDepth : stream.importedColor) = true;
493 }
494
495 Semaphore* opencvStreamer::importSemaphore(const HvtSemaphoreExportInfo& exportInfos) {
496#ifdef WIN32
497 if ((vk::ExternalSemaphoreHandleTypeFlagBits)exportInfos.type != vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) {
498#else
499 if ((vk::ExternalSemaphoreHandleTypeFlagBits)exportInfos.type != vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) {
500#endif
501 throw HvtResult::HVT_ERROR_UNSUPPORTED_SEMAPHORE_TYPE;
502 }
503
504 auto sem = device->createSemaphoreUnique({});
505#ifdef WIN32
506 device->importSemaphoreWin32HandleKHR(
507 vk::ImportSemaphoreWin32HandleInfoKHR(
508 *sem,
509 {},
510 vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32,
511 exportInfos.semaphore
512 ));
513#else
514 device->importSemaphoreFdKHR(
515 vk::ImportSemaphoreFdInfoKHR(
516 *sem,
517 {},
518 vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
519 exportInfos.semaphore
520 ));
521#endif
522 return new Semaphore{
523 .sem = std::move(sem)
524 };
525 }
526
527 void opencvStreamer::destroySemaphore(Semaphore * sem) const {
528 delete sem;
529 }
530
531 void opencvStreamer::startStreaming() {
532 //assert that we have everything ready for streaming
533 for (auto& stream : readStreams) {
534 if (!stream.importedColor || !stream.importedDepth) {
535 throw HvtResult::HVT_ERROR_CALL_ORDER;
536 }
537 }
538
539 // Start worker
540 running = true;
541 streamingThread = std::thread([&] {
542 streamingLoop();
543 });
544 }
545
546 void opencvStreamer::acquireStreamsFrames(const HvtAcquireStreamFramesInfo & infos) {
547 if (infos.frameInfoCount != readStreams.size()) {
548 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
549 }
550
551 swapPendingToReading();
552
553 auto imageIndex = slotReadingIndex;
554
555 for (int i = 0; i < infos.frameInfoCount; i++) {
556 auto& stream = readStreams.at(i);
557 auto& desc = infos.pStreamFrameInfos[i];
558 desc.extrinsics = stream.extrinsics;
559 desc.intrinsics = stream.intrinsics;
560 desc.imageIndex = imageIndex;
561 }
562
563 if (infos.signalSemaphore) {
564 auto sem = Semaphore::check(infos.signalSemaphore);
565 std::scoped_lock l(queueMutex);
566 syncQueue.submit(
567 vk::SubmitInfo(
568 {},
569 {},
570 {},
571 *sem->sem));
572 }
573 }
574
575 void opencvStreamer::releaseStreamsFrames(Semaphore * waitSem) {
576 std::scoped_lock l(queueMutex);
577
578 if (waitSem) {
579 vk::PipelineStageFlags stage = vk::PipelineStageFlagBits::eTopOfPipe;
580 syncQueue.submit(
581 vk::SubmitInfo(
582 *waitSem->sem,
583 stage,
584 {},
585 {}));
586 }
587 }
588
589 void opencvStreamer::stopStreaming() {
590 running = false;
591 if (streamingThread.joinable()) {
592 streamingThread.join();
593 }
594 }
595
596 opencvStreamer::~opencvStreamer() {
597 stopStreaming();
598 }
599
605 void opencvStreamer::streamingLoop() {
606
607 auto startTime = Clock::now();
608
609 while (running) {
610
611 auto now = Clock::now();
612 auto time = now - startTime;
613
614 int readyCount = 0;
615 for (auto& stream : readStreams) {
616 if (stream.nextFrameReady(time)) {
617 readyCount++;
618 }
619 }
620
621 if (readyCount == readStreams.size()) {
622
623 oneTimeSubmit([&](vk::CommandBuffer& cmd) {
624 for (int i = 0; auto & stream : readStreams) {
625 if (stream.nextFrameReady(time)) {
626 uploadFrame(cmd, i, stream, stream.frameIndex(time));
627 stream.streamedFrame = stream.frameIndex(time);
628 }
629 i++;
630 }
631 });
632
633 // Commit
634 swapStreamingToPending();
635 }
636 std::this_thread::yield();
637 }
638 }
639
650 void opencvStreamer::uploadFrame(vk::CommandBuffer cmd, int streamId, ReadStream & stream, int frame) {
651
652 auto imageIndex = slotStreamingIndex;
653
654 auto& dstColorSlot = stream.colorSlots.at(imageIndex);
655 auto& dstDepthSlot = stream.depthSlots.at(imageIndex);
656
657 if (config->params_real.at(streamId).getDepthBitDepth() != 10) {
658
659 auto depthFileOffset = frame * stream.depthFrameStride;
660 stream.depthFile.seekg(depthFileOffset, std::ios_base::beg);
661 stream.depthFile.read(stream.stagingMapping + stream.colorFrameSize, stream.depthFrameSize);
662
663 if (!stream.depthFile) {
664 std::cerr << "Failed to read more than " << stream.depthFile.gcount() << " bytes of data" << std::endl;
665 }
666
667 //Copy to depth
668 cmd.copyBufferToImage(
669 *stream.stagingBuffer,
670 *dstDepthSlot.image,
671 vk::ImageLayout::eTransferDstOptimal,
672 { vk::BufferImageCopy(
673 stream.colorFrameSize,
674 stream.resolution.x,
675 stream.resolution.y,
676 vk::ImageSubresourceLayers(
677 vk::ImageAspectFlagBits::eColor,
678 0, 0, 1
679 ),
680 vk::Offset3D(0,0,0),
681 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
682 ) });
683
684 }
685 else {
686 cv::Mat depth = read_depth(stream.fileNameDepth, frame, config->params_real.at(streamId));
687 auto imageIndex = slotStreamingIndex;
688
689 //auto mainStride = (stream.depthFrameSize * 2) / 3;
690 //auto secondStride = mainStride / 4;
691 //auto half_res = stream.resolution / 2;
692
693 void* data;
694 VkDeviceSize imageSize = depth.total() * depth.elemSize();
695 device->mapMemory(*stream.stagingMemory, stream.colorFrameSize, imageSize, {}, &data);
696 memcpy(data, depth.data, imageSize);
697 device->unmapMemory(*stream.stagingMemory);
698
699 //Copy to color //TODO CHECK YUV PLANES
700 cmd.copyBufferToImage(
701 *stream.stagingBuffer,
702 *dstDepthSlot.image,
703 vk::ImageLayout::eTransferDstOptimal, { vk::BufferImageCopy(
704 stream.colorFrameSize,
705 stream.resolution.x,
706 stream.resolution.y,
707 vk::ImageSubresourceLayers(
708 vk::ImageAspectFlagBits::eColor,
709 0, 0, 1
710 ),
711 vk::Offset3D(0,0,0),
712 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
713 ) });
714 }
715
716
717 if (InternalFormatClass::YUV == internalFormat) {
718 auto colorFileOffset = frame * stream.colorFrameStride;
719
720
721 stream.colorFile.seekg(colorFileOffset, std::ios_base::beg);
722
723
724 // Those call might potentially block a lot on slow hdds....
725 stream.colorFile.read(stream.stagingMapping, stream.colorFrameSize);
726
727
728 if (!stream.colorFile) {
729 std::cerr << "Failed to read more than " << stream.colorFile.gcount() << " bytes of data" << std::endl;
730 }
731
732 {
733 auto mainStride = (stream.colorFrameSize * 2) / 3;
734 auto secondStride = mainStride / 4;
735 auto half_res = stream.resolution / 2;
736
737 //Copy to color //TODO CHECK YUV PLANES
738 cmd.copyBufferToImage(
739 *stream.stagingBuffer,
740 *dstColorSlot.image,
741 vk::ImageLayout::eTransferDstOptimal,
742 {
743 vk::BufferImageCopy(
744 0,
745 stream.resolution.x,
746 stream.resolution.y,
747 vk::ImageSubresourceLayers(
748 vk::ImageAspectFlagBits::ePlane0,
749 0, 0, 1
750 ),
751 vk::Offset3D(0,0,0),
752 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
753 ),
754 vk::BufferImageCopy(
755 mainStride,
756 half_res.x,
757 half_res.y,
758 vk::ImageSubresourceLayers(
759 vk::ImageAspectFlagBits::ePlane1,
760 0, 0, 1
761 ),
762 vk::Offset3D(0,0,0),
763 vk::Extent3D(half_res.x, half_res.y, 1)
764 ),
765 vk::BufferImageCopy(
766 mainStride + secondStride,
767 half_res.x,
768 half_res.y,
769 vk::ImageSubresourceLayers(
770 vk::ImageAspectFlagBits::ePlane2,
771 0, 0, 1
772 ),
773 vk::Offset3D(0,0,0),
774 vk::Extent3D(half_res.x, half_res.y, 1)
775 )
776 });
777 }
778 }
779 else {
780 cv::Mat color;
781 color = read_color(stream.fileNameColor, frame, config->params_real.at(streamId));
782 auto imageIndex = slotStreamingIndex;
783
784 //auto mainStride = (stream.colorFrameSize * 2) / 3;
785 //auto secondStride = mainStride / 4;
786 //auto half_res = stream.resolution / 2;
787
788 void* data;
789 VkDeviceSize imageSize = color.total() * color.elemSize();
790 device->mapMemory(*stream.stagingMemory, 0, imageSize, {}, &data);
791 memcpy(data, color.data, imageSize);
792 device->unmapMemory(*stream.stagingMemory);
793
794 //Copy to color //TODO CHECK YUV PLANES
795 cmd.copyBufferToImage(
796 *stream.stagingBuffer,
797 *dstColorSlot.image,
798 vk::ImageLayout::eTransferDstOptimal, { vk::BufferImageCopy(
799 0,
800 stream.resolution.x,
801 stream.resolution.y,
802 vk::ImageSubresourceLayers(
803 vk::ImageAspectFlagBits::eColor,
804 0, 0, 1
805 ),
806 vk::Offset3D(0,0,0),
807 vk::Extent3D(stream.resolution.x, stream.resolution.y, 1)
808 ) });
809 }
810 }
811
812 void opencvStreamer::swapStreamingToPending() {
813 std::scoped_lock l(indicesMutex);
814 std::swap(slotPendingIndex, slotStreamingIndex);
815 newDataInPending = true;
816 //printf("Swapped %d to pending\n", slotPendingIndex);
817 }
818
819 void opencvStreamer::swapPendingToReading() {
820 std::scoped_lock l(indicesMutex);
821 if (newDataInPending) {
822 std::swap(slotPendingIndex, slotReadingIndex);
823 newDataInPending = false;
824 //printf("Swapped %d to reading\n", slotReadingIndex);
825 }
826 }
827
829
830 template<typename Closure>
831 HvtResult exceptionFirewall(Closure && clos) {
832 try {
833 clos();
834 }
835 catch (HvtResult res) {
836 return res;
837 }
838 catch (const std::exception& e) {
839 std::cerr << "Catched exception at C boundary : \"" << e.what() << "\"" << std::endl;
840 return HvtResult::HVT_ERROR_UNKNOWN;
841 }
842 catch (...) {
843 return HvtResult::HVT_ERROR_UNKNOWN;
844 }
845 return HvtResult::HVT_SUCESS;
846 }
847
848 template<typename T>
849 void checkNonNull(T ptr) {
850 if (!ptr) {
851 throw HvtResult::HVT_ERROR_INVALID_HANDLE;
852 }
853 }
854
856
857 extern "C" {
858
865 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtCreateStreamingContext(const HvtStreamingContextCreateInfo* createInfo, HvtStreamingContext* outStreamingContext) {
866 return exceptionFirewall([&] {
867 checkNonNull(createInfo);
868
869 if (createInfo->headerVersion != HVT_HEADER_VERSION) {
870 throw HvtResult::HVT_ERROR_HEADER_VERSION;
871 }
872
873 auto context = new opencvStreamer(createInfo->graphicsDeviceUUID);
874 *outStreamingContext = context->to_handle();
875 });
876 }
877
885 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtEnumerateStreamsParameters(HvtStreamingContext streamingContext, uint32_t* pStreamParameterCount, HvtRGBDStreamParameters* pStreamParameters) {
886 return exceptionFirewall([&] {
887 auto context = opencvStreamer::check(streamingContext);
888 checkNonNull(pStreamParameterCount);
889 context->enumerateStreamsParameters(pStreamParameterCount, pStreamParameters);
890 });
891 }
892
899 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportStreamImages(HvtStreamingContext streamingContext, const HvtStreamImagesExportInfo* exportInfos) {
900 return exceptionFirewall([&] {
901 auto context = opencvStreamer::check(streamingContext);
902 checkNonNull(exportInfos);
903 context->importStreamImages(*exportInfos);
904 });
905 }
906
914 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportSemaphore(HvtStreamingContext streamingContext, const HvtSemaphoreExportInfo* exportInfo, HvtSemaphore* outSemaphore) {
915 return exceptionFirewall([&] {
916 auto context = opencvStreamer::check(streamingContext);
917 checkNonNull(exportInfo);
918 auto sem = context->importSemaphore(*exportInfo);
919 *outSemaphore = sem->to_handle();
920 });
921 }
922
929 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroySemaphore(HvtStreamingContext streamingContext, HvtSemaphore semaphore) {
930 return exceptionFirewall([&] {
931 auto context = opencvStreamer::check(streamingContext);
932 context->destroySemaphore(Semaphore::check(semaphore));
933 });
934 }
935
936
942 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStartStreaming(HvtStreamingContext streamingContext) {
943 return exceptionFirewall([&] {
944 auto context = opencvStreamer::check(streamingContext);
945 context->startStreaming();
946 });
947 }
948
955 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtAcquireStreamsFrames(HvtStreamingContext streamingContext, const HvtAcquireStreamFramesInfo* infos) {
956 return exceptionFirewall([&] {
957 auto context = opencvStreamer::check(streamingContext);
958 context->acquireStreamsFrames(*infos);
959 });
960 }
961
971 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtReleaseStreamsFrames(HvtStreamingContext streamingContext, HvtSemaphore waitSemaphore) {
972 return exceptionFirewall([&] {
973 auto context = opencvStreamer::check(streamingContext);
974 context->releaseStreamsFrames(Semaphore::opt_check(waitSemaphore));
975 });
976 }
977
983 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStopStreaming(HvtStreamingContext streamingContext) {
984 return exceptionFirewall([&] {
985 auto context = opencvStreamer::check(streamingContext);
986 context->stopStreaming();
987 });
988 }
989
995 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroyStreamingContext(HvtStreamingContext streamingContext) {
996 return exceptionFirewall([&] {
997 auto context = opencvStreamer::check(streamingContext);
998 delete context;
999 });
1000 }
1001
1002 }
1003
static Config loadFromFile(std::string const &filename, std::string const &dir)
Definition: Config.cpp:72
int getColorBitDepth() const
Definition: Parameters.cpp:148
std::string const & getProjectionType() const
Definition: Parameters.cpp:91
cv::Size getSize() const
Definition: Parameters.cpp:138
cv::Vec3f getRotation() const
Definition: Parameters.cpp:96
cv::Vec2f getDepthRange() const
Definition: Parameters.cpp:123
cv::Vec2f getPrinciplePoint() const
Definition: Parameters.cpp:191
cv::Vec2f getHorRange() const
Definition: Parameters.cpp:168
cv::Vec2f getVerRange() const
Definition: Parameters.cpp:174
cv::Vec3f getPosition() const
Definition: Parameters.cpp:113
int getDepthBitDepth() const
Definition: Parameters.cpp:153
cv::Vec2f getFocal() const
Definition: Parameters.cpp:185
Parameters for query of the current frames infos.
Intrinsics parameters of an equirectangular projection.
Intrinsics parameters of a perspective projection.
Description of an RGBD stream.
char name[HVT_MAX_STREAM_NAME_LENGHT]
Export info for images of a stream.
Parameters for the creation of the Streaming context.
Union of possible intrinsics types data.