HoviTron Video Pipeline
raytrixdll-v5/RaytrixStreamer.cpp
1#include "RaytrixStreamer.h"
2
3/* ---------------------------
4
5 Static functions for Raytrix callback functions
6
7*/
8
10// Copyright (c) 2019 Raytrix GmbH. All rights reserved.
12//
23void OnImageCaptured(const Rx::CRxImage& xImage, unsigned uCamIdx, Rx::LFR::CImageQueue& camBuffer)
24{
25 try
26 {
27 // Make a copy of the provided image. We don't know if this image is reused by the camera SDK or by another handler
28 Rx::CRxImage xCapturedImage;
29 xCapturedImage.Create(xImage);
30
31 /************************************************************************/
32 /* Write into buffer */
33 /************************************************************************/
34 if (!camBuffer.MoveIn(std::move(xCapturedImage)))
35 {
36 // Buffer is full and overwrite is disabled
37 // This is a lost frame
38 return;
39 }
40 }
41 catch (Rx::CRxException& ex)
42 {
43 printf("Exception occured:\n%s\n\n", ex.ToString(true).ToCString());
44 printf("Press any key to end program...\n");
45 //(void)_getch();
46 }
47}
57static void ImageCaptured(const Rx::CRxImage& xImage, unsigned uCamIdx, void* pvContext)
58{
59 RaytrixStreamer* context = (RaytrixStreamer*)pvContext;
60 OnImageCaptured(xImage, uCamIdx, context->getCamBuffer(uCamIdx));
61}
62/*
63
64 Static functions for Raytrix callback functions
65
66 --------------------------- */
67
68
69 /* ----------------------
70 * Copyright 2023 Université Libre de Bruxelles(ULB), Universidad Politécnica de Madrid(UPM), CREAL, Deutsches Zentrum für Luft - und Raumfahrt(DLR)
71
72 * Licensed under the Apache License, Version 2.0 (the "License");
73 * you may not use this file except in compliance with the License.
74 * You may obtain a copy of the License at < http://www.apache.org/licenses/LICENSE-2.0%3E
75
76 * Unless required by applicable law or agreed to in writing, software
77 * distributed under the License is distributed on an "AS IS" BASIS,
78 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
79 * See the License for the specific language governing permissionsand
80 * limitations under the License.
81 ---------------------- */
82
83/* ----------------------------
84 The Raytrix DLL has been written by Armand Losfeld (armand.losfeld@ulb.be or armand-losfeld@hotmail.com) with the help of Martin Lingenauber.
85 This work has been highly inspired from the work made for the Kinect DLL (AcqKiRT) written by Jaime Sancho.
86 ---------------------------- */
87
88/* ------------------------------------
89
90 RaytrixStreamer class functions
91
92*/
93
94/*------- Init Raytrix DLL functions */
95
96void RaytrixStreamer::findVulkanGPU(const uint8_t uuid[VK_UUID_SIZE]) {
97 // Find vulkan GPU (synthesis GPU)
98 int cudaDeviceCount;
99 cudaGetDeviceCount(&cudaDeviceCount);
100
101 vulkanGPU = NULL;
102 for (int cudaDevice = 0; cudaDevice < cudaDeviceCount; cudaDevice++)
103 {
104 cudaDeviceProp deviceProp;
105 cudaGetDeviceProperties(&deviceProp, cudaDevice);
106
107 std::cout << "GPU n: " << cudaDevice << std::endl;
108 const unsigned char* p = reinterpret_cast<uchar*>(&deviceProp.uuid);
109 for (size_t i = 0; i < 16; i++) {
110 printf("%02hhx", p[i]);
111 }
112 putchar('\n');
113
114 if (!memcmp(&deviceProp.uuid, uuid, VK_UUID_SIZE))
115 {
116 vulkanGPU = cudaDevice;
117 }
118 }
119 std::cout << std::endl;
120 std::cout << "Vulkan GPU: " << std::endl;
121 for (size_t i = 0; i < 16; ++i) {
122 printf("%02hhx", uuid[i]);
123 }
124 putchar('\n');
125
126 std::cout << std::endl;
127
128
129 gpuErrchk(cudaSetDevice(vulkanGPU));
130 std::cout << "Available GPUs: " << cudaDeviceCount << " Vulkan GPU:" << vulkanGPU << std::endl;
131}
132
133void RaytrixStreamer::initRxCams() {
134 // Init Rx Cameras
135 std::cout << "Initializing Rx cams" << std::endl;
136
137 // Start to find cameras. This is an synchronous call and we wait here until the find process has been finished
138 xCamServer.FindCameras();
139
140 // Quit the application if there is no camera
141 if (xCamServer.GetCameraCount() == 0)
142 {
143 printf("No camera found\n");
144 throw HVT_ERROR_NOT_FOUND;
145 }
146
147 printf("Number of cameras available: %d\n", xCamServer.GetCameraCount());
148
149 for (int i = 0; i < infoJSON.numCams; ++i) {
150 // Get the camera from the camera server
151 auto& xCam = xCamServer.GetCamera(infoJSON.uIDs[i]);
152
153 // Open the camera
154 printf("Opening camera %i...", infoJSON.uIDs[i]);
155 xCam.Open();
156 printf("done.\n");
157
158 // Add a image captured callback. This method gets called for every captured camera image and more details are given there
159 xCam.AddImageCapturedCallback(ImageCaptured, (void*) this);
160
161 printf("______________________________________________\n");
162 printf("Camera %d:\n", infoJSON.uIDs[i]);
163
164 // Write the camera type to the console, you can find your camera type on the label of your camera.
165 // E.g. R26-C-A-U3-A028-RS-A
166 // The type is given by the 3. entry: R26-C- ->A<- -U3-A028-RS-A
167 // This does not represent the camera connection to your current device, but for this camera example it is USB 3.0 indicated by the U3
168 printf(">> Type: %s\n", xCam.GetDriverName().ToCString());
169
170 // This is the camera serial printed on your camera label
171 printf(">> ID : %s\n", xCam.GetInternalSerial().ToCString());
172
173 printf(">> Buffersize: %u\n", uBufferSize);
174
175 printf(">> Overwrite : %s\n", bOverwrite ? "Yes" : "No");
176
177 printf("\n\n");
178
179 // Create buffer within the given size and with the given overwrite flag
180 camBuffers[i].Initialize(uBufferSize, bOverwrite);
181
182 if(xCam.IsPropertyAvailable(Rx::Interop::Runtime30::Camera::EProperty::Exposure)) xCam.SetProperty(Rx::Interop::Runtime30::Camera::EProperty::Exposure, infoJSON.exposureCams[i]);
183
184 Rx::CRxArrayString GUIDs;
185 Rx::LFR::CCalibrationManager::GetCalibrationGUIDs(GUIDs);
186 int idxInGUIDs = -1;
187 for (size_t j = 0; infoJSON.specificCal && j < GUIDs.Length(); ++j) {
188 if (GUIDs.GetDataPtr()[j] == infoJSON.GUIDsCalib[i]) {
189 std::cout << "Find metric calibration wanted: " << infoJSON.GUIDsCalib[i].ToCString() << std::endl;
190 idxInGUIDs = j;
191 }
192 }
193
194 if (idxInGUIDs >= 0) {
195 Rx::LFR::CCalibrationManager::LoadCalibration(*(camCalibrations[i]), GUIDs.GetDataPtr()[idxInGUIDs], true);
196 }
197 else {
198 // Load the default calibration of the camera (and load the gray image too)
199 if (!Rx::LFR::CCalibrationManager::HasDatabase(xCam))
200 {
201 std::cerr << "The first camera in your system does not have a database." << std::endl;
202 std::cerr << "Either use the installer for your camera settings to install the calibration on your system or create a calibration manualy via RxLive for example." << std::endl;
203 throw HVT_ERROR_NOT_FOUND;
204 }
205 Rx::LFR::CCalibrationManager::LoadDefaultCalibration(*(camCalibrations[i]), xCam, true);
206 }
207 }
208}
209
210void RaytrixStreamer::initStreamParameters() {
211
212 synthesisSem = new Semaphore();
213 if (synthesisSem == NULL) {
214 std::cerr << "Impossible to allocate data for semaphore ..." << std::endl;
215 throw HVT_ERROR_UNKNOWN;
216 }
217
218 // Init Creal params
219 auto frameRate = 30;
220 using namespace std::chrono_literals;
221 auto framePeriod = std::chrono::nanoseconds(1s) / frameRate;
222
223 //Stream for each cam
224 for (size_t i = 0; i < infoJSON.numCams; ++i)
225 {
226 HvtIntrinsics intrinsics;
227 HvtProjectionType ptype;
228
229 // Always perspective projection
230 ptype = HvtProjectionType::HVT_PROJECTION_PERSPECTIVE;
231 intrinsics.perspective = getRxIntrinsicParams(i);
232
233 HvtExtrinsics extrinsics = getRxExtrinsicParams(i);
234
235 ReadStream RS;
236 RS.projectionType = ptype;
237 RS.width_color = infoJSON.width;
238 RS.height_color = infoJSON.height;
239 RS.width_depth = infoJSON.widthDepth;
240 RS.height_depth = infoJSON.heightDepth;
241 RS.afar = getMaxDepth();
242 RS.anear = getMinDepth();
243 RS.colorFrameSize = infoJSON.width * infoJSON.height * 4; // RGBA
244 RS.colorFrameStride = infoJSON.width * 4;
245 RS.depthFrameSize = infoJSON.widthDepth * infoJSON.heightDepth * sizeof(float); // F32
246 RS.depthFrameStride = infoJSON.widthDepth * sizeof(float);
247 RS.colorFormat = HvtImageFormat::HVT_FORMAT_R8G8B8A8_UNORM; // HVT_FORMAT_G8_B8_R8_3PLANE_420_UNORM
248 RS.depthFormat = HvtImageFormat::HVT_FORMAT_R32_SFLOAT;
249 RS.intrinsics = intrinsics;
250 RS.extrinsics = extrinsics;
251 RS.framePeriod = framePeriod;
252 RS.frameCount = (int) numFrames;
253 readStreams.push_back(RS);
254 }
255}
256
257void RaytrixStreamer::initRxCudaCompute() {
258 std::cout << "Initialize RxCUDACompute instances" << std::endl;
259 xCudaComputes = new Rx::LFR::CCudaCompute[infoJSON.numCams];
260 // Enumerate all CUDA devices at the beginning
261 Rx::LFR::CCuda::EnumerateCudaDevices();
262 Rx::CRxMetaData metaData;
263 double imgDivisor;
264 for (size_t i = 0; i < infoJSON.numCams; ++i) {
265 try {
266 xCudaComputes[i].SetCudaDevice(Rx::LFR::CCuda::GetDevice(vulkanGPU));
267 // Apply the calibration to the compute instance and set the DataType for the processed images
268 xCudaComputes[i].ApplyCalibration(*(camCalibrations[i]), true);
269
270 if (infoJSON.loadFromRawFile) {
271 seqReaders[i].GetMetaData((Rx::CRxMetaData&)metaData);
272 xCudaComputes[i].GetComputeParams().ImportFromMetaData(metaData);
273
274 rayImages[i].GetSize((int&)widthDefault, (int&)heightDefault);
275 }
276 else {
277 xCudaComputes[i].GetComputeParams().ImportParameterFromFile(infoJSON.filenameComputeParameters);
278
279 xCamServer.GetCamera(i).GetProperty(Rx::Interop::Runtime30::Camera::EProperty::Width, (int&)widthDefault);
280 xCamServer.GetCamera(i).GetProperty(Rx::Interop::Runtime30::Camera::EProperty::Height, (int&)heightDefault);
281 }
282
283 xCudaComputes[i].GetComputeParams().SetValue(Rx::LFR::Params::ECudaCompute::PreProc_DataType, (unsigned)Rx::Interop::Runtime28::EDataType::UByte);
284
285 if (infoJSON.forceRGBResolution) {
286 if (widthDefault / infoJSON.width != heightDefault / infoJSON.height) {
287 std::cout << "The focus image divisor should be the same for the rows and the the columns..." << std::endl;
288 }
289 imgDivisor = (double)widthDefault / (double)infoJSON.width;
290 xCudaComputes[i].GetComputeParams().SetValue(Rx::LFR::Params::ECudaCompute::Focus_ImageDivisor, imgDivisor);
291 }
292 else {
293 xCudaComputes[i].GetComputeParams().GetValue(Rx::LFR::Params::ECudaCompute::Focus_ImageDivisor,(double&) imgDivisor);
294 infoJSON.width = widthDefault/imgDivisor;
295 infoJSON.height = heightDefault/imgDivisor;
296 }
297
298 if (infoJSON.forceDepthResolution) {
299 if (widthDefault / infoJSON.widthDepth != heightDefault / infoJSON.heightDepth) {
300 std::cout << "The depth image divisor should be the same for the rows and the the columns..." << std::endl;
301 }
302 imgDivisor = (double)widthDefault / (double)infoJSON.widthDepth;
303 xCudaComputes[i].GetComputeParams().SetValue(Rx::LFR::Params::ECudaCompute::Depth_ImageDivisor, imgDivisor);
304 }
305 else {
306 xCudaComputes[i].GetComputeParams().GetValue(Rx::LFR::Params::ECudaCompute::Depth_ImageDivisor, (double&)imgDivisor);
307 infoJSON.widthDepth = widthDefault / imgDivisor;
308 infoJSON.heightDepth = heightDefault / imgDivisor;
309 }
310 }
311 catch (Rx::CRxException e) {
312 std::cerr << "Failed to initialized cuda compute... Check Raytrix error code: " << e.ToString().ToCString() << std::endl;
313 throw HVT_ERROR_UNKNOWN;
314 }
315 }
316
317}
318
319void RaytrixStreamer::initRxSDK() {
320 // Authenticate the Light Field Runtime
321 printf("Authenticate LFR...\n");
322 try {
323 Rx::LFR::CLightFieldRuntime::Authenticate();
324 }
325 catch (Rx::CRxException e) {
326 std::cerr << "Impossible to authenticate. Check RaytrixSDK error code: " << e.ToString().ToCString() << std::endl;
327 throw HVT_ERROR_UNKNOWN;
328 }
329 if (!Rx::LFR::CLightFieldRuntime::IsFeatureSupported(Rx::Dongle::ERuntimeFeature::SDK)) {
330 std::cerr << "Your license does not allow you to use the SDK properly. Upgrade it or buy another license..." << std::endl;
331 throw HVT_ERROR_INVALID_HANDLE;
332 }
333
334 camCalibrations = (Rx::LFR::CCalibration**)malloc(sizeof(Rx::LFR::CCalibration*) * infoJSON.numCams);
335 if (camCalibrations == NULL) {
336 std::cerr << "Impossible to allocate memory for default calibrations" << std::endl;
337 throw HVT_ERROR_UNKNOWN;
338 }
339
340 if (infoJSON.loadFromRawFile) {
341 rayImages = new Rx::LFR::CRayImage[infoJSON.numCams];
342 seqReaders = new Rx::LFR::CSeqFileReader[infoJSON.numCams];
343 }
344 else {
345 camBuffers = new Rx::LFR::CImageQueue[infoJSON.numCams];
346 capturedImages = new Rx::CRxImage[infoJSON.numCams];
347 for (size_t i = 0; i < infoJSON.numCams; ++i) {
348 camCalibrations[i] = new Rx::LFR::CCalibration();
349 }
350 }
351 imgFormatRGB = new Rx::CRxImageFormat[infoJSON.numCams];
352 imgFormatDepth = new Rx::CRxImageFormat[infoJSON.numCams];
353
354 pitchInCudaRGB = new size_t[infoJSON.numCams];
355 pitchInCudaDepth = new size_t[infoJSON.numCams];
356
357 referencePlaneToCameraPlane = new float[infoJSON.numCams];
358}
359
360void RaytrixStreamer::initArrayCuda() {
361 std::cout << "Init CUDA array" << std::endl;
362 imgRGB = (unsigned short**)malloc(sizeof(unsigned short*) * infoJSON.numCams);
363 imgDepth = (unsigned short**)malloc(sizeof(unsigned short*) * infoJSON.numCams);
364 finalRGB = (unsigned char**)malloc(sizeof(unsigned char*) * infoJSON.numCams);
365 finalDepth = (float**)malloc(sizeof(float*) * infoJSON.numCams);
366 imgFloatDepth = (float**)malloc(sizeof(float*) * infoJSON.numCams);
367 prevFinalDepth = (float**)malloc(sizeof(float*) * infoJSON.numCams);
368 imgUByteRGB = (uchar**)malloc(sizeof(uchar*) * infoJSON.numCams);
369
370 if (imgRGB == NULL || imgDepth == NULL || finalRGB == NULL || finalDepth == NULL) {
371 std::cerr << "Impossible to allocate data for img vectors..." << std::endl;
372 throw HVT_ERROR_UNKNOWN;
373 }
374
375 gpuErrchk(cudaSetDevice(vulkanGPU));
376 cudaStreams = (cudaStream_t*)malloc(sizeof(cudaStream_t) * infoJSON.numCams);
377 cudaEvents = (cudaEvent_t*)malloc(sizeof(cudaEvent_t) * infoJSON.numCams);
378
379 for (size_t i = 0; i < infoJSON.numCams; ++i) {
380 gpuErrchk(cudaMalloc(finalRGB + i, sizeof(uchar) * infoJSON.width * infoJSON.height * 4));
381 gpuErrchk(cudaMalloc(finalDepth + i, sizeof(float) * infoJSON.widthDepth * infoJSON.heightDepth));
382 gpuErrchk(cudaMalloc(prevFinalDepth + i, sizeof(float) * infoJSON.widthDepth * infoJSON.heightDepth));
383
384 gpuErrchk(cudaStreamCreate(&(cudaStreams[i])));
385 gpuErrchk(cudaEventCreateWithFlags(&(cudaEvents[i]), cudaEventDisableTiming));
386 }
387}
388
389void RaytrixStreamer::readInfoFromJSON() {
390 char* jsonPath;
391 size_t len;
392 errno_t err = _dupenv_s(&jsonPath, &len, ENV_NAME);
393 if (err == -1) {
394 std::cerr << "No " << ENV_NAME << "env variable set, cannot load settings" << std::endl;
395 throw HvtResult::HVT_ERROR_NOT_FOUND;
396 }
397
398 // read json file
399 std::cout << "Start reading JSON file..." << std::endl;
400 std::ifstream jsonRead{ jsonPath, std::ifstream::binary };
401 if (!jsonRead.good()) {
402 std::cout << "Impossible to read JSON file, check path... Path given: " << jsonPath << std::endl;
403 throw HVT_ERROR_NOT_FOUND;
404 }
405 nlohmann::json jsonContent;
406 jsonRead >> jsonContent;
407 // Num of cams
408 infoJSON.numCams = jsonContent["numCams"];
409 // Specific RGB res
410 infoJSON.forceRGBResolution = jsonContent["forceUseOfSpecificRBGResolution"];
411 if (infoJSON.forceRGBResolution) {
412 // Size total focus
413 infoJSON.width = jsonContent["width"];
414 infoJSON.height = jsonContent["height"];
415 }
416 // Specific depth res
417 infoJSON.forceDepthResolution = jsonContent["forceUseOfSpecificDepthResolution"];
418 if (infoJSON.forceRGBResolution) {
419 // Size depth
420 infoJSON.widthDepth = jsonContent["width_depth"];
421 infoJSON.heightDepth = jsonContent["height_depth"];
422 }
423 // ID cams
424 infoJSON.uIDs = new unsigned int[infoJSON.numCams];
425 for (size_t i = 0; i < infoJSON.numCams; ++i) {
426 infoJSON.uIDs[i] = jsonContent["idCams"][std::to_string(i).c_str()];
427 }
428 // Compute params
429 std::string tmpStdString = jsonContent["RxComputeParametersFile"];
430 infoJSON.filenameComputeParameters = Rx::CRxString(tmpStdString.c_str());
431 // Sequence file
432 infoJSON.loadFromRawFile = jsonContent["fromRawFile"];
433 if (infoJSON.loadFromRawFile) {
434 // Load seq file path
435 infoJSON.filenameRawFiles = new Rx::CRxString[infoJSON.numCams];
436 for (size_t i = 0; i < infoJSON.numCams; ++i) {
437 tmpStdString = jsonContent["pathRawFile"][std::to_string(i).c_str()];
438 infoJSON.filenameRawFiles[i] = Rx::CRxString(tmpStdString.c_str());
439 }
440 }
441 //Calibration
442 infoJSON.specificCal = jsonContent["specificCalibration"];
443 if (infoJSON.specificCal) {
444 // Load calib GUID
445 infoJSON.GUIDsCalib = new Rx::CRxString[infoJSON.numCams];
446 for (size_t i = 0; i < infoJSON.numCams; ++i) {
447 tmpStdString = jsonContent["GUIDsForCalibration"][std::to_string(i).c_str()];
448 infoJSON.GUIDsCalib[i] = Rx::CRxString(tmpStdString.c_str());
449 }
450 }
451 if (!infoJSON.loadFromRawFile) {
452 //Exposure of cameras
453 infoJSON.exposureCams = new float[infoJSON.numCams];
454 for (size_t i = 0; i < infoJSON.numCams; ++i) {
455 infoJSON.exposureCams[i] = jsonContent["exposureCams"][std::to_string(i).c_str()];
456 }
457 }
458 // Extrinsic params path
459 infoJSON.extrinsicParamsPath = jsonContent["ExtrinsicParamsFile"];
460 std::cout << "Json path read" << std::endl;
461}
462
463void RaytrixStreamer::initMutex() {
464 loaderMtxs = new std::mutex[infoJSON.numCams];
465 loaderCVs = new std::condition_variable[infoJSON.numCams];
466 canLoadImgs = new bool[infoJSON.numCams];
467 memset(canLoadImgs, true, infoJSON.numCams * sizeof(bool));
468}
469
470void RaytrixStreamer::initRxRayFile() {
471 std::cout << "Read RayFile: " << std::endl;
472 try {
473 for (size_t i = 0; i < infoJSON.numCams; ++i) {
474 std::cout << "Seq " << i << ": " << infoJSON.filenameRawFiles[i].ToCString() << std::endl;
475 seqReaders[i].Open(infoJSON.filenameRawFiles[i]);
476 seqReaders[i].ReadFrame(rayImages[i], true, false);
477
478 Rx::CRxArrayString GUIDs;
479 Rx::LFR::CCalibrationManager::GetCalibrationGUIDs(GUIDs);
480 int idxInGUIDs = -1;
481 for (size_t j = 0; infoJSON.specificCal && j < GUIDs.Length(); ++j) {
482 if (GUIDs.GetDataPtr()[j] == infoJSON.GUIDsCalib[i]) {
483 std::cout << "Find metric calibration wanted: " << infoJSON.GUIDsCalib[i].ToCString() << std::endl;
484 idxInGUIDs = j;
485 }
486 }
487
488 if (idxInGUIDs >= 0) {
489 camCalibrations[i] = new Rx::LFR::CCalibration();
490 Rx::LFR::CCalibrationManager::LoadCalibration(*(camCalibrations[i]), GUIDs.GetDataPtr()[idxInGUIDs], true);
491 }
492 else {
493 camCalibrations[i] = (Rx::LFR::CCalibration*)&(rayImages[i].GetCalibration());
494 }
495 }
496 }
497 catch (Rx::CRxException e) {
498 std::cout << "Impossible to read Rays file... Check error code: " << e.ToString().ToCString() << std::endl;
499 throw HVT_ERROR_UNKNOWN;
500 }
501}
502
503/*---------------------------------*/
504
505/*------- Print fps(functions)*/
506
507void RaytrixStreamer::addMsgToPrint(const char* title, long long int averageTime, float frameNumber)
508{
509 if (printMsg == NULL) {
510 whereToAddMsg = 0;
511 printMsg = (char*)malloc(sizeof(char) * SIZE_MSG);
512 if (printMsg == NULL)
513 return;
514 }
515
516 size_t size = std::strlen(title) + 6 + 6;
517 sprintf(printMsg + whereToAddMsg, "%s: %f ...", title, (frameNumber) * 1000.f / averageTime);
518 whereToAddMsg += size;
519}
520
521void RaytrixStreamer::printFPS() {
522 std::cout << '\r' << printMsg << std::flush;
523}
524
525void RaytrixStreamer::resetPrintMsg() {
526 memset(printMsg, '\0', sizeof(char) * SIZE_MSG);
527 whereToAddMsg = 0;
528}
529/*----------------------------*/
530
531/*--------- Getter*/
532
534 const float ZFar = 1.3f; // -> !!! MAGIC NUMBER !!! from AcquKirt -> not used here
535 return ZFar;
536}
537
539 const float ZNear = 0.4f; // -> !!! MAGIC NUMBER !!! from AcquKirt -> not used here
540 return ZNear;
541}
542
544 Rx::LFR::CParameters<Rx::LFR::Params::ECalib::ID>& paramsCal = camCalibrations[idxCam]->GetParams();
545 double f, sizePixel;
546 paramsCal.GetValue(Rx::LFR::Params::ECalib::MainLensThick_NominalFocalLengthMM, f);
547 paramsCal.GetValue(Rx::LFR::Params::ECalib::Sensor_PhysicalPixelSizeMM, sizePixel);
548
549 HvtIntrinsicsPerspective intrinsicParams;
550 intrinsicParams.focalX = (float) f / (((float)widthDefault / (float)infoJSON.widthDepth) * sizePixel);
551 intrinsicParams.focalY = (float) f / (((float)heightDefault / (float)infoJSON.heightDepth) * sizePixel);
552 intrinsicParams.principlePointX = ((float)infoJSON.width) / 2.0f;
553 intrinsicParams.principlePointY = ((float)infoJSON.height) / 2.0f;
554
555 return intrinsicParams;
556}
557
559 HvtPosition pos;
560 HvtRotation rot;
561 unsigned isExCalib;
562 Rx::LFR::CParameters<Rx::LFR::Params::ECalib::ID>& paramsCal = camCalibrations[idxCam]->GetParams();
563 paramsCal.GetValue(Rx::LFR::Params::ECalib::IsExtrinsicCalibrated, isExCalib);
564 if (isExCalib != 1) {
565 std::cerr << "Camera " << idxCam << " is not extrinsincly calibrated... Reconstruction must be wrong." << std::endl;
566 }
567 Rx::CRxArrayDouble trans;
568 paramsCal.GetValue(Rx::LFR::Params::ECalib::Translation_Global_Sensor, trans);
569 double* dataPtr = (double*)trans.GetPointer();
570 size_t sizeData = trans.Length();
571 bool checkExParams = true;
572 if (sizeData < 1)
573 checkExParams = false;
574
575 std::ifstream jsonRead{ infoJSON.extrinsicParamsPath, std::ifstream::binary };
576 if (!jsonRead.good()) {
577 std::cout << "Impossible to read JSON extrinsic file, check path... Path given: " << infoJSON.extrinsicParamsPath << std::endl;
578 throw HVT_ERROR_NOT_FOUND;
579 }
580 nlohmann::json jsonContent;
581 jsonRead >> jsonContent;
582
583 if (checkExParams) {
584 pos.x = (float)dataPtr[2] + (double) jsonContent["cameras"][idxCam]["Position"][0]; // x is forward/backward
585 pos.y = (float)dataPtr[0] + (double) jsonContent["cameras"][idxCam]["Position"][1]; // y is left/right
586 pos.z = (float)dataPtr[1] + (double) jsonContent["cameras"][idxCam]["Position"][2]; // z is up/down
587 referencePlaneToCameraPlane[idxCam] = pos.x;
588 double data;
589 paramsCal.GetValue(Rx::LFR::Params::ECalib::Rotation_Global_Sensor_Pitch, data);
590 rot.pitch = (float)data + (double) jsonContent["cameras"][idxCam]["Rotation"][0];
591 paramsCal.GetValue(Rx::LFR::Params::ECalib::Rotation_Global_Sensor_Roll, data);
592 rot.roll = (float)data + (double) jsonContent["cameras"][idxCam]["Rotation"][1];
593 paramsCal.GetValue(Rx::LFR::Params::ECalib::Rotation_Global_Sensor_Yaw, data);
594 rot.yaw = (float)data + (double) jsonContent["cameras"][idxCam]["Rotation"][2];
595 }
596 else {
597 pos.x = 0.0f;
598 pos.y = 0.0f;
599 pos.z = 0.0f;
600 referencePlaneToCameraPlane[idxCam] = 1.0f;
601 rot.pitch = 0.0f;
602 rot.roll = 0.0f;
603 rot.yaw = 0.0f;
604 }
605 jsonRead.close();
606
607
608 HvtExtrinsics extrinsics;
609 extrinsics.position = pos;
610 extrinsics.rotation = rot;
611 return extrinsics;
612}
613
614Rx::LFR::CImageQueue& RaytrixStreamer::getCamBuffer(size_t idxCam)
615{
616 return camBuffers[idxCam];
617}
618/*---------------*/
619
620/*--------------- RaytrixDLL functions for RGBD acquire and refine tasks*/
621
622void RaytrixStreamer::getRGBDFormats() {
623 for (size_t i = 0; i < infoJSON.numCams; ++i) {
624 try{
625 imgFormatRGB[i] = xCudaComputes[i].GetImageFormat(idRGB);
626 }
627 catch (Rx::CRxException e) {
628 std::cerr << "Impossible to get image format... Check error code: "<< e.ToString().ToCString() << std::endl;
629 throw HVT_ERROR_UNKNOWN;
630 }
631 if (imgFormatRGB[i].m_iWidth != infoJSON.width || imgFormatRGB[i].m_iHeight != infoJSON.height) {
632 std::cerr << "RGB image formats do not correspond..." << std::endl;
633 throw HVT_ERROR_UNKNOWN;
634 }
635 try {
636 imgFormatDepth[i] = xCudaComputes[i].GetImageFormat(idDepth);
637 }
638 catch (Rx::CRxException e) {
639 std::cerr << "Impossible to get image format... Check error code: " << e.ToString().ToCString() << std::endl;
640 throw HVT_ERROR_UNKNOWN;
641 }
642 if (imgFormatDepth[i].m_iWidth != infoJSON.widthDepth || imgFormatDepth[i].m_iHeight != infoJSON.heightDepth) {
643 std::cerr << "Depth image formats do not correspond..." << std::endl;
644 throw HVT_ERROR_UNKNOWN;
645 }
646
647 pitchInCudaRGB[i] = xCudaComputes[i].GetDevicePointerPitch(idRGB);
648 pitchInCudaDepth[i] = xCudaComputes[i].GetDevicePointerPitch(idDepth);
649
650 std::cout << "RGB cam/file " << i << " :" << imgFormatRGB[i].ToString().ToCString() << std::endl;
651 std::cout << "Depth cam/file "<< i <<" :" << imgFormatDepth[i].ToString().ToCString() << std::endl;
652 }
653 isFormatLoaded = true;
654}
655
656void RaytrixStreamer::launchAsyncLoader(bool needToWaitThreadToFinish) {
657 std::thread t;
658 if (infoJSON.loadFromRawFile) {
659 t = std::thread(&RaytrixStreamer::asyncLoaderImageFromSeqFiles, this);
660 }
661 else {
662 t = std::thread(&RaytrixStreamer::asyncLoaderImage, this);
663 }
664 if (needToWaitThreadToFinish) t.join();
665 else t.detach();
666}
667
668void RaytrixStreamer::asyncLoaderImageFromOneSeqFile(size_t idxCam) {
669 try {
670 // Upload the image as the new ray image of all further CUDA computations
671 if (seqReaders[idxCam].GetFrameCount() == seqReaders[idxCam].GetLastFrameIndex() + 1) {
672 seqReaders[idxCam].SetNextFrameIndex(0);
673 }
674 seqReaders[idxCam].ReadFrame(rayImages[idxCam], false);
675
676 xCudaComputes[idxCam].UploadRawImage(rayImages[idxCam]);
677 }
678 catch (Rx::CRxException e) {
679 std::cerr << "Impossible to read or upload ray image to the cuda compute ... Check error code: " << e.ToString().ToCString() << std::endl;
680 throw HVT_ERROR_UNKNOWN;
681 }
682}
683
684void RaytrixStreamer::asyncLoaderImageFromSeqFiles() {
685 bool* tmpSync = &imgsLoaded;
686 std::unique_lock<std::mutex> lckSync(syncMtx);
687 syncCV.wait(lckSync, [tmpSync] {return !(*tmpSync); });
688 std::thread* threads = new std::thread[infoJSON.numCams];
689 for (size_t i = 0; i < infoJSON.numCams; ++i) {
690 threads[i] = std::thread(&RaytrixStreamer::asyncLoaderImageFromOneSeqFile, this, i);
691 }
692 for (size_t i = 0; i < infoJSON.numCams; ++i) {
693 threads[i].join();
694 }
695 imgsLoaded = true;
696 lckSync.unlock();
697 syncCV.notify_all();
698 delete[] threads;
699}
700
701void RaytrixStreamer::asyncLoaderImage() {
702 size_t idxRefCam = 0;
703 bool loadImgSuccess = false;
704
705 bool* tmpSync = &imgsLoaded;
706 std::unique_lock<std::mutex> lckSync(syncMtx);
707 syncCV.wait(lckSync, [tmpSync] {return !(*tmpSync); });
708 while(!loadImgSuccess){
709 for (size_t i = 0; i < infoJSON.numCams; ++i) {
710 std::thread(&RaytrixStreamer::asyncLoaderImageOneCam, this, i).detach();
711 }
712 if (infoJSON.numCams == 1) {
713 bool* tmp = canLoadImgs;
714 std::unique_lock<std::mutex> lck(loaderMtxs[0]);
715 loaderCVs[0].wait(lck, [tmp] {return !(*tmp); });
716 canLoadImgs[0] = true;
717 lck.unlock();
718 loaderCVs[0].notify_all();
719 break;
720 }
721
722 bool* tmpLoaderRefCam = canLoadImgs + idxRefCam;
723 std::unique_lock<std::mutex> lckLoaderRefCam(loaderMtxs[idxRefCam]);
724 loaderCVs[idxRefCam].wait(lckLoaderRefCam, [tmpLoaderRefCam] {return !(*tmpLoaderRefCam); });
725 for (size_t i = 0; i < infoJSON.numCams; ++i) {
726 if (i == idxRefCam) continue;
727
728 bool* tmp = canLoadImgs+i;
729 std::unique_lock<std::mutex> lckLoader(loaderMtxs[i]);
730 loaderCVs[i].wait(lckLoader, [tmp] {return !(*tmp) ; });
731
732 loadImgSuccess = (abs(capturedImages[i].GetTimestamp() - capturedImages[idxRefCam].GetTimestamp()) < MAX_TIME_BETWEEN_CAMS);
733
734 canLoadImgs[i] = true;
735 lckLoader.unlock();
736 loaderCVs[i].notify_all();
737 }
738 canLoadImgs[idxRefCam] = true;
739 lckLoaderRefCam.unlock();
740 loaderCVs[idxRefCam].notify_all();
741 }
742 try{
743 for (size_t i = 0; i < infoJSON.numCams; ++i) {
744 xCudaComputes[i].UploadRawImage(capturedImages[i]);
745 }
746 }
747 catch (Rx::CRxException e) {
748 std::cerr << "Impossible to upload raw image to the cuda compute ... Check error code: " << e.ToString().ToCString() << std::endl;
749 throw HVT_ERROR_UNKNOWN;
750 }
751 imgsLoaded = true;
752 lckSync.unlock();
753 syncCV.notify_all();
754}
755
756void RaytrixStreamer::asyncLoaderImageOneCam(size_t idxCam) {
757 bool* tmp = canLoadImgs + idxCam;
758 std::unique_lock<std::mutex> lck(loaderMtxs[idxCam]);
759 loaderCVs[idxCam].wait(lck, [tmp] {return *tmp; });
760
761 bool imgLoadSucess = false;
762 while (!imgLoadSucess) {
763 if (camCaptureMode == Rx::Interop::Runtime30::Camera::ETriggerMode::Software_SnapShot) {
764 // Trigger the camera
765 auto& cam = xCamServer.GetCamera(idxCam);
766 cam.Trigger();
767 }
768
769 // Wait for the image buffer to be not empty
770 if(!camBuffers[idxCam].WaitForNotEmpty(MAX_TIME_WAIT_BUFFER)) continue;
771
772 // Try to move the captured image out of the buffer
773 imgLoadSucess = camBuffers[idxCam].MoveOut(capturedImages[idxCam]);
774 }
775 canLoadImgs[idxCam] = false;
776 lck.unlock();
777 loaderCVs[idxCam].notify_all();
778}
779
780void RaytrixStreamer::acquireRGBD(){
781 try {
782 for (size_t i = 0; i < infoJSON.numCams; ++i) {
783 if (!xCudaComputes[i].Compute_TotalFocus(idSpaceRGB)) {
784 std::cerr << "Impossible to compute total focus image and depth ... Camera: " << i << std::endl;
785 throw HVT_ERROR_UNKNOWN;
786 }
787 if (!xCudaComputes[i].Compute_Depth3D(idSpaceDepth3D[0], idSpaceDepth3D[1])) {
788 std::cerr << "Impossible to compute depth ... Camera: " << i << std::endl;
789 throw HVT_ERROR_UNKNOWN;
790 }
791 }
792 }
793 catch (Rx::CRxException e) {
794 std::cerr << "Impossible to read or compute... Check error code: " << e.ToString().ToCString() << std::endl;
795 throw HVT_ERROR_UNKNOWN;
796 }
797 if (!isFormatLoaded) {
798 getRGBDFormats();
799 }
800 for (size_t i = 0; i < infoJSON.numCams; ++i) {
801 switch (imgFormatRGB[i].m_eDataType) {
802 case Rx::Interop::Runtime28::EDataType::UByte:
803 imgUByteRGB[i] = (uchar*)xCudaComputes[i].GetImageDevicePointer(idRGB);
804 break;
805 case Rx::Interop::Runtime28::EDataType::UShort:
806 imgRGB[i] = (unsigned short*)xCudaComputes[i].GetImageDevicePointer(idRGB);
807 break;
808 default:
809 std::cerr << "Not known RGBA format... Check format: " << imgFormatRGB[i].ToString().ToCString() << std::endl;
810 throw HVT_ERROR_UNKNOWN;
811 }
812
813 switch (imgFormatDepth[i].m_eDataType) {
814 case Rx::Interop::Runtime28::EDataType::UShort:
815 imgDepth[i] = (unsigned short*)xCudaComputes[i].GetImageDevicePointer(idDepth);
816 break;
817 case Rx::Interop::Runtime28::EDataType::Float:
818 imgFloatDepth[i] = (float*)xCudaComputes[i].GetImageDevicePointer(idDepth);
819 break;
820 default:
821 std::cerr << "Not known depth format... Check format: " << imgFormatDepth[i].ToString().ToCString() << std::endl;
822 throw HVT_ERROR_UNKNOWN;
823 }
824 //gpuErrchk(cudaDeviceSynchronize());
825 }
826}
827
828void RaytrixStreamer::refineRGBD() {
829 for (size_t i = 0; i < infoJSON.numCams; ++i) {
830 switch (imgFormatRGB[i].m_eDataType) {
831 case Rx::Interop::Runtime28::EDataType::UByte:
832 if (pitchInCudaRGB[i] != sizeof(uchar) * 4 * infoJSON.width) {
833 removePitch<uchar>(imgUByteRGB[i], finalRGB[i], infoJSON.width, infoJSON.height, pitchInCudaRGB[i] / (sizeof(uchar) * 4), 4, cudaStreams[i]);
834 }
835 else {
836 gpuErrchk(cudaMemcpyAsync(finalRGB[i], imgUByteRGB[i], sizeof(uchar) * 4 * infoJSON.width * infoJSON.height, cudaMemcpyDeviceToDevice, cudaStreams[i]));
837 }
838 break;
839 case Rx::Interop::Runtime28::EDataType::UShort:
840 uShort2uChar(imgRGB[i], finalRGB[i], infoJSON.width, infoJSON.height, pitchInCudaRGB[i] / (sizeof(USHORT) * 4), 4, cudaStreams[i]);
841 break;
842 default:
843 std::cerr << "Not known RGBA format... Check format: " << imgFormatRGB[i].ToString().ToCString() << std::endl;
844 throw HVT_ERROR_UNKNOWN;
845 }
846
847 switch (imgFormatDepth[i].m_eDataType) {
848 case Rx::Interop::Runtime28::EDataType::UShort:
849 break;
850 case Rx::Interop::Runtime28::EDataType::Float:
851 scaleAddDataArrayUChannel<float>(imgFloatDepth[i], finalDepth[i], infoJSON.widthDepth, infoJSON.heightDepth, pitchInCudaDepth[i] / (sizeof(float) * 4), 4, channelDepth, -1 / 1000.0f, -referencePlaneToCameraPlane[i], cudaStreams[i]);
852 break;
853 default:
854 std::cerr << "Not known depth format... Check format: " << imgFormatDepth[i].ToString().ToCString() << std::endl;
855 throw HVT_ERROR_UNKNOWN;
856 }
857 temporalConsistencyAdjustement<float>(prevFinalDepth[i], finalDepth[i], infoJSON.widthDepth, infoJSON.heightDepth, 5, 0.05, 0.5, cudaStreams[i]);
858 }
859}
860
861/*----------------------------------------------------------------------*/
862
863/*--------- Hovitron API functions*/
864
865void RaytrixStreamer::uploadFrame(size_t streamId, ReadStream& stream)
866{
867 auto imageIndex = slotStreamingIndex;
868
869 auto& dstColorSlot = stream.colorSlots.at(imageIndex);
870 auto& dstDepthSlot = stream.depthSlots.at(imageIndex);
871
872 //gpuErrchk(cudaEventSynchronize(cudaEvents[streamId]));
873 copyColor(dstColorSlot.cuda_ptr_surf,finalRGB[streamId],stream.width_color,stream.height_color, cudaStreams[streamId]);
874 copyDepth(dstDepthSlot.cuda_ptr_surf, finalDepth[streamId], stream.width_depth, stream.height_depth, cudaStreams[streamId]);
875 gpuErrchk(cudaEventRecord(cudaEvents[streamId],cudaStreams[streamId]));
876}
877
878void RaytrixStreamer::streamingLoop() {
879 gpuErrchk(cudaSetDevice(vulkanGPU));
880 for (size_t i = 0; i < infoJSON.numCams; ++i) {
881 if (!infoJSON.loadFromRawFile) {
882 // Start cameras
883 std::cout << "Start camera " << i << std::endl;
884 xCamServer.GetCamera(i).Start(camCaptureMode);
885 }
886 }
887
888 beginAvgStreaming = std::chrono::steady_clock::now();
889 std::chrono::steady_clock::time_point begin;
890
891 launchAsyncLoader(true);
892
893 std::unique_lock<std::mutex> lck(syncMtx, std::defer_lock);
894 bool* cond = &imgsLoaded;
895 while (running) {
896 begin = std::chrono::steady_clock::now();
897
898 lck.lock();
899 syncCV.wait(lck, [cond] {return *cond; });
900
901 acquireRGBD();
902
903 *cond = false;
904 lck.unlock();
905 syncCV.notify_all();
906
907 launchAsyncLoader();
908
909 refineRGBD();
910
911 for (size_t i = 0; i < infoJSON.numCams; ++i) {
912 uploadFrame(i, readStreams[i]);
913
914 readStreams[i].streamedFrame = readStreams[i].frameIndex(begin - beginAvgStreaming);
915 }
916 // Commit
917 swapStreamingToPending();
918 numFrames++;
919
920 endStreaming = std::chrono::steady_clock::now();
921
922 if ((int)numFrames % 100 == 0) {
923 addMsgToPrint("Streaming Loop fps", std::chrono::duration_cast<std::chrono::milliseconds>(endStreaming - beginAvgStreaming).count(), numFrames);
924 printFPS();
925 resetPrintMsg();
926 }
927 }
928 std::this_thread::yield();
929}
930
931void RaytrixStreamer::swapStreamingToPending() {
932 std::lock_guard<std::mutex> l(indicesMutex); // JS
933 std::swap(slotPendingIndex, slotStreamingIndex);
934 newDataInPending = true;
935};
936
937void RaytrixStreamer::swapPendingToReading() {
938 std::lock_guard<std::mutex> l(indicesMutex);
939 if (newDataInPending)
940 {
941 std::swap(slotPendingIndex, slotReadingIndex);
942 newDataInPending = false;
943 // printf("Swapped %d to reading\n", slotReadingIndex);
944 }
945};
946/*--------------------------------*/
947
948/*---------- Debug functions*/
949
950template<typename T> void RaytrixStreamer::writeInFileWindow(T* dataCorrect, T* dataDebug, size_t width, size_t height, size_t sizeWindow, int numCam) {
951 if (width / 2 + sizeWindow > width || height / 2 + sizeWindow > height) {
952 std::cerr << "Specify a smaller window size..." << std::endl;
953 throw HVT_ERROR_UNKNOWN;
954 }
955 if (dataCorrect == NULL || dataDebug == NULL) {
956 std::cerr << "Specify allocated array ... " << std::endl;
957 throw HVT_ERROR_UNKNOWN;
958 }
959
960 const char outCorrect[] = "../RaytrixStreamer/dataCorrect.txt";
961 const char outDebug[] = "../RaytrixStreamer/dataDebug.txt";
962
963 fs::path pathCorrect = outCorrect;
964 std::ofstream correctDataStream{ pathCorrect, std::ofstream::out | std::ofstream::trunc };
965 if (!correctDataStream.good()) {
966 std::cerr << "Impossible to open data correct file, check path... Path given: " << pathCorrect << std::endl;
967 throw HVT_ERROR_NOT_FOUND;
968 }
969 fs::path pathDebug = outDebug;
970 std::ofstream debugDataStream{ pathDebug, std::ofstream::out | std::ofstream::trunc };
971 if (!debugDataStream.good()) {
972 std::cerr << "Impossible to open data debug file, check path... Path given: " << pathDebug << std::endl;
973 throw HVT_ERROR_NOT_FOUND;
974 }
975
976 for (size_t i = 0; i < sizeWindow; ++i) {
977 for (size_t j = 0; j < sizeWindow; ++j) {
978 correctDataStream << dataCorrect[(width / 2) * ((height / 2) + i) + j] << ",";
979 debugDataStream << dataDebug[(width / 2) * ((height / 2) + i) + j] << ",";
980 }
981 correctDataStream << std::endl;
982 debugDataStream << std::endl;
983 }
984
985 correctDataStream.close();
986 debugDataStream.close();
987}
988
989void RaytrixStreamer::debugFloatDepth() {
990 Rx::CRxImage imgTmp;
991 float* imgTmp2 = new float[infoJSON.widthDepth * infoJSON.heightDepth];
992 int width, height;
993 gpuErrchk(cudaDeviceSynchronize());
994 for (size_t i = 0; i < infoJSON.numCams; ++i) {
995 try {
996 xCudaComputes[i].Download(Rx::LFR::EImage::DepthMap_View_Virtual, imgTmp);
997 imgTmp.GetSize(width, height);
998 }
999 catch (Rx::CRxException e) {
1000 std::cerr << "Error while debugging depth... Check error code: " << e.ToString().ToCString() << std::endl;
1001 throw HVT_ERROR_UNKNOWN;
1002 }
1003 std::cout << "Downloaded img " << i << ": Width: " << width << " Height: " << height << " Nb of bytes: " << imgTmp.GetByteCount() << " Format: " << imgTmp.GetFormat().ToString().ToCString() << std::endl;
1004 gpuErrchk(cudaMemcpy(imgTmp2, finalDepth[i], infoJSON.widthDepth * infoJSON.heightDepth * sizeof(float), cudaMemcpyDeviceToHost));
1005 std::cout << "CUDA img " << i << ": Width: " << infoJSON.widthDepth << " Height : " << infoJSON.heightDepth << " Nb of bytes : " << infoJSON.widthDepth * infoJSON.heightDepth * sizeof(float) << " Format: " << imgFormatDepth[i].ToString().ToCString() << std::endl;
1006 gpuErrchk(cudaDeviceSynchronize());
1007 writeInFileWindow<float>((float*)imgTmp.GetDataPtr(), imgTmp2, infoJSON.widthDepth, infoJSON.heightDepth, 100, i);
1008 }
1009 delete[] imgTmp2;
1010}
1011/*--------------------------*/
1012
1013/*----------- ReadStream function*/
1014
1015int RaytrixStreamer::ReadStream::frameIndex(Clock::duration time) const
1016{
1017 return (time / framePeriod);
1018}
1019
1020bool RaytrixStreamer::ReadStream::nextFrameReady(Clock::duration time) const
1021{
1022 return streamedFrame != frameIndex(time);
1023}
1024/*-------------------------------*/
1025
1026/*
1027
1028 RaytrixStreamer class functions
1029
1030 ------------------------------------ */
1031
1032
1033
1034/* ---------------------------
1035
1036 Functions called by the C entry points. Each Hovitron streamer dll has the same entry points, then we define what we need for the corresponding dll in the functions below.
1037
1038*/
1039
1040/*-------------- Constructor/Destructor */
1041
1042RaytrixStreamer::RaytrixStreamer(const uint8_t uuid[VK_UUID_SIZE])
1043{
1044 findVulkanGPU(uuid);
1045 readInfoFromJSON();
1046 initRxSDK();
1047 if (infoJSON.loadFromRawFile) {
1048 initRxRayFile();
1049 }
1050 else {
1051 initRxCams();
1052 }
1053 initRxCudaCompute();
1054 initStreamParameters();
1055 initArrayCuda();
1056 initMutex();
1057}
1058
1060{
1061 std::cout << "---- Destroy RaytrixStreamer dll ----" << std::endl;
1062 if (synthesisSem != NULL)
1063 delete synthesisSem;
1064 for (size_t i = 0; i < infoJSON.numCams; ++i) {
1065 xCamServer.GetCamera(i).Close();
1066 }
1067 if (infoJSON.loadFromRawFile) {
1068 delete[] seqReaders;
1069 delete[] rayImages;
1070 }
1071 Rx::LFR::CLightFieldRuntime::End();
1072}
1073/*--------------------------------------*/
1074
1075void RaytrixStreamer::enumerateStreamsParameters(uint32_t* streamsCount, HvtRGBDStreamParameters* parameters) const
1076{
1077 cudaSetDevice(vulkanGPU);
1078 if (!parameters)
1079 {
1080 *streamsCount = readStreams.size();
1081 return;
1082 }
1083
1084 if (*streamsCount != readStreams.size())
1085 {
1086 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
1087 }
1088
1089 int i = 0;
1090 for (const auto& readStream : readStreams)
1091 {
1093 SP->colorResolution = { (uint32_t)readStream.width_color, (uint32_t)readStream.height_color };
1094 SP->depthResolution = { (uint32_t)readStream.width_depth, (uint32_t)readStream.height_depth };
1095 SP->nearDepth = readStream.anear;
1096 SP->farDepth = readStream.afar;
1097 SP->colorFormat = (HvtImageFormat)readStream.colorFormat;
1098 SP->depthFormat = (HvtImageFormat)readStream.depthFormat;
1099 SP->slotCount = numSlots;
1100 SP->projectionType = readStream.projectionType;
1101
1102 parameters[i] = *SP;
1103 //snprintf(parameters->name, HVT_MAX_STREAM_NAME_LENGHT, "%s", deviceNames.at(i).c_str());
1104 ++i;
1105 }
1106}
1107
1108void RaytrixStreamer::importStreamImages(const HvtStreamImagesExportInfo& exportInfos)
1109{
1110 cudaSetDevice(vulkanGPU);
1111 auto isDepth = (bool)exportInfos.depth;
1112 auto& stream = readStreams.at(exportInfos.streamIndex);
1113 //auto format = isDepth ? stream.depthFormat : stream.colorFormat;
1114 // auto size = isDepth ? stream.depthFrameSize : stream.colorFrameSize;
1115 auto& slots = isDepth ? stream.depthSlots : stream.colorSlots;
1116 auto mipLevels = 1;
1117
1118 // std::cout << "Init memory buffers CUDA" << std::endl;
1119
1120 if (exportInfos.imagesCount != numSlots)
1121 {
1122 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
1123 }
1124
1125 for (int i = 0; i < numSlots; ++i)
1126 {
1127 auto mem = exportInfos.pImages[i];
1128 // std::cout << "External Memory loop" << std::endl;
1129
1130 cudaExternalMemory_t cudaExtMemImageBuffer;
1131 cudaMipmappedArray_t cudaMipmappedImageArray;
1132
1133 cudaExternalMemoryHandleDesc cudaExtMemHandleDesc;
1134 memset(&cudaExtMemHandleDesc, 0, sizeof(cudaExtMemHandleDesc));
1135
1136 cudaExtMemHandleDesc.type = cudaExternalMemoryHandleTypeOpaqueWin32;
1137 cudaExtMemHandleDesc.handle.win32.handle = (HANDLE)mem.handle;
1138 cudaExtMemHandleDesc.size = mem.size;
1139
1140 gpuErrchk(cudaImportExternalMemory(&cudaExtMemImageBuffer, &cudaExtMemHandleDesc));
1141
1142 cudaExternalMemoryMipmappedArrayDesc externalMemoryMipmappedArrayDesc;
1143 memset(&externalMemoryMipmappedArrayDesc, 0, sizeof(externalMemoryMipmappedArrayDesc));
1144
1145 cudaExtent extent = isDepth ? make_cudaExtent(stream.width_depth, stream.height_depth, 0) : make_cudaExtent(stream.width_color, stream.height_color, 0);
1146 cudaChannelFormatDesc formatDesc;
1147 formatDesc.x = isDepth ? 32 : 8;
1148 formatDesc.y = isDepth ? 0 : 8;
1149 formatDesc.z = isDepth ? 0 : 8;
1150 formatDesc.w = isDepth ? 0 : 8;
1151 formatDesc.f = isDepth ? cudaChannelFormatKindFloat : cudaChannelFormatKindUnsigned;
1152
1153 externalMemoryMipmappedArrayDesc.offset = 0;
1154 externalMemoryMipmappedArrayDesc.formatDesc = formatDesc;
1155 externalMemoryMipmappedArrayDesc.extent = extent;
1156 externalMemoryMipmappedArrayDesc.flags = 0;
1157 externalMemoryMipmappedArrayDesc.numLevels = mipLevels;
1158
1159 gpuErrchk(cudaExternalMemoryGetMappedMipmappedArray(&cudaMipmappedImageArray, cudaExtMemImageBuffer, &externalMemoryMipmappedArrayDesc));
1160
1161 cudaArray_t cudaMipLevelArray;
1162 cudaResourceDesc resourceDesc;
1163
1164 gpuErrchk(cudaGetMipmappedArrayLevel(&cudaMipLevelArray, cudaMipmappedImageArray, 0));
1165 // cudaMemcpy2DArrayToArray(cudaMipLevelArray, 0, 0, cudaMipLevelArray, 0, 0, stream.width * sizeof(uchar4), stream.height, cudaMemcpyDeviceToDevice);
1166
1167 memset(&resourceDesc, 0, sizeof(resourceDesc));
1168 resourceDesc.resType = cudaResourceTypeArray;
1169 resourceDesc.res.array.array = cudaMipLevelArray;
1170
1171 cudaSurfaceObject_t surfaceObject;
1172 gpuErrchk(cudaCreateSurfaceObject(&surfaceObject, &resourceDesc));
1173
1174 ImageSlot IS;
1175 IS.size = mem.size;
1176 IS.vk_handle = &mem.handle;
1177 IS.cuda_ptr_surf = surfaceObject;
1178
1179 slots.at(i) = IS;
1180 }
1181 (isDepth ? stream.importedDepth : stream.importedColor) = true;
1182}
1183
1184void RaytrixStreamer::importSemaphore(const HvtSemaphoreExportInfo& exportInfos)
1185{
1186 cudaSetDevice(vulkanGPU);
1187 if (exportInfos.semaphore == NULL)
1188 {
1189 synthesisSem->isValid = false;
1190 synthesisSem->sem = NULL;
1191 return;
1192 }
1193
1194 cudaExternalSemaphoreHandleDesc externalSemaphoreHandleDesc = {};
1195
1196 if (exportInfos.type & HvtSemaphoreType::HVT_SEMAPHORE_TYPE_OPAQUE_WIN32_BIT)
1197 {
1198 externalSemaphoreHandleDesc.type = cudaExternalSemaphoreHandleTypeOpaqueWin32;
1199 }
1200 else if (exportInfos.type & HvtSemaphoreType::HVT_SEMAPHORE_TYPE_OPAQUE_WIN32_KMT_BIT)
1201 {
1202 externalSemaphoreHandleDesc.type = cudaExternalSemaphoreHandleTypeOpaqueWin32Kmt;
1203 }
1204 else if (exportInfos.type & HvtSemaphoreType::HVT_SEMAPHORE_TYPE_SYNC_FD_BIT)
1205 {
1206 externalSemaphoreHandleDesc.type = cudaExternalSemaphoreHandleTypeOpaqueFd;
1207 }
1208
1209 externalSemaphoreHandleDesc.handle.win32.handle = (HANDLE)exportInfos.semaphore;
1210 externalSemaphoreHandleDesc.flags = 0;
1211
1212 cudaImportExternalSemaphore(&synthesisSem->sem, &externalSemaphoreHandleDesc);
1213 synthesisSem->isValid = true;
1214 // cudaWaitExternalSemaphoresAsync();
1215}
1216
1217void RaytrixStreamer::destroySemaphore(Semaphore* sem) const
1218{
1219 cudaSetDevice(vulkanGPU);
1220 delete sem;
1221}
1222
1223void RaytrixStreamer::startStreaming()
1224{
1225 //cudaSetDevice(vulkanGPU);
1226 // assert that we have everything ready for streaming
1227 for (auto& stream : readStreams)
1228 {
1229 if (!stream.importedColor || !stream.importedDepth)
1230 {
1231 throw HvtResult::HVT_ERROR_CALL_ORDER;
1232 }
1233 }
1234
1235 // Start worker
1236 running = true;
1237 std::cout << "Starting streaming" << std::endl;
1238 streamingThread = std::thread([&]
1239 { streamingLoop(); });
1240}
1241
1242void RaytrixStreamer::acquireStreamsFrames(const HvtAcquireStreamFramesInfo& infos)
1243{
1244 gpuErrchk(cudaSetDevice(vulkanGPU));
1245 if (infos.frameInfoCount != readStreams.size())
1246 {
1247 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
1248 }
1249
1250 swapPendingToReading();
1251
1252 auto imageIndex = slotReadingIndex;
1253
1254 for (uint32_t i = 0; i < infos.frameInfoCount; ++i)
1255 {
1256 gpuErrchk(cudaEventSynchronize(cudaEvents[i]));
1257
1258 auto& stream = readStreams.at(i);
1259 auto& desc = infos.pStreamFrameInfos[i];
1260 desc.extrinsics = stream.extrinsics;
1261 desc.intrinsics = stream.intrinsics;
1262 desc.imageIndex = imageIndex;
1263 }
1264}
1265
1266void RaytrixStreamer::releaseStreamsFrames(Semaphore* waitSem)
1267{
1268 std::cout << "release frames" << std::endl;
1269}
1270
1271void RaytrixStreamer::stopStreaming()
1272{
1273 cudaSetDevice(vulkanGPU);
1274 running = false;
1275 if (streamingThread.joinable())
1276 {
1277 streamingThread.join();
1278 }
1279}
1280
1281
1282
1284
1285template <typename Closure>
1286HvtResult exceptionFirewall(Closure&& clos)
1287{
1288 try
1289 {
1290 clos();
1291 }
1292 catch (HvtResult res)
1293 {
1294 return res;
1295 }
1296 catch (const std::exception& e)
1297 {
1298 std::cerr << "Catched exception at C boundary : \"" << e.what() << "\"" << std::endl;
1299 return HvtResult::HVT_ERROR_UNKNOWN;
1300 }
1301 catch (...)
1302 {
1303 return HvtResult::HVT_ERROR_UNKNOWN;
1304 }
1305 return HvtResult::HVT_SUCESS;
1306}
1307
1308template <typename T>
1309void checkNonNull(T ptr)
1310{
1311 if (!ptr)
1312 {
1313 throw HvtResult::HVT_ERROR_INVALID_HANDLE;
1314 }
1315}
1316
1318
1319extern "C"
1320{
1321
1328 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtCreateStreamingContext(const HvtStreamingContextCreateInfo* createInfo, HvtStreamingContext* outStreamingContext)
1329 {
1330 //std::cout << "hvtCreateStreamingContext" << std::endl;
1331 return exceptionFirewall([&]
1332 {
1333 checkNonNull(createInfo);
1334
1335 if (createInfo->headerVersion != HVT_HEADER_VERSION) {
1336 throw HvtResult::HVT_ERROR_HEADER_VERSION;
1337 }
1338
1339 auto context = new RaytrixStreamer(createInfo->graphicsDeviceUUID);
1340 *outStreamingContext = context->to_handle(); });
1341 }
1342
1350 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtEnumerateStreamsParameters(HvtStreamingContext streamingContext, uint32_t* pStreamParameterCount, HvtRGBDStreamParameters* pStreamParameters)
1351 {
1352 //std::cout << "hvtEnumerateStreamsParameters" << std::endl;
1353 return exceptionFirewall([&]
1354 {
1355 auto context = RaytrixStreamer::check(streamingContext);
1356 checkNonNull(pStreamParameterCount);
1357 context->enumerateStreamsParameters(pStreamParameterCount, pStreamParameters); });
1358 }
1359
1366 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportStreamImages(HvtStreamingContext streamingContext, const HvtStreamImagesExportInfo* exportInfos)
1367 {
1368 //std::cout << "hvtExportStreamImages" << std::endl;
1369 return exceptionFirewall([&]
1370 {
1371 auto context = RaytrixStreamer::check(streamingContext);
1372 checkNonNull(exportInfos);
1373 context->importStreamImages(*exportInfos); });
1374 }
1375
1383 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportSemaphore(HvtStreamingContext streamingContext, const HvtSemaphoreExportInfo* exportInfo, HvtSemaphore* outSemaphore)
1384 {
1385 //std::cout << "hvtExportSemaphore" << std::endl;
1386 return exceptionFirewall([&]
1387 {
1388 auto context = RaytrixStreamer::check(streamingContext);
1389 checkNonNull(exportInfo);
1390 context->importSemaphore(*exportInfo); });
1391 }
1392
1399 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroySemaphore(HvtStreamingContext streamingContext, HvtSemaphore semaphore)
1400 {
1401 //std::cout << "hvtDestroySemaphore" << std::endl;
1402 return exceptionFirewall([&]
1403 {
1404 auto context = RaytrixStreamer::check(streamingContext);
1405 context->destroySemaphore(Semaphore::check(semaphore)); });
1406 }
1407
1413 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStartStreaming(HvtStreamingContext streamingContext)
1414 {
1415 //std::cout << "hvtStartStreaming" << std::endl;
1416 return exceptionFirewall([&]
1417 {
1418 auto context = RaytrixStreamer::check(streamingContext);
1419 context->startStreaming(); });
1420 }
1421
1428 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtAcquireStreamsFrames(HvtStreamingContext streamingContext, const HvtAcquireStreamFramesInfo* infos)
1429 {
1430 // std::cout << "hvtAcquireStreamsFrames" << std::endl;
1431 return exceptionFirewall([&]
1432 {
1433 auto context = RaytrixStreamer::check(streamingContext);
1434 context->acquireStreamsFrames(*infos); });
1435 }
1436
1446 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtReleaseStreamsFrames(HvtStreamingContext streamingContext, HvtSemaphore waitSemaphore)
1447 {
1448 // std::cout << "hvtReleaseStreamsFrames" << std::endl;
1449 return exceptionFirewall([&]
1450 {
1451 //auto context = AcqKiRT::check(streamingContext);
1452 // context->releaseStreamsFrames(Semaphore::opt_check(waitSemaphore));
1453 });
1454 }
1455
1461 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStopStreaming(HvtStreamingContext streamingContext)
1462 {
1463 //std::cout << "hvtStopStreaming" << std::endl;
1464 return exceptionFirewall([&]
1465 {
1466 auto context = RaytrixStreamer::check(streamingContext);
1467 context->stopStreaming();
1468 });
1469 }
1470
1476 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroyStreamingContext(HvtStreamingContext streamingContext)
1477 {
1478 //std::cout << "hvtDestroyStreamingContext" << std::endl;
1479 return exceptionFirewall([&]
1480 {
1481 auto context = RaytrixStreamer::check(streamingContext);
1482 delete context; });
1483 }
1484}
HvtIntrinsicsPerspective getRxIntrinsicParams(size_t idxCam)
Get the intrinsic parameters of the camera of ID "idxCam". The intrinsic parameters are found through...
~RaytrixStreamer()
Free all the memories that were allocated during the streaming process.
HvtExtrinsics getRxExtrinsicParams(size_t idxCam)
Get the extrinsic parameters of the camera of ID "idxCam". The extrinsic parameters are found through...
float getMinDepth()
Get the minimum depth.
float getMaxDepth()
Get the maximum depth.
Rx::LFR::CImageQueue & getCamBuffer(size_t idxCam)
Get the camera buffer in the list of camera buffers.
RaytrixStreamer(const uint8_t uuid[VK_UUID_SIZE])
Launch all init functions defined in RaytrixStreamer class.
Parameters for query of the current frames infos.
Intrinsics parameters of a perspective projection.
Description of an RGBD stream.
Export info for images of a stream.
Parameters for the creation of the Streaming context.
Union of possible intrinsics types data.