HoviTron Video Pipeline
raytrixdll-v6/RaytrixStreamer.cpp
1#include "RaytrixStreamer.h"
2
3/* ---------------------------
4
5 Static functions for Raytrix callback functions
6
7*/
8
10// Copyright (c) 2019 Raytrix GmbH. All rights reserved.
12//
23void OnImageCaptured(const Rx::CRxImage& xImage, unsigned uCamIdx, Rx::LFR::CImageQueue& camBuffer)
24{
25 try
26 {
27 // Make a copy of the provided image. We don't know if this image is reused by the camera SDK or by another handler
28 Rx::CRxImage xCapturedImage;
29 xCapturedImage.Create(xImage);
30
31 /************************************************************************/
32 /* Write into buffer */
33 /************************************************************************/
34 if (!camBuffer.MoveIn(std::move(xCapturedImage)))
35 {
36 // Buffer is full and overwrite is disabled
37 // This is a lost frame
38 return;
39 }
40 }
41 catch (Rx::CRxException& ex)
42 {
43 printf("Exception occured:\n%s\n\n", ex.ToString(true).ToCString());
44 printf("Press any key to end program...\n");
45 //(void)_getch();
46 }
47}
57static void ImageCaptured(const Rx::CRxImage& xImage, unsigned uCamIdx, void* pvContext)
58{
59 RaytrixStreamer* context = (RaytrixStreamer*)pvContext;
60 OnImageCaptured(xImage, uCamIdx, context->getCamBuffer(uCamIdx));
61}
62/*
63
64 Static functions for Raytrix callback functions
65
66 --------------------------- */
67
68
69 /* ----------------------
70 * Copyright 2023 Université Libre de Bruxelles(ULB), Universidad Politécnica de Madrid(UPM), CREAL, Deutsches Zentrum für Luft - und Raumfahrt(DLR)
71
72 * Licensed under the Apache License, Version 2.0 (the "License");
73 * you may not use this file except in compliance with the License.
74 * You may obtain a copy of the License at < http://www.apache.org/licenses/LICENSE-2.0%3E
75
76 * Unless required by applicable law or agreed to in writing, software
77 * distributed under the License is distributed on an "AS IS" BASIS,
78 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
79 * See the License for the specific language governing permissionsand
80 * limitations under the License.
81 ---------------------- */
82
83 /* ----------------------------
84 The Raytrix DLL has been written by Armand Losfeld (armand.losfeld@ulb.be or armand-losfeld@hotmail.com) with the help of Martin Lingenauber.
85 This work has been highly inspired from the work made for the Kinect DLL (AcqKiRT) written by Jaime Sancho.
86 ---------------------------- */
87
88 /* ------------------------------------
89
90 RaytrixStreamer class functions
91
92 */
93
94 /*------- Init Raytrix DLL functions */
95
96void RaytrixStreamer::findVulkanGPU(const uint8_t uuid[VK_UUID_SIZE]) {
97 // Find vulkan GPU (synthesis GPU)
98 int cudaDeviceCount;
99 cudaGetDeviceCount(&cudaDeviceCount);
100
101 vulkanGPU = NULL;
102 for (int cudaDevice = 0; cudaDevice < cudaDeviceCount; cudaDevice++)
103 {
104 cudaDeviceProp deviceProp;
105 cudaGetDeviceProperties(&deviceProp, cudaDevice);
106
107 std::cout << "GPU n: " << cudaDevice << std::endl;
108 const unsigned char* p = reinterpret_cast<uchar*>(&deviceProp.uuid);
109 for (size_t i = 0; i < 16; i++) {
110 printf("%02hhx", p[i]);
111 }
112 putchar('\n');
113
114 if (!memcmp(&deviceProp.uuid, uuid, VK_UUID_SIZE))
115 {
116 vulkanGPU = cudaDevice;
117 }
118 }
119 std::cout << std::endl;
120 std::cout << "Vulkan GPU: " << std::endl;
121 for (size_t i = 0; i < 16; ++i) {
122 printf("%02hhx", uuid[i]);
123 }
124 putchar('\n');
125
126 std::cout << std::endl;
127
128
129 gpuErrchk(cudaSetDevice(vulkanGPU));
130 std::cout << "Available GPUs: " << cudaDeviceCount << " Vulkan GPU:" << vulkanGPU << std::endl;
131}
132
133void RaytrixStreamer::initRxCams() {
134 // Init Rx Cameras
135 std::cout << "Initializing Rx cams" << std::endl;
136
137 // Start to find cameras. This is an synchronous call and we wait here until the find process has been finished
138 xCamServer.FindCameras();
139
140 // Quit the application if there is no camera
141 if (xCamServer.GetCameraCount() == 0)
142 {
143 printf("No camera found\n");
144 throw HVT_ERROR_NOT_FOUND;
145 }
146
147 printf("Number of cameras available: %d\n", xCamServer.GetCameraCount());
148
149 for (int i = 0; i < infoJSON.numCams; ++i) {
150 // Get the camera from the camera server
151 auto& xCam = xCamServer.GetCamera(infoJSON.uIDs[i]);
152
153 // Open the camera
154 printf("Opening camera %i...", infoJSON.uIDs[i]);
155 xCam.Open();
156 printf("done.\n");
157
158 // Add a image captured callback. This method gets called for every captured camera image and more details are given there
159 xCam.AddImageCapturedCallback(ImageCaptured, (void*)this);
160
161 printf("______________________________________________\n");
162 printf("Camera %d:\n", infoJSON.uIDs[i]);
163
164 // Write the camera type to the console, you can find your camera type on the label of your camera.
165 // E.g. R26-C-A-U3-A028-RS-A
166 // The type is given by the 3. entry: R26-C- ->A<- -U3-A028-RS-A
167 // This does not represent the camera connection to your current device, but for this camera example it is USB 3.0 indicated by the U3
168 printf(">> Type: %s\n", xCam.GetDriverName().ToCString());
169
170 // This is the camera serial printed on your camera label
171 printf(">> ID : %s\n", xCam.GetInternalSerial().ToCString());
172
173 printf(">> Buffersize: %u\n", uBufferSize);
174
175 printf(">> Overwrite : %s\n", bOverwrite ? "Yes" : "No");
176
177 printf("\n\n");
178
179 // Create buffer within the given size and with the given overwrite flag
180 camBuffers[i].Initialize(uBufferSize, bOverwrite);
181
182 if (xCam.IsPropertyAvailable(Rx::ECamProperty::Exposure)) xCam.SetProperty(Rx::ECamProperty::Exposure, infoJSON.exposureCams[i]);
183
184 Rx::CRxArrayString GUIDs;
185 Rx::LFR::CCalibrationManager::GetCalibrationGUIDs(GUIDs);
186 int idxInGUIDs = -1;
187 for (size_t j = 0; infoJSON.specificCal && j < GUIDs.Length(); ++j) {
188 if (GUIDs.At(j) == infoJSON.GUIDsCalib[i]) {
189 std::cout << "Find metric calibration wanted: " << infoJSON.GUIDsCalib[i].ToCString() << std::endl;
190 idxInGUIDs = j;
191 }
192 }
193
194 if (idxInGUIDs >= 0) {
195 Rx::LFR::CCalibrationManager::LoadCalibration(*(camCalibrations[i]), GUIDs.At(idxInGUIDs), true);
196 }
197 else {
198 // Load the default calibration of the camera (and load the gray image too)
199 if (!Rx::LFR::CCalibrationManager::HasDatabase(xCam))
200 {
201 std::cerr << "The first camera in your system does not have a database." << std::endl;
202 std::cerr << "Either use the installer for your camera settings to install the calibration on your system or create a calibration manualy via RxLive for example." << std::endl;
203 throw HVT_ERROR_NOT_FOUND;
204 }
205 Rx::LFR::CCalibrationManager::LoadDefaultCalibration(*(camCalibrations[i]), xCam, true);
206 }
207 }
208}
209
210void RaytrixStreamer::initStreamParameters() {
211
212 synthesisSem = new Semaphore();
213 if (synthesisSem == NULL) {
214 std::cerr << "Impossible to allocate data for semaphore ..." << std::endl;
215 throw HVT_ERROR_UNKNOWN;
216 }
217
218 // Init Creal params
219 auto frameRate = 30;
220 using namespace std::chrono_literals;
221 auto framePeriod = std::chrono::nanoseconds(1s) / frameRate;
222
223 //Stream for each cam
224 for (size_t i = 0; i < infoJSON.numCams; ++i)
225 {
226 HvtIntrinsics intrinsics;
227 HvtProjectionType ptype;
228
229 // Always perspective projection
230 ptype = HvtProjectionType::HVT_PROJECTION_PERSPECTIVE;
231 intrinsics.perspective = getRxIntrinsicParams(i);
232
233 HvtExtrinsics extrinsics = getRxExtrinsicParams(i);
234
235 ReadStream RS;
236 RS.projectionType = ptype;
237 RS.width_color = infoJSON.width;
238 RS.height_color = infoJSON.height;
239 RS.width_depth = infoJSON.widthDepth;
240 RS.height_depth = infoJSON.heightDepth;
241 RS.afar = getMaxDepth();
242 RS.anear = getMinDepth();
243 RS.colorFrameSize = infoJSON.width * infoJSON.height * 4; // RGBA
244 RS.colorFrameStride = infoJSON.width * 4;
245 RS.depthFrameSize = infoJSON.widthDepth * infoJSON.heightDepth * sizeof(float); // F32
246 RS.depthFrameStride = infoJSON.widthDepth * sizeof(float);
247 RS.colorFormat = HvtImageFormat::HVT_FORMAT_R8G8B8A8_UNORM; // HVT_FORMAT_G8_B8_R8_3PLANE_420_UNORM
248 RS.depthFormat = HvtImageFormat::HVT_FORMAT_R32_SFLOAT;
249 RS.intrinsics = intrinsics;
250 RS.extrinsics = extrinsics;
251 RS.framePeriod = framePeriod;
252 RS.frameCount = (int)numFrames;
253 readStreams.push_back(RS);
254 }
255}
256
257void RaytrixStreamer::initRxCudaCompute() {
258 std::cout << "Initialize RxCUDACompute instances" << std::endl;
259 xCudaComputes = new Rx::LFR::CCudaCompute[infoJSON.numCams];
260 // Enumerate all CUDA devices at the beginning
261 Rx::LFR::CCuda::EnumerateCudaDevices();
262 Rx::CRxMetaData metaData;
263 double imgDivisor;
264 for (size_t i = 0; i < infoJSON.numCams; ++i) {
265 try {
266 xCudaComputes[i].SetCudaDevice(Rx::LFR::CCuda::GetDevice(vulkanGPU));
267 // Apply the calibration to the compute instance and set the DataType for the processed images
268 xCudaComputes[i].ApplyCalibration(*(camCalibrations[i]), true);
269 if (infoJSON.loadFromRawFile) {
270 seqReaders[i].GetMetaData((Rx::CRxMetaData&)metaData);
271 xCudaComputes[i].GetComputeParams().ImportFromMetaData(metaData);
272
273 rayImages[i].GetSize((int&)widthDefault, (int&)heightDefault);
274 }
275 else {
276 xCudaComputes[i].GetComputeParams().ImportParameterFromFile(infoJSON.filenameComputeParameters);
277
278 xCamServer.GetCamera(i).GetProperty(Rx::ECamProperty::Width, (int&)widthDefault);
279 xCamServer.GetCamera(i).GetProperty(Rx::ECamProperty::Height, (int&)heightDefault);
280 }
281
282 xCudaComputes[i].GetComputeParams().SetValue(Rx::LFR::ECudaComputeParam::PreProc_DataType, (unsigned) Rx::EDataType::UByte);
283
284 if (infoJSON.forceRGBResolution) {
285 if (widthDefault / infoJSON.width != heightDefault / infoJSON.height) {
286 std::cout << "The focus image divisor should be the same for the rows and the the columns..." << std::endl;
287 }
288 imgDivisor = (double)widthDefault / (double)infoJSON.width;
289 xCudaComputes[i].GetComputeParams().SetValue(Rx::LFR::ECudaComputeParam::Focus_ImageDivisor, imgDivisor);
290 }
291 else {
292 xCudaComputes[i].GetComputeParams().GetValue(Rx::LFR::ECudaComputeParam::Focus_ImageDivisor, (double&)imgDivisor);
293 infoJSON.width = widthDefault / imgDivisor;
294 infoJSON.height = heightDefault / imgDivisor;
295 }
296
297 if (infoJSON.forceDepthResolution) {
298 if (widthDefault / infoJSON.widthDepth != heightDefault / infoJSON.heightDepth) {
299 std::cout << "The depth image divisor should be the same for the rows and the the columns..." << std::endl;
300 }
301 imgDivisor = (double)widthDefault / (double)infoJSON.widthDepth;
302 xCudaComputes[i].GetComputeParams().SetValue(Rx::LFR::ECudaComputeParam::Depth_ImageDivisor, imgDivisor);
303 }
304 else {
305 xCudaComputes[i].GetComputeParams().GetValue(Rx::LFR::ECudaComputeParam::Depth_ImageDivisor, (double&)imgDivisor);
306 infoJSON.widthDepth = widthDefault / imgDivisor;
307 infoJSON.heightDepth = heightDefault / imgDivisor;
308 }
309 }
310 catch (Rx::CRxException e) {
311 std::cerr << "Failed to initialized cuda compute... Check Raytrix error code: " << e.ToString().ToCString() << std::endl;
312 throw HVT_ERROR_UNKNOWN;
313 }
314 }
315
316}
317
318void RaytrixStreamer::initRxSDK() {
319 // Authenticate the Light Field Runtime
320 printf("Authenticate LFR...\n");
321 try {
322 Rx::LFR::CLightFieldRuntime::Authenticate();
323 }
324 catch (Rx::CRxException e) {
325 std::cerr << "Impossible to authenticate. Check RaytrixSDK error code: " << e.ToString().ToCString() << std::endl;
326 throw HVT_ERROR_UNKNOWN;
327 }
328 if (!Rx::LFR::CLightFieldRuntime::IsFeatureSupported(Rx::ERuntimeFeature::SDK)) {
329 std::cerr << "Your license does not allow you to use the SDK properly. Upgrade it or buy another license..." << std::endl;
330 throw HVT_ERROR_INVALID_HANDLE;
331 }
332 camCalibrations = (Rx::LFR::CCalibration**)malloc(sizeof(Rx::LFR::CCalibration*) * infoJSON.numCams);
333 if (camCalibrations == NULL) {
334 std::cerr << "Impossible to allocate memory for default calibrations" << std::endl;
335 throw HVT_ERROR_UNKNOWN;
336 }
337
338 if (infoJSON.loadFromRawFile) {
339 rayImages = new Rx::LFR::CRayImage[infoJSON.numCams];
340 seqReaders = new Rx::LFR::CSeqFileReader[infoJSON.numCams];
341 }
342 else {
343 camBuffers = new Rx::LFR::CImageQueue[infoJSON.numCams];
344 capturedImages = new Rx::CRxImage[infoJSON.numCams];
345 for (size_t i = 0; i < infoJSON.numCams; ++i) {
346 camCalibrations[i] = new Rx::LFR::CCalibration();
347 }
348 }
349 imgFormatRGB = new Rx::CRxImageFormat[infoJSON.numCams];
350 imgFormatDepth = new Rx::CRxImageFormat[infoJSON.numCams];
351
352 pitchInCudaRGB = new size_t[infoJSON.numCams];
353 pitchInCudaDepth = new size_t[infoJSON.numCams];
354
355 referencePlaneToCameraPlane = new float[infoJSON.numCams];
356
357 cuImgs = new Rx::LFR::CudaImage[infoJSON.numCams];
358 cuDepths = new Rx::LFR::CudaImage[infoJSON.numCams];
359}
360
361void RaytrixStreamer::initArrayCuda() {
362 std::cout << "Init CUDA array" << std::endl;
363 imgRGB = (unsigned short**)malloc(sizeof(unsigned short*) * infoJSON.numCams);
364 imgDepth = (unsigned short**)malloc(sizeof(unsigned short*) * infoJSON.numCams);
365 finalRGB = (unsigned char**)malloc(sizeof(unsigned char*) * infoJSON.numCams);
366 finalDepth = (float**)malloc(sizeof(float*) * infoJSON.numCams);
367 imgFloatDepth = (float**)malloc(sizeof(float*) * infoJSON.numCams);
368 prevFinalDepth = (float**)malloc(sizeof(float*) * infoJSON.numCams);
369 imgUByteRGB = (uchar**)malloc(sizeof(uchar*) * infoJSON.numCams);
370
371 if (imgRGB == NULL || imgDepth == NULL || finalRGB == NULL || finalDepth == NULL) {
372 std::cerr << "Impossible to allocate data for img vectors..." << std::endl;
373 throw HVT_ERROR_UNKNOWN;
374 }
375
376 gpuErrchk(cudaSetDevice(vulkanGPU));
377 cudaStreams = (cudaStream_t*)malloc(sizeof(cudaStream_t) * infoJSON.numCams);
378 cudaEvents = (cudaEvent_t*)malloc(sizeof(cudaEvent_t) * infoJSON.numCams);
379
380 for (size_t i = 0; i < infoJSON.numCams; ++i) {
381 gpuErrchk(cudaMalloc(finalRGB + i, sizeof(uchar) * infoJSON.width * infoJSON.height * 4));
382 gpuErrchk(cudaMalloc(finalDepth + i, sizeof(float) * infoJSON.widthDepth * infoJSON.heightDepth));
383 gpuErrchk(cudaMalloc(prevFinalDepth + i, sizeof(float) * infoJSON.widthDepth * infoJSON.heightDepth));
384
385 gpuErrchk(cudaStreamCreate(&(cudaStreams[i])));
386 gpuErrchk(cudaEventCreateWithFlags(&(cudaEvents[i]), cudaEventDisableTiming));
387 }
388}
389
390void RaytrixStreamer::readInfoFromJSON() {
391 char* jsonPath;
392 size_t len;
393 errno_t err = _dupenv_s(&jsonPath, &len, ENV_NAME);
394 if (err == -1) {
395 std::cerr << "No " << ENV_NAME << "env variable set, cannot load settings" << std::endl;
396 throw HvtResult::HVT_ERROR_NOT_FOUND;
397 }
398
399 // read json file
400 std::cout << "Start reading JSON file..." << std::endl;
401 std::ifstream jsonRead{ jsonPath, std::ifstream::binary };
402 if (!jsonRead.good()) {
403 std::cout << "Impossible to read JSON file, check path... Path given: " << jsonPath << std::endl;
404 throw HVT_ERROR_NOT_FOUND;
405 }
406 nlohmann::json jsonContent;
407 jsonRead >> jsonContent;
408 // Num of cams
409 infoJSON.numCams = jsonContent["numCams"];
410 // Specific RGB res
411 infoJSON.forceRGBResolution = jsonContent["forceUseOfSpecificRBGResolution"];
412 if (infoJSON.forceRGBResolution) {
413 // Size total focus
414 infoJSON.width = jsonContent["width"];
415 infoJSON.height = jsonContent["height"];
416 }
417 // Specific depth res
418 infoJSON.forceDepthResolution = jsonContent["forceUseOfSpecificDepthResolution"];
419 if (infoJSON.forceRGBResolution) {
420 // Size depth
421 infoJSON.widthDepth = jsonContent["width_depth"];
422 infoJSON.heightDepth = jsonContent["height_depth"];
423 }
424 // ID cams
425 infoJSON.uIDs = new unsigned int[infoJSON.numCams];
426 for (size_t i = 0; i < infoJSON.numCams; ++i) {
427 infoJSON.uIDs[i] = jsonContent["idCams"][std::to_string(i).c_str()];
428 }
429 // Compute params
430 std::string tmpStdString = jsonContent["RxComputeParametersFile"];
431 infoJSON.filenameComputeParameters = Rx::CRxString(tmpStdString.c_str());
432 // Sequence file
433 infoJSON.loadFromRawFile = jsonContent["fromRawFile"];
434 if (infoJSON.loadFromRawFile) {
435 // Load seq file path
436 infoJSON.filenameRawFiles = new Rx::CRxString[infoJSON.numCams];
437 for (size_t i = 0; i < infoJSON.numCams; ++i) {
438 tmpStdString = jsonContent["pathRawFile"][std::to_string(i).c_str()];
439 infoJSON.filenameRawFiles[i] = Rx::CRxString(tmpStdString.c_str());
440 }
441 }
442 //Calibration
443 infoJSON.specificCal = jsonContent["specificCalibration"];
444 if (infoJSON.specificCal) {
445 // Load calib GUID
446 infoJSON.GUIDsCalib = new Rx::CRxString[infoJSON.numCams];
447 for (size_t i = 0; i < infoJSON.numCams; ++i) {
448 tmpStdString = jsonContent["GUIDsForCalibration"][std::to_string(i).c_str()];
449 infoJSON.GUIDsCalib[i] = Rx::CRxString(tmpStdString.c_str());
450 }
451 }
452 if (!infoJSON.loadFromRawFile) {
453 //Exposure of cameras
454 infoJSON.exposureCams = new float[infoJSON.numCams];
455 for (size_t i = 0; i < infoJSON.numCams; ++i) {
456 infoJSON.exposureCams[i] = jsonContent["exposureCams"][std::to_string(i).c_str()];
457 }
458 }
459 // Extrinsic params path
460 infoJSON.extrinsicParamsPath = jsonContent["ExtrinsicParamsFile"];
461 std::cout << "Json path read" << std::endl;
462}
463
464void RaytrixStreamer::initMutex() {
465 loaderMtxs = new std::mutex[infoJSON.numCams];
466 loaderCVs = new std::condition_variable[infoJSON.numCams];
467 canLoadImgs = new bool[infoJSON.numCams];
468 memset(canLoadImgs, true, infoJSON.numCams * sizeof(bool));
469}
470
471void RaytrixStreamer::initRxRayFile() {
472 std::cout << "Read RayFile: " << std::endl;
473 try {
474 for (size_t i = 0; i < infoJSON.numCams; ++i) {
475 std::cout << "Seq " << i << ": " << infoJSON.filenameRawFiles[i].ToCString() << std::endl;
476 seqReaders[i].Open(infoJSON.filenameRawFiles[i]);
477 seqReaders[i].ReadFrame(rayImages[i], true, false);
478
479 Rx::CRxArrayString GUIDs;
480 Rx::LFR::CCalibrationManager::GetCalibrationGUIDs(GUIDs);
481 int idxInGUIDs = -1;
482 for (size_t j = 0; infoJSON.specificCal && j < GUIDs.Length(); ++j) {
483 if (GUIDs.At(j) == infoJSON.GUIDsCalib[i]) {
484 std::cout << "Find metric calibration wanted: " << infoJSON.GUIDsCalib[i].ToCString() << std::endl;
485 idxInGUIDs = j;
486 }
487 }
488
489 if (idxInGUIDs >= 0) {
490 camCalibrations[i] = new Rx::LFR::CCalibration();
491 Rx::LFR::CCalibrationManager::LoadCalibration(*(camCalibrations[i]), GUIDs.At(idxInGUIDs), true);
492 }
493 else {
494 camCalibrations[i] = (Rx::LFR::CCalibration*)&(rayImages[i].GetCalibration());
495 }
496 }
497 }
498 catch (Rx::CRxException e) {
499 std::cout << "Impossible to read Rays file... Check error code: " << e.ToString().ToCString() << std::endl;
500 throw HVT_ERROR_UNKNOWN;
501 }
502}
503
504/*---------------------------------*/
505
506/*------- Print fps(functions)*/
507
508void RaytrixStreamer::addMsgToPrint(const char* title, long long int averageTime, float frameNumber)
509{
510 if (printMsg == NULL) {
511 whereToAddMsg = 0;
512 printMsg = (char*)malloc(sizeof(char) * SIZE_MSG);
513 if (printMsg == NULL)
514 return;
515 }
516
517 size_t size = std::strlen(title) + 6 + 6;
518 sprintf(printMsg + whereToAddMsg, "%s: %f ...", title, (frameNumber) * 1000.f / averageTime);
519 whereToAddMsg += size;
520}
521
522void RaytrixStreamer::printFPS() {
523 std::cout << '\r' << printMsg << std::flush;
524}
525
526void RaytrixStreamer::resetPrintMsg() {
527 memset(printMsg, '\0', sizeof(char) * SIZE_MSG);
528 whereToAddMsg = 0;
529}
530/*----------------------------*/
531
532/*--------- Getter*/
533
535 const float ZFar = 1.3f; // -> !!! MAGIC NUMBER !!! from AcquKirt -> not used here
536 return ZFar;
537}
538
540 const float ZNear = 0.4f; // -> !!! MAGIC NUMBER !!! from AcquKirt -> not used here
541 return ZNear;
542}
543
545 Rx::LFR::CParameters<Rx::LFR::ECalibrationParam>& paramsCal = camCalibrations[idxCam]->GetParams();
546 double f, sizePixel;
547 paramsCal.GetValue(Rx::LFR::ECalibrationParam::MainLensThick_NominalFocalLengthMM, f);
548 paramsCal.GetValue(Rx::LFR::ECalibrationParam::Sensor_PhysicalPixelSizeMM, sizePixel);
549
550 HvtIntrinsicsPerspective intrinsicParams;
551 intrinsicParams.focalX = (float)f / (((float)widthDefault / (float)infoJSON.widthDepth) * sizePixel);
552 intrinsicParams.focalY = (float)f / (((float)heightDefault / (float)infoJSON.heightDepth) * sizePixel);
553 intrinsicParams.principlePointX = ((float)infoJSON.width) / 2.0f;
554 intrinsicParams.principlePointY = ((float)infoJSON.height) / 2.0f;
555
556 return intrinsicParams;
557}
558
560 HvtPosition pos;
561 HvtRotation rot;
562 unsigned isExCalib;
563 Rx::LFR::CParameters<Rx::LFR::ECalibrationParam>& paramsCal = camCalibrations[idxCam]->GetParams();
564 paramsCal.GetValue(Rx::LFR::ECalibrationParam::IsExtrinsicCalibrated, isExCalib);
565 if (isExCalib != 1) {
566 std::cerr << "Camera " << idxCam << " is not extrinsincly calibrated... Reconstruction must be wrong." << std::endl;
567 }
568 Rx::CRxArrayDouble trans;
569 paramsCal.GetValue(Rx::LFR::ECalibrationParam::Translation_Global_Sensor, trans);
570 double* dataPtr = (double*)trans.GetPointer();
571 size_t sizeData = trans.Length();
572 bool checkExParams = true;
573 if (sizeData < 1)
574 checkExParams = false;
575
576 std::ifstream jsonRead{ infoJSON.extrinsicParamsPath, std::ifstream::binary };
577 if (!jsonRead.good()) {
578 std::cout << "Impossible to read JSON extrinsic file, check path... Path given: " << infoJSON.extrinsicParamsPath << std::endl;
579 throw HVT_ERROR_NOT_FOUND;
580 }
581 nlohmann::json jsonContent;
582 jsonRead >> jsonContent;
583
584 if (checkExParams) {
585 pos.x = (float)dataPtr[2] + (double)jsonContent["cameras"][idxCam]["Position"][0]; // x is forward/backward
586 pos.y = (float)dataPtr[0] + (double)jsonContent["cameras"][idxCam]["Position"][1]; // y is left/right
587 pos.z = (float)dataPtr[1] + (double)jsonContent["cameras"][idxCam]["Position"][2]; // z is up/down
588 referencePlaneToCameraPlane[idxCam] = pos.x;
589 double data;
590 paramsCal.GetValue(Rx::LFR::ECalibrationParam::Rotation_Global_Sensor_Pitch, data);
591 rot.pitch = (float)data + (double)jsonContent["cameras"][idxCam]["Rotation"][0];
592 paramsCal.GetValue(Rx::LFR::ECalibrationParam::Rotation_Global_Sensor_Roll, data);
593 rot.roll = (float)data + (double)jsonContent["cameras"][idxCam]["Rotation"][1];
594 paramsCal.GetValue(Rx::LFR::ECalibrationParam::Rotation_Global_Sensor_Yaw, data);
595 rot.yaw = (float)data + (double)jsonContent["cameras"][idxCam]["Rotation"][2];
596 }
597 else {
598 pos.x = 0.0f;
599 pos.y = 0.0f;
600 pos.z = 0.0f;
601 referencePlaneToCameraPlane[idxCam] = 1.0f;
602 rot.pitch = 0.0f;
603 rot.roll = 0.0f;
604 rot.yaw = 0.0f;
605 }
606 jsonRead.close();
607
608
609 HvtExtrinsics extrinsics;
610 extrinsics.position = pos;
611 extrinsics.rotation = rot;
612 return extrinsics;
613}
614
615Rx::LFR::CImageQueue& RaytrixStreamer::getCamBuffer(size_t idxCam)
616{
617 return camBuffers[idxCam];
618}
619/*---------------*/
620
621/*--------------- RaytrixDLL functions for RGBD acquire and refine tasks*/
622
623void RaytrixStreamer::getRGBDFormats() {
624 for (size_t i = 0; i < infoJSON.numCams; ++i) {
625 try {
626 imgFormatRGB[i] = xCudaComputes[i].GetImageFormat(idRGB);
627 }
628 catch (Rx::CRxException e) {
629 std::cerr << "Impossible to get image format... Check error code: " << e.ToString().ToCString() << std::endl;
630 throw HVT_ERROR_UNKNOWN;
631 }
632 if (imgFormatRGB[i].m_iWidth != infoJSON.width || imgFormatRGB[i].m_iHeight != infoJSON.height) {
633 std::cerr << "RGB image formats do not correspond..." << std::endl;
634 throw HVT_ERROR_UNKNOWN;
635 }
636 try {
637 imgFormatDepth[i] = xCudaComputes[i].GetImageFormat(idDepth);
638 }
639 catch (Rx::CRxException e) {
640 std::cerr << "Impossible to get image format... Check error code: " << e.ToString().ToCString() << std::endl;
641 throw HVT_ERROR_UNKNOWN;
642 }
643 if (imgFormatDepth[i].m_iWidth != infoJSON.widthDepth || imgFormatDepth[i].m_iHeight != infoJSON.heightDepth) {
644 std::cerr << "Depth image formats do not correspond..." << std::endl;
645 throw HVT_ERROR_UNKNOWN;
646 }
647
648 pitchInCudaRGB[i] = cuImgs[i].GetPitch();
649 pitchInCudaDepth[i] = cuDepths[i].GetPitch();
650
651 std::cout << "RGB cam/file " << i << " :" << imgFormatRGB[i].ToString().ToCString() << std::endl;
652 std::cout << "Depth cam/file " << i << " :" << imgFormatDepth[i].ToString().ToCString() << std::endl;
653 }
654 isFormatLoaded = true;
655}
656
657void RaytrixStreamer::launchAsyncLoader(bool needToWaitThreadToFinish) {
658 std::thread t;
659 if (infoJSON.loadFromRawFile) {
660 t = std::thread(&RaytrixStreamer::asyncLoaderImageFromSeqFiles, this);
661 }
662 else {
663 t = std::thread(&RaytrixStreamer::asyncLoaderImage, this);
664 }
665 if (needToWaitThreadToFinish) t.join();
666 else t.detach();
667}
668
669void RaytrixStreamer::asyncLoaderImageFromOneSeqFile(size_t idxCam) {
670 try {
671 // Upload the image as the new ray image of all further CUDA computations
672 if (seqReaders[idxCam].GetFrameCount() == seqReaders[idxCam].GetLastFrameIndex() + 1) {
673 seqReaders[idxCam].SetNextFrameIndex(0);
674 }
675 seqReaders[idxCam].ReadFrame(rayImages[idxCam], false);
676
677 xCudaComputes[idxCam].UploadRawImage(rayImages[idxCam]);
678 }
679 catch (Rx::CRxException e) {
680 std::cerr << "Impossible to read or upload ray image to the cuda compute ... Check error code: " << e.ToString().ToCString() << std::endl;
681 throw HVT_ERROR_UNKNOWN;
682 }
683}
684
685void RaytrixStreamer::asyncLoaderImageFromSeqFiles() {
686 bool* tmpSync = &imgsLoaded;
687 std::unique_lock<std::mutex> lckSync(syncMtx);
688 syncCV.wait(lckSync, [tmpSync] {return !(*tmpSync); });
689 std::thread* threads = new std::thread[infoJSON.numCams];
690 for (size_t i = 0; i < infoJSON.numCams; ++i) {
691 threads[i] = std::thread(&RaytrixStreamer::asyncLoaderImageFromOneSeqFile, this, i);
692 }
693 for (size_t i = 0; i < infoJSON.numCams; ++i) {
694 threads[i].join();
695 }
696 imgsLoaded = true;
697 lckSync.unlock();
698 syncCV.notify_all();
699 delete[] threads;
700}
701
702
703void RaytrixStreamer::asyncLoaderImage() {
704 size_t idxRefCam = 0;
705 bool loadImgSuccess = false;
706
707 bool* tmpSync = &imgsLoaded;
708 std::unique_lock<std::mutex> lckSync(syncMtx);
709 syncCV.wait(lckSync, [tmpSync] {return !(*tmpSync); });
710 while (!loadImgSuccess) {
711 for (size_t i = 0; i < infoJSON.numCams; ++i) {
712 std::thread(&RaytrixStreamer::asyncLoaderImageOneCam, this, i).detach();
713 }
714 if (infoJSON.numCams == 1) {
715 bool* tmp = canLoadImgs;
716 std::unique_lock<std::mutex> lck(loaderMtxs[0]);
717 loaderCVs[0].wait(lck, [tmp] {return !(*tmp); });
718 canLoadImgs[0] = true;
719 lck.unlock();
720 loaderCVs[0].notify_all();
721 break;
722 }
723
724 bool* tmpLoaderRefCam = canLoadImgs + idxRefCam;
725 std::unique_lock<std::mutex> lckLoaderRefCam(loaderMtxs[idxRefCam]);
726 loaderCVs[idxRefCam].wait(lckLoaderRefCam, [tmpLoaderRefCam] {return !(*tmpLoaderRefCam); });
727 for (size_t i = 0; i < infoJSON.numCams; ++i) {
728 if (i == idxRefCam) continue;
729
730 bool* tmp = canLoadImgs + i;
731 std::unique_lock<std::mutex> lckLoader(loaderMtxs[i]);
732 loaderCVs[i].wait(lckLoader, [tmp] {return !(*tmp); });
733
734 loadImgSuccess = (abs(capturedImages[i].GetTimestamp() - capturedImages[idxRefCam].GetTimestamp()) < MAX_TIME_BETWEEN_CAMS);
735
736 canLoadImgs[i] = true;
737 lckLoader.unlock();
738 loaderCVs[i].notify_all();
739 }
740 canLoadImgs[idxRefCam] = true;
741 lckLoaderRefCam.unlock();
742 loaderCVs[idxRefCam].notify_all();
743 }
744 try {
745 for (size_t i = 0; i < infoJSON.numCams; ++i) {
746 xCudaComputes[i].UploadRawImage(capturedImages[i]);
747 }
748 }
749 catch (Rx::CRxException e) {
750 std::cerr << "Impossible to upload raw image to the cuda compute ... Check error code: " << e.ToString().ToCString() << std::endl;
751 throw HVT_ERROR_UNKNOWN;
752 }
753 imgsLoaded = true;
754 lckSync.unlock();
755 syncCV.notify_all();
756}
757
758void RaytrixStreamer::asyncLoaderImageOneCam(size_t idxCam) {
759 bool* tmp = canLoadImgs + idxCam;
760 std::unique_lock<std::mutex> lck(loaderMtxs[idxCam]);
761 loaderCVs[idxCam].wait(lck, [tmp] { return *tmp; });
762
763 bool imgLoadSucess = false;
764 while (!imgLoadSucess) {
765 if(camCaptureMode == Rx::ECamTriggerMode::Software_SnapShot){
766 // Trigger the camera
767 auto& cam = xCamServer.GetCamera(idxCam);
768 cam.Trigger();
769 }
770 // Wait for the image buffer to be not empty
771 if (!camBuffers[idxCam].WaitForNotEmpty(MAX_TIME_WAIT_BUFFER)) continue;
772
773 // Try to move the captured image out of the buffer
774 imgLoadSucess = camBuffers[idxCam].MoveOut(capturedImages[idxCam]);
775 }
776 canLoadImgs[idxCam] = false;
777 lck.unlock();
778 loaderCVs[idxCam].notify_all();
779}
780
781void RaytrixStreamer::acquireRGBD() {
782 try {
783 for (size_t i = 0; i < infoJSON.numCams; ++i) {
784 if (!xCudaComputes[i].Compute_TotalFocus(idSpaceRGB)) {
785 std::cerr << "Impossible to compute total focus image and depth ... Camera: " << i << std::endl;
786 throw HVT_ERROR_UNKNOWN;
787 }
788 if (!xCudaComputes[i].Compute_Depth3D(idSpaceDepth3D[0], idSpaceDepth3D[1])) {
789 std::cerr << "Impossible to compute depth ... Camera: " << i << std::endl;
790 throw HVT_ERROR_UNKNOWN;
791 }
792 }
793 }
794 catch (Rx::CRxException e) {
795 std::cerr << "Impossible to read or compute... Check error code: " << e.ToString().ToCString() << std::endl;
796 throw HVT_ERROR_UNKNOWN;
797 }
798 for (size_t i = 0; i < infoJSON.numCams; ++i) {
799 cuImgs[i] = xCudaComputes[i].GetCudaImage(idRGB);
800 cuDepths[i] = xCudaComputes[i].GetCudaImage(idDepth);
801 }
802 if (!isFormatLoaded) {
803 getRGBDFormats();
804 }
805 for (size_t i = 0; i < infoJSON.numCams; ++i) {
806 switch (imgFormatRGB[i].m_eDataType) {
807 case Rx::EDataType::UByte:
808 imgUByteRGB[i] = (uchar*)cuImgs[i].GetData();
809 break;
810 case Rx::EDataType::UShort:
811 imgRGB[i] = (unsigned short*)cuImgs[i].GetData();
812 break;
813 default:
814 std::cerr << "Not known RGBA format... Check format: " << imgFormatRGB[i].ToString().ToCString() << std::endl;
815 throw HVT_ERROR_UNKNOWN;
816 }
817
818 switch (imgFormatDepth[i].m_eDataType) {
819 case Rx::EDataType::UShort:
820 imgDepth[i] = (unsigned short*)cuDepths[i].GetData();
821 break;
822 case Rx::EDataType::Float:
823 imgFloatDepth[i] = (float*)cuDepths[i].GetData();
824 break;
825 default:
826 std::cerr << "Not known depth format... Check format: " << imgFormatDepth[i].ToString().ToCString() << std::endl;
827 throw HVT_ERROR_UNKNOWN;
828 }
829 }
830}
831
832void RaytrixStreamer::refineRGBD() {
833 for (size_t i = 0; i < infoJSON.numCams; ++i) {
834 switch (imgFormatRGB[i].m_eDataType) {
835 case Rx::EDataType::UByte:
836 if (pitchInCudaRGB[i] != sizeof(uchar) * 4 * infoJSON.width) {
837 removePitch<uchar>(imgUByteRGB[i], finalRGB[i], infoJSON.width, infoJSON.height, pitchInCudaRGB[i] / (sizeof(uchar) * 4), 4, cudaStreams[i]);
838 }
839 else {
840 gpuErrchk(cudaMemcpyAsync(finalRGB[i], imgUByteRGB[i], sizeof(uchar) * 4 * infoJSON.width * infoJSON.height, cudaMemcpyDeviceToDevice, cudaStreams[i]));
841 }
842 break;
843 case Rx::EDataType::UShort:
844 uShort2uChar(imgRGB[i], finalRGB[i], infoJSON.width, infoJSON.height, pitchInCudaRGB[i] / (sizeof(USHORT) * 4), 4, cudaStreams[i]);
845 break;
846 default:
847 std::cerr << "Not known RGBA format... Check format: " << imgFormatRGB[i].ToString().ToCString() << std::endl;
848 throw HVT_ERROR_UNKNOWN;
849 }
850
851 switch (imgFormatDepth[i].m_eDataType) {
852 case Rx::EDataType::UShort:
853 break;
854 case Rx::EDataType::Float:
855 scaleAddDataArrayUChannel<float>(imgFloatDepth[i], finalDepth[i], infoJSON.widthDepth, infoJSON.heightDepth, pitchInCudaDepth[i] / (sizeof(float) * 4), 4, channelDepth, -1 / 1000.0f, -referencePlaneToCameraPlane[i], cudaStreams[i]);
856 break;
857 default:
858 std::cerr << "Not known depth format... Check format: " << imgFormatDepth[i].ToString().ToCString() << std::endl;
859 throw HVT_ERROR_UNKNOWN;
860 }
861 temporalConsistencyAdjustement<float>(prevFinalDepth[i], finalDepth[i], infoJSON.widthDepth, infoJSON.heightDepth, 5, 0.05, 0.5, cudaStreams[i]);
862 }
863}
864
865/*----------------------------------------------------------------------*/
866
867/*--------- Hovitron API functions*/
868
869void RaytrixStreamer::uploadFrame(size_t streamId, ReadStream& stream)
870{
871 auto imageIndex = slotStreamingIndex;
872
873 auto& dstColorSlot = stream.colorSlots.at(imageIndex);
874 auto& dstDepthSlot = stream.depthSlots.at(imageIndex);
875
876 //gpuErrchk(cudaEventSynchronize(cudaEvents[streamId]));
877 copyColor(dstColorSlot.cuda_ptr_surf, finalRGB[streamId], stream.width_color, stream.height_color, cudaStreams[streamId]);
878 copyDepth(dstDepthSlot.cuda_ptr_surf, finalDepth[streamId], stream.width_depth, stream.height_depth, cudaStreams[streamId]);
879 gpuErrchk(cudaEventRecord(cudaEvents[streamId], cudaStreams[streamId]));
880}
881
882void RaytrixStreamer::streamingLoop() {
883 gpuErrchk(cudaSetDevice(vulkanGPU));
884 for (size_t i = 0; i < infoJSON.numCams; ++i) {
885 if (!infoJSON.loadFromRawFile) {
886 // Start cameras
887 std::cout << "Start camera " << i << std::endl;
888 xCamServer.GetCamera(i).Start(camCaptureMode);
889 }
890 }
891
892 beginAvgStreaming = std::chrono::steady_clock::now();
893 std::chrono::steady_clock::time_point begin;
894
895 launchAsyncLoader(true);
896
897 std::unique_lock<std::mutex> lck(syncMtx, std::defer_lock);
898 bool* cond = &imgsLoaded;
899 while (running) {
900 begin = std::chrono::steady_clock::now();
901
902 lck.lock();
903 syncCV.wait(lck, [cond] { return *cond; });
904
905 acquireRGBD();
906
907 *cond = false;
908 lck.unlock();
909 syncCV.notify_all();
910
911 launchAsyncLoader();
912
913 refineRGBD();
914
915 for (size_t i = 0; i < infoJSON.numCams; ++i) {
916 uploadFrame(i, readStreams[i]);
917
918 readStreams[i].streamedFrame = readStreams[i].frameIndex(begin - beginAvgStreaming);
919 }
920 // Commit
921 swapStreamingToPending();
922 numFrames++;
923
924 endStreaming = std::chrono::steady_clock::now();
925
926 if ((int)numFrames % 100 == 0) {
927 addMsgToPrint("Streaming Loop fps", std::chrono::duration_cast<std::chrono::milliseconds>(endStreaming - beginAvgStreaming).count(), numFrames);
928 printFPS();
929 resetPrintMsg();
930 }
931 }
932 std::this_thread::yield();
933}
934
935void RaytrixStreamer::swapStreamingToPending() {
936 std::lock_guard<std::mutex> l(indicesMutex); // JS
937 std::swap(slotPendingIndex, slotStreamingIndex);
938 newDataInPending = true;
939};
940
941void RaytrixStreamer::swapPendingToReading() {
942 std::lock_guard<std::mutex> l(indicesMutex);
943 if (newDataInPending)
944 {
945 std::swap(slotPendingIndex, slotReadingIndex);
946 newDataInPending = false;
947 // printf("Swapped %d to reading\n", slotReadingIndex);
948 }
949};
950/*--------------------------------*/
951
952/*---------- Debug functions*/
953
954template<typename T> void RaytrixStreamer::writeInFileWindow(T* dataCorrect, T* dataDebug, size_t width, size_t height, size_t sizeWindow, int numCam) {
955 if (width / 2 + sizeWindow > width || height / 2 + sizeWindow > height) {
956 std::cerr << "Specify a smaller window size..." << std::endl;
957 throw HVT_ERROR_UNKNOWN;
958 }
959 if (dataCorrect == NULL || dataDebug == NULL) {
960 std::cerr << "Specify allocated array ... " << std::endl;
961 throw HVT_ERROR_UNKNOWN;
962 }
963
964 const char outCorrect[] = "../RaytrixStreamer/dataCorrect.txt";
965 const char outDebug[] = "../RaytrixStreamer/dataDebug.txt";
966
967 fs::path pathCorrect = outCorrect;
968 std::ofstream correctDataStream{ pathCorrect, std::ofstream::out | std::ofstream::trunc };
969 if (!correctDataStream.good()) {
970 std::cerr << "Impossible to open data correct file, check path... Path given: " << pathCorrect << std::endl;
971 throw HVT_ERROR_NOT_FOUND;
972 }
973 fs::path pathDebug = outDebug;
974 std::ofstream debugDataStream{ pathDebug, std::ofstream::out | std::ofstream::trunc };
975 if (!debugDataStream.good()) {
976 std::cerr << "Impossible to open data debug file, check path... Path given: " << pathDebug << std::endl;
977 throw HVT_ERROR_NOT_FOUND;
978 }
979
980 for (size_t i = 0; i < sizeWindow; ++i) {
981 for (size_t j = 0; j < sizeWindow; ++j) {
982 correctDataStream << dataCorrect[(width / 2) * ((height / 2) + i) + j] << ",";
983 debugDataStream << dataDebug[(width / 2) * ((height / 2) + i) + j] << ",";
984 }
985 correctDataStream << std::endl;
986 debugDataStream << std::endl;
987 }
988
989 correctDataStream.close();
990 debugDataStream.close();
991}
992
993void RaytrixStreamer::debugFloatDepth() {
994 Rx::CRxImage imgTmp;
995 float* imgTmp2 = new float[infoJSON.widthDepth * infoJSON.heightDepth];
996 int width, height;
997 gpuErrchk(cudaDeviceSynchronize());
998 for (size_t i = 0; i < infoJSON.numCams; ++i) {
999 try {
1000 xCudaComputes[i].Download(Rx::LFR::EImage::DepthMap_View_Virtual, imgTmp);
1001 imgTmp.GetSize(width, height);
1002 }
1003 catch (Rx::CRxException e) {
1004 std::cerr << "Error while debugging depth... Check error code: " << e.ToString().ToCString() << std::endl;
1005 throw HVT_ERROR_UNKNOWN;
1006 }
1007 std::cout << "Downloaded img " << i << ": Width: " << width << " Height: " << height << " Nb of bytes: " << imgTmp.GetByteCount() << " Format: " << imgTmp.GetFormat().ToString().ToCString() << std::endl;
1008 gpuErrchk(cudaMemcpy(imgTmp2, finalDepth[i], infoJSON.widthDepth * infoJSON.heightDepth * sizeof(float), cudaMemcpyDeviceToHost));
1009 std::cout << "CUDA img " << i << ": Width: " << infoJSON.widthDepth << " Height : " << infoJSON.heightDepth << " Nb of bytes : " << infoJSON.widthDepth * infoJSON.heightDepth * sizeof(float) << " Format: " << imgFormatDepth[i].ToString().ToCString() << std::endl;
1010 gpuErrchk(cudaDeviceSynchronize());
1011 writeInFileWindow<float>((float*)imgTmp.GetDataPtr(), imgTmp2, infoJSON.widthDepth, infoJSON.heightDepth, 100, i);
1012 }
1013 delete[] imgTmp2;
1014}
1015/*--------------------------*/
1016
1017/*----------- ReadStream function*/
1018
1019int RaytrixStreamer::ReadStream::frameIndex(Clock::duration time) const
1020{
1021 return (time / framePeriod);
1022}
1023
1024bool RaytrixStreamer::ReadStream::nextFrameReady(Clock::duration time) const
1025{
1026 return streamedFrame != frameIndex(time);
1027}
1028/*-------------------------------*/
1029
1030/*
1031
1032 RaytrixStreamer class functions
1033
1034 ------------------------------------ */
1035
1036
1037
1038 /* ---------------------------
1039
1040 Functions called by the C entry points. Each Hovitron streamer dll has the same entry points, then we define what we need for the corresponding dll in the functions below.
1041
1042 */
1043
1044 /*-------------- Constructor/Destructor */
1045
1046RaytrixStreamer::RaytrixStreamer(const uint8_t uuid[VK_UUID_SIZE])
1047{
1048 findVulkanGPU(uuid);
1049 readInfoFromJSON();
1050 initRxSDK();
1051 if (infoJSON.loadFromRawFile) {
1052 initRxRayFile();
1053 }
1054 else {
1055 initRxCams();
1056 }
1057 initRxCudaCompute();
1058 initStreamParameters();
1059 initArrayCuda();
1060 initMutex();
1061}
1062
1064{
1065 std::cout << "---- Destroy RaytrixStreamer dll ----" << std::endl;
1066 if (synthesisSem != NULL)
1067 delete synthesisSem;
1068 for (size_t i = 0; i < infoJSON.numCams; ++i) {
1069 xCamServer.GetCamera(i).Close();
1070 }
1071 if (infoJSON.loadFromRawFile) {
1072 delete[] seqReaders;
1073 delete[] rayImages;
1074 }
1075 Rx::LFR::CLightFieldRuntime::End();
1076}
1077/*--------------------------------------*/
1078
1079void RaytrixStreamer::enumerateStreamsParameters(uint32_t* streamsCount, HvtRGBDStreamParameters* parameters) const
1080{
1081 cudaSetDevice(vulkanGPU);
1082 if (!parameters)
1083 {
1084 *streamsCount = readStreams.size();
1085 return;
1086 }
1087
1088 if (*streamsCount != readStreams.size())
1089 {
1090 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
1091 }
1092
1093 int i = 0;
1094 for (const auto& readStream : readStreams)
1095 {
1097 SP->colorResolution = { (uint32_t)readStream.width_color, (uint32_t)readStream.height_color };
1098 SP->depthResolution = { (uint32_t)readStream.width_depth, (uint32_t)readStream.height_depth };
1099 SP->nearDepth = readStream.anear;
1100 SP->farDepth = readStream.afar;
1101 SP->colorFormat = (HvtImageFormat)readStream.colorFormat;
1102 SP->depthFormat = (HvtImageFormat)readStream.depthFormat;
1103 SP->slotCount = numSlots;
1104 SP->projectionType = readStream.projectionType;
1105
1106 parameters[i] = *SP;
1107 //snprintf(parameters->name, HVT_MAX_STREAM_NAME_LENGHT, "%s", deviceNames.at(i).c_str());
1108 ++i;
1109 }
1110}
1111
1112void RaytrixStreamer::importStreamImages(const HvtStreamImagesExportInfo& exportInfos)
1113{
1114 cudaSetDevice(vulkanGPU);
1115 auto isDepth = (bool)exportInfos.depth;
1116 auto& stream = readStreams.at(exportInfos.streamIndex);
1117 //auto format = isDepth ? stream.depthFormat : stream.colorFormat;
1118 // auto size = isDepth ? stream.depthFrameSize : stream.colorFrameSize;
1119 auto& slots = isDepth ? stream.depthSlots : stream.colorSlots;
1120 auto mipLevels = 1;
1121
1122 // std::cout << "Init memory buffers CUDA" << std::endl;
1123
1124 if (exportInfos.imagesCount != numSlots)
1125 {
1126 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
1127 }
1128
1129 for (int i = 0; i < numSlots; ++i)
1130 {
1131 auto mem = exportInfos.pImages[i];
1132 // std::cout << "External Memory loop" << std::endl;
1133
1134 cudaExternalMemory_t cudaExtMemImageBuffer;
1135 cudaMipmappedArray_t cudaMipmappedImageArray;
1136
1137 cudaExternalMemoryHandleDesc cudaExtMemHandleDesc;
1138 memset(&cudaExtMemHandleDesc, 0, sizeof(cudaExtMemHandleDesc));
1139
1140 cudaExtMemHandleDesc.type = cudaExternalMemoryHandleTypeOpaqueWin32;
1141 cudaExtMemHandleDesc.handle.win32.handle = (HANDLE)mem.handle;
1142 cudaExtMemHandleDesc.size = mem.size;
1143
1144 gpuErrchk(cudaImportExternalMemory(&cudaExtMemImageBuffer, &cudaExtMemHandleDesc));
1145
1146 cudaExternalMemoryMipmappedArrayDesc externalMemoryMipmappedArrayDesc;
1147 memset(&externalMemoryMipmappedArrayDesc, 0, sizeof(externalMemoryMipmappedArrayDesc));
1148
1149 cudaExtent extent = isDepth ? make_cudaExtent(stream.width_depth, stream.height_depth, 0) : make_cudaExtent(stream.width_color, stream.height_color, 0);
1150 cudaChannelFormatDesc formatDesc;
1151 formatDesc.x = isDepth ? 32 : 8;
1152 formatDesc.y = isDepth ? 0 : 8;
1153 formatDesc.z = isDepth ? 0 : 8;
1154 formatDesc.w = isDepth ? 0 : 8;
1155 formatDesc.f = isDepth ? cudaChannelFormatKindFloat : cudaChannelFormatKindUnsigned;
1156
1157 externalMemoryMipmappedArrayDesc.offset = 0;
1158 externalMemoryMipmappedArrayDesc.formatDesc = formatDesc;
1159 externalMemoryMipmappedArrayDesc.extent = extent;
1160 externalMemoryMipmappedArrayDesc.flags = 0;
1161 externalMemoryMipmappedArrayDesc.numLevels = mipLevels;
1162
1163 gpuErrchk(cudaExternalMemoryGetMappedMipmappedArray(&cudaMipmappedImageArray, cudaExtMemImageBuffer, &externalMemoryMipmappedArrayDesc));
1164
1165 cudaArray_t cudaMipLevelArray;
1166 cudaResourceDesc resourceDesc;
1167
1168 gpuErrchk(cudaGetMipmappedArrayLevel(&cudaMipLevelArray, cudaMipmappedImageArray, 0));
1169 // cudaMemcpy2DArrayToArray(cudaMipLevelArray, 0, 0, cudaMipLevelArray, 0, 0, stream.width * sizeof(uchar4), stream.height, cudaMemcpyDeviceToDevice);
1170
1171 memset(&resourceDesc, 0, sizeof(resourceDesc));
1172 resourceDesc.resType = cudaResourceTypeArray;
1173 resourceDesc.res.array.array = cudaMipLevelArray;
1174
1175 cudaSurfaceObject_t surfaceObject;
1176 gpuErrchk(cudaCreateSurfaceObject(&surfaceObject, &resourceDesc));
1177
1178 ImageSlot IS;
1179 IS.size = mem.size;
1180 IS.vk_handle = &mem.handle;
1181 IS.cuda_ptr_surf = surfaceObject;
1182
1183 slots.at(i) = IS;
1184 }
1185 (isDepth ? stream.importedDepth : stream.importedColor) = true;
1186}
1187
1188void RaytrixStreamer::importSemaphore(const HvtSemaphoreExportInfo& exportInfos)
1189{
1190 cudaSetDevice(vulkanGPU);
1191 if (exportInfos.semaphore == NULL)
1192 {
1193 synthesisSem->isValid = false;
1194 synthesisSem->sem = NULL;
1195 return;
1196 }
1197
1198 cudaExternalSemaphoreHandleDesc externalSemaphoreHandleDesc = {};
1199
1200 if (exportInfos.type & HvtSemaphoreType::HVT_SEMAPHORE_TYPE_OPAQUE_WIN32_BIT)
1201 {
1202 externalSemaphoreHandleDesc.type = cudaExternalSemaphoreHandleTypeOpaqueWin32;
1203 }
1204 else if (exportInfos.type & HvtSemaphoreType::HVT_SEMAPHORE_TYPE_OPAQUE_WIN32_KMT_BIT)
1205 {
1206 externalSemaphoreHandleDesc.type = cudaExternalSemaphoreHandleTypeOpaqueWin32Kmt;
1207 }
1208 else if (exportInfos.type & HvtSemaphoreType::HVT_SEMAPHORE_TYPE_SYNC_FD_BIT)
1209 {
1210 externalSemaphoreHandleDesc.type = cudaExternalSemaphoreHandleTypeOpaqueFd;
1211 }
1212
1213 externalSemaphoreHandleDesc.handle.win32.handle = (HANDLE)exportInfos.semaphore;
1214 externalSemaphoreHandleDesc.flags = 0;
1215
1216 cudaImportExternalSemaphore(&synthesisSem->sem, &externalSemaphoreHandleDesc);
1217 synthesisSem->isValid = true;
1218 // cudaWaitExternalSemaphoresAsync();
1219}
1220
1221void RaytrixStreamer::destroySemaphore(Semaphore* sem) const
1222{
1223 cudaSetDevice(vulkanGPU);
1224 delete sem;
1225}
1226
1227void RaytrixStreamer::startStreaming()
1228{
1229 //cudaSetDevice(vulkanGPU);
1230 // assert that we have everything ready for streaming
1231 for (auto& stream : readStreams)
1232 {
1233 if (!stream.importedColor || !stream.importedDepth)
1234 {
1235 throw HvtResult::HVT_ERROR_CALL_ORDER;
1236 }
1237 }
1238
1239 // Start worker
1240 running = true;
1241 std::cout << "Starting streaming" << std::endl;
1242 streamingThread = std::thread([&]
1243 { streamingLoop(); });
1244}
1245
1246void RaytrixStreamer::acquireStreamsFrames(const HvtAcquireStreamFramesInfo& infos)
1247{
1248 cudaSetDevice(vulkanGPU);
1249 if (infos.frameInfoCount != readStreams.size())
1250 {
1251 throw HvtResult::HVT_ERROR_WRONG_BUFFER_SIZE;
1252 }
1253
1254 swapPendingToReading();
1255
1256 auto imageIndex = slotReadingIndex;
1257
1258 for (int i = 0; i < infos.frameInfoCount; ++i)
1259 {
1260 gpuErrchk(cudaEventSynchronize(cudaEvents[i]));
1261
1262 auto& stream = readStreams.at(i);
1263 auto& desc = infos.pStreamFrameInfos[i];
1264 desc.extrinsics = stream.extrinsics;
1265 desc.intrinsics = stream.intrinsics;
1266 desc.imageIndex = imageIndex;
1267 }
1268}
1269
1270void RaytrixStreamer::releaseStreamsFrames(Semaphore* waitSem)
1271{
1272 std::cout << "release frames" << std::endl;
1273}
1274
1275void RaytrixStreamer::stopStreaming()
1276{
1277 cudaSetDevice(vulkanGPU);
1278 running = false;
1279 if (streamingThread.joinable())
1280 {
1281 streamingThread.join();
1282 }
1283}
1284
1285
1286
1288
1289template <typename Closure>
1290HvtResult exceptionFirewall(Closure&& clos)
1291{
1292 try
1293 {
1294 clos();
1295 }
1296 catch (HvtResult res)
1297 {
1298 return res;
1299 }
1300 catch (const std::exception& e)
1301 {
1302 std::cerr << "Catched exception at C boundary : \"" << e.what() << "\"" << std::endl;
1303 return HvtResult::HVT_ERROR_UNKNOWN;
1304 }
1305 catch (...)
1306 {
1307 return HvtResult::HVT_ERROR_UNKNOWN;
1308 }
1309 return HvtResult::HVT_SUCESS;
1310}
1311
1312template <typename T>
1313void checkNonNull(T ptr)
1314{
1315 if (!ptr)
1316 {
1317 throw HvtResult::HVT_ERROR_INVALID_HANDLE;
1318 }
1319}
1320
1322
1323extern "C"
1324{
1325
1332 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtCreateStreamingContext(const HvtStreamingContextCreateInfo* createInfo, HvtStreamingContext* outStreamingContext)
1333 {
1334 //std::cout << "hvtCreateStreamingContext" << std::endl;
1335 return exceptionFirewall([&]
1336 {
1337 checkNonNull(createInfo);
1338
1339 if (createInfo->headerVersion != HVT_HEADER_VERSION) {
1340 throw HvtResult::HVT_ERROR_HEADER_VERSION;
1341 }
1342
1343 auto context = new RaytrixStreamer(createInfo->graphicsDeviceUUID);
1344 *outStreamingContext = context->to_handle(); });
1345 }
1346
1354 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtEnumerateStreamsParameters(HvtStreamingContext streamingContext, uint32_t* pStreamParameterCount, HvtRGBDStreamParameters* pStreamParameters)
1355 {
1356 //std::cout << "hvtEnumerateStreamsParameters" << std::endl;
1357 return exceptionFirewall([&]
1358 {
1359 auto context = RaytrixStreamer::check(streamingContext);
1360 checkNonNull(pStreamParameterCount);
1361 context->enumerateStreamsParameters(pStreamParameterCount, pStreamParameters); });
1362 }
1363
1370 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportStreamImages(HvtStreamingContext streamingContext, const HvtStreamImagesExportInfo* exportInfos)
1371 {
1372 //std::cout << "hvtExportStreamImages" << std::endl;
1373 return exceptionFirewall([&]
1374 {
1375 auto context = RaytrixStreamer::check(streamingContext);
1376 checkNonNull(exportInfos);
1377 context->importStreamImages(*exportInfos); });
1378 }
1379
1387 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtExportSemaphore(HvtStreamingContext streamingContext, const HvtSemaphoreExportInfo* exportInfo, HvtSemaphore* outSemaphore)
1388 {
1389 //std::cout << "hvtExportSemaphore" << std::endl;
1390 return exceptionFirewall([&]
1391 {
1392 auto context = RaytrixStreamer::check(streamingContext);
1393 checkNonNull(exportInfo);
1394 context->importSemaphore(*exportInfo); });
1395 }
1396
1403 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroySemaphore(HvtStreamingContext streamingContext, HvtSemaphore semaphore)
1404 {
1405 //std::cout << "hvtDestroySemaphore" << std::endl;
1406 return exceptionFirewall([&]
1407 {
1408 auto context = RaytrixStreamer::check(streamingContext);
1409 context->destroySemaphore(Semaphore::check(semaphore)); });
1410 }
1411
1417 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStartStreaming(HvtStreamingContext streamingContext)
1418 {
1419 //std::cout << "hvtStartStreaming" << std::endl;
1420 return exceptionFirewall([&]
1421 {
1422 auto context = RaytrixStreamer::check(streamingContext);
1423 context->startStreaming(); });
1424 }
1425
1432 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtAcquireStreamsFrames(HvtStreamingContext streamingContext, const HvtAcquireStreamFramesInfo* infos)
1433 {
1434 // std::cout << "hvtAcquireStreamsFrames" << std::endl;
1435 return exceptionFirewall([&]
1436 {
1437 auto context = RaytrixStreamer::check(streamingContext);
1438 context->acquireStreamsFrames(*infos); });
1439 }
1440
1450 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtReleaseStreamsFrames(HvtStreamingContext streamingContext, HvtSemaphore waitSemaphore)
1451 {
1452 // std::cout << "hvtReleaseStreamsFrames" << std::endl;
1453 return exceptionFirewall([&]
1454 {
1455 //auto context = AcqKiRT::check(streamingContext);
1456 // context->releaseStreamsFrames(Semaphore::opt_check(waitSemaphore));
1457 });
1458 }
1459
1465 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtStopStreaming(HvtStreamingContext streamingContext)
1466 {
1467 //std::cout << "hvtStopStreaming" << std::endl;
1468 return exceptionFirewall([&]
1469 {
1470 auto context = RaytrixStreamer::check(streamingContext);
1471 context->stopStreaming();
1472 });
1473 }
1474
1480 HVTAPI_ATTR HvtResult HVTAPI_CALL hvtDestroyStreamingContext(HvtStreamingContext streamingContext)
1481 {
1482 //std::cout << "hvtDestroyStreamingContext" << std::endl;
1483 return exceptionFirewall([&]
1484 {
1485 auto context = RaytrixStreamer::check(streamingContext);
1486 delete context; });
1487 }
1488}
HvtIntrinsicsPerspective getRxIntrinsicParams(size_t idxCam)
Get the intrinsic parameters of the camera of ID "idxCam". The intrinsic parameters are found through...
~RaytrixStreamer()
Free all the memories that were allocated during the streaming process.
HvtExtrinsics getRxExtrinsicParams(size_t idxCam)
Get the extrinsic parameters of the camera of ID "idxCam". The extrinsic parameters are found through...
float getMinDepth()
Get the minimum depth.
float getMaxDepth()
Get the maximum depth.
Rx::LFR::CImageQueue & getCamBuffer(size_t idxCam)
Get the camera buffer in the list of camera buffers.
RaytrixStreamer(const uint8_t uuid[VK_UUID_SIZE])
Launch all init functions defined in RaytrixStreamer class.
Parameters for query of the current frames infos.
Intrinsics parameters of a perspective projection.
Description of an RGBD stream.
Export info for images of a stream.
Parameters for the creation of the Streaming context.
Union of possible intrinsics types data.