Skip to content
Permalink
16e941848f
Switch branches/tags

Name already in use

A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
Go to file
 
 
Cannot retrieve contributors at this time
766 lines (638 sloc) 24.3 KB
/** \file dab_tracker_azure_tools.cpp
*/
#include "dab_tracker_azure_tools.h"
#include "dab_tracker_azure_camera.h"
#include "ofxCvGrayscaleImage.h"
#include "ofxCvColorImage.h"
#include "ofxCvShortImage.h"
#include "dab_tracker_pointcloud.h"
#include <k4arecord/types.h>
using namespace dab;
using namespace dab::tracker;
AzureTools::AzureTools()
{
init();
}
AzureTools::~AzureTools()
{}
void
AzureTools::init()
{
initCameras();
}
void
AzureTools::initCameras()
{
// look for available devices and store serial numbers
int camCount = k4a_device_get_installed_count();
for (int cI = 0; cI < camCount; ++cI)
{
try
{
k4a::device _camera = k4a::device::open(static_cast<uint32_t>(cI));
std::string _serial = _camera.get_serialnum();
std::cout << "cam " << cI << " serial " << _serial << "\n";
mAvailabeDevices.push_back(_serial);
_camera.close();
}
catch (const k4a::error& e)
{
continue;
}
}
}
void
AzureTools::capture(std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
bool captureSuccess;
if (pCamera->mMoviePlaying == false)
{
captureSuccess = pCamera->mNativeDevice.get_capture(&pCamera->mNativeCapture, std::chrono::milliseconds(0));
}
else
{
captureSuccess = pCamera->mNativeMoviePlayer.get_next_capture(&pCamera->mNativeCapture);
}
if (captureSuccess == false) return;
if (pCamera->mColorMode == true && pCamera->mDepthMode == false)
{
const k4a::image colorImage = pCamera->mNativeCapture.get_color_image();
if(colorImage.is_valid() == true)
{
pCamera->mNativeColorImage = colorImage;
try
{
updateColorFrame(pCamera, colorImage);
if (pCamera->mMovieRecording == true) pCamera->mNativeMovieRecorder.write_capture(pCamera->mNativeCapture);
}
catch (dab::Exception& e)
{
e += dab::Exception("TRACKER ERROR: failed to acquire color frame", __FILE__, __FUNCTION__, __LINE__);
throw e;
}
}
}
if (pCamera->mDepthMode == true && pCamera->mColorMode == false)
{
const k4a::image depthImage = pCamera->mNativeCapture.get_depth_image();
if (depthImage.is_valid() == true)
{
pCamera->mNativeDepthImage = depthImage;
try
{
updateDepthFrame(pCamera, depthImage);
if (pCamera->mMovieRecording == true) pCamera->mNativeMovieRecorder.write_capture(pCamera->mNativeCapture);
}
catch (dab::Exception& e)
{
e += dab::Exception("TRACKER ERROR: failed to acquire depth frame", __FILE__, __FUNCTION__, __LINE__);
throw e;
}
}
}
if (pCamera->mColorMode == true && pCamera->mDepthMode == true)
{
const k4a::image colorImage = pCamera->mNativeCapture.get_color_image();
const k4a::image depthImage = pCamera->mNativeCapture.get_depth_image();
if (colorImage.is_valid() == true && depthImage.is_valid() == true)
{
pCamera->mNativeColorImage = colorImage;
pCamera->mNativeDepthImage = depthImage;
pCamera->mDepthColorTransformation.depth_image_to_color_camera(depthImage, &pCamera->mNativeDepthColorImage);
try
{
updateColorFrame(pCamera, colorImage);
updateDepthFrame(pCamera, pCamera->mNativeDepthColorImage);
if (pCamera->mPointCloudMode == true)
{
pCamera->mDepthColorTransformation.depth_image_to_point_cloud(pCamera->mNativeDepthColorImage, K4A_CALIBRATION_TYPE_COLOR, &(pCamera->mNativePointCloudImage));
updatePointCloud(pCamera, pCamera->mNativePointCloudImage);
}
if (pCamera->mMovieRecording == true) pCamera->mNativeMovieRecorder.write_capture(pCamera->mNativeCapture);
}
catch (dab::Exception& e)
{
e += dab::Exception("TRACKER ERROR: failed to acquire depth frame", __FILE__, __FUNCTION__, __LINE__);
throw e;
}
}
}
}
void
AzureTools::updateColorFrame(std::shared_ptr<AzureCamera> pCamera, const k4a::image& pNativeColorFrame) throw (dab::Exception)
{
k4a_image_format_t colorImageFormat = pNativeColorFrame.get_format();
//std::cout << "colorImageFormat " << colorImageFormat << "\n";
std::array<unsigned int, 2> nativeFrameSize = { pNativeColorFrame.get_width_pixels(), pNativeColorFrame.get_height_pixels() };
uint8_t* nativeData = const_cast<uint8_t*>(pNativeColorFrame.get_buffer());
std::array<unsigned int, 2> imageSize = { pCamera->mColorFrame->getWidth(), pCamera->mColorFrame->getHeight() };
unsigned char* imageData = (unsigned char*)pCamera->mColorFrame->getCvImage()->imageData;
if (imageSize[0] != nativeFrameSize[0] || imageSize[1] != nativeFrameSize[1]) throw dab::Exception("TRACKER ERROR: Wrong Color Frame Size, expected: " + std::to_string(imageSize[0]) + " " + std::to_string(imageSize[1]) + " received " + std::to_string(nativeFrameSize[0]) + " " + std::to_string(nativeFrameSize[1]), __FILE__, __FUNCTION__, __LINE__);
int pixelCount = imageSize[0] * imageSize[1];
if (colorImageFormat == K4A_IMAGE_FORMAT_COLOR_BGRA32)
{
cv::Mat bgra = cv::Mat(nativeFrameSize[1], nativeFrameSize[0], CV_8UC4, nativeData).clone();
cv::Mat mat;
cv::cvtColor(bgra, mat, cv::COLOR_BGRA2RGB);
nativeData = mat.data;
int pixelCount = imageSize[0] * imageSize[1];
memcpy(imageData, nativeData, pixelCount * 3);
pCamera->mColorFrame->flagImageChanged();
}
else if (colorImageFormat == K4A_IMAGE_FORMAT_COLOR_MJPG)
{
// code from https://gist.dreamtobe.cn/UnaNancyOwen/9f16ce7ea4c2673fe08b4ce4804fc209
// this code doesn't work
// the decoding of the jpeg formatted data fails and returns a cv::Mat with data being a nullptr
std::vector<uint8_t> buffer(pNativeColorFrame.get_buffer(), pNativeColorFrame.get_buffer() + pNativeColorFrame.get_size());
cv::Mat mat = cv::imdecode(buffer, cv::IMREAD_ANYCOLOR);
cv::cvtColor(mat, mat, cv::COLOR_BGR2BGRA);
memcpy(pCamera->mColorFrame->getCvImage()->imageData, mat.data, pNativeColorFrame.get_size());
pCamera->mColorFrame->flagImageChanged();
}
else if (colorImageFormat == K4A_IMAGE_FORMAT_COLOR_NV12)
{
cv::Mat nv12 = cv::Mat(nativeFrameSize[1] + nativeFrameSize[1] / 2, nativeFrameSize[0], CV_8UC1, nativeData).clone();
cv::Mat mat;
cv::cvtColor(nv12, mat, cv::COLOR_YUV2RGB_NV12);
nativeData = mat.data;
int pixelCount = imageSize[0] * imageSize[1];
memcpy(imageData, nativeData, pixelCount * 3);
pCamera->mColorFrame->flagImageChanged();
}
else if (colorImageFormat == K4A_IMAGE_FORMAT_COLOR_YUY2)
{
cv::Mat yuy2 = cv::Mat(nativeFrameSize[1], nativeFrameSize[0], CV_8UC2, nativeData).clone();
cv::Mat mat;
cv::cvtColor(yuy2, mat, cv::COLOR_YUV2RGB_YUY2);
nativeData = mat.data;
int pixelCount = imageSize[0] * imageSize[1];
memcpy(imageData, nativeData, pixelCount*3);
pCamera->mColorFrame->flagImageChanged();
}
else
{
throw dab::Exception("TRACKER ERROR: unsupported color format", __FILE__, __FUNCTION__, __LINE__);
}
}
void
AzureTools::updateDepthFrame(std::shared_ptr<AzureCamera> pCamera, const k4a::image& pNativeDepthFrame) throw (dab::Exception)
{
k4a_image_format_t depthImageFormat = pNativeDepthFrame.get_format();
//std::cout << "depthImageFormat " << depthImageFormat << "\n";
if (depthImageFormat == K4A_IMAGE_FORMAT_DEPTH16)
{
int frameWidth = pCamera->mDepthFrame->getWidth();
int frameHeight = pCamera->mDepthFrame->getHeight();
int pixelCount = frameWidth * frameHeight;
//unsigned char* buffer = (unsigned char*)pCamera->mDepthFrame->getCvImage()->imageData;
unsigned short* buffer = (unsigned short*)pCamera->mDepthFrame->getCvImage()->imageData;
std::array<unsigned int, 2> nativeFrameSize = { pNativeDepthFrame.get_width_pixels(), pNativeDepthFrame.get_height_pixels() };
const uint16_t* nativeData = reinterpret_cast<const uint16_t *>(pNativeDepthFrame.get_buffer());
if (frameWidth != nativeFrameSize[0] || frameHeight != nativeFrameSize[1]) throw dab::Exception("TRACKER ERROR: Wrong Depth Frame Size, expected: " + std::to_string(frameWidth) + " " + std::to_string(frameHeight) + " received " + std::to_string(nativeFrameSize[0]) + " " + std::to_string(nativeFrameSize[1]), __FILE__, __FUNCTION__, __LINE__);
float depthScaling = 1.0 / (pCamera->mMaxDepth - pCamera->mMinDepth);
float normDepth;
float shrt_max = static_cast<float>(SHRT_MAX);
for (int pI = 0; pI < pixelCount; ++pI)
{
normDepth = (static_cast<float>(nativeData[pI]) - pCamera->mMinDepth) * depthScaling;
normDepth = max(min(normDepth, 1.0f), 0.0f);
//buffer[pI] = nativeData[pI];
buffer[pI] = static_cast<unsigned short>(normDepth * shrt_max);
}
pCamera->mDepthFrame->flagImageChanged();
}
else
{
throw dab::Exception("TRACKER ERROR: unsupported depth format", __FILE__, __FUNCTION__, __LINE__);
}
}
void
AzureTools::updatePointCloud(std::shared_ptr<AzureCamera> pCamera, const k4a::image& pNativePointCloudFrame) throw (dab::Exception)
{
PointCloud* pointCloud = pCamera->mPointCloud;
int frameWidth = pNativePointCloudFrame.get_width_pixels();
int frameHeight = pNativePointCloudFrame.get_height_pixels();
unsigned int pointCount = frameWidth * frameHeight;
if (pointCloud->size()[0] != pointCount) pointCloud->setSize({ pointCount });
std::vector<glm::vec3>& pointCloudPoints = pointCloud->points();
int16_t *pointCloudImageData = (int16_t *)(void *)pNativePointCloudFrame.get_buffer();
for (int pI = 0; pI < pointCount; ++pI)
{
glm::vec3 point;
point.x = pointCloudImageData[3 * pI + 0] / 1000.0f;
point.y = pointCloudImageData[3 * pI + 1] / 1000.0f;
point.z = pointCloudImageData[3 * pI + 2] / 1000.0f;
//if (point.z == 0)
//{
// continue;
//}
pointCloudPoints[pI] = point;
}
}
void
AzureTools::startMovieRecording(std::shared_ptr<AzureCamera> pCamera, const std::string& pFileName) throw (dab::Exception)
{
try
{
// create recorder
pCamera->mNativeMovieRecorder = k4a::record::create(pFileName.c_str(), pCamera->mNativeDevice, pCamera->mNativeConfig);
if (pCamera->mColorMode == true)
{
std::string trackName = "COLORTRACK";
std::string codecId = "V_MPEG4/ISO/ASP"; // for all ids see https://www.matroska.org/technical/codec_specs.html
const k4a::image colorImage = pCamera->mNativeCapture.get_color_image();
std::array<unsigned int, 2> nativeFrameSize = { colorImage.get_width_pixels(), colorImage.get_height_pixels() };
int frameRate = pCamera->mNativeConfig.camera_fps;
AzureMovieHeader codec_header;
createMovieHeader(nativeFrameSize, &codec_header);
k4a_record_video_settings_t video_settings;
video_settings.width = nativeFrameSize[0];
video_settings.height = nativeFrameSize[1];
video_settings.frame_rate = frameRate;
pCamera->mNativeMovieRecorder.add_custom_video_track(trackName.c_str(),
codecId.c_str(),
(uint8_t *)(&codec_header),
sizeof(codec_header),
&video_settings);
}
if (pCamera->mDepthMode == true)
{
std::string trackName = "DEPTHTRACK";
std::string codecId = "V_MPEG4/ISO/ASP"; // for all ids see https://www.matroska.org/technical/codec_specs.html
const k4a::image depthImage = pCamera->mNativeCapture.get_depth_image();
std::array<unsigned int, 2> nativeFrameSize = { depthImage.get_width_pixels(), depthImage.get_height_pixels() };
int frameRate = pCamera->mNativeConfig.camera_fps;
AzureMovieHeader codec_header;
createMovieHeader(nativeFrameSize, &codec_header);
k4a_record_video_settings_t video_settings;
video_settings.width = nativeFrameSize[0];
video_settings.height = nativeFrameSize[1];
video_settings.frame_rate = frameRate;
pCamera->mNativeMovieRecorder.add_custom_video_track(trackName.c_str(),
codecId.c_str(),
(uint8_t *)(&codec_header),
sizeof(codec_header),
&video_settings);
}
pCamera->mNativeMovieRecorder.write_header();
}
catch (const std::exception& e)
{
std::string error = e.what();
throw dab::Exception("Camera Error: " + error, __FILE__, __FUNCTION__, __LINE__);
}
}
void
AzureTools::stopMovieRecording(std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
try
{
pCamera->mNativeMovieRecorder.flush();
pCamera->mNativeMovieRecorder.close();
}
catch (const std::exception& e)
{
std::string error = e.what();
throw dab::Exception("Camera Error: " + error, __FILE__, __FUNCTION__, __LINE__);
}
}
bool
AzureTools::checkCameraAvailable(const std::string& pSerialNr)
{
return std::find(mAvailabeDevices.begin(), mAvailabeDevices.end(), pSerialNr) != mAvailabeDevices.end();
}
bool
AzureTools::checkCameraOpen(const std::string& pSerialNr)
{
return mCameras.find(pSerialNr) != mCameras.end();
}
void
AzureTools::openCamera(std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
if (mAvailabeDevices.size() == 0) throw dab::Exception("Camera Error: No Azure Kinect Cameras availabe", __FILE__, __FUNCTION__, __LINE__);
if (mAvailabeDevices.size() == mCameras.size()) throw dab::Exception("Camera Error: All Azure Kinect Cameras already open", __FILE__, __FUNCTION__, __LINE__);
// look for suitable camera number to open
int openCameraNr = -1;
if (pCamera->mSerialNr.empty() == false)
{
if (checkCameraAvailable(pCamera->mSerialNr) == false) throw dab::Exception("Camera Error: Azure Kinect Cameras with Serial Nr " + pCamera->mSerialNr + " not availabe", __FILE__, __FUNCTION__, __LINE__);
if (checkCameraOpen(pCamera->mSerialNr) == true) throw dab::Exception("Camera Error: Azure Kinect Cameras with Serial Nr " + pCamera->mSerialNr + " already open", __FILE__, __FUNCTION__, __LINE__);
pCamera->mDeviceNr = std::find(mAvailabeDevices.begin(), mAvailabeDevices.end(), pCamera->mSerialNr) - mAvailabeDevices.begin();
}
else if (pCamera->mDeviceNr >= -1)
{
if (pCamera->mDeviceNr >= mAvailabeDevices.size()) throw dab::Exception("Camera Error: device nr " + std::to_string(pCamera->mDeviceNr) + " exceeds number of available Kinect Azure Cameras: " + std::to_string(mAvailabeDevices.size()), __FILE__, __FUNCTION__, __LINE__);
pCamera->mSerialNr = pCamera->mSerialNr[pCamera->mDeviceNr];
if (checkCameraOpen(pCamera->mSerialNr) == true) throw dab::Exception("Camera Error: Azure Kinect Cameras with Serial Nr " + pCamera->mSerialNr + " already open", __FILE__, __FUNCTION__, __LINE__);
}
else
{
for (openCameraNr = 0; openCameraNr < mAvailabeDevices.size(); ++openCameraNr)
{
if (checkCameraOpen(mAvailabeDevices[openCameraNr]) == false)
{
pCamera->mSerialNr = mAvailabeDevices[openCameraNr];
pCamera->mDeviceNr = openCameraNr;
break;
}
}
}
try
{
pCamera->mNativeDevice = k4a::device::open(pCamera->mDeviceNr);
mCameras[pCamera->mSerialNr] = pCamera;
}
catch (std::exception)
{
throw dab::Exception("Camera Error: Failed to open Azure Kinect Device Nr " + std::to_string(pCamera->mDeviceNr) + " Serial Nr " + pCamera->mSerialNr, __FILE__, __FUNCTION__, __LINE__);
}
}
void
AzureTools::configureCamera(std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
// fps settings
if (pCamera->mNativeConfig.camera_fps == K4A_DEVICE_CONFIG_INIT_DISABLE_ALL.camera_fps)
{
pCamera->mNativeConfig.camera_fps = K4A_FRAMES_PER_SECOND_30;
}
// color settings
if (pCamera->mColorMode == true)
{
if (pCamera->mNativeConfig.color_resolution == K4A_DEVICE_CONFIG_INIT_DISABLE_ALL.color_resolution)
{
//pCamera->mNativeConfig.color_resolution = K4A_COLOR_RESOLUTION_1080P;
pCamera->mNativeConfig.color_resolution = K4A_COLOR_RESOLUTION_720P;
}
if (pCamera->mNativeConfig.color_format == K4A_DEVICE_CONFIG_INIT_DISABLE_ALL.color_format)
{
pCamera->mNativeConfig.color_format = K4A_IMAGE_FORMAT_COLOR_BGRA32;
//pCamera->mNativeConfig.color_format = K4A_IMAGE_FORMAT_COLOR_MJPG;
//pCamera->mNativeConfig.color_format = K4A_IMAGE_FORMAT_COLOR_NV12;
//pCamera->mNativeConfig.color_format = K4A_IMAGE_FORMAT_COLOR_YUY2;
}
}
else if (pCamera->mNativeConfig.color_resolution != K4A_DEVICE_CONFIG_INIT_DISABLE_ALL.color_resolution)
{
pCamera->mColorMode = true;
if (pCamera->mNativeConfig.color_format == K4A_DEVICE_CONFIG_INIT_DISABLE_ALL.color_format)
{
pCamera->mNativeConfig.color_format = K4A_IMAGE_FORMAT_COLOR_BGRA32;
}
}
// depth {settings
if (pCamera->mDepthMode == true)
{
if (pCamera->mNativeConfig.depth_mode == K4A_DEVICE_CONFIG_INIT_DISABLE_ALL.depth_mode)
{
pCamera->mNativeConfig.depth_mode = K4A_DEPTH_MODE_NFOV_UNBINNED;
}
}
else if (pCamera->mNativeConfig.depth_mode != K4A_DEVICE_CONFIG_INIT_DISABLE_ALL.depth_mode)
{
pCamera->mDepthMode = true;
}
if (pCamera->mNativeConfig.synchronized_images_only == K4A_DEVICE_CONFIG_INIT_DISABLE_ALL.synchronized_images_only)
{
if (pCamera->mColorMode == true && pCamera->mDepthMode == true)
{
pCamera->mNativeConfig.synchronized_images_only = true;
}
else
{
pCamera->mNativeConfig.synchronized_images_only = false;
}
}
// master / slave mode
if (pCamera->mWiredMode == AzureCamera::Slave)
{
pCamera->mNativeConfig.wired_sync_mode = K4A_WIRED_SYNC_MODE_SUBORDINATE;
}
else if (pCamera->mWiredMode == AzureCamera::Master)
{
pCamera->mNativeConfig.wired_sync_mode = K4A_WIRED_SYNC_MODE_MASTER;
}
try
{
pCamera->mNativeDevice.start_cameras(&pCamera->mNativeConfig);
}
catch (std::exception)
{
throw dab::Exception("Camera Error: Failed to configure Azure Kinect Device Nr " + std::to_string(pCamera->mDeviceNr) + " Serial Nr " + pCamera->mSerialNr, __FILE__, __FUNCTION__, __LINE__);
}
}
void
AzureTools::openMovie(std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
try
{
// try to open record file
pCamera->mNativeMoviePlayer = k4a::playback::open(pCamera->mMoviePlayFileName.c_str());
}
catch (std::exception)
{
throw dab::Exception("Camera Error: Failed to load playback file " + pCamera->mMoviePlayFileName, __FILE__, __FUNCTION__, __LINE__);
}
}
void
AzureTools::createCameraFrames(std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
bool colorFrameSuccess = !pCamera->mColorMode;
bool depthFrameSuccess = !pCamera->mDepthMode;
while (colorFrameSuccess == false || depthFrameSuccess == false)
{
bool captureSuccess;
if (pCamera->mMoviePlaying == false)
{
captureSuccess = pCamera->mNativeDevice.get_capture(&pCamera->mNativeCapture, std::chrono::milliseconds(10000));
}
else
{
captureSuccess = pCamera->mNativeMoviePlayer.get_next_capture(&pCamera->mNativeCapture);
}
if (captureSuccess == false)
{
std::this_thread::sleep_for(std::chrono::microseconds(10));
continue;
}
if (pCamera->mColorMode == true && colorFrameSuccess == false)
{
const k4a::image colorFrame = pCamera->mNativeCapture.get_color_image();
colorFrameSuccess = colorFrame.is_valid();
if (colorFrameSuccess == true)
{
std::array<unsigned int, 2> frameSize;
frameSize[0] = static_cast<unsigned int>(colorFrame.get_width_pixels());
frameSize[1] = static_cast<unsigned int>(colorFrame.get_height_pixels());
pCamera->mColorFrame = new ofxCvColorImage();
pCamera->mColorFrame->allocate(frameSize[0], frameSize[1]);
pCamera->mChannelCount = 3;
pCamera->mFrame = pCamera->mColorFrame;
pCamera->mFrameSize = frameSize;
}
}
if (pCamera->mDepthMode == true && depthFrameSuccess == false)
{
std::array<unsigned int, 2> frameSize;
if (pCamera->mColorMode == false)
{
const k4a::image depthFrame = pCamera->mNativeCapture.get_depth_image();
depthFrameSuccess = depthFrame.is_valid();
if(depthFrameSuccess == true)
{
frameSize[0] = static_cast<unsigned int>(depthFrame.get_width_pixels());
frameSize[1] = static_cast<unsigned int>(depthFrame.get_height_pixels());
pCamera->mDepthFrame = new ofxCvShortImage();
pCamera->mDepthFrame->allocate(frameSize[0], frameSize[1]);
pCamera->mFrameSize = frameSize;
}
}
else if(colorFrameSuccess == true)
{
frameSize[0] = pCamera->mColorFrame->getWidth();
frameSize[1] = pCamera->mColorFrame->getHeight();
// prepare transformation from depth into color image
if (pCamera->mMoviePlaying == false)
{
pCamera->mDepthColorCalibration = pCamera->mNativeDevice.get_calibration(pCamera->mNativeConfig.depth_mode, pCamera->mNativeConfig.color_resolution);
}
else
{
pCamera->mDepthColorCalibration = pCamera->mNativeMoviePlayer.get_calibration();
}
pCamera->mDepthColorTransformation = k4a::transformation(pCamera->mDepthColorCalibration);
pCamera->mNativeDepthColorImage = k4a::image::create(
K4A_IMAGE_FORMAT_DEPTH16,
frameSize[0],
frameSize[1],
frameSize[0] * (int)sizeof(uint16_t));
pCamera->mDepthFrame = new ofxCvShortImage();
pCamera->mDepthFrame->allocate(frameSize[0], frameSize[1]);
pCamera->mFrameSize = frameSize;
if (pCamera->mPointCloudMode == true)
{
std::cout << "create point cloud\n";
pCamera->mNativePointCloudImage = k4a::image::create(
K4A_IMAGE_FORMAT_CUSTOM,
frameSize[0],
frameSize[1],
frameSize[0] * 3 * (int)sizeof(int16_t));
pCamera->mPointCloud = new PointCloud({ frameSize[0] * frameSize[1] });
}
depthFrameSuccess = true;
}
}
}
}
void
AzureTools::createMovieHeader(const std::array<uint32_t, 2>& pFrameSize, AzureMovieHeader *pHeader)
{
pHeader->biSize = sizeof(AzureMovieHeader);
pHeader->biWidth = pFrameSize[0];
pHeader->biHeight = pFrameSize[1];
pHeader->biPlanes = 1;
pHeader->biBitCount = 16;
pHeader->biCompression = AzureFOURCC("YUY2");
pHeader->biSizeImage = sizeof(uint16_t) * pFrameSize[0] * pFrameSize[1];
pHeader->biXPelsPerMeter = 0;
pHeader->biYPelsPerMeter = 0;
pHeader->biClrUsed = 0;
pHeader->biClrImportant = 0;
}
void
AzureTools::initCamera(std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
try
{
std::cout << "initCamera begin\n";
if (pCamera->mMoviePlaying == false)
{
openCamera(pCamera);
configureCamera(pCamera);
}
else
{
openMovie(pCamera);
}
createCameraFrames(pCamera);
std::cout << "initCamera end\n";
}
catch (dab::Exception& e)
{
e += dab::Exception("Camera Error: failed to initialize azure kinect", __FILE__, __FUNCTION__, __LINE__);
}
}
void
AzureTools::initControls(std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
if (pCamera->mDepthMode == true)
{
// min depth value
int minDepth = 0;
Parameter<int>* minDepthControl = new Parameter<int>("mindepth", minDepth, 0, SHRT_MAX); // what are the min and max values for color fps??
minDepthControl->registerParameterListener(pCamera);
pCamera->mControls["mindepth"] = minDepthControl;
// max depth value
int maxDepth = SHRT_MAX;
Parameter<int>* maxDepthControl = new Parameter<int>("maxdepth", maxDepth, 0, SHRT_MAX); // what are the min and max values for color fps??
maxDepthControl->registerParameterListener(pCamera);
pCamera->mControls["maxdepth"] = maxDepthControl;
}
// Color Camera Controls
if (pCamera->mColorMode == true && pCamera->mMoviePlaying == false)
{
const k4a::image colorImage = pCamera->mNativeCapture.get_color_image();
if (colorImage.is_valid() == true)
{
// brightness
int brightness_value;
k4a_color_control_mode_t brightness_mode;
pCamera->mNativeDevice.get_color_control(K4A_COLOR_CONTROL_BRIGHTNESS, &brightness_mode, &brightness_value);
Parameter<int>* brightnessControl = new Parameter<int>("brightness", brightness_value, 0, 200);
brightnessControl->registerParameterListener(pCamera);
pCamera->mControls["brightness"] = brightnessControl;
// exposure
int exposure_value;
k4a_color_control_mode_t exposure_mode;
pCamera->mNativeDevice.get_color_control(K4A_COLOR_CONTROL_EXPOSURE_TIME_ABSOLUTE, &exposure_mode, &exposure_value);
Parameter<int>* exposureControl = new Parameter<int>("exposure", exposure_value, 0, 33330);
exposureControl->registerParameterListener(pCamera);
pCamera->mControls["exposure"] = exposureControl;
}
}
}
void
AzureTools::setControlValue(const std::string& pControlName, const AbstractValue& pValue, std::shared_ptr<AzureCamera> pCamera) throw (dab::Exception)
{
try
{
if (pControlName == "mindepth")
{
int minDepth = pValue.get<int>();
pCamera->mMinDepth = minDepth;
}
else if (pControlName == "maxdepth")
{
int maxdepth = pValue.get<int>();
pCamera->mMaxDepth = maxdepth;
}
else if (pControlName == "brightness")
{
int brightness_value = pValue.get<int>();
k4a_color_control_mode_t brightness_mode = K4A_COLOR_CONTROL_MODE_MANUAL;
std::cout << "brightness " << brightness_value << "\n";
pCamera->mNativeDevice.set_color_control(K4A_COLOR_CONTROL_BRIGHTNESS, brightness_mode, static_cast<int32_t>(brightness_value));
}
else if (pControlName == "exposure")
{
int exposure_value = pValue.get<int>();
k4a_color_control_mode_t exposure_mode = K4A_COLOR_CONTROL_MODE_MANUAL;
std::cout << "exposure " << exposure_value << "\n";
pCamera->mNativeDevice.set_color_control(K4A_COLOR_CONTROL_EXPOSURE_TIME_ABSOLUTE, exposure_mode, static_cast<int32_t>(exposure_value));
}
}
catch (Exception& e)
{
throw;
}
}