Fieol

KinectV1Device.cpp

Mar 31st, 2020
100
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 25.54 KB | None | 0 0
  1. //
  2. // Copyright 2018 Adam Horvath - MIRROR.UPLUGINS.COM - info@uplugins.com - All Rights Reserved.
  3. //
  4.  
  5. #include "VirtualMirrorPluginPrivatePCH.h"
  6. #include "KinectV1Device.h"
  7.  
  8.  
  9. #include <string>
  10. #include <iostream>
  11. #include <sstream>
  12. #include <direct.h>
  13. #define GetCurrentDir _getcwd
  14.  
  15. //General Log
  16. DEFINE_LOG_CATEGORY(KinectV1);
  17.  
  18. //Quickfix for MSVC 2015 linker errors (libjpeg.lib)
  19. #if (_MSC_VER >= 1900)
  20. FILE _iob[] = { *stdin, *stdout, *stderr };
  21.  
  22. extern "C" FILE * __cdecl __iob_func(void)
  23. {
  24.     return _iob;
  25. }
  26. #endif
  27.  
  28. #include "Core.h"
  29.  
  30.  
  31. FKinectV1Device::FKinectV1Device()
  32. {
  33.     initiated = false;
  34.     QuadRGB = NULL;
  35.     QuadDEPTH = NULL;
  36.     QuadUSER = NULL;
  37.  
  38.     UserTracker = NULL;
  39.  
  40.     SensorOffset = FVector(0, 0, 0);
  41.    
  42.     UE_LOG(KinectV1, Log, TEXT("Device start"));
  43. }
  44.  
  45. FKinectV1Device::~FKinectV1Device()
  46. {
  47.     UE_LOG(KinectV1, Log, TEXT("Device shutdown"));
  48.     Cleanup();
  49. }
  50.  
  51. bool FKinectV1Device::StartupDevice()
  52. {
  53.     return true;
  54. }
  55.  
  56. UTexture2D* FKinectV1Device::GetTextureRGB(){
  57.     UE_LOG(KinectV1, Log, TEXT("GetTextureRGB"));
  58.     if (this->initiated) {
  59.        
  60.         return this->TextureRGB;
  61.        
  62.     } else {
  63.         if (GEngine) {
  64.             GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, FString::Printf(TEXT("KinectV1 is not initialized properly. (RGB)")));
  65.         }
  66.         return this->DummyTexture;
  67.     }
  68. }
  69.  
  70.  
  71.  
  72. UTexture2D* FKinectV1Device::GetTextureDEPTH() {
  73.     if (this->initiated) {
  74.         return this->TextureDEPTH;
  75.     } else {
  76.         if (GEngine) {
  77.             GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, FString::Printf(TEXT("KinectV1 is not initialized properly. (DEPTH)")));
  78.         }
  79.         return this->DummyTexture;
  80.     }
  81. }
  82.  
  83. UTexture2D* FKinectV1Device::GetTextureUSER() {
  84.     if (this->initiated) {
  85.         return this->TextureUSER;
  86.     }
  87.     else {
  88.         if (GEngine) {
  89.             GEngine->AddOnScreenDebugMessage(-1, 5.f, FColor::Red, FString::Printf(TEXT("KinectV1 is not initialized properly. (USER)")));
  90.         }
  91.         return this->DummyTexture;
  92.     }
  93. }
  94.  
  95.  
  96. void FKinectV1Device::ShutdownDevice()
  97. {
  98.     Cleanup();
  99. }
  100.  
  101. void FKinectV1Device::UpdateDevice(float DeltaTime){
  102.     if (!this->initiated) return;
  103.    
  104.     //Check for camera refresh rate and update accordingly
  105.     RefreshTimer += DeltaTime;
  106.     if (RefreshTimer >= 1.0f / 30) //30FPS
  107.     {
  108.         RefreshTimer = 0; //Reset timer
  109.        
  110.         //Update texures
  111.         UpdateTextureRGB();
  112.        
  113.         UpdateTextureDEPTH();
  114.        
  115.         //Update user tracker
  116.         UpdateUserTracker();
  117.  
  118.         //User map
  119.         UpdateTextureUSER();
  120.     }
  121. }
  122.  
  123.  
  124.  
  125. void FKinectV1Device::UpdateTextureRGB(){
  126.    
  127.     ColorStream.readFrame(&ColorFrame);
  128.    
  129.     if (ColorFrame.isValid()) {
  130.  
  131.         const openni::RGB888Pixel* TextureMap = (const openni::RGB888Pixel*)ColorFrame.getData();
  132.  
  133.         int u = 0;
  134.         for (int i = 0; i < (640 * 480); i += 1) {
  135.  
  136.             QuadRGB[u].rgbRed = TextureMap[i].r;
  137.             QuadRGB[u].rgbGreen = TextureMap[i].g;
  138.             QuadRGB[u].rgbBlue = TextureMap[i].b;
  139.             QuadRGB[u].rgbReserved = 255;
  140.             u++;
  141.         }
  142.        
  143.         const size_t SizeQuadRGB = 640 * 480* sizeof(RGBQUAD);
  144.         uint8* Dest = (uint8*)TextureRGB->PlatformData->Mips[0].BulkData.Lock(LOCK_READ_WRITE);
  145.         FMemory::Memcpy(Dest, (uint8*)QuadRGB, SizeQuadRGB);
  146.  
  147.         TextureRGB->PlatformData->Mips[0].BulkData.Unlock();
  148.         TextureRGB->UpdateResource();
  149.        
  150.     }
  151.  
  152. }
  153.  
  154. void FKinectV1Device::UpdateTextureDEPTH() {
  155.    
  156.     DepthStream.readFrame(&DepthFrame);
  157.    
  158.    
  159.  
  160.     if (DepthFrame.isValid()) {
  161.         //Hands on depth map
  162.         FVector2D HandLeftPosition2D(0, 0);
  163.         FVector2D HandRightPosition2D(0, 0);
  164.         EJointType ClosestBodyJointLeftHand = EJointType::SpineBase;
  165.         EJointType ClosestBodyJointRightHand = EJointType::SpineBase;
  166.  
  167.         float ClosestBodyJointLeftHandDepth = 450;
  168.         float ClosestBodyJointRightHandDepth = 450;
  169.         float HandLeftDepth = 450;
  170.         float HandRightDepth = 450;
  171.         float HandLeftRadius = 1024;
  172.         float HandRightRadius = 1024;
  173.  
  174.         if (IsTracking()) {
  175.             int MinimumHandRadius = 25;
  176.  
  177.  
  178.             //Hand left
  179.            
  180.  
  181.                 HandLeftDepth = FMath::Abs(GetBonePosition(EJointType::HandLeft).Z);
  182.                 HandLeftPosition2D = GetBonePosition2D(EJointType::HandLeft);
  183.                 HandLeftRadius = (GetBonePosition2D(EJointType::WristLeft) - GetBonePosition2D(EJointType::HandTipLeft)).Size()*HandOcclusionMultiplier;
  184.                 if (HandLeftRadius < MinimumHandRadius) HandLeftRadius = MinimumHandRadius;
  185.  
  186.                 ClosestBodyJointLeftHand = GetClosestBodyJoint(EJointType::HandLeft);
  187.                 ClosestBodyJointLeftHandDepth = FMath::Abs(GetBonePosition(ClosestBodyJointLeftHand).Z);
  188.  
  189.                
  190.             //Hand right
  191.            
  192.                 HandRightDepth = FMath::Abs(GetBonePosition(EJointType::HandRight).Z);
  193.                 HandRightPosition2D = GetBonePosition2D(EJointType::HandRight);
  194.                 HandRightRadius = (GetBonePosition2D(EJointType::WristRight) - GetBonePosition2D(EJointType::HandTipRight)).Size()*HandOcclusionMultiplier;
  195.                 if (HandRightRadius < MinimumHandRadius) HandRightRadius = MinimumHandRadius;
  196.  
  197.                 ClosestBodyJointRightHand = GetClosestBodyJoint(EJointType::HandRight);
  198.                 ClosestBodyJointRightHandDepth = FMath::Abs(GetBonePosition(ClosestBodyJointRightHand).Z);
  199.            
  200.         }
  201.  
  202.        
  203.         openni::RGB888Pixel TextureMap[640*480];
  204.        
  205.         const openni::DepthPixel* DepthRow = (const openni::DepthPixel*)DepthFrame.getData();
  206.         openni::RGB888Pixel* TexRow = TextureMap +  640;
  207.         int rowSize = DepthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);
  208.  
  209.         for (int y = 0; y < 480; ++y)
  210.         {
  211.             const openni::DepthPixel* Depth = DepthRow;
  212.            
  213.             openni::RGB888Pixel* Tex = TexRow;
  214.  
  215.             for (int x = 0; x < 640; ++x, ++Depth, ++Tex)
  216.             {
  217.                 if (*Depth != 0)
  218.                 {
  219.                     if (
  220.                         ((FVector2D(x, y) - HandLeftPosition2D).Size() < HandLeftRadius) && (*Depth < ClosestBodyJointLeftHandDepth - 150)
  221.                         || ((FVector2D(x, y) - HandRightPosition2D).Size() < HandRightRadius) && (*Depth < ClosestBodyJointRightHandDepth - 150)
  222.                         ) {
  223.  
  224.                         int32 red, green, blue;
  225.  
  226.                         UKinectV1FunctionLibrary::IntToRGB(*Depth, red, green, blue);
  227.                         Tex->r = (BYTE)red;
  228.                         Tex->g = (BYTE)green;
  229.                         Tex->b = (BYTE)blue;
  230.                     }
  231.                     else {
  232.                         Tex->r = 255;
  233.                         Tex->g = 255;
  234.                         Tex->b = 255;
  235.                     }
  236.                 }
  237.                
  238.             }
  239.  
  240.             DepthRow += rowSize;
  241.             TexRow += DepthFrame.getWidth();
  242.         }
  243.  
  244.        
  245.         for (int i = 0; i < (640*480); i += 1) {
  246.                 QuadDEPTH[i].rgbRed = TextureMap[i].r;
  247.                 QuadDEPTH[i].rgbGreen = TextureMap[i].g;
  248.                 QuadDEPTH[i].rgbBlue = TextureMap[i].b;
  249.                 QuadDEPTH[i].rgbReserved = 255;
  250.         }
  251.  
  252.  
  253.         const size_t SizeQuadDEPTH = 640 * 480 * sizeof(RGBQUAD);
  254.         uint8* Dest = (uint8*)TextureDEPTH->PlatformData->Mips[0].BulkData.Lock(LOCK_READ_WRITE);
  255.         FMemory::Memcpy(Dest, (uint8*)QuadDEPTH, SizeQuadDEPTH);
  256.  
  257.         TextureDEPTH->PlatformData->Mips[0].BulkData.Unlock();
  258.         TextureDEPTH->UpdateResource();
  259.  
  260.     }
  261.  
  262. }
  263.  
  264. void FKinectV1Device::UpdateTextureUSER()
  265. {
  266.     if (!UserTrackerFrame.isValid()) return;
  267.    
  268.     if (UserTrackerFrame.getUsers().getSize() == 0) {
  269.         //No users, make it full transparent
  270.         uint8* pDest = (uint8*)TextureUSER->PlatformData->Mips[0].BulkData.Lock(LOCK_READ_WRITE);
  271.         for (int y = 0; y < 480; ++y)
  272.         {
  273.             for (int x = 0; x < 640; ++x)
  274.             {
  275.                 pDest[(y * 640 + x)] = 0;   pDest++;
  276.                 pDest[(y * 640 + x)] = 0;   pDest++;
  277.                 pDest[(y * 640 + x)] = 0;   pDest++;
  278.                 pDest[(y * 640 + x)] = 0;
  279.             }
  280.         }
  281.         TextureUSER->PlatformData->Mips[0].BulkData.Unlock();
  282.         TextureUSER->UpdateResource();
  283.  
  284.         return;
  285.  
  286.     }
  287.  
  288.  
  289.     const nite::UserMap& userLabels = UserTrackerFrame.getUserMap();
  290.    
  291.  
  292.     float factor[3] = { 1, 1, 1 };
  293.     float Colors[][3] = { { 0, 0, 1 },{ 0, 0, 1 },{ 0, 0, 1 },{ 0, 0, 1 } };
  294.     int colorCount = 3;
  295.  
  296.    
  297.         // update dynamic texture
  298.        
  299.         const size_t Size = 640 * 480 * sizeof(RGBQUAD);
  300.        
  301.         uint8* pDest = (uint8*)TextureUSER->PlatformData->Mips[0].BulkData.Lock(LOCK_READ_WRITE);
  302.        
  303.        
  304.  
  305.    
  306.     //Nite specific
  307.  
  308.     const nite::UserId* pLabels = userLabels.getPixels();
  309.  
  310.    
  311.  
  312.    
  313.     for (int y = 0; y < 480; ++y)
  314.     {
  315.         for (int x = 0; x < 640; ++x, ++pLabels)
  316.         {
  317.  
  318.             if (*pLabels == 0)
  319.             {
  320.                 factor[0] = factor[1] = factor[2] = 0;
  321.             }
  322.             else
  323.             {
  324.                 factor[0] = Colors[*pLabels % colorCount][0];
  325.                 factor[1] = Colors[*pLabels % colorCount][1];
  326.                 factor[2] = Colors[*pLabels % colorCount][2];
  327.             }
  328.            
  329.             // write to output buffer
  330.            
  331.             pDest[(y * 640 + x)] = 255 * factor[0]; pDest++;
  332.             pDest[(y * 640 + x)] = 255 * factor[1]; pDest++;
  333.             pDest[(y * 640 + x)] = 255 * factor[2]; pDest++;
  334.            
  335.             if ((factor[0] == factor[1]) && (factor[1] == factor[2]) && (factor[2] == 0)) {
  336.                 pDest[(y*640 + x)] = 0;
  337.             }
  338.             else {
  339.                 pDest[(y*640 + x)] = 255;
  340.             }
  341.  
  342.  
  343.             factor[0] = factor[1] = factor[2] = 1;
  344.         }
  345.     }
  346.  
  347.     TextureUSER->PlatformData->Mips[0].BulkData.Unlock();
  348.     TextureUSER->UpdateResource();
  349.  
  350.    
  351. }
  352.  
  353.  
  354.  
  355. void FKinectV1Device::UpdateUserTracker() {
  356.     //Check if tracker is valid
  357.     if (!UserTracker->isValid()) return;
  358.  
  359. //Read the user frame
  360.     nite::Status rc = UserTracker->readFrame(&UserTrackerFrame);
  361.  
  362.     if (rc != nite::STATUS_OK)
  363.     {
  364.         UE_LOG(KinectV1, Error, TEXT("Error reading user frame"));
  365.         return;
  366.     }
  367.  
  368.     //Floor
  369.     if (this->UserTrackerFrame.getFloorConfidence()>0.5) {
  370.         this->SceneFloor = this->UserTrackerFrame.getFloor();
  371.         this->FloorFound = true;
  372.     }
  373.     else {
  374.         this->FloorFound = false;
  375.     }
  376.  
  377.     //Sensor Height  - Angle
  378.     if (this->FloorFound) {
  379.         FVector floor = FVector(SceneFloor.point.x, SceneFloor.point.y, SceneFloor.point.z);
  380.         FVector floorNormal = FVector(SceneFloor.normal.x, SceneFloor.normal.y, SceneFloor.normal.z);
  381.  
  382.         float lenProduct = floorNormal.Size()* FVector::UpVector.Size();
  383.         float f = FVector::DotProduct(floorNormal, FVector::UpVector) / lenProduct;
  384.         f = FMath::Clamp(f, (float)-1.0, (float)1.0);
  385.  
  386.         float tilt = 90 - FMath::RadiansToDegrees(FMath::Acos(f));
  387.         SensorAngle = tilt;
  388.  
  389.         float sensorHeightD = (floor.Z)*FMath::Sin(FMath::DegreesToRadians(tilt));
  390.         float sensorHeight = FMath::Abs(floor.Y) - sensorHeightD;
  391.  
  392.         SensorHeight = sensorHeight;
  393.     }
  394.  
  395.     const nite::Array<nite::UserData>& users = UserTrackerFrame.getUsers();
  396.  
  397.  
  398.        
  399.  
  400.         this->NumberOfUsers = users.getSize();
  401.  
  402.         //If tracked user id 0 but no users
  403.         if (this->TrackedUserID != 0 && this->NumberOfUsers == 0) this->TrackedUserID = 0;
  404.  
  405.         //New user - Lost user
  406.         for (int i = 0; i < this->NumberOfUsers; ++i)
  407.         {
  408.             const nite::UserData& user = users[i];
  409.            
  410.             if (user.isNew())
  411.             {
  412.                 UserTracker->startPoseDetection(user.getId(), nite::POSE_PSI);
  413.                 UE_LOG(KinectV1, Log, TEXT("Pose detection started on user ID: %d"), user.getId());
  414.             }
  415.             else if (user.getPose(nite::POSE_PSI).isEntered())
  416.             {
  417.                 if (this->TrackedUserID == 0)
  418.                 {
  419.                     //if (this->FloorFound == true && this->SensorHeight>1300 && this->SensorHeight<2300) {
  420.                         UE_LOG(KinectV1,Log,TEXT("PSI pose detected!!!!"));
  421.                         this->Calibrating = true;
  422.                         UserTracker->stopPoseDetection(user.getId(), nite::POSE_PSI);
  423.                         UserTracker->startSkeletonTracking(user.getId());
  424.                         this->TrackedUserID = user.getId();
  425.                         UE_LOG(KinectV1,Log,TEXT("Tracking started on user ID : %d"), user.getId());
  426.                     //}
  427.                 }
  428.             }
  429.             else if (user.isLost() || !user.isVisible())
  430.             {
  431.                 if (this->TrackedUserID != 0 && user.getId() == this->TrackedUserID)
  432.                 {
  433.                     UserTracker->stopSkeletonTracking((user.getId()));
  434.                     UE_LOG(KinectV1, Log, TEXT("Tracking stopped on user ID:%d"), user.getId());
  435.                     UserTracker->startPoseDetection(this->TrackedUserID, nite::POSE_PSI);
  436.                     UE_LOG(KinectV1, Log, TEXT("Pose detection started on user ID : %d"), user.getId());
  437.  
  438.                     this->TrackedUserID = 0;
  439.                     this->Calibrating = false;
  440.                 }
  441.             }
  442.             else if (user.getId() == this->TrackedUserID && this->TrackedUserID != 0 && user.getSkeleton().getState() == nite::SkeletonState::SKELETON_TRACKED)
  443.             {
  444.                
  445.                 this->Calibrating = false;
  446.  
  447.                 //Center of Mass for Height computation
  448.                 nite::Point3f com = user.getCenterOfMass();
  449.                 this->CoM = FVector(com.x, com.y, com.z);
  450.                
  451.             }
  452.             else if (user.getId() == this->TrackedUserID && this->TrackedUserID != 0 && user.getSkeleton().getState() == nite::SkeletonState::SKELETON_CALIBRATING) {
  453.                 //UE_LOG(KinectV1, Log, TEXT("Calibration: %d"), user.getId());
  454.                 this->Calibrating = true;
  455.             }
  456.             else if (user.getId() == this->TrackedUserID && this->TrackedUserID != 0 && (
  457.                 user.getSkeleton().getState() == nite::SkeletonState::SKELETON_CALIBRATION_ERROR_HANDS ||
  458.                 user.getSkeleton().getState() == nite::SkeletonState::SKELETON_CALIBRATION_ERROR_HEAD ||
  459.                 user.getSkeleton().getState() == nite::SkeletonState::SKELETON_CALIBRATION_ERROR_LEGS ||
  460.                 user.getSkeleton().getState() == nite::SkeletonState::SKELETON_CALIBRATION_ERROR_TORSO ||
  461.                 user.getSkeleton().getState() == nite::SkeletonState::SKELETON_CALIBRATION_ERROR_NOT_IN_POSE ||
  462.                 user.getSkeleton().getState() == nite::SkeletonState::SKELETON_NONE
  463.                 )
  464.  
  465.                 ) {
  466.                
  467.                 UE_LOG(KinectV1, Warning, TEXT("Calibration error : %d"), static_cast<int32>(user.getSkeleton().getState()));
  468.                 UserTracker->stopSkeletonTracking((user.getId()));
  469.                 this->Calibrating = false;
  470.                
  471.                 UE_LOG(KinectV1, Log, TEXT("Tracking stopped on user ID:%d"), user.getId());
  472.                 UserTracker->startPoseDetection(user.getId(), nite::POSE_PSI);
  473.                
  474.                 UE_LOG(KinectV1, Log, TEXT("Pose detection started on user ID:%d"), user.getId());
  475.                 this->TrackedUserID = 0;
  476.  
  477.             }
  478.  
  479.  
  480.         }
  481.  
  482. }
  483.  
  484.  
  485. bool FKinectV1Device::Init(bool playOni){
  486.     RefreshTimer = 0;
  487.  
  488.     //Defaults 
  489.     TrackedUserID = 0;
  490.     FloorFound = false;
  491.     Calibrating = false;
  492.     SensorAngle = 0;
  493.     SensorHeight = 0;
  494.    
  495.     //Color texture
  496.     TextureRGB = UTexture2D::CreateTransient(640,480);
  497.     TextureRGB->SRGB = 1;
  498.     TextureRGB->UpdateResource();
  499.  
  500.     //Depth texture
  501.     TextureDEPTH = UTexture2D::CreateTransient(640, 480);
  502.     TextureDEPTH->SRGB = 0;
  503.     TextureDEPTH->UpdateResource();
  504.  
  505.     //Texture user
  506.     TextureUSER = UTexture2D::CreateTransient(640, 480);
  507.     TextureUSER->SRGB = 1;
  508.     TextureUSER->UpdateResource();
  509.  
  510.    
  511.     //Return this if WebcamTexture cannot be created by any reason.
  512.     DummyTexture = UTexture2D::CreateTransient(640,480);
  513.     DummyTexture->SRGB = 1;
  514.     DummyTexture->UpdateResource();
  515.  
  516.     //Create rgbquads for holding texture data
  517.     QuadRGB = new RGBQUAD[640*480];
  518.     QuadDEPTH = new RGBQUAD[640*480];
  519.     QuadUSER = new RGBQUAD[640*480];
  520.    
  521.     //Create NiTE user tracker
  522.     UserTracker = new nite::UserTracker;
  523.  
  524.     //Init sensor
  525.     if (!SensorInit(playOni)) {
  526.         return false;
  527.     }
  528.  
  529.    
  530.     initiated = 1;
  531.     UE_LOG(KinectV1, Log, TEXT("Init complete !!\n"));
  532.  
  533.     return 1; //Succesfull init
  534. }
  535.  
  536. bool FKinectV1Device::SensorInit(bool playOni) {
  537.    
  538.     //Init OpenNI
  539.     const char* deviceURI = openni::ANY_DEVICE;
  540.     openni::Status rc = openni::OpenNI::initialize();
  541.  
  542.     if (rc != openni::STATUS_OK) {
  543.         UE_LOG(KinectV1, Error, TEXT("Failed to initialize OpenNI -  %s "), *FString(UTF8_TO_TCHAR(openni::OpenNI::getExtendedError())));
  544.         return 0;
  545.     }
  546.     if (!playOni) {
  547.         //Open device
  548.         rc = Device.open(deviceURI);
  549.        
  550.  
  551.     }else{
  552.         //Open ONI file
  553.         //rc = Device.open("e:/UnrealProjects/KinectV1/Plugins/KinectV1Plugin/Binaries/Win64/adam.oni");
  554.         rc = Device.open(deviceURI);
  555.     }
  556.  
  557.     DeviceName = Device.getDeviceInfo().getName();
  558.     UE_LOG(KinectV1, Log, TEXT("Device found: %s "), *FString(UTF8_TO_TCHAR(Device.getDeviceInfo().getName())));
  559.    
  560.  
  561.     if (rc != openni::STATUS_OK)
  562.     {
  563.         UE_LOG(KinectV1, Error, TEXT("Failed to open device -   %s "), *FString(UTF8_TO_TCHAR(openni::OpenNI::getExtendedError())));
  564.         return 0;
  565.     }
  566.  
  567.     //Create depth stream
  568.     UE_LOG(KinectV1, Log, TEXT("Creating depth stream."));
  569.     rc = DepthStream.create(Device, openni::SENSOR_DEPTH);
  570.     if (rc == openni::STATUS_OK)
  571.     {
  572.         const openni::Array<openni::VideoMode>& vm = Device.getSensorInfo(openni::SensorType::SENSOR_DEPTH)->getSupportedVideoModes();
  573.  
  574.         for (int i = 0; i<vm.getSize(); i++) {
  575.             if (vm[i].getResolutionX() == 640 && vm[i].getResolutionY() == 480 && vm[i].getFps() == 30) {
  576.                 this->DepthStream.setVideoMode(vm[i]);
  577.                 break;
  578.             }
  579.         }
  580.  
  581.        
  582.         UE_LOG(KinectV1, Log, TEXT("Starting depth stream."));
  583.         rc = DepthStream.start();
  584.         if (rc != openni::STATUS_OK)
  585.         {
  586.             UE_LOG(KinectV1, Error, TEXT("Couldn't start depth stream - %s "), *FString(UTF8_TO_TCHAR(openni::OpenNI::getExtendedError())));
  587.             DepthStream.destroy();
  588.             return 0;
  589.         }
  590.        
  591.  
  592.  
  593.     }
  594.     else
  595.     {
  596.         UE_LOG(KinectV1, Error, TEXT("Couldn't find depth stream: - %s "), *FString(UTF8_TO_TCHAR(openni::OpenNI::getExtendedError())));
  597.         return 0;
  598.     }
  599.  
  600.  
  601.     //Create color stream
  602.     UE_LOG(KinectV1, Log, TEXT("Creating color stream."));
  603.     rc = ColorStream.create(Device, openni::SENSOR_COLOR);
  604.  
  605.     if (rc == openni::STATUS_OK)
  606.     {
  607.         if (this->DeviceName != "Kinect") {
  608.             const openni::Array<openni::VideoMode>& vm = Device.getSensorInfo(openni::SensorType::SENSOR_COLOR)->getSupportedVideoModes();
  609.             for (int i = 0; i<vm.getSize(); i++) {
  610.                 if (vm[i].getResolutionX() == 640 && vm[i].getResolutionY() == 480 && vm[i].getFps() == 30) {
  611.                     ColorStream.setVideoMode(vm[i]);
  612.                     break;
  613.                 }
  614.             }
  615.         }
  616.         UE_LOG(KinectV1, Log, TEXT("Starting color stream."));
  617.         ColorStream.start();
  618.  
  619.         if (rc != openni::STATUS_OK)
  620.         {
  621.             UE_LOG(KinectV1, Error, TEXT("Couldn't start color stream - %s "), *FString(UTF8_TO_TCHAR(openni::OpenNI::getExtendedError())));
  622.             ColorStream.destroy();
  623.             return 0;
  624.         }
  625.     }
  626.     else
  627.     {
  628.         UE_LOG(KinectV1, Error, TEXT("Couldn't find color stream: - %s "), *FString(UTF8_TO_TCHAR(openni::OpenNI::getExtendedError())));
  629.         return nite::Status::STATUS_ERROR;
  630.     }
  631.  
  632.     //Check streams
  633.     if (!ColorStream.isValid())
  634.     {
  635.         UE_LOG(KinectV1, Error, TEXT("No valid streams"));
  636.         openni::OpenNI::shutdown();
  637.         return 0;
  638.     }
  639.  
  640.     //Nite
  641.    
  642.     if (nite::NiTE::initialize() != nite::Status::STATUS_OK)
  643.     {
  644.         UE_LOG(KinectV1, Error, TEXT("NITE - Loading error"));
  645.         return 0;
  646.     }
  647.  
  648.     //Color to Depth registration
  649.     UE_LOG(KinectV1, Log, TEXT("Registration: %d"), Device.isImageRegistrationModeSupported(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR));
  650.    
  651.     //Device.setDepthColorSyncEnabled(true);
  652.    
  653.     openni::Status res;
  654.     res = Device.setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
  655.  
  656.     if (res==openni::Status::STATUS_OK) {
  657.         UE_LOG(KinectV1, Log, TEXT("Registration successfull!"));
  658.     } else {
  659.         UE_LOG(KinectV1, Log, TEXT("Registration failed!"));
  660.     }
  661.  
  662.     FString NiteDataPath = FPaths::ConvertRelativePathToFull(FPaths::ProjectDir() + "/Binaries/Win64/");
  663.  
  664.     //Store current directory
  665.     char buff[FILENAME_MAX];
  666.     GetCurrentDir(buff, FILENAME_MAX);
  667.     std::string current_working_dir(buff);
  668.    
  669.    
  670.    
  671.     //Change current directory to load NiTE data
  672.  
  673.     std::string s = std::string(TCHAR_TO_UTF8(*NiteDataPath));
  674.  
  675.     int len;
  676.     int slength = (int)s.length() + 1;
  677.     len = MultiByteToWideChar(CP_ACP, 0, s.c_str(), slength, 0, 0);
  678.     wchar_t* buf = new wchar_t[len];
  679.     MultiByteToWideChar(CP_ACP, 0, s.c_str(), slength, buf, len);
  680.     std::wstring r(buf);
  681.     delete[] buf;
  682.  
  683.     UE_LOG(KinectV1, Log, TEXT("Current directory set to: - %s "), *FString(UTF8_TO_TCHAR(s.c_str())));
  684.     SetCurrentDirectory(r.c_str());
  685.  
  686.     UE_LOG(KinectV1, Log, TEXT("Creating user tracker"));
  687.    
  688.    
  689.     if (UserTracker->create(&Device) != nite::STATUS_OK)
  690.     {
  691.        
  692.         UE_LOG(KinectV1, Error, TEXT("User tracker error - %s "), *FString(UTF8_TO_TCHAR(openni::OpenNI::getExtendedError())));
  693.         return 0;
  694.     }
  695.    
  696.     //Set current directory back
  697.     UE_LOG(KinectV1, Log, TEXT("Current directory set back to: - %s "), *FString(UTF8_TO_TCHAR(current_working_dir.c_str())));
  698.     SetCurrentDirectoryA(current_working_dir.c_str());
  699.     return 1;
  700. }
  701.  
  702. void FKinectV1Device::AbortTracking() {
  703.     if (!UserTrackerFrame.isValid()) return;
  704.     if (UserTrackerFrame.getUsers().getSize()>0) {
  705.         for (int i = 0; i<UserTrackerFrame.getUsers().getSize(); i++) {
  706.             UserTracker->stopSkeletonTracking((UserTrackerFrame.getUsers()[i].getId()));
  707.             UserTracker->startPoseDetection(UserTrackerFrame.getUsers()[i].getId(), nite::POSE_PSI);
  708.             this->TrackedUserID = 0;
  709.             this->Calibrating = false;
  710.         }
  711.     }
  712. }
  713.  
  714. FString FKinectV1Device::GetDeviceName() {
  715.     return this->DeviceName;
  716. }
  717.  
  718. bool  FKinectV1Device::IsTracking()
  719. {
  720.     if (!UserTrackerFrame.isValid()) return false;
  721.     if (UserTrackerFrame.getUsers().getSize()>0) {
  722.         for (int i = 0; i<UserTrackerFrame.getUsers().getSize(); i++) {
  723.             if (UserTrackerFrame.getUsers()[i].getSkeleton().getState() == nite::SkeletonState::SKELETON_TRACKED) {
  724.                 if (this->Calibrating == false) {
  725.                     if(GetBonePosition(EJointType::SpineBase)!=FVector::ZeroVector){
  726.                         return true;
  727.                     }
  728.                 }
  729.             }
  730.         }
  731.     }
  732.     return false;
  733. }
  734.  
  735. FRotator FKinectV1Device::GetBoneRotation(EJointType skelJoint, bool flip)
  736. {
  737.     nite::JointType skelJointNite = ConvertJoint(skelJoint);
  738.  
  739.     if (!UserTrackerFrame.isValid() || this->TrackedUserID == 0) return FRotator::ZeroRotator;
  740.     FQuat newQ = FRotator::ZeroRotator.Quaternion();
  741.     nite::Quaternion jointOri;
  742.     nite::SkeletonJoint joint;
  743.     joint = this->UserTrackerFrame.getUserById(this->TrackedUserID)->getSkeleton().getJoint(skelJointNite);
  744.     jointOri = joint.getOrientation();
  745.  
  746.     if (joint.getOrientationConfidence() > 0)
  747.     {
  748.        
  749.         newQ.X = jointOri.x;
  750.         newQ.Y = -jointOri.z;
  751.         newQ.Z = jointOri.y;
  752.         newQ.W = jointOri.w;
  753.        
  754.  
  755.         newQ = newQ*FRotator(90, 0, 0).Quaternion();
  756.        
  757.     }
  758.  
  759.     //Try to return valid rotation only if not found check previous data
  760.     if (newQ != FRotator::ZeroRotator.Quaternion()) {
  761.         JointRotationsValid[skelJoint] = newQ.Rotator();
  762.     }
  763.     else {
  764.         if (JointRotationsValid.find(skelJoint) == JointRotationsValid.end()) {
  765.             // not found
  766.             return FRotator(newQ); //No valid data
  767.         }
  768.         else {
  769.             // found
  770.             return JointRotationsValid.find(skelJoint)->second;
  771.         }
  772.     }
  773.  
  774.  
  775.     return FRotator(newQ);
  776. }
  777.  
  778. FVector2D FKinectV1Device::GetBonePosition2D(EJointType skelJoint)
  779. {
  780.     FVector2D JointPosition2D(0,0);
  781.     FVector JointPostion3D = GetBonePosition(skelJoint);
  782.     UserTracker->convertJointCoordinatesToDepth(JointPostion3D.X, JointPostion3D.Y, JointPostion3D.Z, &JointPosition2D.X, &JointPosition2D.Y);
  783.     return JointPosition2D;
  784. }
  785.  
  786. FVector FKinectV1Device::GetBonePosition(EJointType skelJoint, bool flip)
  787. {
  788.     nite::JointType skelJointNite = ConvertJoint(skelJoint);
  789.     if (!UserTrackerFrame.isValid() || this->TrackedUserID == 0) return  FVector::ZeroVector;
  790.     FVector newPos = FVector::ZeroVector;
  791.     nite::Point3f jointPos;
  792.     nite::SkeletonJoint joint;
  793.     joint = UserTrackerFrame.getUserById(TrackedUserID)->getSkeleton().getJoint(skelJointNite);
  794.  
  795.     if (joint.getPositionConfidence()>0) {
  796.         jointPos = joint.getPosition();
  797.         newPos = FVector(jointPos.x, jointPos.y, jointPos.z);
  798.     }
  799.  
  800.     //Try to return valid rotation only if not found check previous data
  801.     if (newPos != FVector::ZeroVector) {
  802.         JointPositionsValid[skelJoint] = newPos;
  803.     } else {
  804.         if (JointPositionsValid.find(skelJoint) == JointPositionsValid.end()) {
  805.             // not found
  806.             return newPos; //No valid data
  807.         }
  808.         else {
  809.             // found
  810.             return JointPositionsValid.find(skelJoint)->second;
  811.         }
  812.     }
  813.  
  814.     return newPos;
  815. }
  816.  
  817.  
  818. bool FKinectV1Device::SensorShutdown() {
  819.    
  820.     ColorFrame.release();
  821.     ColorStream.stop();
  822.     ColorStream.destroy();
  823.  
  824.    
  825.     DepthStream.stop();
  826.    
  827.     if(UserTracker){
  828.         if (UserTracker->isValid()) {
  829.             UserTrackerFrame.release();
  830.             UserTracker->destroy();
  831.         }
  832.     }
  833.  
  834.     DepthStream.destroy();
  835.     Device.close();
  836.     openni::OpenNI::shutdown();
  837.     nite::NiTE::shutdown();
  838.    
  839.     return true;
  840. }
  841.  
  842.  
  843.  
  844. void FKinectV1Device::Cleanup(void){
  845.     //Delete RGB quad
  846.     if (QuadRGB)
  847.     {
  848.         delete[] QuadRGB;
  849.         QuadRGB = NULL;
  850.     }
  851.  
  852.     //Delete DEPTH quad
  853.     if (QuadDEPTH)
  854.     {
  855.         delete[] QuadRGB;
  856.         QuadRGB = NULL;
  857.     }
  858.  
  859.     //Delete USER quad
  860.     if (QuadUSER)
  861.     {
  862.         delete[] QuadUSER;
  863.         QuadUSER = NULL;
  864.     }
  865.  
  866.    
  867.  
  868.     //Shutdown sensor
  869.     SensorShutdown();
  870.  
  871.     initiated = false;
  872.  
  873.     UE_LOG(KinectV1, Log, TEXT("Cleanup ready !!\n"));
  874. }
  875.  
  876. nite::JointType FKinectV1Device::ConvertJoint(EJointType Joint) {
  877.     switch(Joint){
  878.        
  879.         //Center
  880.         case EJointType::Head: return nite::JOINT_HEAD; break;
  881.         case EJointType::Neck: return nite::JOINT_NECK; break;
  882.         case EJointType::SpineBase: return nite::JOINT_TORSO; break;
  883.  
  884.         //Arms
  885.         case EJointType::ShoulderLeft: return nite::JOINT_LEFT_SHOULDER; break;
  886.         case EJointType::ShoulderRight: return nite::JOINT_RIGHT_SHOULDER; break;
  887.         case EJointType::ElbowLeft: return nite::JOINT_LEFT_ELBOW; break;
  888.         case EJointType::ElbowRight: return nite::JOINT_RIGHT_ELBOW; break;
  889.         case EJointType::HandLeft: return nite::JOINT_LEFT_HAND; break;
  890.         case EJointType::HandRight: return nite::JOINT_RIGHT_HAND; break;
  891.        
  892.         //Legs
  893.         case EJointType::HipLeft: return nite::JOINT_LEFT_HIP; break;
  894.         case EJointType::HipRight: return nite::JOINT_RIGHT_HIP; break;
  895.         case EJointType::KneeLeft: return nite::JOINT_LEFT_KNEE; break;
  896.         case EJointType::KneeRight: return nite::JOINT_RIGHT_KNEE; break;
  897.         case EJointType::FootLeft: return nite::JOINT_LEFT_FOOT; break;
  898.         case EJointType::FootRight: return nite::JOINT_RIGHT_FOOT; break;
  899.     }
  900.     return nite::JOINT_TORSO;
  901. }
  902.  
  903. EJointType FKinectV1Device::GetClosestBodyJoint(EJointType HandJoint) {
  904.     FVector HandPos = GetBonePosition(HandJoint);
  905.     float distanceMinimum = 10000;
  906.     float distance = 0;
  907.     EJointType ResultJoint = EJointType::SpineBase;
  908.  
  909.     //Joints to check
  910.     FVector SpineBase = GetBonePosition(EJointType::SpineBase);
  911.     FVector Neck = GetBonePosition(EJointType::Neck);
  912.     FVector LeftThigh = GetBonePosition(EJointType::HipLeft);
  913.     FVector RightThigh = GetBonePosition(EJointType::HipRight);
  914.  
  915.     //Check distance of each from the hand joint
  916.     if ((HandPos - SpineBase).Size() < distanceMinimum) { distanceMinimum = (HandPos - SpineBase).Size();  ResultJoint = EJointType::SpineBase; }
  917.     if ((HandPos - Neck).Size() < distanceMinimum) { distanceMinimum = (HandPos - Neck).Size();  ResultJoint = EJointType::Neck; }
  918.     if ((HandPos - LeftThigh).Size() < distanceMinimum) { distanceMinimum = (HandPos - LeftThigh).Size();  ResultJoint = EJointType::HipLeft; }
  919.     if ((HandPos - RightThigh).Size() < distanceMinimum) { distanceMinimum = (HandPos - RightThigh).Size();  ResultJoint = EJointType::HipRight; }
  920.  
  921.     return ResultJoint;
  922. }
Add Comment
Please, Sign In to add comment