Hi
I posted a topic in here some time ago, concerning how to get multiple PS3Eye cameras working with my OpenCvSharp project in Unity3d.
Several things were not working for me in the OpenCvSharp project, and so I decided to move over to try and manipulate and build upon the C++ multicam example that comes with the SDK, and then at some point finding a way to send the data I need from the C++ app, to Unity3d (Probably through some socket programming or OSC).
Anyway, I have spend quite some time messing around in the multicam example, but my skills in visual studio, multithreaded programming and C++ in general, are not great…
Here is what i need:
- 2 PS3Eye cameras capture frames at 30fps. in each their own capture thread
- The captured images are converted to HSV colorspace
- A third thread waits for the other threads to grab and convert images, then it grabs the two converted
images and stacks them into a “wider image”
- The wide image is thresholded twice into two wide binary images (one for blue and one for green objects)
- The binary images are analyzed for Blobs (I think I will just use CvFindContours as I got that working ok in OpenCvSharp)
- The Blob coordinates along with a “color-tag” is sent to Unity3D in a continious stream.
As per the multicam example I have the two threads grabbing images, and converting them to HSV
I also have created a StitchingClass with a third thread, as well as set up events to let the thread know when it can start stitching/stacking… I am unsure if I have done these things entirely correct, as I said, I’m no great C++ programmer…
There are a few places where I’m really stuck now:
How to grab the two HSV images in my StitchingThread… ?
How to end the StitchingThread if it times out…?
How should I declare the frame dimensions for my stacked Image (I have an idea that I can use CLEyeCameraGetFrameDimensions() to get w and h, once I figure out how to access the HSV images from the stitching thread)...
I have included my code below
Any comments and or help is greatly appreciated:
#include "stdafx.h"
CvScalar HSVMinBlue = cvScalar(91, 100, 135);
CvScalar HSVMaxBlue = cvScalar(150, 255, 256);
CvScalar HSVMinGreen = cvScalar(55, 100, 135);
CvScalar HSVMaxGreen = cvScalar(90, 256, 256);
IplConvKernel* kernel = cvCreateStructuringElementEx(5,5,3,3, CV_SHAPE_ELLIPSE, 0);
HANDLE hEvents[2];
HANDLE _hThread;
//Stitching class
class StitchingClass
{
bool _running;
int w, h;
public:
StitchingClass()
{
//CLEyeCameraGetFrameDimensions(cam, w, h);
//printf("Capture width: ", w, " Capture height: ", h);
}
bool StartStitching()
{
_running = true;
cvNamedWindow("Stitched blue binary", CV_WINDOW_AUTOSIZE);
cvNamedWindow("Stitched green binary", CV_WINDOW_AUTOSIZE);
// Start thread for stitching
_hThread = CreateThread(NULL, 0, &StitchingClass;::StitchingThread, this, 0, 0);
if(_hThread == NULL)
{
MessageBox(NULL,"Could not create stitching thread","CLEyeMulticamTest", MB_ICONEXCLAMATION);
return false;
}
return true;
}
void Run(){
//Create IplImages
IplImage *pStackedHSV;
IplImage *pStackedBlueThres;
IplImage *pStackedGreenThres;
IplImage *pTempImage;
//Create the appropriate OpenCV images
//pStackedHSV = cvCreateImage(cvSize((w*2), h), IPL_DEPTH_8U, 4);
//pStackedBlueThres = cvCreateImage(cvSize((w*2), h), IPL_DEPTH_8U, 1);
//pStackedGreenThres = cvCreateImage(cvSize((w*2), h), IPL_DEPTH_8U, 1);
//pTempImage = cvCreateImage(cvSize((w*2), h), IPL_DEPTH_8U, 1);
while(_running){
// In the while loop inside the Run() function we wait for all of the events with timeout of 2000ms
if(WaitForMultipleObjects(2, hEvents, TRUE, 2000) == WAIT_TIMEOUT)
{
// we timed out, so something went wrong
printf("Event timeout!");
//HOW DO I EXIT THE THREAD HERE?
}
else{
printf("Events triggered!");
// We got all images. Stitch images _pHSVImage 1 and 2 together here
//HOW DO I ACCESS THE TWO IMAGES? (IE. THE "GetImage()" FUNCTION
//Threshold the stitched image for blue and green objects using InRange
//cvInRangeS(pStackedHSV, HSVMinBlue, HSVMaxBlue, pStackedBlueThres);
//cvInRangeS(pStackedHSV, HSVMinGreen, HSVMaxGreen, pStackedGreenThres);
//Do some noise reduction using CV_MOP_CLOSE (erosion + dialation) with a custom 5x5 elliptical kernel
//cvMorphologyEx(pStackedBlueThres, pStackedBlueThres, pTempImage, kernel, CV_MOP_CLOSE, 1);
//cvMorphologyEx(pStackedGreenThres, pStackedGreenThres, pTempImage, kernel, CV_MOP_CLOSE, 1);
//Connect larger close-b regions using CV_MOP_OPEN (dialation + erosion) with a custom 5x5 elliptical kernel
//cvMorphologyEx(pStackedBlueThres, pStackedBlueThres, pTempImage, kernel, CV_MOP_OPEN, 1);
//cvMorphologyEx(pStackedGreenThres, pStackedGreenThres, pTempImage, kernel, CV_MOP_OPEN, 1);
//Reset the events
ResetEvent(hEvents[0]);
ResetEvent(hEvents[1]);
}
}
}
static DWORD WINAPI StitchingThread(LPVOID instance)
{
// seed the rng with current tick count and thread id
srand(GetTickCount() + GetCurrentThreadId());
// forward thread to stitching function
StitchingClass *pThis = (StitchingClass *)instance;
pThis->Run();
return 0;
}
};
// Sample camera capture class
class CLEyeCameraCapture
{
IplImage *pHSVImage;
CHAR _windowName[256];
CHAR _HSVwindowName[256];
GUID _cameraGUID;
CLEyeCameraInstance _cam;
CLEyeCameraColorMode _mode;
CLEyeCameraResolution _resolution;
float _fps;
bool _running;
int _indexID;
public:
CLEyeCameraCapture(LPSTR windowName, LPSTR HSVwindowName, GUID cameraGUID, CLEyeCameraColorMode mode, CLEyeCameraResolution resolution, float fps, int indexID) :
_cameraGUID(cameraGUID), _cam(NULL), _mode(mode), _resolution(resolution), _fps(fps), _running(false), _indexID(indexID)
{
// This event will be used to signal that our image is ready for stitching
hEvents[_indexID] = CreateEvent(NULL, FALSE, FALSE, NULL);
strcpy(_windowName, windowName);
strcpy(_HSVwindowName, HSVwindowName);
}
IplImage *GetImage()
{
return pHSVImage;
}
bool StartCapture()
{
_running = true;
cvNamedWindow(_windowName, CV_WINDOW_AUTOSIZE);
cvNamedWindow(_HSVwindowName, CV_WINDOW_AUTOSIZE);
// Start CLEye image capture thread
_hThread = CreateThread(NULL, 0, &CLEyeCameraCapture;::CaptureThread, this, 0, 0);
if(_hThread == NULL)
{
MessageBox(NULL,"Could not create stitching thread","CLEyeMulticamTest", MB_ICONEXCLAMATION);
return false;
}
return true;
}
void StopCapture()
{
if(!_running) return;
_running = false;
WaitForSingleObject(_hThread, 1000);
cvDestroyWindow(_windowName);
cvDestroyWindow(_HSVwindowName);
}
void IncrementCameraParameter(int param)
{
if(!_cam) return;
printf("CLEyeGetCameraParameter %d\n", CLEyeGetCameraParameter(_cam, (CLEyeCameraParameter)param));
CLEyeSetCameraParameter(_cam, (CLEyeCameraParameter)param, CLEyeGetCameraParameter(_cam, (CLEyeCameraParameter)param)+10);
}
void DecrementCameraParameter(int param)
{
if(!_cam) return;
printf("CLEyeGetCameraParameter %d\n", CLEyeGetCameraParameter(_cam, (CLEyeCameraParameter)param));
CLEyeSetCameraParameter(_cam, (CLEyeCameraParameter)param, CLEyeGetCameraParameter(_cam, (CLEyeCameraParameter)param)-10);
}
void Run()
{
//Create some IplImages
int w, h;
IplImage *pCapImage;
PBYTE pCapBuffer = NULL;
// Create camera instance
_cam = CLEyeCreateCamera(_cameraGUID, _mode, _resolution, _fps);
if(_cam == NULL) return;
// Get camera frame dimensions
CLEyeCameraGetFrameDimensions(_cam, w, h);
//Create the appropriate OpenCV images
pCapImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 4);
pHSVImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 3);
// Set some camera parameters
CLEyeSetCameraParameter(_cam, CLEYE_GAIN, 0);
CLEyeSetCameraParameter(_cam, CLEYE_AUTO_GAIN, false);
CLEyeSetCameraParameter(_cam, CLEYE_AUTO_EXPOSURE, false);
CLEyeSetCameraParameter(_cam, CLEYE_EXPOSURE, 511);
CLEyeSetCameraParameter(_cam, CLEYE_AUTO_WHITEBALANCE, false);
CLEyeSetCameraParameter(_cam, CLEYE_WHITEBALANCE_RED, 127);
CLEyeSetCameraParameter(_cam, CLEYE_WHITEBALANCE_GREEN, 255);
CLEyeSetCameraParameter(_cam, CLEYE_WHITEBALANCE_BLUE, 255);
// Start capturing
CLEyeCameraStart(_cam);
cvGetImageRawData(pCapImage, &pCapBuffer;);
// image capturing loop
while(_running)
{
CLEyeCameraGetFrame(_cam, pCapBuffer);
//Convert to HSV colorspace
cvCvtColor(pCapImage, pHSVImage, CV_BGR2HSV);
//After the image has been fully processed, set the event
SetEvent(hEvents[_indexID]);
cvShowImage(_windowName, pCapImage);
//cvShowImage("Stacked input", pStackedInput);
cvShowImage(_HSVwindowName, pHSVImage);
}
// Stop camera capture
CLEyeCameraStop(_cam);
// Destroy camera object
CLEyeDestroyCamera(_cam);
// Destroy the allocated OpenCV image
cvReleaseImage(&pHSVImage;);
cvReleaseImage(&pCapImage;);
//cvReleaseImage(&pStackedInput;);
_cam = NULL;
}
static DWORD WINAPI CaptureThread(LPVOID instance)
{
// seed the rng with current tick count and thread id
srand(GetTickCount() + GetCurrentThreadId());
// forward thread to Capture function
CLEyeCameraCapture *pThis = (CLEyeCameraCapture *)instance;
pThis->Run();
return 0;
}
};
// Main program entry point
int _tmain(int argc, _TCHAR* argv[])
{
srand(GetTickCount());
// Query for number of connected cameras
int numCams = CLEyeGetCameraCount();
if(numCams == 0)
{
printf("No PS3Eye cameras detected\n");
return -1;
}
CLEyeCameraCapture* cam[2] = { NULL };
printf("Found %d cameras\n", numCams);
for(int i = 0; i < numCams; i++)
{
char windowName[64];
char HSVwindowName[64];
// Query unique camera uuid
GUID guid = CLEyeGetCameraUUID(i);
printf("Camera %d GUID: [x-x-x-xx-xxxxxx]\n",
i+1, guid.Data1, guid.Data2, guid.Data3,
guid.Data4[0], guid.Data4[1], guid.Data4[2],
guid.Data4[3], guid.Data4[4], guid.Data4[5],
guid.Data4[6], guid.Data4[7]);
sprintf(windowName, "Camera Window %d", i+1);
sprintf(HSVwindowName, "HSV Camera Window %d", i+1);
// Create camera capture object
// Choose color mode and resolution
cam[i] = new CLEyeCameraCapture(windowName, HSVwindowName, guid, CLEYE_COLOR_RAW, CLEYE_VGA, 30, i);
printf("Starting capture on camera %d\n", i+1);
cam[i]->StartCapture();
}
if(numCams >= 2){
//Start a stitching object
StitchingClass *stitchInstance;
stitchInstance = new StitchingClass();
stitchInstance->StartStitching();
}
printf("Use the following keys to change camera parameters:\n"
"\t'1' - select camera 1\n"
"\t'2' - select camera 2\n"
"\t'g' - select gain parameter\n"
"\t'e' - select exposure parameter\n"
"\t'z' - select zoom parameter\n"
"\t'r' - select rotation parameter\n"
"\t'+' - increment selected parameter\n"
"\t'-' - decrement selected parameter\n");
// The <ESC> key will exit the program
CLEyeCameraCapture *pCam = NULL;
int param = -1, key;
while((key = cvWaitKey(0)) != 0x1b)
{
switch(key)
{
case 'g': case 'G': printf("Parameter Gain\n"); param = CLEYE_GAIN; break;
case 'e': case 'E': printf("Parameter Exposure\n"); param = CLEYE_EXPOSURE; break;
case 'z': case 'Z': printf("Parameter Zoom\n"); param = CLEYE_ZOOM; break;
case 'r': case 'R': printf("Parameter Rotation\n"); param = CLEYE_ROTATION; break;
case '1': printf("Selected camera 1\n"); pCam = cam[0]; break;
case '2': printf("Selected camera 2\n"); pCam = cam[1]; break;
case '+': if(pCam) pCam->IncrementCameraParameter(param); break;
case '-': if(pCam) pCam->DecrementCameraParameter(param); break;
}
}
for(int i = 0; i < numCams; i++)
{
printf("Stopping capture on camera %d\n", i+1);
cam[i]->StopCapture();
delete cam[i];
}
return 0;
}