Hi,
i am working with project, where camera needs to catch the frames with exactly 20ms interval (or another, but never mind). What is a problem, i couldn’t figure out what’s wrong for a long time and i created this 2 measure programs, which imho shows, that Eye’s hardware clock isn’t accurate as much as i need it. In this topic http://codelaboratories.com/forums/viewthread/221/ Alex, You write
the camera hardware runs and captures at the perfectly fixed clock
I threw there few my classes. Clock, and Camera - I put them just in case if you would look at them. Actual experiment with strange results starts from the line “Two test programs:”.
The code:
Standard clock (not very important)
#include "Stdafx.h"
#include "Clock.h"
Clock::Clock(void)
{
}
Clock::~Clock(void)
{
}
void Clock::ProfileMSStart()
{
PLARGE_INTEGER start = new LARGE_INTEGER;
QueryPerformanceFrequency(&_frequencyPT);
QueryPerformanceCounter(start);
p = (PVOID)start;
}
double Clock::ProfileMSMiddle()
{
LARGE_INTEGER stop, diff;
PLARGE_INTEGER start = (PLARGE_INTEGER)p;
QueryPerformanceCounter(&stop;);
diff.QuadPart = stop.QuadPart - start->QuadPart;
double timeMs = (double)1000.0 * ((double)diff.QuadPart / (double)_frequencyPT.QuadPart);
return timeMs;
}
double Clock::ProfileMSEnd()
{
LARGE_INTEGER stop, diff;
PLARGE_INTEGER start = (PLARGE_INTEGER)p;
QueryPerformanceCounter(&stop;);
diff.QuadPart = stop.QuadPart - start->QuadPart;
double timeMs = 1000.0 * ((double)diff.QuadPart / (double)_frequencyPT.QuadPart);
delete start;
delete p;
return timeMs;
}
Standard camera class
#include "stdafx.h"
#include "Camera.h"
Camera::Camera(void)
{
}
Camera::~Camera(void)
{
}
bool Camera::initCamera(int id)
{
CLEyeCameraColorMode _mode;
CLEyeCameraResolution _resolution;
bool _isColor;
GUID _cameraGUID;
int _fps;
int w, h;
/* int formats[] = { CLEYE_MONO_PROCESSED, CLEYE_COLOR_PROCESSED, CLEYE_MONO_RAW, CLEYE_COLOR_RAW, CLEYE_BAYER_RAW };
// list of QVGA frame rates to test
float ratesQvga[] = { 15, 20, 30, 40, 50, 60, 75, 90, 100, 120, 150, 187 };*/
_resolution = CLEYE_VGA;
_mode = CLEYE_COLOR_RAW;
_fps = FPS;
_cameraGUID = CLEyeGetCameraUUID(id);
if(_mode == CLEYE_COLOR_PROCESSED || _mode == CLEYE_COLOR_RAW)
_isColor = true;
else
_isColor = false;
if(!CLEyeGetCameraCount()) return false;
// Create camera instances
_cam = CLEyeCreateCamera(_cameraGUID, _mode, _resolution, (float)_fps);
if(_cam == NULL) return 1;
// Get camera frame dimensions
CLEyeCameraGetFrameDimensions(_cam, w, h);
// Create the OpenCV images
pCapImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, _isColor ? 4 : 1);
// Set some camera parameters
#ifdef DEBUG
cout << "Gain: "<< CLEyeGetCameraParameter(_cam, CLEYE_GAIN) << endl;
cout << "Exposure: "<< CLEyeGetCameraParameter(_cam, CLEYE_EXPOSURE) << endl;
#endif
CLEyeSetCameraParameter(_cam, CLEYE_GAIN, 0);
CLEyeSetCameraParameter(_cam, CLEYE_EXPOSURE, START_EXPOSURE);
// Start capturing
CLEyeCameraStart(_cam);
Sleep(1000);
cvGetRawData(pCapImage, &pCapBuffer;, &step;, &size;);
return true;
}
cv::Mat Camera::getFrame()
{
int wynik = CLEyeCameraGetFrame(_cam, pCapBuffer);
// cout << " CLEyeCameraGetFrame zwrocilo: " << wynik << endl;
// if(wynik != 1) cout << "\a";
return cv::cvarrToMat(pCapImage, true, true, 1);
}
CvSize Camera::getSize()
{
return size;
}
void Camera::setExposure(int exposure)
{
CLEyeSetCameraParameter(_cam, CLEYE_EXPOSURE, exposure);
}
Constants.h
#define FPS 50.0
#define START_EXPOSURE 15
Two test programs:
#include "stdafx.h"
Camera* cam;
Clock* clockC;
int framesCount = 0;
int _tmain(int argc, _TCHAR* argv[])
{
cam = new Camera();
clockC = new Clock();
cam->initCamera(0);
Sleep(2000);
clockC->ProfileMSStart();
while(true)
{
cout << clockC->ProfileMSMiddle() - ((double)framesCount++ * (1000.0/FPS)) << endl;
cam->getFrame();
}
return 0;
}
The result is for different frame rates and QVGA/VGA modes slowly but constantly rising/descending value printed.
Something like:
1.02
1.15
1.23
1.34
1.56
1.78
and so on
Second program, which uses Windows API function - Timer fires callback function every 40ms
#include "stdafx.h"
Clock* clockC;
Camera* camera;
int frameCounter = -1;
int n = 0, m = 0;
cv::Mat* mat;
void callback(PVOID param, BOOLEAN waitOrTimerFired)
{
n++;
*mat = camera->getFrame();
m++;
}
int _tmain(int argc, _TCHAR* argv[])
{
clockC = new Clock();
camera = new Camera();
camera->initCamera(0);
Sleep(2000);
mat = new cv::Mat(camera->getSize().height, camera->getSize().width, CV_8UC3);
HANDLE handle;
CreateTimerQueueTimer(&handle;, NULL, (WAITORTIMERCALLBACK)callback, NULL, 100, (1000.0 / FPS), WT_EXECUTEDEFAULT);
while(cvWaitKey(10000))
{
cout << " " << n << " " << m << endl;
}
return 0;
}
The result for some camera settings (i don’t remember what i used, but i tried only one or two setups) was sth like:
n = 450 m = 460
n = 902 m = 923
n = 1357 m = 1385
and so on - it looks like threads are stopping on getFrame() method.
Is it what it should look like? I’m confused, becouse I completly don’t know why results are what they are. Please help, if it’s possible.
Is there any framerate/resolution mode which time interval is perfect?
Sorry, if i made some English mistakes