how to free the camera - opencv

How can I release the camera when application keeps on running. It is still in on condition. here is the code. I don't know how to release it
#include <cv.h>
#include <highgui.h>
main( int argc, char* argv[] ) {
int i=1;
CvCapture* capture = NULL;
capture = cvCreateCameraCapture( 0 );
IplImage *frames = cvQueryFrame(capture);
while(1) {
if (i==20)
cvReleaseCapture ( &capture );
char c = cvWaitKey(33);
if( c == 27 ) break;
i++;
}
return 0;
}

Your code isn't totally clear so I hope I'm understanding correctly, but I think what you want is something more like this...
#include <cv.h>
#include <highgui.h>
int main( int argc, char* argv[] )
{
int i=1;
CvCapture* capture = NULL;
capture = cvCreateCameraCapture( 0 );
IplImage *frame = cvQueryFrame(capture);
while(1)
{
// if we are on the 20th frame, quit.
if (i==20)
{
cvReleaseCapture ( &capture );
break;
}
// if the user types whatever key 27 corresponds to, quit.
char c = cvWaitKey(33);
if( c == 27 )
{
cvReleaseCapture ( &capture );
break;
}
// do you want to get the next frame? here.
frame = cvQueryFrame( capture );
i++;
}
return 0;
}
Your problem is that you are not breaking after releasing the capture, so you will continue in the loop with a released camera. Also, you had IplImage *frames instead of IplImage *frame. That will only point to a single frame at a time, so I figured renaming it would be helpful for you.

Related

capture’ was not declared in this scope capture = cvCaptureFromCAM( 0 ); how to fix it

I was following a tutorial on web for an OpenCV library to detect eyes
when i compile it this error appears I tried to fix it but didn't find a
solution.
main.cpp:59:1: error: ‘capture’ was not declared in this scope
capture = cvCaptureFromCAM( 0 );
The code is long I put only the part that I think causing this error .
: Full code
#include <opencv2/objdetect/objdetect.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <iostream>
#include <queue>
#include <stdio.h>
#include <math.h>
#include "constants.h"
#include "findEyeCenter.h"
#include "findEyeCorner.h"
/** Function Headers */
void detectAndDisplay( cv::Mat frame );
int main( )
{
cv::Mat frame;
if( !face_cascade.load( face_cascade_name ) ){ printf("--(!)Error loading face cascade, please change face_cascade_name in source code.\n"); return -1; };
createCornerKernels();
ellipse(skinCrCbHist, cv::Point(113, 155.6), cv::Size(23.4, 15.2),
43.0, 0.0, 360.0, cv::Scalar(255, 255, 255), -1);
capture = cvCaptureFromCAM( 0 );
if( capture)
{
while( true )
{
frame = cvQueryFrame( capture );
// mirror it
imshow("Video",frame);
cv::flip(frame, frame, 1);
frame.copyTo(debugImage);
// Apply the classifier to the frame
if( !frame.empty() ) {
detectAndDisplay( frame );
}
else {
printf(" --(!) No captured frame -- Break!");
break;
}
imshow(main_window_name,debugImage);
int c = cv::waitKey(10);
if( (char)c == 'c' ) { break; }
if( (char)c == 'f' ) {
imwrite("frame.png",frame);
}
}
}
releaseCornerKernels();
return 0;
}
}
try to use VideoCapture capture(0);

how to save only two frames from video in opencv

Actually i want to save current frame and previous frame using opencv and C++, i got this code from internet after edited this will save both frame as a current frame.
int main()
{
IplImage* currFrame = 0;
IplImage* prevFrame = 0;
CvCapture* cap = cvCaptureFromAVI("how.mp4");
currFrame = cvQueryFrame( cap );
char s [20];
prevFrame = cvCloneImage( currFrame );
while(currFrame = cvQueryFrame( cap ))
{
int num = 1;
cvShowImage( "DisplayVideo", currFrame );
sprintf(s,"pics/frame%d.jpg",num);
cvSaveImage(s,currFrame);
cvNamedWindow("image1");
cvShowImage("image1",currFrame);
cvCopy( currFrame , prevFrame);
num = 2;
sprintf(s,"pics/frame%d.jpg",num);
cvSaveImage(s,prevFrame);
cvNamedWindow("image2");
cvShowImage("image2",prevFrame);
char c = cvWaitKey(500); if( c == 27 ) break;
}
cvReleaseCapture( &cap );
}
i got the answer, here is the code
int main()
{
VideoCapture cap("how.mp4");
Mat curr, prev;
// if(!cap)return -1;
cap>>curr;
curr.copyTo(prev);
while(1)
{
cap>>curr;
imwrite("pics2/current.jpg",curr);
imwrite("pics2/previous.jpg", prev);
imshow("image1",curr);
imshow("image2",prev);
curr.copyTo(prev);
if(waitKey(500)==27)break; //Esc pressed
}
}
Thanks to All, for your answers and support..

OpenCV cvCvtColor not linked?

I'm new to openCV, and I finally got it to open a window and show the webcam output.
I'm now trying to convert frames to greyscale before displaying them. Unfortunately, I get the following error every time I try to compile. error: ‘cvCvtColor’ was not declared in this scope I'm using the following line to compile
g++ main.cpp -lopencv_core -lopencv_imgproc -lopencv_highgui -lopencv_objdetect -o camera
Am I linking my code incorrectly? Here's the full code listing. Sorry for my formatting issues!
#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/core/core.hpp"
#include <stdio.h>
#include <unistd.h>
IplImage* getCameraFrame(CvCapture* &camera)
{
IplImage *frame;
int w, h;
// If the camera hasn't been initialized, then open it.
if (!camera) {
printf("Acessing the camera ...\n");
camera = cvCreateCameraCapture( 0 );
if (!camera) {
printf("Couldn't access the camera.\n");
exit(1);
}
// Try to set the camera resolution to 320 x 240.
cvSetCaptureProperty(camera, CV_CAP_PROP_FRAME_WIDTH, 320);
cvSetCaptureProperty(camera, CV_CAP_PROP_FRAME_HEIGHT, 240);
// Get the first frame, to make sure the camera is initialized.
frame = cvQueryFrame( camera );
if (frame) {
w = frame->width;
h = frame->height;
printf("Got the camera at %dx%d resolution.\n", w, h);
}
// Wait a little, so that the camera can auto-adjust its brightness.
sleep(1); // (in milliseconds)
}
// Wait until the next camera frame is ready, then grab it.
frame = cvQueryFrame( camera );
if (!frame) {
printf("Couldn't grab a camera frame.\n");
exit(1);
}
return frame;
}
int main(int argc, char* argv[])
{
cvNamedWindow( "Example2", CV_WINDOW_AUTOSIZE );
CvCapture* capture = cvCreateCameraCapture( 0 );
IplImage* frame;
while(1) {
frame = cvQueryFrame( capture );
if( !frame ) break;
cvCvtColor(frame, frame, CV_RGB2GRAY);
cvShowImage( "Example2", frame );
char c = cvWaitKey(33);
if( c == 27 ) break;
}
cvReleaseCapture( &capture );
cvDestroyWindow( "Example2" );
return 0;
}
you're using the headers for the c++ api, but code from the deprecated c-api ( please don't !! ).
cv::VideoCapture cap(0);
while(cap.IsOpened()) {
cv::Mat f,g;
if ( ! cap.read(f) ) break;
cv::cvtColor(f, g, CV_RGB2GRAY);
cv::imshow("lalala",g);
if ( waitKey(10) == 27 ) break;
}
return 0;

Converting rgb video to grayscale in OpenCV

I have this program that should convert rgb video to grayscale:
CvCapture* capture = 0;
capture = cvCreateFileCapture( "sample.avi" );
if(!capture)
{
return -1;
}
IplImage *bgr_frame=cvQueryFrame(capture);//Init the video read
double fps = cvGetCaptureProperty (capture,CV_CAP_PROP_FPS);
CvSize size = cvSize((int)cvGetCaptureProperty( capture, CV_CAP_PROP_FRAME_WIDTH),(int)cvGetCaptureProperty( capture, CV_CAP_PROP_FRAME_HEIGHT));
CvVideoWriter *writer = cvCreateVideoWriter("izlaz.avi",CV_FOURCC_DEFAULT,fps,size);
IplImage *grayScaleImage = cvCreateImage(size ,IPL_DEPTH_8U,1);
while( (bgr_frame=cvQueryFrame(capture)) != NULL )
{
cvCvtColor(bgr_frame, grayScaleImage, CV_BGR2GRAY);
cvWriteFrame( writer, grayScaleImage );
}
cvReleaseVideoWriter( &writer );
cvReleaseCapture( &capture );
which is not the case. Can anyone help me to modify it to work exactly that? Thank You :)
I think you will need to choose between fourcc of your codec, CV_FOURCC_DEFAULT didn't work for me. I tried CV_FOURCC('P','I','M,'1') and CV_FOURCC('M','J','P','G'). They both worked for me.
Here is the sample code which I tried.
#include<opencv2\opencv.hpp>
#include<iostream>
using namespace cv;
int main(int argc, char*argv[])
{
char *my_file = "C:\\vid_an2\\Wildlife.wmv";
std::cout<<"Video File "<<my_file<<std::endl;
cv::VideoCapture input_video;
if(input_video.open(my_file))
{
std::cout<<"Video file open "<<std::endl;
}
else
{
std::cout<<"Not able to Video file open "<<std::endl;
}
int fps = input_video.get(CV_CAP_PROP_FPS);
int frameCount = input_video.get(CV_CAP_PROP_FRAME_COUNT);
double fheight = input_video.get(CV_CAP_PROP_FRAME_HEIGHT);
double fwidth = input_video.get(CV_CAP_PROP_FRAME_WIDTH);
CvSize fsize;
fsize.width = fwidth;
fsize.height = fheight;
CvVideoWriter *new_writer = cvCreateVideoWriter("brg_file.avi",CV_FOURCC('M','J','P','G'), fps,fsize, 0);
std::cout<<"Video Frame Rate "<<fps<<std::endl;
std::cout<<"Video Frame Count "<<frameCount<<std::endl;
Mat cap_img;
Mat gry_img;
IplImage new_img;
while(input_video.grab())
{
if(input_video.retrieve(cap_img))
{
cvtColor(cap_img, gry_img, CV_RGB2GRAY);
new_img = gry_img.operator IplImage();
int ret = cvWriteFrame(new_writer, (const IplImage*)&new_img);
std::cout<<"Wrote Frame "<<ret<<std::endl;
}
}
cvReleaseVideoWriter(&new_writer);
return 0;
}

HIDAPI in two threads

According to https://github.com/signal11/hidapi/issues/72 HIDAPI ought to be thread safe on Linux machines. However, I can't get it working at all. This is what I do:
#ifdef WIN32
#include <windows.h>
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <pthread.h>
#include <stdlib.h>
#include <assert.h>
#include "hidapi.h"
hid_device *handle;
static void *TaskCode(void *argument)
{
int res;
//hid_device *handle;
unsigned char buf[64];
// res = hid_init();
// if( res == -1 )
// {
// return (void*)1;
// }
//
// handle = hid_open(0x0911, 0x251c, NULL);
// if( handle == NULL )
// {
// return (void*)2;
// }
printf( "while 2\n");
while( 1 )
{
memset( buf, 64, 0 );
res = hid_read(handle, buf, 0);
if( res == -1 )
{
return (void*)3;
}
printf( "received %d bytes\n", res);
for (int i = 0; i < res; i++)
printf("Byte %d: %02x ", i+1, buf[i]);
//printf( "%02x ", buf[0]);
fflush(stdout);
}
return (void*)0;
}
int main(int argc, char* argv[])
{
int res;
//hid_device *handle;
unsigned char buf[65];
res = hid_init();
if( res == -1 )
{
return 1;
}
handle = hid_open(0x0911, 0x251c, NULL);
if( handle == NULL )
{
return 2;
}
hid_set_nonblocking( handle, 0 );
pthread_t thread;
int rc = pthread_create(&thread, NULL, TaskCode, NULL);
printf( "while 1\n");
while(1)
{
int a = getchar();
if( a == 'a')
{
// Get Device Type (cmd 0x82). The first byte is the report number (0x0).
buf[0] = 0x0;
buf[1] = 0x82;
res = hid_write(handle, buf, 65);
if( res != -1 )
printf( "write ok, transferred %d bytes\n", res );
else
{
printf( "write error\n" );
char* str = hid_error(handle);
printf( "error: %s\n", str );
return 1;
}
}
else if( a== 'b')
break;
}
void* trc;
rc = pthread_join(thread, &trc);
printf( "rc code: %d\n", (int)trc );
// Finalize the hidapi library
res = hid_exit();
return 0;
}
If I don't use the global handle, I get 'write error' every time. If I do, as in the example, formally everything works but hid_read always returns 0 bytes... Of course, if I do simple hid_write() followed by hid_read(), I'll get the correct reply to the command 0x82 as intended. I'm really lost here, am I overlooking something?
EDIT: to clarify, zero bytes return also for everything, incl. buttons on mouse etc. So it seems to work but the data buffer is always zero bytes.
Shame on me, a dumb mistake. The code should be:
memset( buf, 0, 64 );
res = hid_read(handle, buf, 64);
and then it works. Should sleep more and write less!

Resources