-
-
Save mik30s/6dd4eb42b2ec906e064d to your computer and use it in GitHub Desktop.
#include <iostream> | |
#include <stdio.h> | |
#include <stdlib.h> | |
#include <linux/ioctl.h> | |
#include <linux/types.h> | |
#include <linux/v4l2-common.h> | |
#include <linux/v4l2-controls.h> | |
#include <linux/videodev2.h> | |
#include <fcntl.h> | |
#include <unistd.h> | |
#include <sys/ioctl.h> | |
#include <sys/mman.h> | |
#include <string.h> | |
#include <fstream> | |
#include <string> | |
using namespace std; | |
int main() { | |
// 1. Open the device | |
int fd; // A file descriptor to the video device | |
fd = open("/dev/video0",O_RDWR); | |
if(fd < 0){ | |
perror("Failed to open device, OPEN"); | |
return 1; | |
} | |
// 2. Ask the device if it can capture frames | |
v4l2_capability capability; | |
if(ioctl(fd, VIDIOC_QUERYCAP, &capability) < 0){ | |
// something went wrong... exit | |
perror("Failed to get device capabilities, VIDIOC_QUERYCAP"); | |
return 1; | |
} | |
// 3. Set Image format | |
v4l2_format imageFormat; | |
imageFormat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
imageFormat.fmt.pix.width = 1024; | |
imageFormat.fmt.pix.height = 1024; | |
imageFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; | |
imageFormat.fmt.pix.field = V4L2_FIELD_NONE; | |
// tell the device you are using this format | |
if(ioctl(fd, VIDIOC_S_FMT, &imageFormat) < 0){ | |
perror("Device could not set format, VIDIOC_S_FMT"); | |
return 1; | |
} | |
// 4. Request Buffers from the device | |
v4l2_requestbuffers requestBuffer = {0}; | |
requestBuffer.count = 1; // one request buffer | |
requestBuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // request a buffer wich we an use for capturing frames | |
requestBuffer.memory = V4L2_MEMORY_MMAP; | |
if(ioctl(fd, VIDIOC_REQBUFS, &requestBuffer) < 0){ | |
perror("Could not request buffer from device, VIDIOC_REQBUFS"); | |
return 1; | |
} | |
// 5. Quety the buffer to get raw data ie. ask for the you requested buffer | |
// and allocate memory for it | |
v4l2_buffer queryBuffer = {0}; | |
queryBuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
queryBuffer.memory = V4L2_MEMORY_MMAP; | |
queryBuffer.index = 0; | |
if(ioctl(fd, VIDIOC_QUERYBUF, &queryBuffer) < 0){ | |
perror("Device did not return the buffer information, VIDIOC_QUERYBUF"); | |
return 1; | |
} | |
// use a pointer to point to the newly created buffer | |
// mmap() will map the memory address of the device to | |
// an address in memory | |
char* buffer = (char*)mmap(NULL, queryBuffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, | |
fd, queryBuffer.m.offset); | |
memset(buffer, 0, queryBuffer.length); | |
// 6. Get a frame | |
// Create a new buffer type so the device knows whichbuffer we are talking about | |
v4l2_buffer bufferinfo; | |
memset(&bufferinfo, 0, sizeof(bufferinfo)); | |
bufferinfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
bufferinfo.memory = V4L2_MEMORY_MMAP; | |
bufferinfo.index = 0; | |
// Activate streaming | |
int type = bufferinfo.type; | |
if(ioctl(fd, VIDIOC_STREAMON, &type) < 0){ | |
perror("Could not start streaming, VIDIOC_STREAMON"); | |
return 1; | |
} | |
/***************************** Begin looping here *********************/ | |
// Queue the buffer | |
if(ioctl(fd, VIDIOC_QBUF, &bufferinfo) < 0){ | |
perror("Could not queue buffer, VIDIOC_QBUF"); | |
return 1; | |
} | |
// Dequeue the buffer | |
if(ioctl(fd, VIDIOC_DQBUF, &bufferinfo) < 0){ | |
perror("Could not dequeue the buffer, VIDIOC_DQBUF"); | |
return 1; | |
} | |
// Frames get written after dequeuing the buffer | |
cout << "Buffer has: " << (double)bufferinfo.bytesused / 1024 | |
<< " KBytes of data" << endl; | |
// Write the data out to file | |
ofstream outFile; | |
outFile.open("webcam_output.jpeg", ios::binary| ios::app); | |
int bufPos = 0, outFileMemBlockSize = 0; // the position in the buffer and the amoun to copy from | |
// the buffer | |
int remainingBufferSize = bufferinfo.bytesused; // the remaining buffer size, is decremented by | |
// memBlockSize amount on each loop so we do not overwrite the buffer | |
char* outFileMemBlock = NULL; // a pointer to a new memory block | |
int itr = 0; // counts thenumber of iterations | |
while(remainingBufferSize > 0) { | |
bufPos += outFileMemBlockSize; // increment the buffer pointer on each loop | |
// initialise bufPos before outFileMemBlockSize so we can start | |
// at the begining of the buffer | |
outFileMemBlockSize = 1024; // set the output block size to a preferable size. 1024 :) | |
outFileMemBlock = new char[sizeof(char) * outFileMemBlockSize]; | |
// copy 1024 bytes of data starting from buffer+bufPos | |
memcpy(outFileMemBlock, buffer+bufPos, outFileMemBlockSize); | |
outFile.write(outFileMemBlock,outFileMemBlockSize); | |
// calculate the amount of memory left to read | |
// if the memory block size is greater than the remaining | |
// amount of data we have to copy | |
if(outFileMemBlockSize > remainingBufferSize) | |
outFileMemBlockSize = remainingBufferSize; | |
// subtract the amount of data we have to copy | |
// from the remaining buffer size | |
remainingBufferSize -= outFileMemBlockSize; | |
// display the remaining buffer size | |
cout << itr++ << " Remaining bytes: "<< remainingBufferSize << endl; | |
delete outFileMemBlock; | |
} | |
// Close the file | |
outFile.close(); | |
/******************************** end looping here **********************/ | |
// end streaming | |
if(ioctl(fd, VIDIOC_STREAMOFF, &type) < 0){ | |
perror("Could not end streaming, VIDIOC_STREAMOFF"); | |
return 1; | |
} | |
close(fd); | |
return 0; | |
} | |
Interesting i've compiled in in reverse order and it worked...
WORKS (EXEC)
$ g++ -Wall -o webcam_capture.bin webcam_capture.cpp
FAILS (EXEC)
$ g++ -Wall -g -c webcam_capture.cpp -o webcam_capture.bin
why memcpy? it leaks btw. this line does that job also:
outFile.write(buffer+bufPos,outFileMemBlockSize);
Why use the while loop? This look so much better:
// Open the file
ofstream outFile;
outFile.open("webcam_output.jpeg", ios::binary| ios::app);
// Write the data out to file
outFile.write(buffer, (double)bufferinfo.bytesused);
// Close the file
outFile.close();
@gcclinux: compiling with -c option, the output is an object file, such as main.o, my_code.o, not executable. without -c option, gcc compile the codes directly from C/C++ code to executable (without object files).
That's why you get strange "reverse order" results.
We normally do:
- Compile: g++ -Wall -o webcam_capture.o -c webcam_capture.cpp (reverse ok), and
- Link: g++ -Wall -o webcam_capture.bin webcam_capture.o
And normally, we don't even use .bin extension for the executables. The executable normally has no extension in Unix/Linux.
Thanks for the code, I found an memory leaks in the while loop, the memory alocated at line 130, is never free.
You can patch the leaks by adding a delete at the end of the while loop
line 145 : delete outFileMemBlock;
@antoinnneee @freanux thanks for spotting that! I've added the fix.
@malekva Thanks! The while loop was a huge oversight on my part. Thanks for catching that.
I has problem with black image from webcam, same as in this fork comment https://gist.github.com/sammy17/b391c68a91f381aad0d149e325e6a87e#gistcomment-3102045
Decision in my case for fix it is add sleep after activating stream in 50 milliseconds:
if(ioctl(fd, VIDIOC_STREAMON, &type) < 0){
perror("Could not start streaming, VIDIOC_STREAMON");
return 1;
}
this_thread::sleep_for(chrono::milliseconds(50));
I use C++11 libraries thread and chrono for sleeping
#include <chrono>
#include <thread>
How can Use this code to Take a video. Multiple frames.......
Can anyone show me how to get a raw image into the buffer? Let's say I want a 640x360 RGB24 image without compression, I set
imageFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
, and in theory, that should give me 640 * 360 * 3 = 691200 bytes. But instead, I get 706560 bytes. I spend almost 2 hours reading the Linux documentation, set every little parameter but nothing.
The plus lines I added (worth nothing):
imageFormat.fmt.pix.sizeimage = width * height * 3 * sizeof(char);
imageFormat.fmt.pix.bytesperline = 0;
imageFormat.fmt.pix.colorspace = V4L2_COLORSPACE_RAW;
Hey @mike168m. Just wanted to say that I've been struggling to work out how the V4L2 kernel works, the documentations is simultaneously way too specific and surprisingly vague.
This has cleared up the questions I have surrounding a few aspects and I just wanted to say thanks for uploading your work for us all to learn from!
Exactly a simple camera capture kind of code with minor glitches that I was looking for!! Before I compile , link and execute the code I checked
/dev/audio* and /dev/video on bash and they don't seem to exist!!! My integrated camera didn't work on windows for some reason and so I added an external logitech camera which was sitting in my drawer and seemed to work in windows. But UBUNTU linux /dev/video* doesn't seem to pick it up!! Is there any libs I have to install to experience the thrill of a camera rendition!! Any help definitely appreciated!!
LoveCoder625
@DbDibyendu did you find how to capture multiple frames ?
Hello, Please advise what I am doing wrong...