External Webcam Tutorial

A Creative Live Cam was used for the project and although the camera supports 720p, it was scaled back to 240p to speed up the response time of the robot as the extra data points were not necessary. To take a look at the results, look under the Videos tab.

Connect the webcam that you have to the IFC6410 board and run dmesg. If your webcam is supported by the UVC drivers under Linux it will show uvcvideo: Found UVC 1.00 device or something similar with the model name of your webcam. When it does, that means you can write a classical V4L2 mating driver program to capture the video feed. This will reside within the Native code of your Android app. A reference manual to the V4L2 API is located here: http://linuxtv.org/downloads/v4l-dvb-apis/.
To automatically chmod 777 the video file descriptor and kick off your Activity for your app on bootup, append to the file /system/etc/init.qcom.post_boot.sh with the necessary commands.

The code below shows you how to open the device driver.

// The struct stat gives information about the file,
// in this case, the driver since linux is a file-based OS
struct stat st;

// Checks if it exists
if (stat(dev_name, &st) == -1) {
LOGI(LOG_TAG, "Cannot identify '%s': %d, %s\n",
dev_name, errno, strerror (errno));

// Checks if it is a character device driver
if (!S_ISCHR(st.st_mode)) {
LOGI(LOG_TAG, "%s is no device\n", dev_name);

// Open the device driver
fd = open (dev_name, O_RDWR /* required */ | O_NONBLOCK, 0);

// Check if sucessfully opened
if (fd == -1) {
LOGI(LOG_TAG, "Cannot open '%s': %d, %s\n",
dev_name, errno, strerror (errno));
LOGI(LOG_TAG, "%s: sucessfully opened %s\n", __func__, dev_name);

The code below shows you how to initialize the video stream for a mmap read. It runs through the typical V4L2 checks and then configures the device for a video stream. Something important here is that at line 67,68 it sets the desired height and the width of the image. Only certain sizes are supported. Here, the size is set to 320×240. All the images are passed through a frame buffer. Line 69 is also important. If you don’t set the correct color space that the camera supports, the code will fail. You can modify the code to run a for loop through all of the color space enumerations offered by V4L2 if you want.

struct v4l2_capability cap;
struct v4l2_cropcap cropcap;    // video cropping and scaling abilities
struct v4l2_crop crop;            // get/set cropping rectangle
struct v4l2_format fmt;
unsigned int min;

// Note that all V4L2 ioctl/xioctl calls are in the format of:
// int ioctl(int fd, int request, struct v4l2_capability *argp);

// Checks if the device driver is a V4L2 device
if (xioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) {
if (EINVAL == errno) {
printf("%s is no V4L2 device\n", dev_name);
} else {
errno_exit ("VIDIOC_QUERYCAP");

// Checks if there are video capture capabilities
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
printf("%s is no video capture device\n", dev_name);

// Checks for R/W or streaming capabilities
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
printf("%s does not support streaming i/o\n", dev_name);

/* Select video input, video standard and tune here. */
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

if (xioctl(fd, VIDIOC_CROPCAP, &cropcap) == 0) {
crop.c = cropcap.defrect; /* reset to default */

if (xioctl (fd, VIDIOC_S_CROP, &crop) == -1) {
switch (errno) {
case EINVAL:
/* Cropping not supported. */
/* Errors ignored. */
} else {
/* Errors ignored. */


// VIDIOC_G_FMT gets current format to find out what is supported
// this is done as a good practice
if (xioctl (fd, VIDIOC_G_FMT, &fmt) == -1) {
CLEAR (fmt);

#if 1
// These parameters detail the output formats!!
fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width       = SET_WIDTH;
fmt.fmt.pix.height      = SET_HEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field       = V4L2_FIELD_NONE;

// VIDIOC_S_FMT sets the format
if (xioctl (fd, VIDIOC_S_FMT, &fmt) == -1)
errno_exit ("VIDIOC_S_FMT");

// The actual set height and width
width = fmt.fmt.pix.width;
height = fmt.fmt.pix.height;

/* Note VIDIOC_S_FMT may change width and height. */
LOGI(LOG_TAG, "%s: Set size image %i\n", __func__, fmt.fmt.pix.sizeimage);
LOGI(LOG_TAG, "%s: Set bytesperline %i\n", __func__, fmt.fmt.pix.bytesperline);
LOGI(LOG_TAG, "%s: Set width %i height %i\n", __func__, fmt.fmt.pix.width, fmt.fmt.pix.height);
LOGI(LOG_TAG, "%s: Set colour space %i\n", __func__, fmt.fmt.pix.colorspace);

// Allocate the output buffer
outbuf = (char*) calloc (fmt.fmt.pix.sizeimage * 3, sizeof(*outbuf));


// Open frame buffer and power state device drivers for drawing
cursor_control.set = FB_CUR_SETIMAGE | FB_CUR_SETPOS;
fb_dev = open("/dev/fb0", O_RDWR);
pw_state_fd = open("/sys/power/state", O_RDWR);
if (fb_dev < 0) {
fb_dev = open("/dev/graphics/fb0", O_RDWR);
if (fb_dev < 0)
printf("ERROR: Framebuffer device not found!\n");

Below is the init_mmap() function that is called in the routine above. This function initializes the circular queue of frame buffers to be exchanged across the kernel to user space through the device driver.

struct v4l2_requestbuffers req;

CLEAR (req);

req.count    = 4;
req.type    = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory    = V4L2_MEMORY_MMAP;

// Checks if MMAP is supported
if (xioctl (fd, VIDIOC_REQBUFS, &req) == -1) {
if (EINVAL == errno) {
printf("%s does not support "
"memory mapping\n", dev_name);
} else {
errno_exit ("VIDIOC_REQBUFS");

// Checks if sufficient buffer memory exists
if (req.count < 2) {
printf("Insufficient buffer memory on %s\n",

// Allocates memory
buffers = (buffer*) calloc(req.count, sizeof(*buffers));
if (!buffers) {
printf("Out of memory\n");

// Buffer management
for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
struct v4l2_buffer buf;

CLEAR (buf);
buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory      = V4L2_MEMORY_MMAP;
buf.index       = n_buffers;

// queries/gets the status of buffers in kernel
if (xioctl (fd, VIDIOC_QUERYBUF, &buf) == -1)
errno_exit ("VIDIOC_QUERYBUF");

// copies it over to buffers[] in userspace
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start =
mmap (NULL /* start anywhere */,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
fd, buf.m.offset);

if (MAP_FAILED == buffers[n_buffers].start)
errno_exit ("mmap");

LOGI(LOG_TAG, "%s: sucessfully initialized mmap\n", __func__);

The below code starts the capturing from the camera feed to dump the frames into the buffer that was initialized int he previous routine.

unsigned int i;
enum v4l2_buf_type type;

// enqueues the as many empty buffers as the driver has before the stream starts
for (i = 0; i < n_buffers; ++i) {
struct v4l2_buffer buf;

CLEAR (buf);

buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory      = V4L2_MEMORY_MMAP;
buf.index       = i;

// Exchanges a buffer with the driver from kernel by enqueueing the driver
if (xioctl (fd, VIDIOC_QBUF, &buf) == -1)
errno_exit ("VIDIOC_QBUF");


// starts streaming from the driver
if (xioctl (fd, VIDIOC_STREAMON, &type) == -1)
errno_exit ("VIDIOC_STREAMON");

LOGI(LOG_TAG, "%s: starting to capture from device\n", __func__);