‘壹’ V4L2的编程
v4L2是针对uvc免驱usb设备的编程框架 ,主要用于采集usb摄像头等,编程模式如下: 打开视频设备后,可以设置该视频设备的属性,例如裁剪、缩放等。这一步是可选的。在Linux编程中,一般使用ioctl函数来对设备的I/O通道进行管理:
extern int ioctl (int __fd, unsigned long int __request, …) __THROW;
__fd:设备的ID,例如刚才用open函数打开视频通道后返回的cameraFd;
__request:具体的命令标志符。
在进行V4L2开发中,一般会用到以下的命令标志符:
VIDIOC_REQBUFS:分配内存
VIDIOC_QUERYBUF:把VIDIOC_REQBUFS中分配的数据缓存转换成物理地址
VIDIOC_QUERYCAP:查询驱动功能
VIDIOC_ENUM_FMT:获取当前驱动支持的视频格式
VIDIOC_S_FMT:设置当前驱动的频捕获格式
VIDIOC_G_FMT:读取当前驱动的频捕获格式
VIDIOC_TRY_FMT:验证当前驱动的显示格式
VIDIOC_CROPCAP:查询驱动的修剪能力
VIDIOC_S_CROP:设置视频信号的边框
VIDIOC_G_CROP:读取视频信号的边框
VIDIOC_QBUF:把数据放回缓存队列
VIDIOC_DQBUF:把数据从缓存中读取出来
VIDIOC_STREAMON:开始视频显示函数
VIDIOC_STREAMOFF:结束视频显示函数
VIDIOC_QUERYSTD:检查当前视频设备支持的标准,例如PAL或NTSC。
这些IO调用,有些是必须的,有些是可选择的。 1. 打开设备文件。
int fd=open(”/dev/video0″,O_RDWR);
2. 取得设备的capability,看看设备具有什么功能,比如是否具有视频输入,或者音频输入输出等。VIDIOC_QUERYCAP,struct v4l2_capability
v4l2_std_id std;
do {
ret= ioctl(fd, VIDIOC_QUERYSTD, &std);
} while (ret == -1 && errno == EAGAIN);
switch (std) {
case V4L2_STD_NTSC:
//……
case V4L2_STD_PAL:
//……
}
3. 选择视频输入,一个视频设备可以有多个视频输入。VIDIOC_S_INPUT,struct v4l2_input(可不要)
4. 设置视频的制式和帧格式,制式包括PAL,NTSC,帧的格式个包括宽度和高度等。
VIDIOC_S_STD,VIDIOC_S_FMT,struct v4l2_std_id,struct v4l2_format
struct v4l2_format fmt;
/*
v4l2_format 结构如下:
struct v4l2_format
{
enum v4l2_buf_type type; // 数据流类型,必须永远是V4L2_BUF_TYPE_VIDEO_CAPTURE
union
{
struct v4l2_pix_format pix;
struct v4l2_window win;
struct v4l2_vbi_format vbi;
__u8 raw_data[200];
} fmt;
};
struct v4l2_pix_format
{
__u32 width; // 宽,必须是16 的倍数
__u32 height; // 高,必须是16 的倍数
__u32 pixelformat; // 视频数据存储类型,例如是YUV 4 :2 :2 还是RGB
enum v4l2_field field;
__u32 bytesperline;
__u32 sizeimage;
enum v4l2_colorspace colorspace;
__u32 priv;
};
*/
样例:
memset ( &fmt, 0, sizeof(fmt) );
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 320;
fmt.fmt.pix.height = 240;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG;
if (ioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
{
printf(set format failed
);
//return 0;
}
5. 向驱动申请帧缓冲,一般不超过5个。struct v4l2_requestbuffers
struct v4l2_requestbuffers
{
__u32 count; // 缓存数量,也就是说在缓存队列里保持多少张照片
enum v4l2_buf_type type; // 数据流类型,必须永远是V4L2_BUF_TYPE_VIDEO_CAPTURE
enum v4l2_memory memory; // V4L2_MEMORY_MMAP 或 V4L2_MEMORY_USERPTR
__u32 reserved[2];
};
样例:
struct v4l2_requestbuffers req;
memset(&req, 0, sizeof (req));
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd,VIDIOC_REQBUFS,&req) == -1)
{
perror(VIDIOC_REQBUFS error
);
//return -1;
}
6.申请物理内存
将申请到的帧缓冲映射到用户空间,这样就可以直接操作采集到的帧了,而不必去复制。将申请到的帧缓冲全部入队列,以便存放采集到的数据.VIDIOC_QBUF,struct v4l2_buffer
VideoBuffer* buffers = calloc( req.count, sizeof(VideoBuffer) );
printf(sizeof(VideoBuffer) is %d
,sizeof(VideoBuffer));
struct v4l2_buffer buf;
for (numBufs = 0; numBufs < req.count; numBufs++)
{
memset( &buf, 0, sizeof(buf) );
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = numBufs;
if (ioctl(fd, VIDIOC_QUERYBUF, &buf) < 0)
{
printf(VIDIOC_QUERYBUF error
);
//return -1;
}
printf(buf len is %d
,sizeof(buf));
//内存映射
buffers[numBufs].length = buf.length;
buffers[numBufs].offset = (size_t) buf.m.offset;
buffers[numBufs].start = mmap (NULL, buf.length,PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
printf(buffers.length = %d,buffers.offset = %d ,buffers.start[0] = %d
,buffers[numBufs].length,buffers[numBufs].offset,buffers[numBufs].start[0]);
printf(buf2 len is %d
,sizeof(buffers[numBufs].start));
if (buffers[numBufs].start == MAP_FAILED)
{
perror(buffers error
);
//return -1;
}
if (ioctl (fd, VIDIOC_QBUF, &buf) < 0)
{
printf(VIDIOC_QBUF error
);
//return -1;
}
}
7. 开始视频的采集。
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl (fd, VIDIOC_STREAMON, &type) < 0)
{
printf(VIDIOC_STREAMON error
);
// return -1;
}
8. 出队列以取得已采集数据的帧缓冲,取得原始采集数据。VIDIOC_DQBUF, 将缓冲重新入队列尾,这样可以循环采集。VIDIOC_QBUF
if (ioctl(fd, VIDIOC_DQBUF, &buf) < 0)
{
perror(VIDIOC_DQBUF failed.
);
//return -1;
}
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
unsigned char *ptcur = buffers[numBufs].start;
DEBUG(buf.bytesused = %d
,buf.bytesused);
int i1;
for(i1=0; i1<buf.bytesused; i1++)
{
if((buffers[numBufs].start[i1] == 0x000000FF) && (buffers[numBufs].start[i1+1] == 0x000000C4))
{
DEBUG(huffman table finded!
buf.bytesused = %d
FFC4 = %d
,buf.bytesused,i1);
break;
}
}
if(i1 == buf.bytesused)printf(huffman table don't exist!
);
int i;
for(i=0; i<buf.bytesused; i++)
{
if((buffers[numBufs].start[i] == 0x000000FF) && (buffers[numBufs].start[i+1] == 0x000000D8)) break;
ptcur++;
}
DEBUG(i=%d,FF=%02x,D8=%02x
,i,buffers[numBufs].start[i],buffers[numBufs].start[i+1]);
int imagesize =buf.bytesused - i;
DEBUG(buf.bytesused = %d
,buf.bytesused);
DEBUG (imagesize = %d
,imagesize);
9. 停止视频的采集。VIDIOC_STREAMOFF
10. 关闭视频设备。close(fd);
‘贰’ android app 如何与uvc摄像头通讯
来看看是怎么操作UVC摄像头的吧.我们实现了一个专门检测UVC摄像头的服务:UVCCameraService类,主要代码如下:
监听
mUSBMonitor = new USBMonitor(this, new USBMonitor.OnDeviceConnectListener() { @Override
public void onAttach(final UsbDevice device) {
Log.v(TAG, "onAttach:" + device);
mUSBMonitor.requestPermission(device);
} @Override
public void onConnect(final UsbDevice device, final USBMonitor.UsbControlBlock ctrlBlock, final boolean createNew) {
releaseCamera(); if (BuildConfig.DEBUG) Log.v(TAG, "onConnect:"); try { final UVCCamera camera = new MyUVCCamera();
camera.open(ctrlBlock);
camera.setStatusCallback(new IStatusCallback() { // ... uvc 摄像头链接成功
Toast.makeText(UVCCameraService.this, "UVCCamera connected!", Toast.LENGTH_SHORT).show(); if (device != null)
cameras.append(device.getDeviceId(), camera);
}catch (Exception ex){
ex.printStackTrace();
}
} @Override
public void onDisconnect(final UsbDevice device, final USBMonitor.UsbControlBlock ctrlBlock) { // ... uvc 摄像头断开链接
if (device != null) {
UVCCamera camera = cameras.get(device.getDeviceId()); if (mUVCCamera == camera) {
mUVCCamera = null;
Toast.makeText(UVCCameraService.this, "UVCCamera disconnected!", Toast.LENGTH_SHORT).show();
liveData.postValue(null);
}
cameras.remove(device.getDeviceId());
}else {
Toast.makeText(UVCCameraService.this, "UVCCamera disconnected!", Toast.LENGTH_SHORT).show();
mUVCCamera = null;
liveData.postValue(null);
}
} @Override
public void onCancel(UsbDevice usbDevice) {
releaseCamera();
} @Override
public void onDettach(final UsbDevice device) {
Log.v(TAG, "onDettach:");
releaseCamera();// AppContext.getInstance().bus.post(new UVCCameraDisconnect());
}
});
这个类主要实现UVC摄像头的监听\链接\销毁\反监听.当有UVC摄像头链接成功后,会创建一个mUVCCamera对象.
然后在MediaStream里, 我们改造了switchCamera,当参数传2时,表示要切换到UVCCamera(0,1分别表示切换到后置\前置摄像头).
创建
在创建摄像头时,如果是要创建uvc摄像头,那直接从服务里面获取之前创建的mUVCCamera实例:
if (mCameraId == 2) {
UVCCamera value = UVCCameraService.liveData.getValue(); if (value != null) { // uvc camera.
uvcCamera = value;
value.setPreviewSize(width, height,1, 30, UVCCamera.PIXEL_FORMAT_YUV420SP,1.0f); return; // value.startPreview();
}else{
Log.i(TAG, "NO UVCCamera");
uvcError = new Exception("no uvccamera connected!"); return;
} // mCameraId = 0;
}123456789101112131415
预览
在预览时,如果uvc摄像头已经创建了,那执行uvc摄像头的预览操作:
UVCCamera value = uvcCamera;if (value != null) {
SurfaceTexture holder = mSurfaceHolderRef.get(); if (holder != null) {
value.setPreviewTexture(holder);
} try {
value.setFrameCallback(uvcFrameCallback, UVCCamera.PIXEL_FORMAT_YUV420SP/*UVCCamera.PIXEL_FORMAT_NV21*/);
value.startPreview();
cameraPreviewResolution.postValue(new int[]{width, height});
}catch (Throwable e){
uvcError = e;
}
}1234567891011121314
这里我们选的colorFormat为PIXEL_FORMAT_YUV420SP 相当于标准摄像头的NV21格式.
关闭预览
同理,关闭时,调用的是uvc摄像头的关闭.
UVCCamera value = uvcCamera; if (value != null) {
value.stopPreview();
}1234
销毁
因为我们这里并没有实质性的创建,所以销毁时也仅将实例置为null就可以了.
UVCCamera value = uvcCamera;if (value != null) { // value.destroy();
uvcCamera = null;
}12345
有了这些操作,我们看看上层怎么调用,
首先需要在Manifest里面增加若干代码,具体详见UVCCamera工程说明.如下:
<activity android:name=".UVCActivity" android:launchMode="singleInstance">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
<intent-filter>
<action android:name="android.hardware.usb.action.USB_DEVICE_ATTACHED" />
</intent-filter>
<intent-filter>
<action android:name="android.hardware.usb.action.USB_DEVICE_DETACHED" />
</intent-filter>
<meta-data android:name="android.hardware.usb.action.USB_DEVICE_ATTACHED"
android:resource="@xml/device_filter" />
</activity>
然后,的代码在UVCActivity里,这个类可以在library分支的myapplication工程里找到.即这里.
启动或者停止UVC摄像头推送:
public void onPush(View view) { // 异步获取到MediaStream对象.
getMediaStream().subscribe(new Consumer<MediaStream>() { @Override
public void accept(final MediaStream mediaStream) throws Exception { // 判断当前的推送状态.
MediaStream.PushingState state = mediaStream.getPushingState(); if (state != null && state.state > 0) { // 当前正在推送,那终止推送和预览
mediaStream.stopStream();
mediaStream.closeCameraPreview();
}else{ // switch 0表示后置,1表示前置,2表示UVC摄像头
// 异步开启UVC摄像头
RxHelper.single(mediaStream.switchCamera(2), null).subscribe(new Consumer<Object>() { @Override
public void accept(Object o) throws Exception { // 开启成功,进行推送.
// ...
mediaStream.startStream("cloud.easydarwin.org", "554", id);
}
}, new Consumer<Throwable>() { @Override
public void accept(final Throwable t) throws Exception { // ooop...开启失败,提示下...
t.printStackTrace();
runOnUiThread(new Runnable() { @Override
public void run() {
Toast.makeText(UVCActivity.this, "UVC摄像头启动失败.." + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
});
}
}
});
}
这样,整个推送就完成了.如果一切顺利,应当能在VLC播放出来UVC摄像头的视频了~~
我们再看看如何录像.也非常简单…
public void onRecord(View view) { // 开始或结束录像.
final TextView txt = (TextView) view;
getMediaStream().subscribe(new Consumer<MediaStream>() { @Override
public void accept(MediaStream mediaStream) throws Exception { if (mediaStream.isRecording()){ // 如果正在录像,那停止.
mediaStream.stopRecord();
txt.setText("录像");
}else { // 没在录像,开始录像...
// 表示最大录像时长为30秒,30秒后如果没有停止,会生成一个新文件.依次类推...
// 文件格式为test_uvc_0.mp4,test_uvc_1.mp4,test_uvc_2.mp4,test_uvc_3.mp4
String path = getExternalFilesDir(Environment.DIRECTORY_MOVIES) + "/test_uvc.mp4";
mediaStream.startRecord(path, 30000); final TextView pushingStateText = findViewById(R.id.pushing_state);
pushingStateText.append("\n录像地址:" + path);
txt.setText("停止");
}
}
});
}21
UVC摄像头还支持后台推送,即不预览的情况下进行推送,同时再切换到前台继续预览.只需要调用一个接口即可实现,如下:
@Overridepublic void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
ms.setSurfaceTexture(surfaceTexture); // 设置预览的surfaceTexture}@Overridepublic boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
ms.setSurfaceTexture(null); // 设置预览窗口为null,表示关闭预览功能
return true;
}123456789
如果要彻底退出uvc摄像头的预览\推送,那只需要同时退出服务即可.
public void onQuit(View view) { // 退出
finish(); // 终止服务...
Intent intent = new Intent(this, MediaStream.class);
stopService(intent);
}1234567
## 获取更多信息 ##