实战小项目之嵌入式linux图像采集与传输

项目简介

     本次编程实战主要是围绕嵌入式linux v4l2采集框架展开,包括以下几个部分:

  1. v4l2视频采集
  2. IPU转码
  3. framebuffer显示
  4. 自定义UDP简单协议进行传输
  5. 上位机软件(QT)

  首先是采集部分

#include "includes.h"

int fd_cam;
struct cam_buffer *buffers=NULL;
unsigned int n_buffers=0;
int frameIndex=0;

void initVideo()
{
    int ret;
    struct     v4l2_capability cam_cap;        //显示设备信息
    struct  v4l2_cropcap     cam_cropcap;    //设置摄像头的捕捉能力
    struct     v4l2_fmtdesc    cam_fmtdesc;    //查询所有支持的格式:VIDIOC_ENUM_FMT
    struct  v4l2_crop        cam_crop;        //图像的缩放
    struct  v4l2_format     cam_format;        //设置摄像头的视频制式、帧格式等

    /*设备的打开*/
    fd_cam = open( USB_VIDEO, O_RDWR );
    if( fd_cam<0 )
    printf("Can‘t open video device\n");

    /* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/
    ret = ioctl( fd_cam,VIDIOC_QUERYCAP,&cam_cap );
    if( ret<0 ) {
    printf("Can‘t get device information: VIDIOCGCAP\n");
    }
    printf("Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",
            cam_cap.driver,cam_cap.card,cam_cap.bus_info,(cam_cap.version>>16)&0XFF,
            (cam_cap.version>>8)&0XFF,cam_cap.version&0XFF);

    /* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/
    cam_fmtdesc.index=0;
    cam_fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    printf("Support format:\n");
    while(ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -1)
    {
        printf("\t%d.%s\n",cam_fmtdesc.index+1,cam_fmtdesc.description);
        cam_fmtdesc.index++;
    }

    /* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/
    cam_cropcap.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(0 == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)){
        printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",
                cam_cropcap.defrect.left,cam_cropcap.defrect.top,
                cam_cropcap.defrect.width,cam_cropcap.defrect.height);
        /* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/
        cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        cam_crop.c = cam_cropcap.defrect;//默认取景窗口大小
        if(-1 == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)){
            //printf("Can‘t set crop para\n");
        }
    }
    else{
        printf("Can‘t set cropcap para\n");
    }

    /* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/
    cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    cam_format.fmt.pix.width = 640;
    cam_format.fmt.pix.height = 480;
    cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//要和摄像头支持的类型对应
    cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
    ret=ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
    if(ret<0){
            printf("Can‘t set frame information\n");
    }
    /* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/
    cam_format.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret=ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
    if(ret<0){
        printf("Can‘t get frame information\n");
    }
    printf("Current data format information:\n\twidth:%d\n\theight:%d\n",
            cam_format.fmt.pix.width,cam_format.fmt.pix.height);
    ret=initBuffers();
    if(ret<0){
        printf("Buffers init error\n");
        //exit(-1);
    }
}

void closeVideo()
{
    //stopCapture();
    //freeBuffers();
    close(fd_cam);
}

int initBuffers()
{
    int ret;
    /* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/
    struct v4l2_requestbuffers req;
    CLEAR(req);
    req.count=4;
    req.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
    ret=ioctl(fd_cam, VIDIOC_REQBUFS, &req);
    if(ret<0){
        printf("Request frame buffers failed\n");
        return -1;
    }
    if(req.count<2){
        printf("Request frame buffers while insufficient buffer memory\n");
        return -1;
    }
    buffers = (struct cam_buffer*)calloc(req.count, sizeof(*buffers));
    if(!buffers){
        printf("Out of memory\n");
        return -1;
    }
    for(n_buffers = 0; n_buffers < req.count; n_buffers++){
        struct v4l2_buffer buf;
        CLEAR(buf);
        // 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = n_buffers;
        ret=ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
        if(ret<0 )
        {
            printf("VIDIOC_QUERYBUF %d failed\n",n_buffers);
            return -1;
        }
        buffers[n_buffers].length = buf.length;
        // 映射内存
        buffers[n_buffers].start =
         mmap(NULL, // start anywhere
              buf.length,
              PROT_READ | PROT_WRITE,
              MAP_SHARED,
              fd_cam, buf.m.offset);
        if(MAP_FAILED == buffers[n_buffers].start)
        {
            printf("mmap buffer%d failed\n",n_buffers);
            return -1;
        }

    }
    return 0;
}
int startCapture()
{
    unsigned int i;
    //struct v4l2_buffer buf;
    for(i=0;i<n_buffers;i++){
        struct v4l2_buffer buf;
        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory =V4L2_MEMORY_MMAP;
        buf.index = i;
        //        fprintf(stderr, "n_buffers: %d\n", i);
        if(-1 == ioctl(fd_cam, VIDIOC_QBUF, &buf))    {
            printf("VIDIOC_QBUF buffer%d failed\n",i);
            return -1;
        }
    }
    enum v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(-1 == ioctl(fd_cam, VIDIOC_STREAMON, &type)){
         printf("VIDIOC_STREAMON error");
         return -1;
    }
    return 0;
}
int stopCapture()
{
    enum v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(-1 == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)){
        printf("VIDIOC_STREAMOFF error\n");
        return -1;
    }
    return 0;
}
int freeBuffers()
{
    unsigned int i;
    for(i = 0; i < n_buffers; ++i){
        if(-1 == munmap(buffers[i].start, buffers[i].length)){
            printf("munmap buffer%d failed\n",i);
            return -1;
        }
    }
    free(buffers);
    return 0;
}
int getFrame(void **frame_buf, size_t* len)
{
    struct v4l2_buffer queue_buf;
    CLEAR(queue_buf);
    queue_buf.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    queue_buf.memory = V4L2_MEMORY_MMAP;
    if(-1 == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)){
        printf("VIDIOC_DQBUF error\n");
        return -1;
    }
    printf("queue_buf.index=%d\n",queue_buf.index);
    //pthread_rwlock_wrlock(&rwlock);
    *frame_buf = buffers[queue_buf.index].start;
    *len = buffers[queue_buf.index].length;
    frameIndex = queue_buf.index;
    //pthread_rwlock_unlock(&rwlock);
    return 0;
}
int backFrame()
{
    if(frameIndex != -1){
        struct v4l2_buffer queue_buf;
        CLEAR(queue_buf);
        queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        queue_buf.memory = V4L2_MEMORY_MMAP;
        queue_buf.index = frameIndex;
        if(-1 == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)){
            printf("VIDIOC_QBUF error\n");
            return -1;
        }
        return 0;
    }
    return -1;
}

/*yuv格式转换为rgb格式*/
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
    unsigned int in, out = 0;
    unsigned int pixel_16;
    unsigned char pixel_24[3];
    unsigned int pixel32;
    int y0, u, y1, v;
    struct timeval starttime,endtime;
    gettimeofday(&starttime,0);
    for(in = 0; in < width * height * 2; in += 4) {
        pixel_16 =
        yuv[in + 3] << 24 |
        yuv[in + 2] << 16 |
        yuv[in + 1] <<  8 |
        yuv[in + 0];
        y0 = (pixel_16 & 0x000000ff);
        u  = (pixel_16 & 0x0000ff00) >>  8;
        y1 = (pixel_16 & 0x00ff0000) >> 16;
        v  = (pixel_16 & 0xff000000) >> 24;
        pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
        pixel_24[0] = (pixel32 & 0x000000ff);
        pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
        pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
        //pthread_rwlock_wrlock(&rwlock);
        rgb[out++] = pixel_24[0];
        rgb[out++] = pixel_24[1];
        rgb[out++] = pixel_24[2];
        //pthread_rwlock_unlock(&rwlock);
        pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
        pixel_24[0] = (pixel32 & 0x000000ff);
        pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
        pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
        //pthread_rwlock_wrlock(&rwlock);
        rgb[out++] = pixel_24[0];
        rgb[out++] = pixel_24[1];
        rgb[out++] = pixel_24[2];
        //pthread_rwlock_unlock(&rwlock);
    }
     gettimeofday(&endtime,0);
     double timeuse = 1000000*(endtime.tv_sec - starttime.tv_sec)+endtime.tv_usec-starttime.tv_usec;
          timeuse /=1000;//除以1000则进行毫秒计时,如果除以1000000则进行秒级别计时,如果除以1则进行微妙级别计时
     printf("yuv2rgb use %f ms\n",timeuse);
    return 0;
}
int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
    unsigned int pixel32 = 0;
    unsigned char *pixel = (unsigned char *)&pixel32;
    int r, g, b;
    r = y + (1.370705 * (v-128));
    g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
    b = y + (1.732446 * (u-128));
    if(r > 255) r = 255;
    if(g > 255) g = 255;
    if(b > 255) b = 255;
    if(r < 0) r = 0;
    if(g < 0) g = 0;
    if(b < 0) b = 0;
    pixel[0] = r * 220 / 256;
    pixel[1] = g * 220 / 256;
    pixel[2] = b * 220 / 256;
    return pixel32;
}

  之后是IPU部分

#include "includes.h"

int fd_ipu=0;
struct ipu_task taskCam;
struct timeval begintime, endtime;
unsigned int ipuOutputSize=0,ipuInputSize=0;
void *inbuf=NULL;
void *outbuf=NULL;
/***************与软件解码对应的IPU解码**************************/
void initIPU()
{
    int ret;
    CLEAR(taskCam);
    // Input image size and format
    taskCam.input.width    = 640;
    taskCam.input.height   = 480;
    taskCam.input.format   = v4l2_fourcc(‘Y‘, ‘U‘, ‘Y‘, ‘V‘);
//
//    taskCam.input.crop.pos.x = 0;
//    taskCam.input.crop.pos.y = 0;
//    taskCam.input.crop.w = 0;
//    taskCam.input.crop.h = 0;

    // Output image size and format
    taskCam.output.width   = 640;
    taskCam.output.height  = 480;
    taskCam.output.format  = v4l2_fourcc(‘B‘, ‘G‘, ‘R‘, ‘3‘);

//    taskCam.output.crop.pos.x = 300;
//    taskCam.output.crop.pos.y = 300;
//    taskCam.output.crop.w = 300;
//    taskCam.output.crop.h = 300;
    // Open IPU device
    fd_ipu = open(IPUDEV, O_RDWR, 0);
    if (fd_ipu < 0) {
        printf("open ipu dev fail\n");
    }
    ipuOutputSize=taskCam.output.paddr= taskCam.output.width * taskCam.output.height
            * fmt_to_bpp(taskCam.output.format)/8;
    printf("ipuOutputSize=%d\n",ipuOutputSize);
    ret = ioctl(fd_ipu, IPU_ALLOC, &taskCam.output.paddr);
        if (ret < 0) {
            printf("ioctl IPU_ALLOC fail\n");
    }
    outbuf= mmap(0, ipuOutputSize, PROT_READ | PROT_WRITE,
            MAP_SHARED, fd_ipu, taskCam.output.paddr);
    if (!outbuf) {
            printf("mmap ipu output image fail\n");
        }
    ipuInputSize  =taskCam.input.paddr=taskCam.input.width * taskCam.input.height
                * fmt_to_bpp(taskCam.input.format)/8;
    printf("ipuInputSize=%d\n",ipuInputSize);
    ret = ioctl(fd_ipu, IPU_ALLOC, &taskCam.input.paddr);
    if (ret < 0) {
        printf("ioctl IPU_ALLOC fail: (errno = %d)\n", errno);
    }
    inbuf = mmap(0, ipuInputSize, PROT_READ | PROT_WRITE,
                MAP_SHARED, fd_ipu, taskCam.input.paddr);
    if (!inbuf) {
                printf("mmap ipu input image fail\n");
            }
}
void IPUConvent(void *in,void *out)
{
    int ret;
    memcpy(inbuf, in, ipuInputSize);
    gettimeofday(&begintime, NULL);
    // Perform color space conversion
    ret = ioctl(fd_ipu, IPU_QUEUE_TASK, &taskCam);
    if (ret < 0) {
        printf("ioct IPU_QUEUE_TASK fail %x\n", ret);
    }
    gettimeofday(&endtime, NULL);
    double timeuse = 1000000*(endtime.tv_sec - begintime.tv_sec)+endtime.tv_usec-begintime.tv_usec;
    timeuse /=1000;//除以1000则进行毫秒计时,如果除以1000000则进行秒级别计时,如果除以1则进行微妙级别计时
    printf("yuv2rgb use %f ms\n",timeuse);
    memcpy(out,outbuf,ipuOutputSize);
}
void closeIPU()
{
    if(rgbFrame)munmap(rgbFrame, ipuOutputSize);
    if(inbuf)munmap(inbuf, ipuInputSize);
    if (taskCam.input.paddr)
            ioctl(fd_ipu, IPU_FREE, &taskCam.input.paddr);
}

  然后是framebuffer显示

#include "includes.h"

int fd_fb0;
long int screensize = 0;
char *fb_buf = 0;
struct fb_var_screeninfo vinfo;
struct fb_fix_screeninfo finfo;

void InitDisOnFrameBuffer()
{
    // Open the file for reading and writing
    fd_fb0 = open(DISON_FB0, O_RDWR);
    if (!fd_fb0) {
        printf("Error: cannot open framebuffer device.\n");
        exit(1);
    }
    printf("The framebuffer device was opened successfully.\n");

    // Get fixed screen information
    if (ioctl(fd_fb0, FBIOGET_FSCREENINFO, &finfo)) {
        printf("Error reading fixed information.\n");
        exit(2);
    }

    // Get variable screen information
    if (ioctl(fd_fb0, FBIOGET_VSCREENINFO, &vinfo)) {
        printf("Error reading variable information.\n");
        exit(3);
    }
    printf("%dx%d, %dbpp\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel );

    // Figure out the size of the screen in bytes
    screensize = vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / 8;
    printf("screensize=%d\n",screensize);

    // Map the device to memory
    fb_buf = (char *)mmap(0, screensize, PROT_READ | PROT_WRITE, MAP_SHARED,
                   fd_fb0, 0);
    if ((int)fb_buf == -1) {
        printf("Error: failed to map framebuffer device to memory.\n");
        exit(4);
    }
    printf("The framebuffer device was mapped to memory successfully.\n");
}
void DisOnFrameBuffer(unsigned char *frame)
{
    //memcpy(fb_buf,frame,640* 480* 3 * sizeof(char));
    int x = 0, y = 0;
    long int location = 0;
    // Figure out where in memory to put the pixel
    for ( y = 0; y < 480; y++ )
        for ( x = 0; x < 640; x++ ) {
            location = (x+vinfo.xoffset) * (vinfo.bits_per_pixel/8) +
                       (y+vinfo.yoffset) * finfo.line_length;
            if ( vinfo.bits_per_pixel == 32 ) {
                //rgb32 bgra
                *(fb_buf + location )         = *frame;frame++; // Some blue
                *(fb_buf + location + 1)    = *frame;frame++; // A little green
                *(fb_buf + location + 2)     = *frame;frame++; //A lot of red//frame[480*y+x+2];
                *(fb_buf + location + 3) = 0; // No transparency
            }
            else { //assume 16bpp
                int b = 10;
                int g = (x-100)/6; // A little green
                int r = 31-(y-100)/16; // A lot of red
                unsigned short int t = r<<11 | g << 5 | b;
                *((unsigned short int*)(fb_buf + location)) = t;
            }
        }
}
void CloseDisOnFrameBuffer()
{
    munmap(fb_buf, screensize);
    close(fd_fb0);
}

  UDP部分

#include "includes.h"

struct sockaddr_in serveraddr;
int confd;

char udpRecbuf[MAXLINE];

void initUDPTrans()
{
    //1.创建一个socket
    confd=socket(AF_INET,SOCK_DGRAM,0);
    //2.初始化服务器地址
    bzero(&serveraddr,sizeof(serveraddr));
    serveraddr.sin_family=AF_INET;
    //
    inet_pton(AF_INET,SEVER_IP,&serveraddr.sin_addr.s_addr);
    serveraddr.sin_port =htons(SERVER_PORT);
}
void sendUDPdata(void *datas,unsigned int size)
{
    size_t len,i,j;//分成1800块 每块512
    char tempflag;
    struct udptransbuf data;
    for(i=0;i<24;i++){
        memcpy(data.buf,datas+i*BLOCKSIZE,BLOCKSIZE);
//        for(j=0;j<BLOCKSIZE;j++)
//            data.buf[j]= (unsigned char*)(datas+i*BLOCKSIZE+j);
        if(i==0){
            tempflag=‘a‘;
            data.flag=tempflag;
        }
        else{
            tempflag++;
            data.flag=tempflag;
        }
        //3向务器发送数据
        len=sendto(confd,(void*)&data,sizeof(data),0,(struct sockaddr *)&serveraddr,sizeof(serveraddr));
        if(len<0)
            printf("UDP send failed\n");
    }
    //char udpSendbuf[MAXLINE]="125wwew3332354#@$#";
}
void recUDPdata(char *udpRecbuf)//这里要求传入的是数组,要是指针需要修改
{
    size_t len;
    len=recvfrom(confd,udpRecbuf,sizeof(udpRecbuf),0,NULL,0);
    write(STDIN_FILENO,udpRecbuf,len);
}
void closeUDPtrans()
{
    close(confd);
}

完整工程

https://github.com/tla001/CapTrans

上位机部分

时间: 2024-10-29 19:12:34

实战小项目之嵌入式linux图像采集与传输的相关文章

Android开发不得不看的11个实战小项目

是不是想学Android开发(http://www.maiziedu.com/course/android-px/)却不知道如何下手?懂得一点点入门基础知识却无法应用到实际开发中?看相关资料觉得都懂了实际动手却发现什么都不懂?本地搭建Android开发环境太麻烦? 如果你有以上的各种问题,那么今天小编推荐的Android开发的这11个小项目,你一定要看!! 因为,这些实战项目都是基于google 官方的API Demos制作而成,而且全部配有Android在线开发环境,你可以随时动手跟着课程操作

必知的11个android开发实战小项目

是不是想学Android开发(http://www.maiziedu.com/course/android-px/)却不知道如何下手?懂得一点点入门基础知识却无法应用到实际开发中?看相关资料觉得都懂了实际动手却发现什么都不懂?本地搭建Android开发环境太麻烦? 如果你有以上的各种问题,那么今天小编推荐的Android开发的这11个小项目,你一定要看!! 因为,这些实战项目都是基于google 官方的API Demos制作而成,而且全部配有Android在线开发环境,你可以随时动手跟着课程操作

【实战小项目】python开发自动化运维工具--批量操作主机

有很多开源自动化运维工具都很好用如ansible/salt stack等,完全不用重复造轮子.只不过,很多运维同学学习Python之后,苦于没小项目训练,本篇演示用Python写一个批量操作主机的工具,大家空余时候可以试着写写,完善完善. 1 思路分析 在运维工作中,古老的方式部署环境.上线代码可能都需要手动在服务器上敲命令,不胜其烦.所以,脚本,自动化工具等还是很有必要的.我觉得一个批量操作工具应该考虑以下几点: (1)本质上,就是到远程主机上执行命令并返回结果. (2)做到批量.也就是要并发

实战小项目之基于嵌入式的视频直播客户端

项目简介 本项目就是RtmpApp嵌入式版本,在此基础上,融入之前的v4l2采集.ipu转码等内容,实现了usb摄像头的采集(yuv422).ipu或者软件转码.x264编码.rtmp传输. 与前一个项目最大的不同在于数据衔接,以及数据转换,这里使用单独线程进行编码传输,里面涉及友元类的使用,是如下 void PushStream::worker(const CapFbTest &ct){ while(runflag){ lastTime=RTMP_GetTime(); if(frameInde

实战小项目之借书系统

项目简介 基于Qt做了一个用户管理和借书系统,主要是为了练手,学了mysql而不是白学,通过这个小软件,对数据库增删改查操作更为熟悉,对于操作失败时,能通过一些返回信息判断错误原因,不废话了,下面是这个小项目的组成: 用户管理 图书管理 借书系统 主界面 借书界面 图书管理界面 完整工程 https://github.com/tla001/PowerSystemV1

实战小项目之RTMP流媒体演示系统

项目简介 windows下使用基于Qt对之前的RtmpApp进行封装与应用,单独功能使用线程执行,主要包括以下几个功能: 视频下载 推送文件 推送摄像头数据或者桌面 基于libvlc的播放器 视频下载部分 最大的坑就是,文件名的那个数组设小了,导致数组越界,写下一个功能的时候,总是崩溃,调了一上午,内心崩溃 推送文件 推送摄像头数据或者桌面 使用videoinput作为视频采集驱动,没有使用qt的camera类,videoinput采集出来的数据是rgb(bgr,可选),直接送入label显示,

实战小项目之基于yolo的目标检测web api实现

上个月,对微服务及web service有了一些想法,看了一本app后台开发及运维的书,主要是一些概念性的东西,对service有了一些基本了解.互联网最开始的构架多是cs构架,浏览器兴起以后,变成了bs,最近几年,随着移动互联网的兴起,cs构架再次火了起来,有了一个新的概念,web service. 最近两天,想结合自己这段时间学的东西,实现一个cs构架的service接口.说一下大体流程,client上传图片到http服务器,http后台使用yolo进行图片的检测,之后将检测结果封装成jso

实战小项目之ffmpeg推流yolo视频实时检测

之前实现了yolo图像的在线检测,这次主要完成远程视频的检测.主要包括推流--収流--检测显示三大部分 首先说一下推流,主要使用ffmpeg命令进行本地摄像头的推流,为了实现首屏秒开使用-g设置gop大小,同时使用-b降低网络负载,保证流畅度. ffmpeg -r 30 -i /dev/video0 -vcodec h264 -max_delay 100 -f flv -g 5 -b 700000 rtmp://219.216.87.170/live/test1 其次是収流,収流最开始的时候,有

Python爬虫实战小项目

爬取国家药品监督管理总局中基于中华人民共和国化妆品生产许可证相关数据 import requests from fake_useragent import UserAgent ua = UserAgent(use_cache_server=False,verify_ssl=False).random headers = { 'User-Agent':ua } url = 'http://125.35.6.84:81/xk/itownet/portalAction.do?method=getXkz