直接上代码

#include <opencv.hpp>
#include <Windows.h>
#include <mutex>
#include <queue>
#include <memory>
#include <thread>#include "atlstr.h"extern "C"
{
#include "x264.h"
#include "x264_config.h"
};using namespace cv;
using namespace std;const int iWidth  = 640;
const int iHeigth = 480;mutex g_i_mutex;
queue <shared_ptr<unsigned char>> YUVFrame;
FILE* fp_out = nullptr;bool bStop = false;int i_pts = 0;x264_t* m_x264Encoder = nullptr;shared_ptr<x264_picture_t> m_pictureIn = make_shared<x264_picture_t>();x264_param_t m_x264Param;shared_ptr<unsigned char> sps = nullptr;
shared_ptr<unsigned char> pps = nullptr;
int spslen = 0;
int ppslen = 0;
const unsigned char pHeader[4] = { '\0', '\0', '\0', '\1' };
int iSize = 0;
bool bSave = false; //保存当前图片,进行编码,为了保证一秒有30帧的数据shared_ptr<unsigned char> GetFrame()
{shared_ptr<unsigned char> yuvFrame(nullptr);{lock_guard<mutex> lock(g_i_mutex);if (!YUVFrame.empty()){yuvFrame = YUVFrame.front();YUVFrame.pop();}}return yuvFrame;
}void OutputDebugPrintf(const char* strOutputString, ...)
{char strBuffer[4096] = { 0 };va_list vlArgs;va_start(vlArgs, strOutputString);_vsnprintf_s(strBuffer, sizeof(strBuffer)-1, strOutputString, vlArgs);va_end(vlArgs);OutputDebugString(CA2W(strBuffer));
}void EncodeImage()
{int pos = 0;while (!bStop){shared_ptr<unsigned char> yuvFrame = GetFrame();if (yuvFrame){//cout << "count size: " << yuvFrame.use_count() << endl;//编码m_pictureIn.get()->i_pts = i_pts++;//m_pictureIn->opaque = opaque;m_pictureIn.get()->img.plane[0] = yuvFrame.get();m_pictureIn.get()->img.plane[1] = yuvFrame.get() + m_x264Param.i_height * m_x264Param.i_width;m_pictureIn.get()->img.plane[2] = yuvFrame.get() + m_x264Param.i_height * m_x264Param.i_width * 5 / 4;m_pictureIn.get()->img.i_stride[0] = m_x264Param.i_width;m_pictureIn.get()->img.i_stride[1] = m_x264Param.i_width >> 1;m_pictureIn.get()->img.i_stride[2] = m_x264Param.i_width >> 1;x264_picture_t picOut;int nalNum;x264_nal_t* nalOut;clock_t tick1 = clock();int len = x264_encoder_encode(m_x264Encoder, &nalOut, &nalNum, m_pictureIn.get(), &picOut);clock_t tick2 = clock();double dt = (double)(tick2 - tick1);OutputDebugPrintf("dt = %lf ms\n", dt);if (len < 0) {cout << "x264 encode failed" << endl;return;}if (IS_X264_TYPE_I(picOut.i_type)){cout << "I" << endl;//写入sps ppsfwrite(sps.get(), 1, spslen, fp_out);fwrite(pps.get(), 1, ppslen, fp_out);}if (nalNum <= 0) {cout << "frame delayed in encoder." << endl;return;}// NALU payload : 4bytes size + payload// NALU payload size : 4bytes size + payload size// for b_repeat_headers = 0 in x264_param_t// so NALU type is only IDR, SLICE(P or B frame)// so you must get SPS PPS before encoding any frame.int firstNalFlag = 0;fwrite(pHeader, 1, 4, fp_out);for (int j = 0; j < nalNum; ++j) {if (NAL_SEI == nalOut[j].i_type){continue;}pos = 0;if (nalOut[j].p_payload[0] == 0x00 && nalOut[j].p_payload[1] == 0x00){if (nalOut[j].p_payload[2] == 0x01){pos = 3;}else if (nalOut[j].p_payload[2] == 0x00 && nalOut[j].p_payload[3] == 0x01){pos = 4;}}if (firstNalFlag == 0){fwrite(nalOut[j].p_payload + pos, 1, nalOut[j].i_payload - pos, fp_out);firstNalFlag = 1;}else{fwrite(nalOut[j].p_payload + pos + 1, 1, nalOut[j].i_payload - pos - 1, fp_out);}//fwrite(nalOut[j].p_payload + pos, 1, nalOut[j].i_payload + pos, fp_out);/*if (NAL_SPS == nalOut[j].i_type || NAL_PPS == nalOut[j].i_type || NAL_SEI == nalOut[j].i_type){fwrite(nalOut[j].p_payload, 1, nalOut[j].i_payload, fp_out);}else if (firstNalFlag == 0){fwrite(nalOut[j].p_payload, 1, nalOut[j].i_payload, fp_out);firstNalFlag = 1;}else{fwrite(nalOut[j].p_payload + pos, 1, nalOut[j].i_payload - pos, fp_out);}*/}}else{this_thread::sleep_for(chrono::milliseconds(2));}}
}void ClearFrame()
{lock_guard<mutex> lock(g_i_mutex);while (!YUVFrame.empty()){YUVFrame.pop();}
}int main(int argc, char* argv[])
{VideoCapture cap(0); // open the default cameraif (!cap.isOpened())  // check if we succeededreturn -1;cap.set(CV_CAP_PROP_FRAME_WIDTH, iWidth);cap.set(CV_CAP_PROP_FRAME_HEIGHT, iHeigth);cap.set(CV_CAP_PROP_FPS, 25.0);//没有效果//bool bRet = cap.set(CV_CAP_PROP_FPS, 25.0);double w = cap.get(CV_CAP_PROP_FRAME_WIDTH);double h = cap.get(CV_CAP_PROP_FRAME_HEIGHT);double count = cap.get(CV_CAP_PROP_FPS);Mat edges;namedWindow("edges", CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO | CV_GUI_EXPANDED);cv::Point p = cv::Point(iWidth - 340, 20);struct tm t;   //tm结构指针time_t now;  //声明time_t类型变量char ch[64];clock_t tick1, tick2;tick1 = clock();double dt;int yuvLen = iWidth * iHeigth * 3 / 2;errno_t err = fopen_s(&fp_out, "test.h264", "wb");if (!fp_out){cout << "Could not open output 264 file." << endl;return -1;}//初始化编码器x264_param_default_preset(&m_x264Param, "ultrafast", "zerolatency");x264_param_apply_profile(&m_x264Param, x264_profile_names[0]);int maxBitRate = 600;//暂且固定m_x264Param.rc.i_bitrate = maxBitRate;m_x264Param.rc.i_vbv_max_bitrate = maxBitRate; // vbv-maxratem_x264Param.rc.i_vbv_buffer_size = maxBitRate; // vbv-bufsizem_x264Param.i_nal_hrd = X264_NAL_HRD_CBR;m_x264Param.rc.i_rc_method = X264_RC_ABR;m_x264Param.rc.f_rf_constant = 0.0f;m_x264Param.b_vfr_input = 1;m_x264Param.i_keyint_max = count;m_x264Param.i_width = iWidth;m_x264Param.i_height = iHeigth;m_x264Param.b_repeat_headers = 0;/*m_x264Param.b_annexb = 0;*/m_x264Param.vui.b_fullrange = 0;m_x264Param.i_threads = 1;int ticks_per_frame = 1;m_x264Param.i_timebase_num = 1;m_x264Param.i_timebase_den = count;m_x264Param.i_fps_num = m_x264Param.i_timebase_den;m_x264Param.i_fps_den = m_x264Param.i_timebase_num * ticks_per_frame;//m_x264Param.b_repeat_headers = 0;//m_x264Param.b_annexb = 0;/*m_x264Param.rc.i_lookahead = 0;m_x264Param.i_sync_lookahead = 0;m_x264Param.b_sliced_threads = 0;m_x264Param.rc.b_mb_tree = 0;*/m_x264Encoder = x264_encoder_open(&m_x264Param);x264_nal_t *nalOut = NULL;int nalNum = 0;x264_encoder_headers(m_x264Encoder, &nalOut, &nalNum);for (int i = 0; i < nalNum; ++i) {x264_nal_t &nal = nalOut[i];if (nal.i_type == NAL_SPS) {//fwrite(nal.p_payload, 1, nal.i_payload, fp_out);sps = shared_ptr<unsigned char>(new unsigned char[nal.i_payload + 1], [](unsigned char* p){delete[] p; });memcpy(sps.get(), nal.p_payload, nal.i_payload);sps.get()[nal.i_payload] = '\0';spslen = nal.i_payload;//the PPS always comes after the SPSx264_nal_t &ppsnal = nalOut[++i];//fwrite(pps.p_payload, 1, pps.i_payload, fp_out);pps = shared_ptr<unsigned char>(new unsigned char[ppsnal.i_payload + 1], [](unsigned char* p){delete[] p; });memcpy(pps.get(), ppsnal.p_payload, ppsnal.i_payload);pps.get()[ppsnal.i_payload] = '\0';ppslen = ppsnal.i_payload;}else if (nal.i_type == NAL_SEI) {//fwrite(nal.p_payload, 1, nal.i_payload, fp_out);}}//x264_picture_init(m_pictureIn.get());x264_picture_alloc(m_pictureIn.get(), X264_CSP_I420, m_x264Param.i_width, m_x264Param.i_height);m_pictureIn.get()->img.i_csp = X264_CSP_I420;m_pictureIn.get()->img.i_plane = 3;m_pictureIn.get()->i_type = X264_TYPE_AUTO;m_pictureIn.get()->i_qpplus1 = 0;m_pictureIn.get()->i_pts = 0;//启动编码线程thread t2(EncodeImage);t2.detach();for (;;){Mat frame;cap >> frame; // get a new frame from camera//cap.read(frame);/*cvtColor(frame, edges, COLOR_BGR2GRAY);GaussianBlur(edges, edges, Size(7, 7), 1.5, 1.5);Canny(edges, edges, 0, 30, 3);imshow("edges", edges);*///加上字符的起始点time(&now);      //获取系统日期和时间localtime_s(&t, &now);//获取当地日期和时间int len = strftime(ch, sizeof(ch), "%Y-%m-%d %H:%M:%S", &t); //年-月-日 时-分-秒ch[len] = '\0';putText(frame, ch, p, FONT_HERSHEY_SIMPLEX, 0.8, Scalar(255, 200, 200), 1, CV_AA);tick2 = tick1;tick1 = clock(); //msdt = (double)(tick1 - tick2);//OutputDebugPrintf("dt = %lf ms\n", dt);cout << dt << "    " << iSize++ << endl;SYSTEMTIME lt;GetLocalTime(&lt);cout << lt.wHour << ":" << lt.wMinute << ":" << lt.wSecond << ":" << lt.wMilliseconds << "   " << iSize++ << endl;if (lt.wMilliseconds <= 40){bSave = true;}cv::Mat yuvImg;cv::cvtColor(frame, yuvImg, CV_BGR2YUV_I420);    //YUV转RGB//放入队列{if (bSave){shared_ptr<unsigned char> yuvBuffer = shared_ptr<unsigned char>(new unsigned char[yuvLen], [](unsigned char* p){delete[] p; });memcpy(yuvBuffer.get(), yuvImg.data, yuvLen * sizeof(unsigned char));    //YUV数据复制到yuv_buffer中//test// unsigned char* yuvBuffer1 = new unsigned char[yuvLen];//   memcpy(yuvBuffer1, yuvImg.data, yuvLen * sizeof(unsigned char));//  m_pictureIn.get()->i_pts = i_pts++;// //m_pictureIn->opaque = opaque;//   m_pictureIn.get()->img.plane[0] = yuvBuffer1;// m_pictureIn.get()->img.plane[1] = yuvBuffer1 + m_x264Param.i_height * m_x264Param.i_width;//   m_pictureIn.get()->img.plane[2] = yuvBuffer1 + m_x264Param.i_height * m_x264Param.i_width * 5 / 4;///* m_pictureIn->img.i_stride[0] = m_x264Param.i_width;//   m_pictureIn->img.i_stride[1] = m_x264Param.i_width >> 1;//    m_pictureIn->img.i_stride[2] = m_x264Param.i_width >> 1;*///  x264_picture_t picOut;//    int nalNum;//   x264_nal_t* nalOut = nullptr;//    int len = x264_encoder_encode(m_x264Encoder, &nalOut, &nalNum, m_pictureIn.get(), &picOut);lock_guard<mutex> lock(g_i_mutex);YUVFrame.push(yuvBuffer);if (YUVFrame.size() > 30){cout << "frame size has over 30, size is " << YUVFrame.size() << endl;}}}imshow("edges", frame);if (waitKey(1) >= 0) break;}bStop = true;this_thread::sleep_for(chrono::milliseconds(100));//等待线程结束fclose(fp_out);ClearFrame();x264_encoder_close(m_x264Encoder);bSave = false;// the camera will be deinitialized automatically in VideoCapture destructorreturn 0;
}

最后我们可以使用Elecard StreamEye 查看生成的h264文件。

工程下载路径:https://download.csdn.net/download/zzqgtt/10582109

将OpenCV抓拍的图片进行x264编码并保存到文件相关推荐

  1. 【方便的Opencv】实现图片合成视频+附带图片生成gif

    学习前言: 最近在学习OpenCv播放视频,学习完毕后,神奇的发现:其实播放视频就是把视频中的图像一帧帧的抽取出来,按照一定的时间间隔展示罢了. 既然如此,那么我能不能逆向思维--倒过来?把一帧帧图片 ...

  2. C++实现图片的base64编码

    from:http://blog.csdn.net/hei_ya/article/details/51637929 1.base64编码的原因 网络传送渠道并不支持所有的字节,例如传统的邮件只支持可见 ...

  3. 通过opencv实现将图片转换为视频

    背景: 为了提升用户的覆盖度,需要将书法图片转化成书法视频,且需要有浏览的效果,这里采用opencv先将图片转化成视频,在利用ffmpeg对视频进行处理,增加音乐.视频编码等. 思路: 1.openc ...

  4. python opencv轻松去图片水印

    背景 最近有一个需要为图片去水印的需求,于是各种折腾开始. 背景了解图片标准 图片使用RGB编码,RGB色彩模式是工业界的一种颜色标准,是通过对红(R).绿(G).蓝(B)三个颜色通道的变化以及它们相 ...

  5. OpenCV:01图片视频的加载显示

    文章目录 **学习目标:** 图像与视频的加载显示 导入OpenCV的包`cv2` 窗口操作 计算按键的`ASCII`值 读取图片 用其他插件来读取(以`matplotlib`为例) 用`OpenCV ...

  6. V4L2采集YUYV数据—X264编码H264视频实例

    前几天在网上买个罗技的C270摄像头,它支持YUYV(YUV422)和JPEG数据输出.它规格书上写的是支持HD720P(1280*720像素),在实际的调试过程中,我使用该分辨率会导致数据采集过慢. ...

  7. TensorFlow与OpenCV,读取图片,进行简单操作并显示

    本文是OpenCV  2 Computer Vision Application Programming Cookbook读书笔记的第一篇.在笔记中将以Python语言改写每章的代码. PythonO ...

  8. java svgbase64转byte_java 图片进行base64 编码解码

    java 图片进行base64 编码解码 刘振兴 代码分享 2017年06月07日 10555 2条评论 import sun.misc.BASE64Decoder; import sun.misc. ...

  9. 【Android RTMP】RTMPDump 封装 RTMPPacket 数据包 ( 关键帧数据格式 | 非关键帧数据格式 | x264 编码后的数据处理 | 封装 H.264 视频数据帧 )

    文章目录 安卓直播推流专栏博客总结 一. x264 编码后的 H.264 数据帧 二. RTMP 协议中 关键帧 / 非关键帧 数据格式 说明 三. 判定 H.264 帧数据分隔符 四. 初始化 RT ...

最新文章

  1. POJ 3030 Nasty Hacks (模拟题)
  2. matlab识别不出linux链接,在Ubuntu上,从matlab调用外部脚本失败_linux_开发99编程知识库...
  3. 格式化json_在Spring Boot中格式化JSON日期
  4. STL vector的erase操作问题
  5. 根据库位获取仓库id
  6. codeblock 安装debug调试
  7. 扫雷小游戏-纯网页版
  8. 优秀项目经理的五大核心能力
  9. 失业七个月,面试六十家公司
  10. 计算机与信息学院迎新标语,学院迎新标语横幅
  11. 花房集团CEO于丹内部信:上市即暴富年代已一去不复返
  12. 探讨关于Flash Player的更多线索(22.12.28更新)
  13. RFS[4]: No standby redo logfiles available for thread 1
  14. MySQL数据库教程天花板,mysql安装到mysql高级,强|硬 宋红康版
  15. 格里高利历java_java时间类简单总结
  16. R语言学习笔记(二) 逻辑运算符
  17. 站长收入差距逐渐拉开 高收入站长稳步增加
  18. java程序员云计算学习路线
  19. TCP/UDP/Socket 通俗讲解
  20. 自动驾驶 Apollo 源码分析系列,感知篇(二):Perception 如何启动?

热门文章

  1. 前端前端开发工程师_我们庞大的工程师团队会使用此前端开发指南
  2. int,int*,(int*)区别
  3. 华芯微特SWM32SRET6-50控制器应用记录
  4. ios5.1.1旧版软件下载_苹果iOS 14.1正式版推送 关闭旧版本降级通道
  5. Mac OS配置python虚拟环境
  6. VB6.0 读取Excel2010简单方法(附源码和注释,可伸手)
  7. 爬虫学习(14):selenium自动化测试(三):鼠标和键盘操作
  8. Eclipse超棒的主题,你不能错过!
  9. 请收下这 72 个炫酷的 CSS 技巧
  10. 数据库 redis