前言:最近 公司项目需要在电视上 播放摄像头视频,而且可以随时切换流,延时要求在500ms以内,网上试过了 各种开源库 ,都不能达到要求。于是自己找到公司C++开发人员请教,最后终于完成,在此记录,也给有需求的小伙伴提供方案。

第一步,添加ffmpeg的so库,网上有很多版本,后面我会在demo里面提供,然后这是我的CmakeList文件:

 
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html# Sets the minimum version of CMake required to build the native library.cmake_minimum_required(VERSION 3.4.1)set(pathToProject F:/androidProject/DuokuWelcome)# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.add_library( # Sets the name of the library.native-lib# Sets the library as a shared library.SHARED# Provides a relative path to your source file(s).src/main/cpp/native-lib.cpp )add_library(avcodec-57 SHARED IMPORTED)
set_target_properties(avcodec-57 PROPERTIESIMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavcodec-57.so)add_library(avdevice-57 SHARED IMPORTED)
set_target_properties(avdevice-57 PROPERTIESIMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavdevice-57.so)add_library(avfilter-6 SHARED IMPORTED)
set_target_properties(avfilter-6 PROPERTIESIMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavfilter-6.so)add_library(avformat-57 SHARED IMPORTED)
set_target_properties(avformat-57 PROPERTIESIMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavformat-57.so)add_library(avutil-55 SHARED IMPORTED)
set_target_properties(avutil-55 PROPERTIESIMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavutil-55.so)add_library(gperf SHARED IMPORTED)
set_target_properties(gperf PROPERTIESIMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libgperf.so)add_library(swresample-2 SHARED IMPORTED)
set_target_properties(swresample-2 PROPERTIESIMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libswresample-2.so)add_library(swscale-4 SHARED IMPORTED)
set_target_properties(swscale-4 PROPERTIESIMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libswscale-4.so)include_directories(${pathToProject}/app/src/main/cpp/include/)# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.find_library( # Sets the name of the path variable.log-lib# Specifies the name of the NDK library that# you want CMake to locate.log )# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.target_link_libraries( # Specifies the target library.native-libandroidOpenSLESjnigraphicsavformat-57avdevice-57avcodec-57avutil-55avfilter-6gperfswresample-2swscale-4${log-lib})

头文件可以放到你想放置的位置,这里需要用到ANativeWindow,用于openGl渲染画面。然后native-cpp文件也比较简单:

#include <jni.h>
#include <string>
#include <opencv2/opencv.hpp>
#include <android/bitmap.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>using namespace cv;extern "C" {
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/frame.h>
#include <libavdevice/avdevice.h>
#include <libavfilter/avfilter.h>#include "libswresample/swresample.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"}extern "C"
JNIEXPORT jstringJNICALL
Java_com_nazhi_testlive555_MainActivity_stringFromJNI(JNIEnv *env,jobject /* this */) {std::string hello = "Hello from C++";return env->NewStringUTF(hello.c_str());
}static AVPacket *pPacket;
static AVFrame *pAvFrame, *pFrameBGR;
static AVCodecContext *pCodecCtx;
struct SwsContext *pImgConvertCtx;
static AVFormatContext *pFormatCtx;
ANativeWindow* nativeWindow;
ANativeWindow_Buffer windowBuffer;
uint8_t *v_out_buffer;
bool stop;extern "C"
JNIEXPORT jint JNICALL
Java_com_nazhi_testlive555_FfmpegUtils_openVideo(JNIEnv *env, jclass type, jstring url,jobject surface) {stop = false;// 初始化#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55, 28, 1)#define av_frame_alloc  avcodec_alloc_frame#endifpAvFrame = av_frame_alloc();pFrameBGR = av_frame_alloc();char input_str[500]={0};sprintf(input_str, "%s", env->GetStringUTFChars(url, NULL));nativeWindow = ANativeWindow_fromSurface(env, surface);if (0 == nativeWindow){return -1;}//初始化avcodec_register_all();av_register_all();         //注册库中所有可用的文件格式和编码器avformat_network_init();avdevice_register_all();pFormatCtx = avformat_alloc_context();if (avformat_open_input(&pFormatCtx, input_str, NULL, NULL) < 0)return 1;avformat_find_stream_info(pFormatCtx, NULL);int videoIndex = -1;for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) //遍历各个流,找到第一个视频流,并记录该流的编码信息{if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {videoIndex = i;                                     //这里获取到的videoindex的结果为1.break;}}pCodecCtx = pFormatCtx->streams[videoIndex]->codec;AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);avcodec_open2(pCodecCtx, pCodec, NULL);int width = pCodecCtx->width;int height = pCodecCtx->height;int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 1);v_out_buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));av_image_fill_arrays(pFrameBGR->data, pFrameBGR->linesize, v_out_buffer, AV_PIX_FMT_RGBA, width, height, 1);pImgConvertCtx = sws_getContext(pCodecCtx->width,             //原始宽度pCodecCtx->height,            //原始高度pCodecCtx->pix_fmt,           //原始格式pCodecCtx->width,             //目标宽度pCodecCtx->height,            //目标高度AV_PIX_FMT_RGBA,               //目标格式SWS_BICUBIC,                    //选择哪种方式来进行尺寸的改变,关于这个参数,可以参考:http://www.cnblogs.com/mmix2009/p/3585524.htmlNULL,NULL,NULL);if (0 > ANativeWindow_setBuffersGeometry(nativeWindow,width,height,WINDOW_FORMAT_RGBA_8888)){ANativeWindow_release(nativeWindow);return -1;}//    int avPicSize = avpicture_get_size(AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);
//    uint8_t *pPicBuffer = (uint8_t *) av_malloc(avPicSize);
//    avpicture_fill((AVPicture *) pFrameBGR, pPicBuffer, AV_PIX_FMT_BGR24, pCodecCtx->width,
//                   pCodecCtx->height);pPacket = (AVPacket*)av_malloc(sizeof(AVPacket));// 读取数据包int count = 0;while (!stop) {if (av_read_frame(pFormatCtx, pPacket) >= 0) {if ((pPacket)->stream_index != videoIndex) {// 包不对,不解码continue;}//解码int gotPicCount = 0;int code = avcodec_decode_video2(pCodecCtx, pAvFrame, &gotPicCount, pPacket);if (gotPicCount != 0) {count++;sws_scale(pImgConvertCtx,(const uint8_t *const *) pAvFrame->data,pAvFrame->linesize,0,pCodecCtx->height,pFrameBGR->data,pFrameBGR->linesize);//cv::Mat tmpFrame(cv::Size(pCodecCtx->width, pCodecCtx->height), CV_8UC3);//转换为openCv的mat类型if (ANativeWindow_lock(nativeWindow, &windowBuffer, NULL) < 0) {} else {uint8_t *dst = (uint8_t *) windowBuffer.bits;for (int h = 0; h < height; h++) {memcpy(dst + h * windowBuffer.stride * 4,v_out_buffer + h * pFrameBGR->linesize[0],pFrameBGR->linesize[0]);}ANativeWindow_unlockAndPost(nativeWindow);}}}av_packet_unref(pPacket);}sws_freeContext(pImgConvertCtx);av_free(pPacket);av_free(pFrameBGR);avcodec_close(pCodecCtx);avformat_close_input(&pFormatCtx);return 1;
}extern "C"
JNIEXPORT void JNICALL
Java_com_nazhi_testlive555_FfmpegUtils_stop(JNIEnv *env, jclass type) {stop = true;
}

这里需要注意的是 Ffmpeg的头文件需要包含在extern “C”内部。流程注释写的很清楚,主要就是初始化,然后通过

avformat_open_input()

函数打开视频地址,这个地址可以是RTSP,RTMP或者本地视频。打开成功返回0,错误返回负数,

av_read_frame()

是个阻塞函数,会一直读取视频帧数据,需要注意的是,代码中注释的cv:Mat,是可以将视频帧数据转化为openCv的Mat类型。后面就是用AnativeWindow渲染了。这里有个全局变量stop是用来控制切换摄像头流的。

最后是java代码 布局只有一个Button和一个Framelayout

package com.nazhi.testlive555import android.annotation.SuppressLint
import android.app.Activity
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.os.Handler
import android.os.Message
import android.util.Log
import kotlinx.android.synthetic.main.activity_main.*
import android.view.*
import android.widget.FrameLayout
import java.lang.ref.WeakReferenceclass MainActivity : AppCompatActivity() {private val mhand = MyHandler(this@MainActivity)private var canSelectCamera = false//控制是否能选择摄像头@SuppressLint("HandlerLeak")class MyHandler constructor(activity: AppCompatActivity) : Handler() {private var mActivity: WeakReference<AppCompatActivity> = WeakReference<AppCompatActivity>(activity)override fun handleMessage(msg: Message) {super.handleMessage(msg)when (msg.what) {
//            showCamera -> {
//            }else -> {}}//}}var i = 0override fun onCreate(savedInstanceState: Bundle?) {super.onCreate(savedInstanceState)window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)window.addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN)setContentView(R.layout.activity_main)frame.removeAllViews()val surfaceView = SurfaceView(this)val lp = FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,ViewGroup.LayoutParams.MATCH_PARENT)lp.gravity = Gravity.LEFT or Gravity.TOPsurfaceView.layoutParams = lpframe.addView(surfaceView)val holder = surfaceView.holderval inputurl = "rtsp://admin:admin123@192.168.2.68:554/h265/ch1/main/av_stream"holder.addCallback(object : SurfaceHolder.Callback {override fun surfaceCreated(holder: SurfaceHolder) {Thread(Runnable {i = FfmpegUtils.openVideo(inputurl, holder.surface)}).start()Log.e("i", "i" + i)canSelectCamera = trueLog.e("MainActivity", "canSelectCamera = true")}override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {}override fun surfaceDestroyed(holder: SurfaceHolder) {}})btn.setOnClickListener {mhand.postDelayed(runnable, 50)}}var inputurl: String = ""var isone = falsevar runnable: Runnable = object : Runnable {override fun run() {if (i == 1) {Log.e("MainActivity", "i == 1")frame.removeAllViews()val surfaceView = SurfaceView(this@MainActivity)val lp = FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,ViewGroup.LayoutParams.MATCH_PARENT)lp.gravity = Gravity.LEFT or Gravity.TOPsurfaceView.layoutParams = lpframe.addView(surfaceView)val holder = surfaceView.holderholder.addCallback(object : SurfaceHolder.Callback {override fun surfaceCreated(holder: SurfaceHolder) {Thread(Runnable {Log.e("MainActivity", inputurl)if (isone) {inputurl = "rtsp1"     } else {inputurl = "rtsp2"}i = FfmpegUtils.openVideo(inputurl, holder.surface)}).start()Log.e("i", "i" + i)i = 0}override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {}override fun surfaceDestroyed(holder: SurfaceHolder) {}})canSelectCamera = true} else {canSelectCamera = falseLog.e("MainActivity", "i != 1")mhand.postDelayed(this, 50)}}}}

调用stop后一直检测知道openVideo返回1,内存成功释放后再重新打开。

至此,整个app就完成了。想深入的话还可以对图像的Mat进行人脸检测和图像处理。

硬解码和openGl ES渲染版本请移步  Android Ffmpeg硬解码、openGL渲染

android使用Ffmpeg JNI实时播放RTSP、RTMP等视频(主码流,子码流均能流畅播放)相关推荐

  1. 【开源技术分享】无需流媒体服务,让浏览器直接播放rtsp/rtmp的神器:EasyMedia

    不同于市面上其他需要各种转发到流媒体服务的中间件来说,EasyMedia不需要依赖任何nginx-rtmp,srs,zlmediakit等等第三方流媒体服务,只需要你有rtsp或者rtmp等等协议的视 ...

  2. Windows远程桌面实现之五(FFMPEG实现桌面屏幕RTSP,RTMP推流及本地保存)

    by fanxiushu 2018-07-10 转载或引用请注明原始作者. 前面文章分别阐述了,如何抓取电脑屏幕数据,如何采集电脑声音, 如何实现在现代浏览器中通过HTML5和WebSocket直接进 ...

  3. ffmpeg c++代码推流RTSP/RTMP(命令行推流)

    由于ffmpeg推出的rtsp或者rtmp端口需要Listen,所以需要开启端口TCP/UDP,之后采用ffmpeg向端口推送数据 第一,安装rtsp-simple-server release下载地 ...

  4. pyqt+vlc 播放rtsp/rtmp地址并嵌入任意frame中

    先上效果图: 代码: import os, platform# 设置VLC库路径,需在import vlc之前 os.environ['PYTHON_VLC_MODULE_PATH'] = " ...

  5. EasyNVR是怎么做到Web浏览器播放RTSP摄像机直播视频延时控制在一秒内的

    背景说明 由于互联网的飞速发展,传统安防摄像头的视频监控直播与互联网直播相结合也是大势所趋.传统安防的直播大多在一个局域网内,在播放的客户端上也是有所限制,一般都是都需要OCX Web插件进行直播.对 ...

  6. Windows/Android/IOS平台如何在Unity3d播放RTSP/RTMP流

    如果基于Unity3d完全重新开发一个播放器,代价大,周期长,不适合快速出产品,最好的方式就是集成现有Native平台上成熟稳定播放器. 集成Navtive播放器可行性分析: 安卓: Unity3d可 ...

  7. JavaCV音视频开发宝典:使用JavaCV和springBoot实现http-flv直播服务,无需流媒体服务,浏览器网页flv.js转封装方式播放rtsp,rtmp和桌面投屏实时画面

    <JavaCV音视频开发宝典>专栏目录导航 <JavaCV音视频开发宝典>专栏介绍和目录 ​ 前言 自从2021年初开始,各个浏览器开始禁用flash插件,以前直播中经常使用的 ...

  8. H5Stream杂记-HTML中引入RTSP/RTMP源视频初学

    最近遇到项目中视频监控的问题,老大的要求是在前端引入RTSP流媒体视频.为此,一个菜鸟怀着好奇的心情去琢磨了许久. 首先根据老大提出的建议,试一试vlc插件.为此踩了一天vlc的坑.不可否认vlc是一 ...

  9. IPTV机顶盒使用MediaPlayer 播放rtsp协议的视频

    废话不多说直接上代码.如下: <%@ page contentType="text/html; charset=utf-8" language="java" ...

最新文章

  1. c语言 python rsa库_Python遇到性能瓶颈怎么办?
  2. 《算法竞赛进阶指南》打卡-基本算法-AcWing 90. 64位整数乘法:位运算
  3. Android Studio 代码混淆(你真的会混淆吗)
  4. OpenCV使用不同的决策树decision trees的实例(附完整代码)
  5. Nginx技术研究系列7-Azure环境中Nginx高可用性和部署架构设计
  6. Python基础01-变量及数据类型
  7. 极路由2 root及校园802.1x拨号经验帖(西南校园)
  8. html和css可以用在ssh里面么,在网站中使用SSH
  9. 制作了一个WMI 查询分析工具
  10. 光储充一体化充电站_【储能项目】国家电网与浙江瓯江口新区共建光储充一体化充电站项目...
  11. 金融综合(网课+读书笔记)
  12. ubuntu11.04下如何发送文件到U盘如何添加发送文件到U盘的右键菜单
  13. Java8新特性之双冒号(::)
  14. u盘文件意外删除怎么恢复,不小心删了u盘的文件怎么办
  15. 30天不动如山计划(重要)
  16. 【Linux系列文章】系统管理
  17. 硬盘 SMART 检测参数详解
  18. VMware ESXi 更换 CA 证书
  19. 视频教程-从零开始自然语言处理-NLP
  20. 【软工视频】--需求分析

热门文章

  1. Linux系列之SSH建立互信
  2. Linux系统USB驱动目录,Linux那些事儿之我是USB 目录
  3. 游侠我的世界开服务器端口被占用,我的世界怎么开服?游侠我的世界开服教程...
  4. 大淘客 服务器繁忙请稍后再试,解决手机端首页卡慢问题
  5. 新型勒索病毒已经出现
  6. html 代码 path,path是什么意思
  7. 2023武汉中级工程师职称申报流程一览 秋禾火
  8. 解决:photoswipe的data-size问题-自适应宽高,终于可以不用传data-size了!
  9. Design System Application - Chapter 1 网格系统 Grid System
  10. Kubernetes 二进制方式集群部署(单节点多节点)