前几天遇到一个问题:得到摄像头数据后,如何能够直接显示?

吾已解决。需要的朋友可下载吾开源项目:

https://github.com/quantum6?tab=repositories

  • 最简单的办法,就是转换为RGB。这个有现在的代码,吾亦提供了一个:

https://blog.csdn.net/quantum7/article/details/105720150

  性能较差。有的手机会特别慢。

  • 不转换,直接显示。NATIVE方式。

网上有人提供了NATIVE方式。吾本来要试一下,一看要装CYGWIN,放弃。

  • 使用GLSurfaceView。

为什么使用这个?因为在doubango中,就是这样做的。具体步骤:

  1. 配置

在AndroidManifest.xml中增加一句:

<uses-feature    android:glEsVersion="0x00020000" android:required="false"  />
  1. 格式转换

NV21转换为YUV420SP。

https://quantum6.blog.csdn.net/article/details/105744636

  1. 继承GlSurfaceView

package net.quantum6.kit;import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;import android.content.Context;
import android.graphics.PixelFormat;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.SurfaceHolder;/*** OpenGL Surface view*/
public class VideoRendererView extends GLSurfaceView implements GLSurfaceView.Renderer
{private final static String TAG = VideoRendererView.class.getCanonicalName();int mBufferWidthY, mBufferHeightY,  mBufferWidthUV, mBufferHeightUV;ByteBuffer mBuffer;int mBufferPositionY, mBufferPositionU, mBufferPositionV;private static final int FLOAT_SIZE_BYTES = 4;private static final int SHORT_SIZE_BYTES = 2;private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET   = 0;private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET    = 3;private final float[] TRIANFLE_VERTICES_DATA = {1, -1, 0, 1, 1,1,  1, 0, 1, 0,-1,  1, 0, 0, 0,-1, -1, 0, 0, 1};private final short[] INDICES_DATA = {0, 1, 2,2, 3, 0};private FloatBuffer mTriangleVertices;private ShortBuffer mIndices;private static final String VERTEX_SHADER_SOURCE ="attribute vec4 aPosition;\n" +"attribute vec2 aTextureCoord;\n" +"varying vec2 vTextureCoord;\n" +"void main() {\n" +"  gl_Position = aPosition;\n" +"  vTextureCoord = aTextureCoord;\n" +"}\n";private static final String FRAGMENT_SHADER_SOURCE = "precision mediump float;" +"varying vec2 vTextureCoord;" +"" +"uniform sampler2D SamplerY; " +"uniform sampler2D SamplerU;" +"uniform sampler2D SamplerV;" +"" +"const mat3 yuv2rgb = mat3(1, 0, 1.2802,1, -0.214821, -0.380589,1, 2.127982, 0);" +"" +"void main() {    " +"    vec3 yuv = vec3(1.1643 * (texture2D(SamplerY, vTextureCoord).r - 0.0625)," +"                    texture2D(SamplerU, vTextureCoord).r - 0.5," +"                    texture2D(SamplerV, vTextureCoord).r - 0.5);" +"    vec3 rgb = yuv * yuv2rgb;    " +"    gl_FragColor = vec4(rgb, 1.0);" +"} ";private int mProgram;private int maPositionHandle;private int maTextureHandle;private int muSamplerYHandle;private int muSamplerUHandle;private int muSamplerVHandle;private int[] mTextureY = new int[1];private int[] mTextureU = new int[1];private int[] mTextureV = new int[1];private boolean mSurfaceCreated;private boolean mSurfaceDestroyed;@SuppressWarnings("unused")private Context mContext;private int mViewWidth, mViewHeight, mViewX, mViewY;private boolean mFullScreenRequired;public VideoRendererView(Context context) {super(context);setEGLContextClientVersion(2);setEGLConfigChooser(8, 8, 8, 8, 16, 0);   setRenderer(this);getHolder().setFormat(PixelFormat.TRANSLUCENT);getHolder().setType(SurfaceHolder.SURFACE_TYPE_GPU);setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);mContext = context;mTriangleVertices = ByteBuffer.allocateDirect(TRIANFLE_VERTICES_DATA.length* FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();mTriangleVertices.put(TRIANFLE_VERTICES_DATA).position(0);mIndices = ByteBuffer.allocateDirect(INDICES_DATA.length* SHORT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asShortBuffer();mIndices.put(INDICES_DATA).position(0);}public void setParams(boolean fullScreenRequired, ByteBuffer buffer, int bufferWidth, int bufferHeight, int fps){mFullScreenRequired = fullScreenRequired;setBuffer(buffer, bufferWidth, bufferHeight);}public void setBuffer(ByteBuffer buffer, int bufferWidth, int bufferHeight){mValidDataList.clear();mEmptyDataList.clear();mBuffer          = buffer;mBufferWidthY    = bufferWidth;mBufferHeightY   = bufferHeight;mBufferWidthUV   = (mBufferWidthY >> 1);mBufferHeightUV  = (mBufferHeightY >> 1);mBufferPositionY = 0;mBufferPositionU = (mBufferWidthY * mBufferHeightY);mBufferPositionV = (mBufferPositionU + (mBufferWidthUV * mBufferHeightUV));}public boolean isReady(){return (mSurfaceCreated && !mSurfaceDestroyed);}public boolean isDestroyed(){return mSurfaceDestroyed;}@Overridepublic void surfaceDestroyed(SurfaceHolder holder) {mSurfaceCreated   = false;mSurfaceDestroyed = true;super.surfaceDestroyed(holder);}@Overridepublic void onDrawFrame(GL10 glUnused) {GLES20.glViewport(mViewX, mViewY, mViewWidth, mViewHeight);GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);GLES20.glUseProgram(mProgram);checkGlError("glUseProgram");if (mValidDataList.size() < BUFFER_COUNT_MIN){return;}if (mBuffer == null){mBuffer = ByteBuffer.allocateDirect(mBufferWidthY*mBufferHeightY*3/2);}byte[] newData = mValidDataList.get(0);mBuffer.rewind();mBuffer.put(newData);if(mBuffer != null){synchronized(this){             GLES20.glActiveTexture(GLES20.GL_TEXTURE0);GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureY[0]);GLES20.glTexImage2D(   GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mBufferWidthY, mBufferHeightY, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mBuffer.position(mBufferPositionY));GLES20.glUniform1i(muSamplerYHandle, 0);GLES20.glActiveTexture(GLES20.GL_TEXTURE1);GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureU[0]);GLES20.glTexImage2D(   GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mBufferWidthUV, mBufferHeightUV, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mBuffer.position(mBufferPositionU));GLES20.glUniform1i(muSamplerUHandle, 1);GLES20.glActiveTexture(GLES20.GL_TEXTURE2);GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureV[0]);GLES20.glTexImage2D(   GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mBufferWidthUV, mBufferHeightUV, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mBuffer.position(mBufferPositionV));GLES20.glUniform1i(muSamplerVHandle, 2);}}GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDICES_DATA.length, GLES20.GL_UNSIGNED_SHORT, mIndices);this.mEmptyDataList.add(newData);}public void onSurfaceChanged(GL10 glUnused, int width, int height) {GLES20.glViewport(0, 0, width, height);setViewport(width, height);// GLU.gluPerspective(glUnused, 45.0f, (float)width/(float)height, 0.1f, 100.0f);}public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {GLES20.glEnable( GLES20.GL_BLEND);GLES20.glDisable(GLES20.GL_DEPTH_TEST);GLES20.glDisable(GLES20.GL_DITHER);GLES20.glDisable(GLES20.GL_STENCIL_TEST);GLES20.glDisable(GL10.GL_DITHER);String extensions = GLES20.glGetString(GL10.GL_EXTENSIONS);Log.d(TAG, "OpenGL extensions=" +extensions);// Ignore the passed-in GL10 interface, and use the GLES20// class's static methods instead.mProgram = createProgram(VERTEX_SHADER_SOURCE, FRAGMENT_SHADER_SOURCE);if (mProgram == 0) {return;}maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");checkGlError("glGetAttribLocation aPosition");if (maPositionHandle == -1) {throw new RuntimeException("Could not get attrib location for aPosition");}maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");checkGlError("glGetAttribLocation aTextureCoord");if (maTextureHandle == -1) {throw new RuntimeException("Could not get attrib location for aTextureCoord");}muSamplerYHandle = GLES20.glGetUniformLocation(mProgram, "SamplerY");if (muSamplerYHandle == -1) {throw new RuntimeException("Could not get uniform location for SamplerY");}muSamplerUHandle = GLES20.glGetUniformLocation(mProgram, "SamplerU");if (muSamplerUHandle == -1) {throw new RuntimeException("Could not get uniform location for SamplerU");}muSamplerVHandle = GLES20.glGetUniformLocation(mProgram, "SamplerV");if (muSamplerVHandle == -1) {throw new RuntimeException("Could not get uniform location for SamplerV");}mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);checkGlError("glVertexAttribPointer maPosition");mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);GLES20.glEnableVertexAttribArray(maPositionHandle);checkGlError("glEnableVertexAttribArray maPositionHandle");GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);checkGlError("glVertexAttribPointer maTextureHandle");GLES20.glEnableVertexAttribArray(maTextureHandle);checkGlError("glEnableVertexAttribArray maTextureHandle");GLES20.glGenTextures(1, mTextureY, 0);GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureY[0]);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,     GLES20.GL_CLAMP_TO_EDGE);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,     GLES20.GL_CLAMP_TO_EDGE);GLES20.glGenTextures(1, mTextureU, 0);GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureU[0]);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,     GLES20.GL_CLAMP_TO_EDGE);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,     GLES20.GL_CLAMP_TO_EDGE);GLES20.glGenTextures(1, mTextureV, 0);GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureV[0]);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,     GLES20.GL_CLAMP_TO_EDGE);GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,     GLES20.GL_CLAMP_TO_EDGE);mSurfaceCreated = true;setViewport(getWidth(), getHeight());}private int loadShader(int shaderType, String source) {int shader = GLES20.glCreateShader(shaderType);if (shader != 0) {GLES20.glShaderSource(shader, source);GLES20.glCompileShader(shader);int[] compiled = new int[1];GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);if (compiled[0] == 0) {Log.e(TAG, "Could not compile shader " + shaderType + ":");Log.e(TAG, GLES20.glGetShaderInfoLog(shader));GLES20.glDeleteShader(shader);shader = 0;}}return shader;}private int createProgram(String vertexSource, String fragmentSource) {int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);if (vertexShader == 0) {return 0;}int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);if (pixelShader == 0) {return 0;}int program = GLES20.glCreateProgram();if (program != 0) {GLES20.glAttachShader(program, vertexShader);checkGlError("glAttachShader");GLES20.glAttachShader(program, pixelShader);checkGlError("glAttachShader");GLES20.glLinkProgram(program);int[] linkStatus = new int[1];GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);if (linkStatus[0] != GLES20.GL_TRUE) {Log.e(TAG, "Could not link program: ");Log.e(TAG, GLES20.glGetProgramInfoLog(program));GLES20.glDeleteProgram(program);program = 0;}}return program;}private void setViewport(int width, int height){if(mFullScreenRequired){mViewWidth = width;mViewHeight = height;mViewX = mViewY = 0;}else{float fRatio = ((float) mBufferWidthY / (float) mBufferHeightY);mViewWidth = (int) ((float) width / fRatio) > height ? (int) ((float) height * fRatio) : width;mViewHeight = (int) (mViewWidth / fRatio) > height ? height : (int) (mViewWidth / fRatio);mViewX = ((width - mViewWidth) >> 1);mViewY = ((height - mViewHeight) >> 1);}}private void checkGlError(String op) {int error;while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {Log.e(TAG, op + ": glError " + error);throw new RuntimeException(op + ": glError " + error);}}private final static int BUFFER_COUNT_MIN   = 2;private List<byte[]>  mValidDataList = Collections.synchronizedList(new LinkedList<byte[]>());private List<byte[]>  mEmptyDataList = Collections.synchronizedList(new LinkedList<byte[]>());public void newDataArrived(final byte[] data){byte[] newData;if (mEmptyDataList.size() > 0){newData = mEmptyDataList.remove(0);}else{newData = new byte[data.length];}System.arraycopy(data, 0, newData, 0, data.length);mValidDataList.add(newData);this.requestRender();}}

全网首发:安卓GLSurfaceView绘制显示YUV(NV21)相关推荐

  1. [Python从零到壹] 十四.机器学习之分类算法五万字总结全网首发(决策树、KNN、SVM、分类对比实验)

    欢迎大家来到"Python从零到壹",在这里我将分享约200篇Python系列文章,带大家一起去学习和玩耍,看看Python这个有趣的世界.所有文章都将结合案例.代码和作者的经验讲 ...

  2. [Python从零到壹] 十三.机器学习之聚类算法四万字总结全网首发(K-Means、BIRCH、树状聚类、MeanShift)

    欢迎大家来到"Python从零到壹",在这里我将分享约200篇Python系列文章,带大家一起去学习和玩耍,看看Python这个有趣的世界.所有文章都将结合案例.代码和作者的经验讲 ...

  3. 全网首发!老大众奥迪碟盒通信协议破解,可以模拟数码碟盒,外接AUX蓝牙U盘等音频设备

    文章目录 前言 什么是碟盒? 碟盒接口定义 车机与碟盒之间的通信协议 主机向碟盒发送数据协议 数据帧格式 逻辑分析仪真机抓包分析 起始信号波形: 数据0信号波形: 数据1信号波形: 逻辑分析仪抓包分析 ...

  4. 安卓使用opengles显示深度点云图或深度3D效果图

    前言:因为想在安卓设备上显示深度图的3D效果画面,经过查找资料,发现使用opengles比较方便.本文基于opengles在安卓设备实现3D点云效果图显示,而且深度图上点的颜色由近及远,从红-黄-绿- ...

  5. 全网首发,大众奥迪领驭帕萨特DIY数码碟盒增加USB和蓝牙播放音乐功能使用原车接口无损改装

    文章目录 全网首发!大众奥迪DIY数码碟盒增加USB和蓝牙播放音乐功能 前言 1.设计指标 2.方案设计 2.1.外壳选择 2.2.蓝牙方案 2.3.MCU方案 3.原理图设计 4.PCB设计 5.程 ...

  6. Android 基于ffmpeg开发简易播放器 - EGL和OpenGLESGLES显示YUV视频

    EGL和OpenGLESGLES显示YUV视频 1.EGL EGL是OpenGL ES与系统原始窗口的适配层: Display:用于与原生窗口建立连接. Surface:用于渲染的区域. Contex ...

  7. 软件测试面试必看,资深测试工程师面试题集锦 全网首发

    全网首发,最牛的全套软件测试  ,自动化测试面试题,这些面试题都在文章末尾,有需要的可以自取 废话少说直接开始咱们今天的整体内容, 1.自我介绍? 我是谁.工作几年.你上家公司做什么.负责什么.你的优 ...

  8. [转载]玩转Android Camera开发(三):国内首发---使用GLSurfaceView预览Camera 基础拍照demo...

    GLSurfaceView 是OpenGL中的一个类,也是可以预览Camera的,而且在预览Camera上有其独到之处.独到之处在哪?当使用Surfaceview无能为力. 痛不欲生时就只有使用GLS ...

  9. Android上使用OpenGLES2.0显示YUV数据

    在Android上用OpenGLES来显示YUV图像,之所以这样做,是因为: 1.android本身也不能直接显示YUV图像,YUV转成RGB还是必要的: 2.YUV手动转RGB会占用大量的CPU资源 ...

  10. [Python从零到壹] 十二.机器学习之回归分析万字总结全网首发(线性回归、多项式回归、逻辑回归)

    欢迎大家来到"Python从零到壹",在这里我将分享约200篇Python系列文章,带大家一起去学习和玩耍,看看Python这个有趣的世界.所有文章都将结合案例.代码和作者的经验讲 ...

最新文章

  1. 如何使用SpringBoot AOP 记录操作日志、异常日志?
  2. 命令行获取docker远程仓库镜像列表
  3. 论文浅尝 | 利用多语言 wordnet 上随机游走实现双语 embeddings
  4. (4.28)for xml path 在合并拆分上的作用演示
  5. [shell基础]——uniq命令
  6. python下载url链接_使用Python从url地址下载所有pdf文件
  7. 图书管理系统活动,时序图
  8. 拓端tecdat|R语言样条曲线、分段线性回归模型piecewise regression估计个股beta值分析收益率数据
  9. 威纶触摸屏数值元件格式设置_在威纶触摸屏当中,数值输入元件那里的设备类型的字母都表示什么意思啊...
  10. python搭建网盘网站_搭建nextcloud私有云存储网盘
  11. kotlin 开发桌面应用_2020-21年Kotlin应用开发十大公司
  12. Excel删除重复行单不删除单元格
  13. 走向.NET架构设计—第三章—分层设计,初涉架构(后篇)
  14. 开篇──纪念调零的百合
  15. 链表操作eeeeeeeeee
  16. Dcim.exe病毒文件感染相机SD卡处理方案
  17. linux 开机速度 固态,Windows/Linux系统开机OCZ胜出
  18. 人工智能基础知识:介绍人工智能的历史,基本概念和应用领域
  19. 利用计算机画统计图教学设计,《折线统计图》教学设计及反思
  20. 乔布斯给中国CEO的三堂课

热门文章

  1. android图片布局填冲满,Android的布局图像填充宽度
  2. ajax后台重定向会返回什么_处理jquery ajax重定向
  3. git flow使用
  4. nginx 配置参数说明
  5. es分词查询与模糊查询
  6. CCS5.5.0编译问题之” XDAIS_CG_ROOT ”
  7. 20180316交换排序-快速排序
  8. 全面解读新中产:有房有车有贷、半数决策看老婆
  9. 线程池与Threadlocal
  10. Atitit 表达式原理 语法分析 原理与实践 解析java的dsl  递归下降是现阶段主流的语法分析方法...