我们经常会遇到播放视频的需求,最近几年全面屏、刘海屏的出现使得屏幕的比例各种各样,下面是视频适配各种屏幕比例,类似于ImageView的scaleType=“centerCrop”。

public class ClipVideoView extends GLSurfaceView {/*** MediaPlayer*/private MediaPlayer mediaPlayer;/*** 视频路径*/private String videoPath;/*** OnPreparedListener*/private MediaPlayer.OnPreparedListener mOnPreparedListener;/*** 构造函数*/public ClipVideoView(Context context) {this(context, null);}/*** 构造函数*/public ClipVideoView(Context context, AttributeSet attrs) {super(context, attrs);init();}/*** 初始化*/private void init() {Logger.d("ClipVideoView init");setEGLContextClientVersion(2);setPreserveEGLContextOnPause(true);setRenderer(new VideoRenderer(getContext()));setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);}/*** 设置视频播放监听器*/public void setPreparedListener(MediaPlayer.OnPreparedListener onPreparedListener) {mOnPreparedListener = onPreparedListener;}/*** 播放视频*/public void start(String path) {if (mediaPlayer == null) {Logger.d("start video mediaPlayer is null,path:" + path);videoPath = path;return;}try {Logger.d("start video path:" + path);mediaPlayer.reset();mediaPlayer.setLooping(true);mediaPlayer.setDataSource(path);mediaPlayer.prepare();mediaPlayer.start();if (mOnPreparedListener != null) {mOnPreparedListener.onPrepared(null);}} catch (Exception e) {Logger.e("play video error:" + e + "  video path:" + path);}}@Overridepublic void onResume() {super.onResume();if (mediaPlayer != null && !"".equals(videoPath)) {Logger.d("video restart");mediaPlayer.start();}}@Overridepublic void onPause() {super.onPause();if (mediaPlayer != null && mediaPlayer.isPlaying()) {Logger.d("video pause");mediaPlayer.pause();}}/*** 销毁*/public void release() {if (mediaPlayer != null) {Logger.d("video release");mediaPlayer.stop();mediaPlayer.release();mediaPlayer = null;}}private class VideoRenderer implements Renderer {/*** TAG*/private static final String TAG = "VideoRenderer";/*** context*/private Context context;/*** 纹理id*/int textureId = -1;/*** 坐标点*/private final int COORDS_PER_VERTEX = 3;/*** 坐标点*/private final int TEXCOORDS_PER_VERTEX = 2;/*** float size*/private static final int FLOAT_SIZE = 4;/*** 定点坐标*/private final float[] QUAD_COORDS = {-1.0f, 1.0f, 0.0f,  // top left-1.0f, -1.0f, 0.0f,  // bottom left1.0f, -1.0f, 0.0f,  // bottom right1.0f, 1.0f, 0.0f  // top right};/*** 纹理坐标*/private float[] quadTexCoords = {0.0f, 1.0f,1.0f, 1.0f,1.0f, 0.0f,0.0f, 0.0f};/*** index*/private final short[] index = {0, 1, 2, 0, 2, 3};/*** 顶点*/private FloatBuffer quadVertices;/*** 纹理*/private FloatBuffer quadTexCoord;/*** 索引*/private ShortBuffer shortBuffer;/*** program*/private int quadProgram = -1;/*** 顶点参数索引*/private int quadPositionParam = -1;/*** 纹理参数索引*/private int quadTexCoordParam = -1;/*** oes*/private int uTextureSamplerLocation = -1;/*** 是否有新的一针视频*/private boolean updateSurface = false;/*** SurfaceTexture*/private SurfaceTexture surfaceTexture;/*** 锁*/private Object lock = new Object();/*** 构造函数*/public VideoRenderer(Context context) {this.context = context;}@Overridepublic void onSurfaceCreated(GL10 gl, EGLConfig config) {Logger.d("onSurfaceCreated");int[] textures = new int[1];GLES20.glGenTextures(1, textures, 0);textureId = textures[0];int textureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;GLES20.glBindTexture(textureTarget, textureId);GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);int vertexShader =ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.base_vertex);int fragmentShader =ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, R.raw.base_fragment_oes);quadProgram = GLES20.glCreateProgram();GLES20.glAttachShader(quadProgram, vertexShader);GLES20.glAttachShader(quadProgram, fragmentShader);GLES20.glLinkProgram(quadProgram);GLES20.glUseProgram(quadProgram);ShaderUtil.checkGLError(TAG, "Program creation");quadPositionParam = GLES20.glGetAttribLocation(quadProgram, "a_Position");quadTexCoordParam = GLES20.glGetAttribLocation(quadProgram, "a_TexCoordinate");uTextureSamplerLocation = GLES20.glGetUniformLocation(quadProgram, "u_Texture");ShaderUtil.checkGLError(TAG, "Program parameters");mediaPlayer = new MediaPlayer();mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);mediaPlayer.setLooping(true);surfaceTexture = new SurfaceTexture(textureId);Surface surface = new Surface(surfaceTexture);surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {@Overridepublic void onFrameAvailable(SurfaceTexture surfaceTexture) {synchronized (lock) {updateSurface = true;}}});mediaPlayer.setSurface(surface);if (!"".equals(videoPath) && !mediaPlayer.isPlaying()) {start(videoPath);}}@Overridepublic void onSurfaceChanged(GL10 gl, int width, int height) {Logger.d("onSurfaceChanged");GLES20.glViewport(0, 0, width, height);GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);quadVertices = ByteBuffer.allocateDirect(QUAD_COORDS.length * FLOAT_SIZE).order(ByteOrder.nativeOrder()).asFloatBuffer();quadVertices.put(QUAD_COORDS);quadVertices.position(0);float videoRatio = 16.0F / 9;Log.d(TAG, "width:" + width + "  height:" + height);float viewRatio = ((float) width) / height;Log.d(TAG, "videoRatio:" + videoRatio + "  viewRatio:" + viewRatio);if (viewRatio < videoRatio) {float s = (1 - (9 / 16.F * viewRatio)) / 2.0F;float[] texCoord = {0.0f + s, 1.0f,1.0f - s, 1.0f,1.0f - s, 0.0f,0.0f + s, 0.0f};Log.d(TAG, Arrays.toString(texCoord));quadTexCoord = ByteBuffer.allocateDirect(texCoord.length * FLOAT_SIZE).order(ByteOrder.nativeOrder()).asFloatBuffer();quadTexCoord.put(texCoord);quadTexCoord.position(0);} else if (viewRatio > videoRatio) {float s = (1 - (16 / (9 * viewRatio))) / 2.0F;float[] texCoord = {0.0f, 1.0f - s,1.0f, 1.0f - s,1.0f, 0.0f + s,0.0f, 0.0f + s};Log.d(TAG, Arrays.toString(texCoord));quadTexCoord = ByteBuffer.allocateDirect(texCoord.length * FLOAT_SIZE).order(ByteOrder.nativeOrder()).asFloatBuffer();quadTexCoord.put(texCoord);quadTexCoord.position(0);} else {quadTexCoord = ByteBuffer.allocateDirect(quadTexCoords.length * FLOAT_SIZE).order(ByteOrder.nativeOrder()).asFloatBuffer();quadTexCoord.put(quadTexCoords);quadTexCoord.position(0);}shortBuffer = ByteBuffer.allocateDirect(index.length * 2).order(ByteOrder.nativeOrder()).asShortBuffer();shortBuffer.put(index);shortBuffer.position(0);}@Overridepublic void onDrawFrame(GL10 gl) {synchronized (lock) {if (updateSurface) {surfaceTexture.updateTexImage();updateSurface = false;}}GLES20.glUseProgram(quadProgram);// Set the vertex positions.quadVertices.position(0);GLES20.glEnableVertexAttribArray(quadPositionParam);GLES20.glVertexAttribPointer(quadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadVertices);// Set the texture coordinates.quadTexCoord.position(0);GLES20.glEnableVertexAttribArray(quadTexCoordParam);GLES20.glVertexAttribPointer(quadTexCoordParam, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadTexCoord);GLES20.glActiveTexture(GLES20.GL_TEXTURE0);GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);GLES20.glUniform1i(uTextureSamplerLocation, 0);GLES20.glDrawElements(GLES20.GL_TRIANGLES, index.length, GLES20.GL_UNSIGNED_SHORT, shortBuffer);// Disable vertex arraysGLES20.glDisableVertexAttribArray(quadPositionParam);GLES20.glDisableVertexAttribArray(quadTexCoordParam);}}
}

base_vertex.glsl文件:

attribute vec4 a_Position;
attribute vec2 a_TexCoordinate;varying vec2 v_TexCoord;void main()
{v_TexCoord = a_TexCoordinate;gl_Position = a_Position;
}

base_fragment.glsl文件:

#extension GL_OES_EGL_image_external : require
precision mediump float;uniform samplerExternalOES u_Texture;
varying vec2 v_TexCoord;void main()
{gl_FragColor = texture2D(u_Texture, v_TexCoord);
}

2个glsl文件放在:res/raw 目录下:

ShareUtil:

/** Copyright 2017 Google Inc. All Rights Reserved.* Licensed under the Apache License, Version 2.0 (the "License");* you may not use this file except in compliance with the License.* You may obtain a copy of the License at**   http://www.apache.org/licenses/LICENSE-2.0** Unless required by applicable law or agreed to in writing, software* distributed under the License is distributed on an "AS IS" BASIS,* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.* See the License for the specific language governing permissions and* limitations under the License.*/
package com.jd.lib.armart.arcore.arcore.rendering;import android.content.Context;
import android.opengl.GLES20;
import android.util.Log;import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;/*** des: shader 工具类* [@author]: mengqingdong* [@date]: 2019-08-14 09:52*/
public class ShaderUtil {/*** Converts a raw text file, saved as a resource, into an OpenGL ES shader.** @param type The type of shader we will be creating.* @param resId The resource ID of the raw text file about to be turned into a shader.* @return The shader object handler.*/public static int loadGLShader(String tag, Context context, int type, int resId) {String code = readRawTextFile(context, resId);int shader = GLES20.glCreateShader(type);GLES20.glShaderSource(shader, code);GLES20.glCompileShader(shader);// Get the compilation status.final int[] compileStatus = new int[1];GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);// If the compilation failed, delete the shader.if (compileStatus[0] == 0) {Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));GLES20.glDeleteShader(shader);shader = 0;}if (shader == 0) {throw new RuntimeException("Error creating shader.");}return shader;}/*** Checks if we've had an error inside of OpenGL ES, and if so what that error is.** @param label Label to report in case of error.* @throws RuntimeException If an OpenGL error is detected.*/public static void checkGLError(String tag, String label) {int lastError = GLES20.GL_NO_ERROR;// Drain the queue of all errors.int error;while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {Log.e(tag, label + ": glError " + error);lastError = error;}if (lastError != GLES20.GL_NO_ERROR) {throw new RuntimeException(label + ": glError " + lastError);}}/*** Converts a raw text file into a string.** @param resId The resource ID of the raw text file about to be turned into a shader.* @return The context of the text file, or null in case of error.*/private static String readRawTextFile(Context context, int resId) {InputStream inputStream = context.getResources().openRawResource(resId);try {BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));StringBuilder sb = new StringBuilder();String line;while ((line = reader.readLine()) != null) {sb.append(line).append("\n");}reader.close();return sb.toString();} catch (IOException e) {e.printStackTrace();}return null;}
}

同时在AndroidManifest.xml 文件中添加如下代码:

<uses-feature android:glEsVersion="0x00020000" android:required="true" />

视频播放使用的是系统MediaPlayer,如果没有包体大小的限制建议使用ijkplayer。

Android视频裁剪适配,类似于ImageView的scaleType=centerCrop相关推荐

  1. Android ImageView中scaleType=centerCrop实现左对齐效果

    在开发过程中,用于显示图片大家用的比较多的应该是ImageView,在显示图片时是通常我们会设置scaleType以达到不同的展示效果.然后通常scaleType能设置的属性仅为: CENTER /c ...

  2. Android入门: 对图片 ImageView的scaleType属性的理解

    关于ImageView的scaleType属性的理解 问题:图片在不同机型上的显示大小不同 1.android:scaleType="center" 保持原图的大小,显示在Imag ...

  3. android 视频裁剪view拖动,android – 视频使用特定坐标裁剪或缩放?

    您可以使用SurfaceView执行此操作.看看我的文章 Surface View – Video Cropping. 用于裁剪SurfaceView的代码. private void updateT ...

  4. android可以剪辑代码的控件,Android 仿抖音视频裁剪范围选择控件,支持本地视频和网络视频...

    实现后效果:由于是在模拟器上跑的背面的封面列表加载不出来,实际效果请真机运行 image.png 具体代码如下: 绘制上层滑动控件部分 package com.cj.customwidget.widg ...

  5. Android ImageView的scaleType(图片比例类型)属性与adjustViewBounds(调整视图边界)属性

    本文转载自[Android ImageView的scaleType(图片比例类型)属性与adjustViewBounds(调整视图边界)属性]并做了排版的修改(http://www.cnblogs.c ...

  6. Android学习笔记19:ImageView实现图片适屏与裁剪

    在Android中,要将一张图片显示在屏幕上,需要创建一个显示图片的对象,该对象就是ImageView. 1.ImageView常用属性 要对图片进行适屏裁剪操作,首先需要了解ImageView的常用 ...

  7. android scaletype没作用,Android ImageView的ScaleType属性

    参考 ScaleType属性 常量 含义 fitXY 横向.纵向独立缩放,以适应该ImageView fitCenter 保持纵横比缩放图片,缩放完成后将图片放在ImageView的中央 fitSta ...

  8. android学习笔记之ImageView的scaleType属性

    我们知道,ImageView有一个属性叫做scaleType,它的取值一共同拥有八种,各自是:matrix,fitXY.fitStart,fitCenter.fitEnd,center,centerC ...

  9. Android ImageView的ScaleType属性

    android中ImageView的ScaleType属性 ScaleType的值分别代表的意义: ImageView是Android中的基础图片显示控件,该控件有个重要的属性是ScaleType,该 ...

  10. 技术实践 | Android 设备音视频兼容性适配

    导读:WebRTC 是一个非常优秀的项目, 可以支持 Web.iOS.Android.Mac.Windows.Linux 在内的所有平台的 API,保证了 API 在所有平台的一致性.然而 WebRT ...

最新文章

  1. 如何在Java中解析命令行参数?
  2. 第二章节 怀疑的练习和不会的练习
  3. 全国计算机等级考试题库二级C操作题100套(第94套)
  4. Jsoup使用DOM方法来遍历一个文档
  5. 用户眼中的银行信息化-我的开博感言
  6. mysql 主从 通俗易懂_MySQL 主从同步架构中你不知道的“坑”(完结篇)
  7. c语言block内部的实现原理,iOS中block变量捕获原理详析
  8. 浅谈NB-IoT应用场景及方案
  9. 平流式隔油池计算_隔油池计算方法及图集[参考提供]
  10. [转载] Python中的enumerate函数介绍
  11. DICOM获取worklist患者列表抓包分析
  12. Rockchip BT.656 TX 和 BT.1120 TX 开发指南
  13. 又有朋友被骗入传销了!
  14. 微信小程序开发经验总结
  15. 模块度(Modularity)与Fast Newman算法讲解与代码实现
  16. 深入理解Linux内核使用浮点运算问题
  17. 多用户反馈win11系统软件莫名崩溃 微软:正抓紧修复补丁
  18. linux/安卓的spi读写ADS1256出现读写错误
  19. excel一列数据中每个数重复固定次数
  20. 转:System32与SysWOW64的关系

热门文章

  1. 最强面试题整理第三弹:Python 后台开发面试题(附答案)
  2. 多态_python的小窝_百度空间
  3. STM32的硬件SPI驱动AD7124的方法
  4. 统计学、统计学习和统计推断之间的关系
  5. VB mschart控件的使用
  6. 帝骑k触屏模拟器_假面骑士decade神主牌模拟器手机版下载|
  7. 郑州大学和中国地质大学计算机学院,2020最新版研究生录取通知书!54所院校,一定有你喜欢的风格!...
  8. javascript实现计算器
  9. HDFS教程(02)- HDFS命令汇总
  10. 杂七杂八(7): win7无法安装python3