看来我在应用程序中使用的MediaPlayer已经开始每16或17帧挂起。

我正在使用GlSurfaceView渲染MediaPlayer播放的帧。一切都以快速fps的速度呈现出良好的效果。该应用程序以前运行良好,但是从几天开始,我看到视频每16或17帧至少在500毫秒内挂起。

该程序看起来像这样,我在Xperia Z1上。为了确保代码中没有回归,我使用教程重新启动,并且这种滞后行为仍然存在。

无论我是否使用locksynchronized中的Java),还是不使用Rendermode.WhenDirty,这在回放中绝对没有改变。

该程序只是一个活动和具有此自定义视图的布局。不涉及其他代码。 (顺便说一句,该演示不使用C#标准进行重构,因为这是一个临时代码段,请不要讨论重构。)

public class CustomVideoView : GLSurfaceView {

    VideoRender mRenderer;
    private MediaPlayer mMediaPlayer = null;
    private string filePath = "";
    private Uri uri = null;
    private Context _context;

    public CustomVideoView(Context context, IAttributeSet attrs) : base(context, attrs) {
        _context = context;
        init ();
    }

    public CustomVideoView(Context context, IAttributeSet attrs, int defStyle) : base(context, attrs) {
        _context = context;
        init ();
    }

    public CustomVideoView(Context context) : base(context, null) {
        _context = context;
        init ();
    }


    public void init() {
        SetEGLContextClientVersion (2);

        Holder.SetFormat (Format.Translucent);
        SetEGLConfigChooser (8, 8, 8, 8, 16, 0);
        filePath = "/storage/sdcard1/download/cat3.mp4";
        mRenderer = new VideoRender (_context, mMediaPlayer, filePath, false, this);
        SetRenderer (mRenderer);
        //RenderMode = Rendermode.WhenDirty;
    }

    public override void OnResume() {
        base.OnResume ();
    }

    public override void OnPause() {
        base.OnPause ();
    }

    protected override void OnDetachedFromWindow() {
        // TODO Auto-generated method stub
        base.OnDetachedFromWindow ();

        if (mMediaPlayer != null) {
            mMediaPlayer.Stop ();
            mMediaPlayer.Release ();
        }
    }

    private class VideoRender : Java.Lang.Object, GLSurfaceView.IRenderer, SurfaceTexture.IOnFrameAvailableListener {

        private string TAG = "VideoRender";
        private const int FLOAT_SIZE_BYTES = 4;
        private const int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 3 * FLOAT_SIZE_BYTES;
        private const int TEXTURE_VERTICES_DATA_STRIDE_BYTES = 2 * FLOAT_SIZE_BYTES;
        private const int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
        private const int TRIANGLE_VERTICES_DATA_UV_OFFSET = 0;
        private float[] mTriangleVerticesData = { -1.0f, -1.0f, 0, 1.0f,
            -1.0f, 0, -1.0f, 1.0f, 0, 1.0f, 1.0f, 0, };

        private float[] mTextureVerticesData = { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f };

        private FloatBuffer mTriangleVertices;

        // extra
        private FloatBuffer mTextureVertices;

        private string mVertexShader = "uniform mat4 uMVPMatrix;\n"
            + "uniform mat4 uSTMatrix;\n" + "attribute vec4 aPosition;\n"
            + "attribute vec4 aTextureCoord;\n"
            + "varying vec2 vTextureCoord;\n" + "void main() {\n"
            + "  gl_Position = uMVPMatrix * aPosition;\n"
            + "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + "}\n";

        private string mFragmentShader = "#extension GL_OES_EGL_image_external : require\n"
            + "precision mediump float;\n"
            + "varying vec2 vTextureCoord;\n"
            + "uniform samplerExternalOES sTexture;\n"
            + "void main() {\n"
            + "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
            + "}\n";

        private float[] mMVPMatrix = new float[16];
        private float[] mSTMatrix = new float[16];
        private float[] projectionMatrix = new float[16];

        private int mProgram;
        private int mTextureID;
        private int muMVPMatrixHandle;
        private int muSTMatrixHandle;
        private int maPositionHandle;
        private int maTextureHandle;

        private SurfaceTexture mSurface;
        private bool updateSurface = false;
        private MediaPlayer mMediaPlayer;
        private string _filePath;
        private bool _isStreaming = false;
        private Context _context;
        private CustomVideoView _customVideoView;

        private int GL_TEXTURE_EXTERNAL_OES = 0x8D65;

        public VideoRender(Context context, MediaPlayer mediaPlayer, string filePath, bool isStreaming, CustomVideoView customVideoView) {
            _customVideoView = customVideoView;
            _filePath = filePath;
            _isStreaming = isStreaming;
            _context = context;
            mMediaPlayer = mediaPlayer;

            mTriangleVertices = ByteBuffer
                .AllocateDirect(
                    mTriangleVerticesData.Length * FLOAT_SIZE_BYTES)
                .Order(ByteOrder.NativeOrder()).AsFloatBuffer();
            mTriangleVertices.Put(mTriangleVerticesData).Position(0);

            // extra
            mTextureVertices = ByteBuffer
                .AllocateDirect(mTextureVerticesData.Length * FLOAT_SIZE_BYTES)
                .Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTextureVertices.Put(mTextureVerticesData).Position(0);

            Android.Opengl.Matrix.SetIdentityM(mSTMatrix, 0);
        }

        public void OnDrawFrame(Javax.Microedition.Khronos.Opengles.IGL10 glUnused) {

            lock (syncLock) {
                if (updateSurface) {
                    mSurface.UpdateTexImage ();
                    mSurface.GetTransformMatrix (mSTMatrix);
                    updateSurface = false;
                }
            }

            GLES20.GlClearColor (255.0f, 255.0f, 255.0f, 1.0f);
            GLES20.GlClear (GLES20.GlDepthBufferBit
                | GLES20.GlColorBufferBit);

            GLES20.GlUseProgram (mProgram);
            checkGlError ("glUseProgram");

            GLES20.GlActiveTexture (GLES20.GlTexture0);
            GLES20.GlBindTexture (GL_TEXTURE_EXTERNAL_OES, mTextureID);

            mTriangleVertices.Position (TRIANGLE_VERTICES_DATA_POS_OFFSET);
            GLES20.GlVertexAttribPointer (maPositionHandle, 3, GLES20.GlFloat,
                false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
                mTriangleVertices);
            checkGlError ("glVertexAttribPointer maPosition");
            GLES20.GlEnableVertexAttribArray (maPositionHandle);
            checkGlError ("glEnableVertexAttribArray maPositionHandle");

            mTextureVertices.Position (TRIANGLE_VERTICES_DATA_UV_OFFSET);
            GLES20.GlVertexAttribPointer (maTextureHandle, 2, GLES20.GlFloat,
                false, TEXTURE_VERTICES_DATA_STRIDE_BYTES, mTextureVertices);

            checkGlError ("glVertexAttribPointer maTextureHandle");
            GLES20.GlEnableVertexAttribArray (maTextureHandle);
            checkGlError ("glEnableVertexAttribArray maTextureHandle");

            Android.Opengl.Matrix.SetIdentityM (mMVPMatrix, 0);

            GLES20.GlUniformMatrix4fv (muMVPMatrixHandle, 1, false, mMVPMatrix,
                0);
            GLES20.GlUniformMatrix4fv (muSTMatrixHandle, 1, false, mSTMatrix, 0);

            GLES20.GlDrawArrays (GLES20.GlTriangleStrip, 0, 4);
            checkGlError ("glDrawArrays");


            GLES20.GlFinish ();

        }

        public void OnSurfaceChanged(Javax.Microedition.Khronos.Opengles.IGL10 glUnused, int width, int height) {

            GLES20.GlViewport (0, 0, width, height);

            Android.Opengl.Matrix.FrustumM (projectionMatrix, 0, -1.0f, 1.0f, -1.0f, 1.0f,
                1.0f, 10.0f);

        }


        public void OnSurfaceCreated(Javax.Microedition.Khronos.Opengles.IGL10 gl,Javax.Microedition.Khronos.Egl.EGLConfig config) {

            mProgram = createProgram (mVertexShader, mFragmentShader);
            if (mProgram == 0) {
                return;
            }
            maPositionHandle = GLES20
                .GlGetAttribLocation (mProgram, "aPosition");
            checkGlError ("glGetAttribLocation aPosition");
            if (maPositionHandle == -1) {
                throw new RuntimeException (
                    "Could not get attrib location for aPosition");
            }
            maTextureHandle = GLES20.GlGetAttribLocation (mProgram,
                "aTextureCoord");
            checkGlError ("glGetAttribLocation aTextureCoord");
            if (maTextureHandle == -1) {
                throw new RuntimeException (
                    "Could not get attrib location for aTextureCoord");
            }

            muMVPMatrixHandle = GLES20.GlGetUniformLocation (mProgram,
                "uMVPMatrix");
            checkGlError ("glGetUniformLocation uMVPMatrix");
            if (muMVPMatrixHandle == -1) {
                throw new RuntimeException (
                    "Could not get attrib location for uMVPMatrix");
            }

            muSTMatrixHandle = GLES20.GlGetUniformLocation (mProgram,
                "uSTMatrix");
            checkGlError ("glGetUniformLocation uSTMatrix");
            if (muSTMatrixHandle == -1) {
                throw new RuntimeException (
                    "Could not get attrib location for uSTMatrix");
            }

            int[] textures = new int[1];
            GLES20.GlGenTextures (1, textures, 0);

            mTextureID = textures [0];
            GLES20.GlBindTexture (GL_TEXTURE_EXTERNAL_OES, mTextureID);
            checkGlError ("glBindTexture mTextureID");

            GLES20.GlTexParameterf (GL_TEXTURE_EXTERNAL_OES,
                GLES20.GlTextureMinFilter, GLES20.GlNearest);
            GLES20.GlTexParameterf (GL_TEXTURE_EXTERNAL_OES,
                GLES20.GlTextureMagFilter, GLES20.GlLinear);

            mSurface = new SurfaceTexture (mTextureID);
            // mSurface.SetOnFrameAvailableListener (this);
            mSurface.FrameAvailable += (object sender, SurfaceTexture.FrameAvailableEventArgs e) => {
                OnFrameAvailable(e.SurfaceTexture);
            };
            Surface surface = new Surface (mSurface);

            mMediaPlayer = new MediaPlayer ();

            if (System.IO.File.Exists(_filePath)) {
                try {
                    if (!_isStreaming) {
                        mMediaPlayer.SetDataSource (_filePath);
                    } else {
                        throw new System.NotImplementedException();
                        //mMediaPlayer.SetDataSource (_context, new Uri.Builder().AppendPath(_filePath));
                    }

                } catch (IllegalArgumentException e) {
                    // TODO Auto-generated catch block
                    e.PrintStackTrace ();
                } catch (SecurityException e) {
                    // TODO Auto-generated catch block
                    e.PrintStackTrace ();
                } catch (IllegalStateException e) {
                    // TODO Auto-generated catch block
                    e.PrintStackTrace ();
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.PrintStackTrace ();
                }
            }

            mMediaPlayer.SetSurface (surface);
            surface.Release ();

            try {
                mMediaPlayer.Prepare ();
            } catch (IOException t) {
                Log.Error (TAG, "media player prepare failed");
            }

            lock (syncLock) {
                updateSurface = false;
            }

            mMediaPlayer.Start ();

        }

        private readonly object syncLock = new object ();

        public void OnFrameAvailable(SurfaceTexture surface) {
            lock (syncLock) {
                updateSurface = true;
            }

            _customVideoView.RequestRender ();
        }

        private int loadShader(int shaderType, string source) {
            int shader = GLES20.GlCreateShader (shaderType);
            if (shader != 0) {
                GLES20.GlShaderSource (shader, source);
                GLES20.GlCompileShader (shader);
                int[] compiled = new int[1];
                GLES20.GlGetShaderiv (shader, GLES20.GlCompileStatus, compiled, 0);
                if (compiled [0] == 0) {
                    Log.Error (TAG, "Could not compile shader " + shaderType + ":");
                    Log.Error (TAG, GLES20.GlGetShaderInfoLog (shader));
                    GLES20.GlDeleteShader (shader);
                    shader = 0;
                }
            }
            return shader;
        }

        private int createProgram(string vertexSource, string fragmentSource) {
            int vertexShader = loadShader (GLES20.GlVertexShader, vertexSource);
            if (vertexShader == 0) {
                return 0;
            }
            int pixelShader = loadShader (GLES20.GlFragmentShader,
                fragmentSource);

            if (pixelShader == 0) {
                return 0;
            }

            int program = GLES20.GlCreateProgram ();
            if (program != 0) {
                GLES20.GlAttachShader (program, vertexShader);
                checkGlError ("glAttachShader");
                GLES20.GlAttachShader (program, pixelShader);
                checkGlError ("glAttachShader");
                GLES20.GlLinkProgram (program);
                int[] linkStatus = new int[1];
                GLES20.GlGetProgramiv (program, GLES20.GlLinkStatus,
                    linkStatus, 0);
                if (linkStatus [0] != GLES20.GlTrue) {
                    Log.Error (TAG, "Could not link program: ");
                    Log.Error (TAG, GLES20.GlGetProgramInfoLog (program));
                    GLES20.GlDeleteProgram (program);
                    program = 0;
                }
            }
            return program;
        }

        private void checkGlError(string op) {
            int error;
            while ((error = GLES20.GlGetError ()) != GLES20.GlNoError) {
                Log.Error (TAG, op + ": glError " + error);
                throw new RuntimeException (op + ": glError " + error);
            }
        }
    }
}


调试时,这里是计算出的FPS整体,以该帧速率播放一帧应该持续多长时间以及实际持续多长时间。我们可以看到滞后的长度,最终大约为320ms。

[fps] 15.0627 - norm=66 - cur=44.712
[fps] 15.09347 - norm=66 - cur=45.017
[fps] 15.12472 - norm=66 - cur=44.437
[fps] 15.17346 - norm=65 - cur=32.413
[fps] 15.20476 - norm=65 - cur=44.01
[fps] 15.2337 - norm=65 - cur=45.506
[fps] 15.26154 - norm=65 - cur=46.177
[fps] 14.8815 - norm=67 - cur=334.503
[fps] 14.93206 - norm=66 - cur=29.971
[fps] 14.96286 - norm=66 - cur=44.071
[fps] 14.99153 - norm=66 - cur=45.445
[fps] 15.03538 - norm=66 - cur=34.213
[fps] 15.0695 - norm=66 - cur=41.142
[fps] 15.09754 - norm=66 - cur=44.468
[fps] 15.12501 - norm=66 - cur=45.628
[fps] 15.17139 - norm=65 - cur=31.558
[fps] 15.20057 - norm=65 - cur=44.01
[fps] 15.22785 - norm=65 - cur=45.231
[fps] 15.25471 - norm=65 - cur=45.384
[fps] 15.30203 - norm=65 - cur=30.093
[fps] 15.32664 - norm=65 - cur=46.636
[fps] 15.35203 - norm=65 - cur=45.933
[fps] 15.37996 - norm=65 - cur=44.041
[fps] 15.42686 - norm=64 - cur=29.3
[fps] 15.47278 - norm=64 - cur=30.001
[fps] 15.49799 - norm=64 - cur=45.384


[编辑]

重新启动电话可解决此问题。因此,要么手机本身缺少RAM或存储空间,要么我没有正确编写示例,而CPU工作过度。

最佳答案

使用普通的SurfaceView来完成此操作可能会更容易,可以自己处理EGL设置和线程管理。如果只是对视频帧进行涂抹,那么拥有专用的渲染线程几乎没有任何价值。 (有关示例,请参见Grafika。)

如果您坚持使用GLSurfaceView,则不需要或不需要在glFinish()末尾调用onDrawFrame()。这是一个同步调用,它将使您的线程停止运行,直到GLES完成绘制为止。 eglSwapBuffers()返回后,GLSurfaceView将调用onDrawFrame()

无论如何,GLES不可能导致300ms的失速。 MediaPlayer停滞了,或者系统中的其他东西正在唤醒并消耗了所有可用的CPU资源。如果要进一步调试,可以尝试systrace

关于android - 在GlSurfaceView上播放的视频每16或17帧挂起500毫秒,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/31103927/

10-12 02:36