VideoDumpView.java revision ea0bad0574451212591841ba84f477ecc216003a
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package com.android.mediadump;
18
19import java.io.IOException;
20import java.io.BufferedOutputStream;
21import java.io.BufferedWriter;
22import java.io.File;
23import java.io.FileWriter;
24import java.io.FilenameFilter;
25import java.io.FileOutputStream;
26import java.io.File;
27
28import java.lang.Integer;
29import java.lang.Math;
30import java.nio.ByteBuffer;
31import java.nio.ByteOrder;
32import java.nio.FloatBuffer;
33import java.nio.channels.FileChannel;
34import java.nio.IntBuffer;
35import java.util.Properties;
36
37import javax.microedition.khronos.egl.EGLConfig;
38import javax.microedition.khronos.opengles.GL10;
39
40import android.app.Activity;
41import android.content.Context;
42import android.content.pm.ActivityInfo;
43import android.graphics.SurfaceTexture;
44import android.media.MediaPlayer;
45import android.opengl.GLES20;
46import android.opengl.GLSurfaceView;
47import android.opengl.GLUtils;
48import android.opengl.Matrix;
49import android.os.Bundle;
50import android.util.Log;
51import android.view.MotionEvent;
52import android.view.SurfaceHolder;
53import android.view.View;
54import android.widget.MediaController;
55import android.widget.MediaController.MediaPlayerControl;
56
57/**
58 * A view to play a video, specified by VideoDumpConfig.VIDEO_URI, and dump the screen
59 * into raw RGB files.
60 * It uses a renderer to display each video frame over a surface texture, read pixels,
61 * and writes the pixels into a rgb file on sdcard.
62 * Those raw rgb files will be used to compare the quality distortion against
63 * the original video. They can be viewed with the RgbPlayer app for debugging.
64 */
65class VideoDumpView extends GLSurfaceView implements MediaPlayerControl {
66    private static final String TAG = "VideoDumpView";
67    VideoDumpRenderer mRenderer;
68    private MediaController mMediaController;
69    private boolean mMediaControllerAttached = false;
70    private MediaPlayer mMediaPlayer = null;
71    private BufferedWriter mImageListWriter = null;
72
73    // A serials of configuration constants.
74    class VideoDumpConfig {
75        // Currently we are running with a local copy of the video.
76        // It should work with a "http://" sort of streaming url as well.
77        public static final String VIDEO_URI = "/sdcard/mediadump/sample.mp4";
78        public static final String ROOT_DIR = "/sdcard/mediadump/";
79        public static final String IMAGES_LIST = "images.lst";
80        public static final String IMAGE_PREFIX = "img";
81        public static final String IMAGE_SUFFIX = ".rgb";
82        public static final String PROPERTY_FILE = "prop.xml";
83
84        // So far, glReadPixels only supports two (format, type) combinations
85        //     GL_RGB  GL_UNSIGNED_SHORT_5_6_5   16 bits per pixel (default)
86        //     GL_RGBA GL_UNSIGNED_BYTE          32 bits per pixel
87        public static final int PIXEL_FORMAT = GLES20.GL_RGB;
88        public static final int PIXEL_TYPE = PIXEL_FORMAT == GLES20.GL_RGBA
89                ? GLES20.GL_UNSIGNED_BYTE : GLES20.GL_UNSIGNED_SHORT_5_6_5;
90        public static final int BYTES_PER_PIXEL =
91                PIXEL_FORMAT == GLES20.GL_RGBA ? 4 : 2;
92        public static final boolean SET_CHOOSER
93                = PIXEL_FORMAT == GLES20.GL_RGBA ? true : false;
94
95        // On Motorola Xoom, it takes 100ms to read pixels and 180ms to write to a file
96        // to dump a complete 720p(1280*720) video frame. It's much slower than the frame
97        // playback interval (40ms). So we only dump a center block and it should be able
98        // to catch all the e2e distortion. A reasonable size of the block is 256x256,
99        // which takes 4ms to read pixels and 25 ms to write to a file.
100        public static final int MAX_DUMP_WIDTH = 256;
101        public static final int MAX_DUMP_HEIGHT = 256;
102
103        // TODO: MediaPlayer doesn't give back the video frame rate and we'll need to
104        // figure it by dividing the total number of frames by the duration.
105        public static final int FRAME_RATE = 25;
106    }
107
108    public VideoDumpView(Context context) {
109        super(context);
110        setEGLContextClientVersion(2);
111        // GLSurfaceView uses RGB_5_6_5 by default.
112        if (VideoDumpConfig.SET_CHOOSER) {
113            setEGLConfigChooser(8, 8, 8, 8, 8, 8);
114        }
115        mRenderer = new VideoDumpRenderer(context);
116        setRenderer(mRenderer);
117    }
118
119    @Override
120    public void onPause() {
121        stopPlayback();
122        super.onPause();
123    }
124
125    @Override
126    public void onResume() {
127        Log.d(TAG, "onResume");
128
129        mMediaPlayer = new MediaPlayer();
130        try {
131            mMediaPlayer.setDataSource(VideoDumpConfig.VIDEO_URI);
132
133            class RGBFilter implements FilenameFilter {
134                public boolean accept(File dir, String name) {
135                    return (name.endsWith(VideoDumpConfig.IMAGE_SUFFIX));
136                }
137            }
138            File dump_dir = new File(VideoDumpConfig.ROOT_DIR);
139            File[] dump_files = dump_dir.listFiles(new RGBFilter());
140            for (File dump_file :dump_files) {
141                dump_file.delete();
142            }
143
144            File image_list = new File(VideoDumpConfig.ROOT_DIR
145                                       + VideoDumpConfig.IMAGES_LIST);
146            image_list.delete();
147            mImageListWriter = new BufferedWriter(new FileWriter(image_list));
148        } catch (java.io.IOException e) {
149            Log.e(TAG, e.getMessage(), e);
150        }
151
152        queueEvent(new Runnable(){
153                public void run() {
154                    mRenderer.setMediaPlayer(mMediaPlayer);
155                    mRenderer.setImageListWriter(mImageListWriter);
156                }});
157
158        super.onResume();
159    }
160
161    public void start() {
162        mMediaPlayer.start();
163    }
164
165    public void pause() {
166        mMediaPlayer.pause();
167        try {
168            mImageListWriter.flush();
169        } catch (java.io.IOException e) {
170            Log.e(TAG, e.getMessage(), e);
171        }
172    }
173
174    public void stopPlayback() {
175        Log.d(TAG, "stopPlayback");
176
177        if (mMediaPlayer != null) {
178            mMediaPlayer.stop();
179            mMediaPlayer.release();
180            mMediaPlayer = null;
181        }
182        if (mImageListWriter != null) {
183            try {
184                mImageListWriter.flush();
185                mImageListWriter.close();
186            } catch (java.io.IOException e) {
187                Log.e(TAG, e.getMessage(), e);
188            }
189        } else {
190            Log.d(TAG, "image list file was not written successfully.");
191        }
192    }
193
194    public void setMediaController(MediaController controller) {
195        if (mMediaController != null) {
196            mMediaController.hide();
197        }
198        mMediaController = controller;
199    }
200
201    private void attachMediaController() {
202        if (mMediaPlayer != null && mMediaController != null) {
203            if (!mMediaControllerAttached) {
204                mMediaController.setMediaPlayer(this);
205                View anchorView = this.getParent() instanceof View ?
206                        (View)this.getParent() : this;
207                mMediaController.setAnchorView(anchorView);
208                mMediaController.setEnabled(true);
209                mMediaControllerAttached = true;
210            }
211            mMediaController.show();
212        }
213    }
214
215    private boolean isInPlaybackState() {
216        return (mMediaPlayer != null && mMediaPlayer.isPlaying());
217    }
218
219    public boolean canPause () {
220        return true;
221    }
222
223    public boolean canSeekBackward () {
224        return true;
225    }
226
227    public boolean canSeekForward () {
228        return true;
229    }
230
231    public int getBufferPercentage () {
232        return 1;
233    }
234
235    public int getCurrentPosition () {
236        if (isInPlaybackState()) {
237            return mMediaPlayer.getCurrentPosition();
238        }
239        return 0;
240    }
241
242    public int getDuration () {
243        return mMediaPlayer.getDuration();
244    }
245
246    public boolean isPlaying () {
247        return isInPlaybackState() && mMediaPlayer.isPlaying();
248    }
249
250    public void seekTo (int pos) {
251        mMediaPlayer.seekTo(pos);
252    }
253
254    @Override
255    public boolean onTouchEvent(MotionEvent ev) {
256        attachMediaController();
257        return true;
258    }
259
260    /**
261     * A renderer to read each video frame from a media player, draw it over a surface
262     * texture, dump the on-screen pixels into a buffer, and writes the pixels into
263     * a rgb file on sdcard.
264     */
265    private static class VideoDumpRenderer
266        implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
267        private static String TAG = "VideoDumpRenderer";
268
269        /* All GL related fields from
270         * http://developer.android.com/resources/samples/ApiDemos/src/com/example
271         * /android/apis/graphics/GLES20TriangleRenderer.html
272         */
273        private static final int FLOAT_SIZE_BYTES = 4;
274        private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
275        private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
276        private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
277        private final float[] mTriangleVerticesData = {
278            // X, Y, Z, U, V
279            -1.0f, -1.0f, 0, 0.f, 0.f,
280            1.0f, -1.0f, 0, 1.f, 0.f,
281            -1.0f,  1.0f, 0, 0.f, 1.f,
282            1.0f,  1.0f, 0, 1.f, 1.f,
283        };
284
285        private FloatBuffer mTriangleVertices;
286
287        private final String mVertexShader =
288                "uniform mat4 uMVPMatrix;\n" +
289                "uniform mat4 uSTMatrix;\n" +
290                "attribute vec4 aPosition;\n" +
291                "attribute vec4 aTextureCoord;\n" +
292                "varying vec2 vTextureCoord;\n" +
293                "void main() {\n" +
294                "  gl_Position = uMVPMatrix * aPosition;\n" +
295                "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
296                "}\n";
297
298        private final String mFragmentShader =
299                "#extension GL_OES_EGL_image_external : require\n" +
300                "precision mediump float;\n" +
301                "varying vec2 vTextureCoord;\n" +
302                "uniform samplerExternalOES sTexture;\n" +
303                "void main() {\n" +
304                "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
305                "}\n";
306
307        private float[] mMVPMatrix = new float[16];
308        private float[] mSTMatrix = new float[16];
309
310        private int mProgram;
311        private int mTextureID;
312        private int muMVPMatrixHandle;
313        private int muSTMatrixHandle;
314        private int maPositionHandle;
315        private int maTextureHandle;
316
317        private SurfaceTexture mSurface;
318        private boolean updateSurface = false;
319
320        // Magic key
321        private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
322
323
324        /**
325         * Fields that reads video source and dumps to file.
326         */
327        // The media player that loads and decodes the video.
328        // Not owned by this class.
329        private MediaPlayer mMediaPlayer;
330        // The frame number from media player.
331        private int mFrameNumber = 0;
332        // The frame number that is drawing on screen.
333        private int mDrawNumber = 0;
334        // The width and height of dumping block.
335        private int mWidth = 0;
336        private int mHeight = 0;
337        // The offset of the dumping block.
338        private int mStartX = 0;
339        private int mStartY = 0;
340        // A buffer to hold the dumping pixels.
341        private ByteBuffer mBuffer = null;
342        // A file writer to write the filenames of images.
343        private BufferedWriter mImageListWriter;
344
345        public VideoDumpRenderer(Context context) {
346            mTriangleVertices = ByteBuffer.allocateDirect(
347                mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
348                    .order(ByteOrder.nativeOrder()).asFloatBuffer();
349            mTriangleVertices.put(mTriangleVerticesData).position(0);
350
351            Matrix.setIdentityM(mSTMatrix, 0);
352        }
353
354        public void setMediaPlayer(MediaPlayer player) {
355            mMediaPlayer = player;
356        }
357
358        public void setImageListWriter(BufferedWriter imageListWriter) {
359            mImageListWriter = imageListWriter;
360        }
361
362        /**
363         * Called to draw the current frame.
364         * This method is responsible for drawing the current frame.
365         */
366        public void onDrawFrame(GL10 glUnused) {
367            boolean isNewFrame = false;
368            int frameNumber = 0;
369
370            synchronized(this) {
371                if (updateSurface) {
372                    isNewFrame = true;
373                    frameNumber = mFrameNumber;
374                    mSurface.updateTexImage();
375                    mSurface.getTransformMatrix(mSTMatrix);
376                    updateSurface = false;
377                }
378            }
379
380            // Initial clear.
381            GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
382            GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
383
384            // Load the program, which is the basics rules to draw the vertexes and textures.
385            GLES20.glUseProgram(mProgram);
386            checkGlError("glUseProgram");
387
388            // Activate the texture.
389            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
390            GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
391
392            // Load the vertexes coordinates. Simple here since it only draw a rectangle
393            // that fits the whole screen.
394            mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
395            GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
396                TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
397            checkGlError("glVertexAttribPointer maPosition");
398            GLES20.glEnableVertexAttribArray(maPositionHandle);
399            checkGlError("glEnableVertexAttribArray maPositionHandle");
400
401            // Load the texture coordinates, which is essentially a rectangle that fits
402            // the whole video frame.
403            mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
404            GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
405                TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
406            checkGlError("glVertexAttribPointer maTextureHandle");
407            GLES20.glEnableVertexAttribArray(maTextureHandle);
408            checkGlError("glEnableVertexAttribArray maTextureHandle");
409
410            // Set up the GL matrices.
411            Matrix.setIdentityM(mMVPMatrix, 0);
412            GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
413            GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
414
415            // Draw a rectangle and render the video frame as a texture on it.
416            GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
417            checkGlError("glDrawArrays");
418            GLES20.glFinish();
419
420            if (isNewFrame) {  // avoid duplicates.
421                Log.d(TAG, mDrawNumber + "/" + frameNumber + " before dumping "
422                      + System.currentTimeMillis());
423                DumpToFile(frameNumber);
424                Log.d(TAG, mDrawNumber + "/" + frameNumber + " after  dumping "
425                      + System.currentTimeMillis());
426
427                mDrawNumber++;
428            }
429        }
430
431        // Call the GL function that dumps the screen into a buffer, then write to a file.
432        private void DumpToFile(int frameNumber) {
433            GLES20.glReadPixels(mStartX, mStartY, mWidth, mHeight,
434                                VideoDumpConfig.PIXEL_FORMAT,
435                                VideoDumpConfig.PIXEL_TYPE,
436                                mBuffer);
437            checkGlError("glReadPixels");
438
439            Log.d(TAG, mDrawNumber + "/" + frameNumber + " after  glReadPixels "
440                  + System.currentTimeMillis());
441
442            String filename =  VideoDumpConfig.ROOT_DIR + VideoDumpConfig.IMAGE_PREFIX
443                    + frameNumber + VideoDumpConfig.IMAGE_SUFFIX;
444            try {
445                mImageListWriter.write(filename);
446                mImageListWriter.newLine();
447                FileOutputStream fos = new FileOutputStream(filename);
448                fos.write(mBuffer.array());
449                fos.close();
450            } catch (java.io.IOException e) {
451                Log.e(TAG, e.getMessage(), e);
452            }
453        }
454
455        /**
456         * Called when the surface changed size.
457         * Called after the surface is created and whenever the OpenGL surface size changes.
458         */
459        public void onSurfaceChanged(GL10 glUnused, int width, int height) {
460            Log.d(TAG, "Surface size: " + width + "x" + height);
461
462            int video_width = mMediaPlayer.getVideoWidth();
463            int video_height = mMediaPlayer.getVideoHeight();
464            Log.d(TAG, "Video size: " + video_width
465                  + "x" + video_height);
466
467            // TODO: adjust video_width and video_height with the surface size.
468            GLES20.glViewport(0, 0, video_width, video_height);
469
470            mWidth = Math.min(VideoDumpConfig.MAX_DUMP_WIDTH, video_width);
471            mHeight = Math.min(VideoDumpConfig.MAX_DUMP_HEIGHT, video_height);
472            mStartX = video_width / mWidth / 2 * mWidth;
473            mStartY = video_height / mHeight / 2 * mHeight;
474
475            Log.d(TAG, "dumping block start at (" + mStartX + "," + mStartY + ") "
476                  + "size " + mWidth + "x" + mHeight);
477
478            int image_size = mWidth * mHeight * VideoDumpConfig.BYTES_PER_PIXEL;
479            mBuffer = ByteBuffer.allocate(image_size);
480
481            int bpp[] = new int[3];
482            GLES20.glGetIntegerv(GLES20.GL_RED_BITS, bpp, 0);
483            GLES20.glGetIntegerv(GLES20.GL_GREEN_BITS, bpp, 1);
484            GLES20.glGetIntegerv(GLES20.GL_BLUE_BITS, bpp, 2);
485            Log.d(TAG, "rgb bits: " + bpp[0] + "-" + bpp[1] + "-" + bpp[2]);
486
487            // Save the properties into a xml file
488            // so the RgbPlayer can understand the output format.
489            Properties prop = new Properties();
490            prop.setProperty("width", Integer.toString(mWidth));
491            prop.setProperty("height", Integer.toString(mHeight));
492            prop.setProperty("startX", Integer.toString(mStartX));
493            prop.setProperty("startY", Integer.toString(mStartY));
494            prop.setProperty("bytesPerPixel",
495                             Integer.toString(VideoDumpConfig.BYTES_PER_PIXEL));
496            prop.setProperty("frameRate", Integer.toString(VideoDumpConfig.FRAME_RATE));
497            try {
498                prop.storeToXML(new FileOutputStream(VideoDumpConfig.ROOT_DIR
499                                                     + VideoDumpConfig.PROPERTY_FILE), "");
500            } catch (java.io.IOException e) {
501                Log.e(TAG, e.getMessage(), e);
502            }
503        }
504
505        /**
506         * Called when the surface is created or recreated.
507         * Called when the rendering thread starts and whenever the EGL context is lost.
508         * A place to put code to create resources that need to be created when the rendering
509         * starts, and that need to be recreated when the EGL context is lost e.g. texture.
510         * Note that when the EGL context is lost, all OpenGL resources associated with
511         * that context will be automatically deleted.
512         */
513        public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
514            Log.d(TAG, "onSurfaceCreated");
515
516            /* Set up shaders and handles to their variables */
517            mProgram = createProgram(mVertexShader, mFragmentShader);
518            if (mProgram == 0) {
519                return;
520            }
521            maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
522            checkGlError("glGetAttribLocation aPosition");
523            if (maPositionHandle == -1) {
524                throw new RuntimeException("Could not get attrib location for aPosition");
525            }
526            maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
527            checkGlError("glGetAttribLocation aTextureCoord");
528            if (maTextureHandle == -1) {
529                throw new RuntimeException("Could not get attrib location for aTextureCoord");
530            }
531
532            muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
533            checkGlError("glGetUniformLocation uMVPMatrix");
534            if (muMVPMatrixHandle == -1) {
535                throw new RuntimeException("Could not get attrib location for uMVPMatrix");
536            }
537
538            muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
539            checkGlError("glGetUniformLocation uSTMatrix");
540            if (muSTMatrixHandle == -1) {
541                throw new RuntimeException("Could not get attrib location for uSTMatrix");
542            }
543
544
545            // Create our texture. This has to be done each time the surface is created.
546            int[] textures = new int[1];
547            GLES20.glGenTextures(1, textures, 0);
548
549            mTextureID = textures[0];
550            GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
551            checkGlError("glBindTexture mTextureID");
552
553            // Can't do mipmapping with mediaplayer source
554            GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
555                                   GLES20.GL_NEAREST);
556            GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
557                                   GLES20.GL_LINEAR);
558            // Clamp to edge is the only option
559            GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
560                                   GLES20.GL_CLAMP_TO_EDGE);
561            GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
562                                   GLES20.GL_CLAMP_TO_EDGE);
563            checkGlError("glTexParameteri mTextureID");
564
565            /*
566             * Create the SurfaceTexture that will feed this textureID,
567             * and pass it to the MediaPlayer
568             */
569            mSurface = new SurfaceTexture(mTextureID);
570            mSurface.setOnFrameAvailableListener(this);
571
572            mMediaPlayer.setTexture(mSurface);
573
574            try {
575                mMediaPlayer.prepare();
576            } catch (IOException t) {
577                Log.e(TAG, "media player prepare failed");
578            }
579
580            synchronized(this) {
581                updateSurface = false;
582            }
583        }
584
585        synchronized public void onFrameAvailable(SurfaceTexture surface) {
586            /* For simplicity, SurfaceTexture calls here when it has new
587             * data available.  Call may come in from some random thread,
588             * so let's be safe and use synchronize. No OpenGL calls can be done here.
589             */
590            mFrameNumber++;
591            updateSurface = true;
592        }
593
594        private int loadShader(int shaderType, String source) {
595            int shader = GLES20.glCreateShader(shaderType);
596            if (shader != 0) {
597                GLES20.glShaderSource(shader, source);
598                GLES20.glCompileShader(shader);
599                int[] compiled = new int[1];
600                GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
601                if (compiled[0] == 0) {
602                    Log.e(TAG, "Could not compile shader " + shaderType + ":");
603                    Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
604                    GLES20.glDeleteShader(shader);
605                    shader = 0;
606                }
607            }
608            return shader;
609        }
610
611        private int createProgram(String vertexSource, String fragmentSource) {
612            int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
613            if (vertexShader == 0) {
614                return 0;
615            }
616            int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
617            if (pixelShader == 0) {
618                return 0;
619            }
620
621            int program = GLES20.glCreateProgram();
622            if (program != 0) {
623                GLES20.glAttachShader(program, vertexShader);
624                checkGlError("glAttachShader");
625                GLES20.glAttachShader(program, pixelShader);
626                checkGlError("glAttachShader");
627                GLES20.glLinkProgram(program);
628                int[] linkStatus = new int[1];
629                GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
630                if (linkStatus[0] != GLES20.GL_TRUE) {
631                    Log.e(TAG, "Could not link program: ");
632                    Log.e(TAG, GLES20.glGetProgramInfoLog(program));
633                    GLES20.glDeleteProgram(program);
634                    program = 0;
635                }
636            }
637            return program;
638        }
639
640        private void checkGlError(String op) {
641            int error;
642            while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
643                Log.e(TAG, op + ": glError " + error);
644                throw new RuntimeException(op + ": glError " + error);
645            }
646        }
647
648    }  // End of class VideoDumpRender.
649
650}  // End of class VideoDumpView.
651