@@ -0,0 +1,952 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android .media .cts ;
import android .graphics .SurfaceTexture ;
import android .hardware .Camera ;
import android .media .MediaCodec ;
import android .media .MediaCodecInfo ;
import android .media .MediaFormat ;
import android .media .MediaMuxer ;
import android .opengl .EGL14 ;
import android .opengl .EGLConfig ;
import android .opengl .EGLContext ;
import android .opengl .EGLDisplay ;
import android .opengl .EGLExt ;
import android .opengl .EGLSurface ;
import android .opengl .GLES11Ext ;
import android .opengl .GLES20 ;
import android .opengl .Matrix ;
import android .os .Environment ;
import android .test .AndroidTestCase ;
import android .util .Log ;
import android .view .Surface ;
import java .io .File ;
import java .io .IOException ;
import java .nio .ByteBuffer ;
import java .nio .ByteOrder ;
import java .nio .FloatBuffer ;
//20131106: removed unnecessary glFinish(), removed hard-coded "/sdcard"
//20131205: added alpha to EGLConfig
//20131210: demonstrate un-bind and re-bind of texture, for apps with shared EGL contexts
//20140123: correct error checks on glGet*Location() and program creation (they don't set error)
/**
* Record video from the camera preview and encode it as an MP4 file. Demonstrates the use
* of MediaMuxer and MediaCodec with Camera input. Does not record audio.
* <p>
* Generally speaking, it's better to use MediaRecorder for this sort of thing. This example
* demonstrates one possible advantage: editing of video as it's being encoded. A GLES 2.0
* fragment shader is used to perform a silly color tweak every 15 frames.
* <p>
* This uses various features first available in Android "Jellybean" 4.3 (API 18). There is
* no equivalent functionality in previous releases. (You can send the Camera preview to a
* byte buffer with a fully-specified format, but MediaCodec encoders want different input
* formats on different devices, and this use case wasn't well exercised in CTS pre-4.3.)
* <p>
* The output file will be something like "/sdcard/test.640x480.mp4".
* <p>
* (This was derived from bits and pieces of CTS tests, and is packaged as such, but is not
* currently part of CTS.)
*/
public class CameraToMpegTest extends AndroidTestCase {
private static final String TAG = "CameraToMpegTest" ;
private static final boolean VERBOSE = false ; // lots of logging
// where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
private static final File OUTPUT_DIR = Environment .getExternalStorageDirectory ();
// parameters for the encoder
private static final String MIME_TYPE = "video/avc" ; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30 ; // 30fps
private static final int IFRAME_INTERVAL = 5 ; // 5 seconds between I-frames
private static final long DURATION_SEC = 8 ; // 8 seconds of video
// Fragment shader that swaps color channels around.
private static final String SWAPPED_FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n " +
"precision mediump float;\n " +
"varying vec2 vTextureCoord;\n " +
"uniform samplerExternalOES sTexture;\n " +
"void main() {\n " +
" gl_FragColor = texture2D(sTexture, vTextureCoord).gbra;\n " +
"}\n " ;
// encoder / muxer state
private MediaCodec mEncoder ;
private CodecInputSurface mInputSurface ;
private MediaMuxer mMuxer ;
private int mTrackIndex ;
private boolean mMuxerStarted ;
// camera state
private Camera mCamera ;
private SurfaceTextureManager mStManager ;
// allocate one of these up front so we don't need to do it every time
private MediaCodec .BufferInfo mBufferInfo ;
/** test entry point */
public void testEncodeCameraToMp4 () throws Throwable {
CameraToMpegWrapper .runTest (this );
}
/**
* Wraps encodeCameraToMpeg(). This is necessary because SurfaceTexture will try to use
* the looper in the current thread if one exists, and the CTS tests create one on the
* test thread.
*
* The wrapper propagates exceptions thrown by the worker thread back to the caller.
*/
private static class CameraToMpegWrapper implements Runnable {
private Throwable mThrowable ;
private CameraToMpegTest mTest ;
private CameraToMpegWrapper (CameraToMpegTest test ) {
mTest = test ;
}
@ Override
public void run () {
try {
mTest .encodeCameraToMpeg ();
} catch (Throwable th ) {
mThrowable = th ;
}
}
/** Entry point. */
public static void runTest (CameraToMpegTest obj ) throws Throwable {
CameraToMpegWrapper wrapper = new CameraToMpegWrapper (obj );
Thread th = new Thread (wrapper , "codec test" );
th .start ();
th .join ();
if (wrapper .mThrowable != null ) {
throw wrapper .mThrowable ;
}
}
}
/**
* Tests encoding of AVC video from Camera input. The output is saved as an MP4 file.
*/
private void encodeCameraToMpeg () {
// arbitrary but popular values
int encWidth = 640 ;
int encHeight = 480 ;
int encBitRate = 6000000 ; // Mbps
Log .d (TAG , MIME_TYPE + " output " + encWidth + "x" + encHeight + " @" + encBitRate );
try {
prepareCamera (encWidth , encHeight );
prepareEncoder (encWidth , encHeight , encBitRate );
mInputSurface .makeCurrent ();
prepareSurfaceTexture ();
mCamera .startPreview ();
long startWhen = System .nanoTime ();
long desiredEnd = startWhen + DURATION_SEC * 1000000000L ;
SurfaceTexture st = mStManager .getSurfaceTexture ();
int frameCount = 0 ;
while (System .nanoTime () < desiredEnd ) {
// Feed any pending encoder output into the muxer.
drainEncoder (false );
// Switch up the colors every 15 frames. Besides demonstrating the use of
// fragment shaders for video editing, this provides a visual indication of
// the frame rate: if the camera is capturing at 15fps, the colors will change
// once per second.
if ((frameCount % 15 ) == 0 ) {
String fragmentShader = null ;
if ((frameCount & 0x01 ) != 0 ) {
fragmentShader = SWAPPED_FRAGMENT_SHADER ;
}
mStManager .changeFragmentShader (fragmentShader );
}
frameCount ++;
// Acquire a new frame of input, and render it to the Surface. If we had a
// GLSurfaceView we could switch EGL contexts and call drawImage() a second
// time to render it on screen. The texture can be shared between contexts by
// passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context
// argument.
mStManager .awaitNewImage ();
mStManager .drawImage ();
// Set the presentation time stamp from the SurfaceTexture's time stamp. This
// will be used by MediaMuxer to set the PTS in the video.
if (VERBOSE ) {
Log .d (TAG , "present: " +
((st .getTimestamp () - startWhen ) / 1000000.0 ) + "ms" );
}
mInputSurface .setPresentationTime (st .getTimestamp ());
// Submit it to the encoder. The eglSwapBuffers call will block if the input
// is full, which would be bad if it stayed full until we dequeued an output
// buffer (which we can't do, since we're stuck here). So long as we fully drain
// the encoder before supplying additional input, the system guarantees that we
// can supply another frame without blocking.
if (VERBOSE ) Log .d (TAG , "sending frame to encoder" );
mInputSurface .swapBuffers ();
}
// send end-of-stream to encoder, and drain remaining output
drainEncoder (true );
} finally {
// release everything we grabbed
releaseCamera ();
releaseEncoder ();
releaseSurfaceTexture ();
}
}
/**
* Configures Camera for video capture. Sets mCamera.
* <p>
* Opens a Camera and sets parameters. Does not start preview.
*/
private void prepareCamera (int encWidth , int encHeight ) {
if (mCamera != null ) {
throw new RuntimeException ("camera already initialized" );
}
Camera .CameraInfo info = new Camera .CameraInfo ();
// Try to find a front-facing camera (e.g. for videoconferencing).
int numCameras = Camera .getNumberOfCameras ();
for (int i = 0 ; i < numCameras ; i ++) {
Camera .getCameraInfo (i , info );
if (info .facing == Camera .CameraInfo .CAMERA_FACING_FRONT ) {
mCamera = Camera .open (i );
break ;
}
}
if (mCamera == null ) {
Log .d (TAG , "No front-facing camera found; opening default" );
mCamera = Camera .open (); // opens first back-facing camera
}
if (mCamera == null ) {
throw new RuntimeException ("Unable to open camera" );
}
Camera .Parameters parms = mCamera .getParameters ();
choosePreviewSize (parms , encWidth , encHeight );
// leave the frame rate set to default
mCamera .setParameters (parms );
Camera .Size size = parms .getPreviewSize ();
Log .d (TAG , "Camera preview size is " + size .width + "x" + size .height );
}
/**
* Attempts to find a preview size that matches the provided width and height (which
* specify the dimensions of the encoded video). If it fails to find a match it just
* uses the default preview size.
* <p>
* TODO: should do a best-fit match.
*/
private static void choosePreviewSize (Camera .Parameters parms , int width , int height ) {
// We should make sure that the requested MPEG size is less than the preferred
// size, and has the same aspect ratio.
Camera .Size ppsfv = parms .getPreferredPreviewSizeForVideo ();
if (VERBOSE && ppsfv != null ) {
Log .d (TAG , "Camera preferred preview size for video is " +
ppsfv .width + "x" + ppsfv .height );
}
for (Camera .Size size : parms .getSupportedPreviewSizes ()) {
if (size .width == width && size .height == height ) {
parms .setPreviewSize (width , height );
return ;
}
}
Log .w (TAG , "Unable to set preview size to " + width + "x" + height );
if (ppsfv != null ) {
parms .setPreviewSize (ppsfv .width , ppsfv .height );
}
}
/**
* Stops camera preview, and releases the camera to the system.
*/
private void releaseCamera () {
if (VERBOSE ) Log .d (TAG , "releasing camera" );
if (mCamera != null ) {
mCamera .stopPreview ();
mCamera .release ();
mCamera = null ;
}
}
/**
* Configures SurfaceTexture for camera preview. Initializes mStManager, and sets the
* associated SurfaceTexture as the Camera's "preview texture".
* <p>
* Configure the EGL surface that will be used for output before calling here.
*/
private void prepareSurfaceTexture () {
mStManager = new SurfaceTextureManager ();
SurfaceTexture st = mStManager .getSurfaceTexture ();
try {
mCamera .setPreviewTexture (st );
} catch (IOException ioe ) {
throw new RuntimeException ("setPreviewTexture failed" , ioe );
}
}
/**
* Releases the SurfaceTexture.
*/
private void releaseSurfaceTexture () {
if (mStManager != null ) {
mStManager .release ();
mStManager = null ;
}
}
/**
* Configures encoder and muxer state, and prepares the input Surface. Initializes
* mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted.
*/
private void prepareEncoder (int width , int height , int bitRate ) {
mBufferInfo = new MediaCodec .BufferInfo ();
MediaFormat format = MediaFormat .createVideoFormat (MIME_TYPE , width , height );
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format .setInteger (MediaFormat .KEY_COLOR_FORMAT ,
MediaCodecInfo .CodecCapabilities .COLOR_FormatSurface );
format .setInteger (MediaFormat .KEY_BIT_RATE , bitRate );
format .setInteger (MediaFormat .KEY_FRAME_RATE , FRAME_RATE );
format .setInteger (MediaFormat .KEY_I_FRAME_INTERVAL , IFRAME_INTERVAL );
if (VERBOSE ) Log .d (TAG , "format: " + format );
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
//
// If you want to have two EGL contexts -- one for display, one for recording --
// you will likely want to defer instantiation of CodecInputSurface until after the
// "display" EGL context is created, then modify the eglCreateContext call to
// take eglGetCurrentContext() as the share_context argument.
mEncoder = MediaCodec .createEncoderByType (MIME_TYPE );
mEncoder .configure (format , null , null , MediaCodec .CONFIGURE_FLAG_ENCODE );
mInputSurface = new CodecInputSurface (mEncoder .createInputSurface ());
mEncoder .start ();
// Output filename. Ideally this would use Context.getFilesDir() rather than a
// hard-coded output directory.
String outputPath = new File (OUTPUT_DIR ,
"test." + width + "x" + height + ".mp4" ).toString ();
Log .i (TAG , "Output file is " + outputPath );
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
try {
mMuxer = new MediaMuxer (outputPath , MediaMuxer .OutputFormat .MUXER_OUTPUT_MPEG_4 );
} catch (IOException ioe ) {
throw new RuntimeException ("MediaMuxer creation failed" , ioe );
}
mTrackIndex = -1 ;
mMuxerStarted = false ;
}
/**
* Releases encoder resources.
*/
private void releaseEncoder () {
if (VERBOSE ) Log .d (TAG , "releasing encoder objects" );
if (mEncoder != null ) {
mEncoder .stop ();
mEncoder .release ();
mEncoder = null ;
}
if (mInputSurface != null ) {
mInputSurface .release ();
mInputSurface = null ;
}
if (mMuxer != null ) {
mMuxer .stop ();
mMuxer .release ();
mMuxer = null ;
}
}
/**
* Extracts all pending data from the encoder and forwards it to the muxer.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
* <p>
* We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
* not recording audio.
*/
private void drainEncoder (boolean endOfStream ) {
final int TIMEOUT_USEC = 10000 ;
if (VERBOSE ) Log .d (TAG , "drainEncoder(" + endOfStream + ")" );
if (endOfStream ) {
if (VERBOSE ) Log .d (TAG , "sending EOS to encoder" );
mEncoder .signalEndOfInputStream ();
}
ByteBuffer [] encoderOutputBuffers = mEncoder .getOutputBuffers ();
while (true ) {
int encoderStatus = mEncoder .dequeueOutputBuffer (mBufferInfo , TIMEOUT_USEC );
if (encoderStatus == MediaCodec .INFO_TRY_AGAIN_LATER ) {
// no output available yet
if (!endOfStream ) {
break ; // out of while
} else {
if (VERBOSE ) Log .d (TAG , "no output available, spinning to await EOS" );
}
} else if (encoderStatus == MediaCodec .INFO_OUTPUT_BUFFERS_CHANGED ) {
// not expected for an encoder
encoderOutputBuffers = mEncoder .getOutputBuffers ();
} else if (encoderStatus == MediaCodec .INFO_OUTPUT_FORMAT_CHANGED ) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted ) {
throw new RuntimeException ("format changed twice" );
}
MediaFormat newFormat = mEncoder .getOutputFormat ();
Log .d (TAG , "encoder output format changed: " + newFormat );
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer .addTrack (newFormat );
mMuxer .start ();
mMuxerStarted = true ;
} else if (encoderStatus < 0 ) {
Log .w (TAG , "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus );
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers [encoderStatus ];
if (encodedData == null ) {
throw new RuntimeException ("encoderOutputBuffer " + encoderStatus +
" was null" );
}
if ((mBufferInfo .flags & MediaCodec .BUFFER_FLAG_CODEC_CONFIG ) != 0 ) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE ) Log .d (TAG , "ignoring BUFFER_FLAG_CODEC_CONFIG" );
mBufferInfo .size = 0 ;
}
if (mBufferInfo .size != 0 ) {
if (!mMuxerStarted ) {
throw new RuntimeException ("muxer hasn't started" );
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData .position (mBufferInfo .offset );
encodedData .limit (mBufferInfo .offset + mBufferInfo .size );
mMuxer .writeSampleData (mTrackIndex , encodedData , mBufferInfo );
if (VERBOSE ) Log .d (TAG , "sent " + mBufferInfo .size + " bytes to muxer" );
}
mEncoder .releaseOutputBuffer (encoderStatus , false );
if ((mBufferInfo .flags & MediaCodec .BUFFER_FLAG_END_OF_STREAM ) != 0 ) {
if (!endOfStream ) {
Log .w (TAG , "reached end of stream unexpectedly" );
} else {
if (VERBOSE ) Log .d (TAG , "end of stream reached" );
}
break ; // out of while
}
}
}
}
/**
* Holds state associated with a Surface used for MediaCodec encoder input.
* <p>
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses
* that to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to
* be sent to the video encoder.
* <p>
* This object owns the Surface -- releasing this will release the Surface too.
*/
private static class CodecInputSurface {
private static final int EGL_RECORDABLE_ANDROID = 0x3142 ;
private EGLDisplay mEGLDisplay = EGL14 .EGL_NO_DISPLAY ;
private EGLContext mEGLContext = EGL14 .EGL_NO_CONTEXT ;
private EGLSurface mEGLSurface = EGL14 .EGL_NO_SURFACE ;
private Surface mSurface ;
/**
* Creates a CodecInputSurface from a Surface.
*/
public CodecInputSurface (Surface surface ) {
if (surface == null ) {
throw new NullPointerException ();
}
mSurface = surface ;
eglSetup ();
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup () {
mEGLDisplay = EGL14 .eglGetDisplay (EGL14 .EGL_DEFAULT_DISPLAY );
if (mEGLDisplay == EGL14 .EGL_NO_DISPLAY ) {
throw new RuntimeException ("unable to get EGL14 display" );
}
int [] version = new int [2 ];
if (!EGL14 .eglInitialize (mEGLDisplay , version , 0 , version , 1 )) {
throw new RuntimeException ("unable to initialize EGL14" );
}
// Configure EGL for recording and OpenGL ES 2.0.
int [] attribList = {
EGL14 .EGL_RED_SIZE , 8 ,
EGL14 .EGL_GREEN_SIZE , 8 ,
EGL14 .EGL_BLUE_SIZE , 8 ,
EGL14 .EGL_ALPHA_SIZE , 8 ,
EGL14 .EGL_RENDERABLE_TYPE , EGL14 .EGL_OPENGL_ES2_BIT ,
EGL_RECORDABLE_ANDROID , 1 ,
EGL14 .EGL_NONE
};
EGLConfig [] configs = new EGLConfig [1 ];
int [] numConfigs = new int [1 ];
EGL14 .eglChooseConfig (mEGLDisplay , attribList , 0 , configs , 0 , configs .length ,
numConfigs , 0 );
checkEglError ("eglCreateContext RGB888+recordable ES2" );
// Configure context for OpenGL ES 2.0.
int [] attrib_list = {
EGL14 .EGL_CONTEXT_CLIENT_VERSION , 2 ,
EGL14 .EGL_NONE
};
mEGLContext = EGL14 .eglCreateContext (mEGLDisplay , configs [0 ], EGL14 .EGL_NO_CONTEXT ,
attrib_list , 0 );
checkEglError ("eglCreateContext" );
// Create a window surface, and attach it to the Surface we received.
int [] surfaceAttribs = {
EGL14 .EGL_NONE
};
mEGLSurface = EGL14 .eglCreateWindowSurface (mEGLDisplay , configs [0 ], mSurface ,
surfaceAttribs , 0 );
checkEglError ("eglCreateWindowSurface" );
}
/**
* Discards all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public void release () {
if (mEGLDisplay != EGL14 .EGL_NO_DISPLAY ) {
EGL14 .eglMakeCurrent (mEGLDisplay , EGL14 .EGL_NO_SURFACE , EGL14 .EGL_NO_SURFACE ,
EGL14 .EGL_NO_CONTEXT );
EGL14 .eglDestroySurface (mEGLDisplay , mEGLSurface );
EGL14 .eglDestroyContext (mEGLDisplay , mEGLContext );
EGL14 .eglReleaseThread ();
EGL14 .eglTerminate (mEGLDisplay );
}
mSurface .release ();
mEGLDisplay = EGL14 .EGL_NO_DISPLAY ;
mEGLContext = EGL14 .EGL_NO_CONTEXT ;
mEGLSurface = EGL14 .EGL_NO_SURFACE ;
mSurface = null ;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent () {
EGL14 .eglMakeCurrent (mEGLDisplay , mEGLSurface , mEGLSurface , mEGLContext );
checkEglError ("eglMakeCurrent" );
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public boolean swapBuffers () {
boolean result = EGL14 .eglSwapBuffers (mEGLDisplay , mEGLSurface );
checkEglError ("eglSwapBuffers" );
return result ;
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime (long nsecs ) {
EGLExt .eglPresentationTimeANDROID (mEGLDisplay , mEGLSurface , nsecs );
checkEglError ("eglPresentationTimeANDROID" );
}
/**
* Checks for EGL errors. Throws an exception if one is found.
*/
private void checkEglError (String msg ) {
int error ;
if ((error = EGL14 .eglGetError ()) != EGL14 .EGL_SUCCESS ) {
throw new RuntimeException (msg + ": EGL error: 0x" + Integer .toHexString (error ));
}
}
}
/**
* Manages a SurfaceTexture. Creates SurfaceTexture and TextureRender objects, and provides
* functions that wait for frames and render them to the current EGL surface.
* <p>
* The SurfaceTexture can be passed to Camera.setPreviewTexture() to receive camera output.
*/
private static class SurfaceTextureManager
implements SurfaceTexture .OnFrameAvailableListener {
private SurfaceTexture mSurfaceTexture ;
private CameraToMpegTest .STextureRender mTextureRender ;
private Object mFrameSyncObject = new Object (); // guards mFrameAvailable
private boolean mFrameAvailable ;
/**
* Creates instances of TextureRender and SurfaceTexture.
*/
public SurfaceTextureManager () {
mTextureRender = new CameraToMpegTest .STextureRender ();
mTextureRender .surfaceCreated ();
if (VERBOSE ) Log .d (TAG , "textureID=" + mTextureRender .getTextureId ());
mSurfaceTexture = new SurfaceTexture (mTextureRender .getTextureId ());
// This doesn't work if this object is created on the thread that CTS started for
// these test cases.
//
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
// create a Handler that uses it. The "frame available" message is delivered
// there, but since we're not a Looper-based thread we'll never see it. For
// this to do anything useful, OutputSurface must be created on a thread without
// a Looper, so that SurfaceTexture uses the main application Looper instead.
//
// Java language note: passing "this" out of a constructor is generally unwise,
// but we should be able to get away with it here.
mSurfaceTexture .setOnFrameAvailableListener (this );
}
public void release () {
// this causes a bunch of warnings that appear harmless but might confuse someone:
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
//mSurfaceTexture.release();
mTextureRender = null ;
mSurfaceTexture = null ;
}
/**
* Returns the SurfaceTexture.
*/
public SurfaceTexture getSurfaceTexture () {
return mSurfaceTexture ;
}
/**
* Replaces the fragment shader.
*/
public void changeFragmentShader (String fragmentShader ) {
mTextureRender .changeFragmentShader (fragmentShader );
}
/**
* Latches the next buffer into the texture. Must be called from the thread that created
* the OutputSurface object.
*/
public void awaitNewImage () {
final int TIMEOUT_MS = 2500 ;
synchronized (mFrameSyncObject ) {
while (!mFrameAvailable ) {
try {
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
// stalling the test if it doesn't arrive.
mFrameSyncObject .wait (TIMEOUT_MS );
if (!mFrameAvailable ) {
// TODO: if "spurious wakeup", continue while loop
throw new RuntimeException ("Camera frame wait timed out" );
}
} catch (InterruptedException ie ) {
// shouldn't happen
throw new RuntimeException (ie );
}
}
mFrameAvailable = false ;
}
// Latch the data.
mTextureRender .checkGlError ("before updateTexImage" );
mSurfaceTexture .updateTexImage ();
}
/**
* Draws the data from SurfaceTexture onto the current EGL surface.
*/
public void drawImage () {
mTextureRender .drawFrame (mSurfaceTexture );
}
@ Override
public void onFrameAvailable (SurfaceTexture st ) {
if (VERBOSE ) Log .d (TAG , "new frame available" );
synchronized (mFrameSyncObject ) {
if (mFrameAvailable ) {
throw new RuntimeException ("mFrameAvailable already set, frame could be dropped" );
}
mFrameAvailable = true ;
mFrameSyncObject .notifyAll ();
}
}
}
/**
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
*/
private static class STextureRender {
private static final int FLOAT_SIZE_BYTES = 4 ;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES ;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0 ;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3 ;
private final float [] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f , -1.0f , 0 , 0.f , 0.f ,
1.0f , -1.0f , 0 , 1.f , 0.f ,
-1.0f , 1.0f , 0 , 0.f , 1.f ,
1.0f , 1.0f , 0 , 1.f , 1.f ,
};
private FloatBuffer mTriangleVertices ;
private static final String VERTEX_SHADER =
"uniform mat4 uMVPMatrix;\n " +
"uniform mat4 uSTMatrix;\n " +
"attribute vec4 aPosition;\n " +
"attribute vec4 aTextureCoord;\n " +
"varying vec2 vTextureCoord;\n " +
"void main() {\n " +
" gl_Position = uMVPMatrix * aPosition;\n " +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n " +
"}\n " ;
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n " +
"precision mediump float;\n " + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n " +
"uniform samplerExternalOES sTexture;\n " +
"void main() {\n " +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n " +
"}\n " ;
private float [] mMVPMatrix = new float [16 ];
private float [] mSTMatrix = new float [16 ];
private int mProgram ;
private int mTextureID = -12345 ;
private int muMVPMatrixHandle ;
private int muSTMatrixHandle ;
private int maPositionHandle ;
private int maTextureHandle ;
public STextureRender () {
mTriangleVertices = ByteBuffer .allocateDirect (
mTriangleVerticesData .length * FLOAT_SIZE_BYTES )
.order (ByteOrder .nativeOrder ()).asFloatBuffer ();
mTriangleVertices .put (mTriangleVerticesData ).position (0 );
Matrix .setIdentityM (mSTMatrix , 0 );
}
public int getTextureId () {
return mTextureID ;
}
public void drawFrame (SurfaceTexture st ) {
checkGlError ("onDrawFrame start" );
st .getTransformMatrix (mSTMatrix );
// (optional) clear to green so we can see if we're failing to set pixels
GLES20 .glClearColor (0.0f , 1.0f , 0.0f , 1.0f );
GLES20 .glClear (GLES20 .GL_DEPTH_BUFFER_BIT | GLES20 .GL_COLOR_BUFFER_BIT );
GLES20 .glUseProgram (mProgram );
checkGlError ("glUseProgram" );
GLES20 .glActiveTexture (GLES20 .GL_TEXTURE0 );
GLES20 .glBindTexture (GLES11Ext .GL_TEXTURE_EXTERNAL_OES , mTextureID );
mTriangleVertices .position (TRIANGLE_VERTICES_DATA_POS_OFFSET );
GLES20 .glVertexAttribPointer (maPositionHandle , 3 , GLES20 .GL_FLOAT , false ,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES , mTriangleVertices );
checkGlError ("glVertexAttribPointer maPosition" );
GLES20 .glEnableVertexAttribArray (maPositionHandle );
checkGlError ("glEnableVertexAttribArray maPositionHandle" );
mTriangleVertices .position (TRIANGLE_VERTICES_DATA_UV_OFFSET );
GLES20 .glVertexAttribPointer (maTextureHandle , 2 , GLES20 .GL_FLOAT , false ,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES , mTriangleVertices );
checkGlError ("glVertexAttribPointer maTextureHandle" );
GLES20 .glEnableVertexAttribArray (maTextureHandle );
checkGlError ("glEnableVertexAttribArray maTextureHandle" );
Matrix .setIdentityM (mMVPMatrix , 0 );
GLES20 .glUniformMatrix4fv (muMVPMatrixHandle , 1 , false , mMVPMatrix , 0 );
GLES20 .glUniformMatrix4fv (muSTMatrixHandle , 1 , false , mSTMatrix , 0 );
GLES20 .glDrawArrays (GLES20 .GL_TRIANGLE_STRIP , 0 , 4 );
checkGlError ("glDrawArrays" );
// IMPORTANT: on some devices, if you are sharing the external texture between two
// contexts, one context may not see updates to the texture unless you un-bind and
// re-bind it. If you're not using shared EGL contexts, you don't need to bind
// texture 0 here.
GLES20 .glBindTexture (GLES11Ext .GL_TEXTURE_EXTERNAL_OES , 0 );
}
/**
* Initializes GL state. Call this after the EGL surface has been created and made current.
*/
public void surfaceCreated () {
mProgram = createProgram (VERTEX_SHADER , FRAGMENT_SHADER );
if (mProgram == 0 ) {
throw new RuntimeException ("failed creating program" );
}
maPositionHandle = GLES20 .glGetAttribLocation (mProgram , "aPosition" );
checkLocation (maPositionHandle , "aPosition" );
maTextureHandle = GLES20 .glGetAttribLocation (mProgram , "aTextureCoord" );
checkLocation (maTextureHandle , "aTextureCoord" );
muMVPMatrixHandle = GLES20 .glGetUniformLocation (mProgram , "uMVPMatrix" );
checkLocation (muMVPMatrixHandle , "uMVPMatrix" );
muSTMatrixHandle = GLES20 .glGetUniformLocation (mProgram , "uSTMatrix" );
checkLocation (muSTMatrixHandle , "uSTMatrix" );
int [] textures = new int [1 ];
GLES20 .glGenTextures (1 , textures , 0 );
mTextureID = textures [0 ];
GLES20 .glBindTexture (GLES11Ext .GL_TEXTURE_EXTERNAL_OES , mTextureID );
checkGlError ("glBindTexture mTextureID" );
GLES20 .glTexParameterf (GLES11Ext .GL_TEXTURE_EXTERNAL_OES , GLES20 .GL_TEXTURE_MIN_FILTER ,
GLES20 .GL_NEAREST );
GLES20 .glTexParameterf (GLES11Ext .GL_TEXTURE_EXTERNAL_OES , GLES20 .GL_TEXTURE_MAG_FILTER ,
GLES20 .GL_LINEAR );
GLES20 .glTexParameteri (GLES11Ext .GL_TEXTURE_EXTERNAL_OES , GLES20 .GL_TEXTURE_WRAP_S ,
GLES20 .GL_CLAMP_TO_EDGE );
GLES20 .glTexParameteri (GLES11Ext .GL_TEXTURE_EXTERNAL_OES , GLES20 .GL_TEXTURE_WRAP_T ,
GLES20 .GL_CLAMP_TO_EDGE );
checkGlError ("glTexParameter" );
}
/**
* Replaces the fragment shader. Pass in null to reset to default.
*/
public void changeFragmentShader (String fragmentShader ) {
if (fragmentShader == null ) {
fragmentShader = FRAGMENT_SHADER ;
}
GLES20 .glDeleteProgram (mProgram );
mProgram = createProgram (VERTEX_SHADER , fragmentShader );
if (mProgram == 0 ) {
throw new RuntimeException ("failed creating program" );
}
}
private int loadShader (int shaderType , String source ) {
int shader = GLES20 .glCreateShader (shaderType );
checkGlError ("glCreateShader type=" + shaderType );
GLES20 .glShaderSource (shader , source );
GLES20 .glCompileShader (shader );
int [] compiled = new int [1 ];
GLES20 .glGetShaderiv (shader , GLES20 .GL_COMPILE_STATUS , compiled , 0 );
if (compiled [0 ] == 0 ) {
Log .e (TAG , "Could not compile shader " + shaderType + ":" );
Log .e (TAG , " " + GLES20 .glGetShaderInfoLog (shader ));
GLES20 .glDeleteShader (shader );
shader = 0 ;
}
return shader ;
}
private int createProgram (String vertexSource , String fragmentSource ) {
int vertexShader = loadShader (GLES20 .GL_VERTEX_SHADER , vertexSource );
if (vertexShader == 0 ) {
return 0 ;
}
int pixelShader = loadShader (GLES20 .GL_FRAGMENT_SHADER , fragmentSource );
if (pixelShader == 0 ) {
return 0 ;
}
int program = GLES20 .glCreateProgram ();
if (program == 0 ) {
Log .e (TAG , "Could not create program" );
}
GLES20 .glAttachShader (program , vertexShader );
checkGlError ("glAttachShader" );
GLES20 .glAttachShader (program , pixelShader );
checkGlError ("glAttachShader" );
GLES20 .glLinkProgram (program );
int [] linkStatus = new int [1 ];
GLES20 .glGetProgramiv (program , GLES20 .GL_LINK_STATUS , linkStatus , 0 );
if (linkStatus [0 ] != GLES20 .GL_TRUE ) {
Log .e (TAG , "Could not link program: " );
Log .e (TAG , GLES20 .glGetProgramInfoLog (program ));
GLES20 .glDeleteProgram (program );
program = 0 ;
}
return program ;
}
public void checkGlError (String op ) {
int error ;
while ((error = GLES20 .glGetError ()) != GLES20 .GL_NO_ERROR ) {
Log .e (TAG , op + ": glError " + error );
throw new RuntimeException (op + ": glError " + error );
}
}
public static void checkLocation (int location , String label ) {
if (location < 0 ) {
throw new RuntimeException ("Unable to locate '" + label + "' in program" );
}
}
}
}