本篇文章主要介绍了"Android MediaCodec实现摄像头数据硬编解码全过程",对于Android开发感兴趣的同学可以参考一下:
MediaCodec 实现h264硬编解码全过程,视频数据从摄像头读出 yv12格式,转换为I420(同yuv420p),投递给encoder,再从encode...
MediaCodec 实现h264硬编解码全过程,视频数据从摄像头读出 yv12格式,转换为I420(同yuv420p),投递给encoder,再从encoder取出编码后的h264数据投递给decoder后显示到surfaceView;
代码中实现了利用udp将h264数据发送到指定主机,可通过vlc播放(时延有些高,尚未处理),另外实现了可以读取本地264文件流投递给解码器播放;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import android.support.v7.app.ActionBarActivity;
import android.annotation.SuppressLint;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
@SuppressLint("NewApi")
publicclassCameraActivityextendsActionBarActivityimplementsSurfaceHolder.Callback,Camera.PreviewCallback,OnClickListener{privatestaticfinal String TAG = "StudyCamera";
privatestaticfinalint FRAME_RATE = 15;
privatestaticfinal String REMOTE_HOST= "192.168.1.105";
privatestaticfinalshort REMOTE_HOST_PORT = 5000;
privatestaticfinal String H264FILE = "/sdcard/test.h264";
privateboolean bOpening = false;
private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
private Surface mSurface;
private Camera mCamera;
privateint mCameraWidth,mCameraHeight;
privateint mSurfaceWidth,mSurfaceHeight;
private MediaCodec mMediaEncoder;
private MediaCodec mMediaDecoder;
private Paint paint;
private InetAddress address;
private DatagramSocket socket;
private UdpSendTask netSendTask;
private H264FileTask h264FileTask;
privateint mFrameIndex = 0;
privatebyte[] mEncoderH264Buf;
privatebyte[] mMediaHead = null;
privatebyte[] mYuvBuffer = newbyte[1280*720*3/2];
@OverrideprotectedvoidonCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_camera);
findViewById(R.id.btnOpen).setOnClickListener(this);
findViewById(R.id.btnClose).setOnClickListener(this);
mCameraWidth =640;
mCameraHeight = 480;
if(!setupView()){
Log.e(TAG, "failed to setupView");
return;
}
paint = new Paint();
mMediaEncoder = null;
mMediaDecoder = null;
mSurface = null;
mEncoderH264Buf = newbyte[10240];
netSendTask = new UdpSendTask();
netSendTask.init();
netSendTask.start();
}
@OverridepublicvoidonClick(View view)
{
switch(view.getId())
{
case R.id.btnOpen:
{
if(!setupCamera()){
Log.e(TAG, "failed to setupCamera");
return;
}
if(!startCamera()){
Log.e(TAG, "failed to openCamera");
return;
}
//play h264 file test//startPlayH264File(); }
break;
case R.id.btnClose:
{
if(!stopCamera()){
Log.e(TAG, "failed to stopCamera");
return;
}
}
break;
}
}
@OverridepublicvoidsurfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated.");
}
@OverridepublicvoidsurfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.i(TAG,"surfaceChanged w:"+width+" h:"+height);
mSurface = surfaceHolder.getSurface();
mSurfaceWidth = width;
mSurfaceHeight = height;
/*
* get canvas from surface holder with draw something
* */// Surface surface = holder.getSurface();// if(!setupDecoder(surface,"video/avc",mCameraWidth,mCameraHeight)){// releaseCamera();// Log.e(TAG, "failed to setupEncoder");// return;// }// Canvas c = holder.lockCanvas();// paint.setColor(Color.WHITE);// paint.setStrokeWidth(4);// c.drawRect(0, 0, width, height, paint);// paint.setColor(Color.BLACK);// paint.setTextSize(30);// paint.setStrokeWidth(1); // c.drawText("http://blog.csdn.net/", 100, 100, paint);// Bitmap bitmap = null;// bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.waiting);// if (bitmap != null) {// float left=(c.getWidth()-bitmap.getWidth())/2;// float top=(c.getHeight()-bitmap.getHeight())/2;// c.drawBitmap(bitmap, left, top, paint);// }//// holder.unlockCanvasAndPost(c);
}
@OverridepublicvoidsurfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG,"surfaceDestroyed");
mSurface = null;
releaseCamera();
}
@OverridepublicvoidonPreviewFrame(byte[] rawData, Camera camera)
{
int w = camera.getParameters().getPreviewSize().width;
int h = camera.getParameters().getPreviewSize().height;
int format = camera.getParameters().getPreviewFormat();
Log.d(TAG,"preview frame format:"+format+" size:"+rawData.length+" w:"+w+" h:"+h);
if(mMediaEncoder == null){
if(!setupEncoder("video/avc",w,h)){
releaseCamera();
Log.e(TAG, "failed to setupEncoder");
return;
}
}
assert(mSurface != null);
if(mMediaDecoder == null){
if(!setupDecoder(mSurface,"video/avc",w,h)){
releaseCamera();
Log.e(TAG, "failed to setupDecoder");
return;
}
}
//convert yv12 to i420
swapYV12toI420(rawData, mYuvBuffer, w, h);
System.arraycopy(mYuvBuffer, 0, rawData,0,rawData.length);
//set h264 buffer to zero.for(int i=0;i0;
int encoderRet = offerEncoder(rawData,mEncoderH264Buf);
if(encoderRet > 0){
Log.d(TAG,"encoder output h264 buffer len:"+encoderRet);
/**
* send to VLC client by udp://@port;
*/
netSendTask.pushBuf(mEncoderH264Buf,encoderRet);
/**
* push data to decoder
*/
offerDecoder(mEncoderH264Buf,encoderRet);
}
//reset buff to camera.
camera.addCallbackBuffer(rawData);
}
/**
*
*/privatebooleansetupView()
{
Log.d(TAG,"fall in setupView");
if (null != surfaceHolder) {
surfaceHolder.removeCallback(this);
surfaceView = null;
}
if (null != surfaceView) {
surfaceView = null;
}
surfaceView = (SurfaceView)findViewById(R.id.surfaceView);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
surfaceHolder.addCallback(this);
returntrue;
}
privatebooleansetupCamera()
{
Log.d(TAG,"fall in setupCamera");
if (null != mCamera) {
mCamera.release();
mCamera = null;
}
mCamera = Camera.open(); // Turn on the camera Camera.Parameters parameters = mCamera.getParameters(); // Camera parameters to obtain
List listSize = parameters.getSupportedVideoSizes();
for(int i=0;listSize != null && i"supportedSize:"+size.width+"-"+size.height);
}
int width = mCameraWidth,height = mCameraHeight;
List listFormats = parameters.getSupportedPreviewFormats();
for(int i=0;i"supportedFormat:"+format);
}
parameters.setFlashMode("off"); // 无闪光灯
parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
//parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPreviewFormat(ImageFormat.YV12);
parameters.setPreviewSize(width, height);
parameters.setPictureSize(width, height);
Log.d(TAG,"setup Camera w:"+width+" h:"+height);
mCamera.setParameters(parameters); // Setting camera parameters//alloc buffer for camera callback data with once.byte[] rawBuf = newbyte[mCameraWidth * mCameraHeight * 3 / 2];
mCamera.addCallbackBuffer(rawBuf);
/*set the buffer must be use setPreviewCallbackWithBuffer*///mCamera.setPreviewCallback(this);
mCamera.setPreviewCallbackWithBuffer(this);
//unused preview display to surface view by first;低调看直播体育app软件下载/*
try {
mCamera.setPreviewDisplay(surfaceHolder); // Set Preview
mCamera.setDisplayOrientation(90);
} catch (IOException e) {
Log.e(TAG,"failed to setPreviewDisplay");
mCamera.release();// release camera
mCamera = null;
return false;
}
*/returntrue;
}
privatebooleanstartCamera(){
Log.d(TAG,"fall in startCamera");
if(bOpening)returnfalse;
mCamera.startPreview(); // Start Preview
bOpening = true;
returntrue;
}
privatebooleanstopCamera(){
Log.d(TAG,"fall in stop Camera");
if(!bOpening)returnfalse;
mCamera.stopPreview();// stop preview
bOpening = false;
returntrue;
}
privatebooleanreleaseCamera()
{
Log.d(TAG,"fall in release Camera");
mCamera.stopPreview();
mCamera.release(); // Release camera resources
mCamera = null;
returntrue;
}
privatebooleansetupEncoder(String mime, int width, int height)
{
int colorFormat = selectColorFormat(selectCodec(mime), mime);
Log.d(TAG,"setupEncoder "+mime+" colorFormat:"+colorFormat+" w:"+width+" h:"+height);
mMediaEncoder = MediaCodec.createEncoderByType(mime);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(mime, width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
//mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mMediaEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaEncoder.start();
returntrue;
}
privatebooleansetupDecoder(Surface surface,String mime,int width, int height){
Log.d(TAG,"setupDecoder surface:"+surface+" mime:"+mime+" w:"+width+" h:"+height);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(mime,width,height);
mMediaDecoder = MediaCodec.createDecoderByType(mime);
if (mMediaDecoder == null) {
Log.e("DecodeActivity", "createDecoderByType fail!");
returnfalse;
}
/*
int codecCount = MediaCodecList.getCodecCount();
for(int i=0;i mMediaDecoder.configure(mediaFormat, surface, null, 0);
mMediaDecoder.start();
returntrue;
}
privateintofferEncoder(byte[] input,byte[] output) {
int pos = 0;
try {
ByteBuffer[] inputBuffers = mMediaEncoder.getInputBuffers();
ByteBuffer[] outputBuffers = mMediaEncoder.getOutputBuffers();
int inputBufferIndex = mMediaEncoder.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
Log.d(TAG,"offerEncoder InputBufSize: " +inputBuffer.capacity()+" inputSize: "+input.length + " bytes");
inputBuffer.clear();
inputBuffer.put(input);
mMediaEncoder.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaEncoder.dequeueOutputBuffer(bufferInfo,0);
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] data = newbyte[bufferInfo.size];
outputBuffer.get(data);
Log.d(TAG,"offerEncoder InputBufSize:"+outputBuffer.capacity()+" outputSize:"+ data.length + " bytes written");
if(mMediaHead != null)
{
System.arraycopy(data, 0, output, pos, data.length);
pos += data.length;
} else// 保存pps sps 只有开始时 第一个帧里有, 保存起来后面用
{
Log.d(TAG, "offer Encoder save sps head,len:"+data.length);
ByteBuffer spsPpsBuffer = ByteBuffer.wrap(data);
if (spsPpsBuffer.getInt() == 0x00000001) {
mMediaHead = newbyte[data.length];
System.arraycopy(data, 0, mMediaHead, 0, data.length);
} else {
Log.e(TAG,"not found media head.");
return -1;
}
}
mMediaEncoder.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mMediaEncoder.dequeueOutputBuffer(bufferInfo, 0);
}
if(output[4] == 0x65) //key frame 编码器生成关键帧时只有 00 00 00 01 65 没有pps sps, 要加上
{
System.arraycopy(output, 0, input, 0, pos);
System.arraycopy(mMediaHead, 0, output, 0, mMediaHead.length);
System.arraycopy(input, 0, output, mMediaHead.length, pos);
pos += mMediaHead.length;
}
} catch (Throwable t) {
t.printStackTrace();
}
return pos;
}
privatevoidofferDecoder(byte[] input,int length) {
try {
ByteBuffer[] inputBuffers = mMediaDecoder.getInputBuffers();
int inputBufferIndex = mMediaDecoder.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
long timestamp = mFrameIndex++ * 1000000 / FRAME_RATE;
Log.d(TAG,"offerDecoder timestamp: " +timestamp+" inputSize: "+length + " bytes");
inputBuffer.clear();
inputBuffer.put(input,0,length);
mMediaDecoder.queueInputBuffer(inputBufferIndex, 0, length, timestamp, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaDecoder.dequeueOutputBuffer(bufferInfo,0);
while (outputBufferIndex >= 0) {
Log.d(TAG,"offerDecoder OutputBufSize:"+bufferInfo.size+ " bytes written");
//If a valid surface was specified when configuring the codec, //passing true renders this output buffer to the surface.
mMediaDecoder.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mMediaDecoder.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
privatevoidstartPlayH264File()
{
assert(mSurface != null);
if(mMediaDecoder == null){
if(!setupDecoder(mSurface,"video/avc",mSurfaceWidth,mSurfaceHeight)){
Log.e(TAG, "failed to setupDecoder");
return;
}
}
h264FileTask = new H264FileTask();
h264FileTask.start();
}
privatevoidswapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height)
{
System.arraycopy(yv12bytes, 0, i420bytes, 0,width*height);
System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height,width*height/4);
System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4,width*height/4);
}
privatevoidreadH264FromFile(){
File file = new File(H264FILE);
if(!file.exists() || !file.canRead()){
Log.e(TAG,"failed to open h264 file.");
return;
}
try {
int len = 0;
FileInputStream fis = new FileInputStream(file);
byte[] buf = newbyte[1024];
while ((len = fis.read(buf)) > 0){
offerDecoder(buf, len);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch(IOException e){
e.printStackTrace();
}
return;
}
/**
* Returns the first codec capable of encoding the specified MIME type, or null if no
* match was found.
*/privatestatic MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
returnnull;
}
/**
* Returns a color format that is supported by the codec and by this test code. If no
* match is found, this throws a test failure -- the set of formats known to the test
* should be expanded for new platforms.
*/privatestaticintselectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
Log.e(TAG,"couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return0; // not reached
}
/**
* Returns true if this is a color format that this test code understands (i.e. we know how
* to read and generate frames in this format).
*/privatestaticbooleanisRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for this testcase MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
returntrue;
default:
returnfalse;
}
}
class H264FileTask extends Thread{
@Overridepublicvoidrun() {
Log.d(TAG,"fall in H264File Read thread");
readH264FromFile();
}
}
class UdpSendTask extends Thread{
private ArrayList mList;
publicvoidinit()
{
try {
socket = new DatagramSocket();
address = InetAddress.getByName(REMOTE_HOST);
} catch (SocketException e) {
e.printStackTrace();
} catch (UnknownHostException e) {
e.printStackTrace();
}
mList = new ArrayList();
}
publicvoidpushBuf(byte[] buf,int len)
{
ByteBuffer buffer = ByteBuffer.allocate(len);
buffer.put(buf,0,len);
mList.add(buffer);
}
@Overridepublicvoidrun() {
Log.d(TAG,"fall in udp send thread");
while(true){
if(mList.size() <= 0){
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
while(mList.size() > 0){
ByteBuffer sendBuf = mList.get(0);
try {
Log.d(TAG,"send udp packet len:"+sendBuf.capacity());
DatagramPacket packet=new DatagramPacket(sendBuf.array(),sendBuf.capacity(), address,REMOTE_HOST_PORT);
socket.send(packet);
} catch (Throwable t) {
t.printStackTrace();
}
mList.remove(0);
}
}
}
}
}
以上就介绍了Android MediaCodec实现摄像头数据硬编解码全过程,包括了方面的内容,希望对Android开发有兴趣的朋友有所帮助。
本文网址链接:http://www.codes51.com/article/detail_138004.html