问题描述
我一直在努力为德code 的视频文件,通过 H264编码为en codeD与Android的媒体codeC 并试图把去codeR输出到的表面,但是当我运行应用程序它显示了一个黑色的表面和DDMS logcat中我看到的德codeR超时
我已经将文件解析成有效的帧第[先读4个字节,表示即将到来的帧的长度,然后读取长度量字节,这表明帧,然后再阅读4个字节长度下一帧等的。]然后传递帧到去codeR在一个循环。在配置去codeR,我通过 SPS&安培; PPS 在 mediaFormat 直接从EN codeD文件的硬编码值[我打开该文件与hexedit了这些价值观。我做的不可以设置任何的 presentationTimeUS 并用0。现在去codeR的 dequeInputBuffer()方法的返回值> = 0值,但 dequeOutputBuffer()返回媒体codec.INFO_TRY_AGAIN_LATER 这最终意味着德codeR被超时。
请看看我的code和请帮助。
先谢谢了。
下面是文件网址 -
下面是我的code -
进口的java.io.File;
进口java.io.FileInputStream中;
进口java.io.FileNotFoundException;
进口java.io.IOException异常;
进口的java.io.InputStream;
进口java.nio.ByteBuffer中;
进口的java.util.ArrayList;进口android.app.Activity;
进口android.media.Media codeC;
进口android.media.Media codec.BufferInfo;
进口android.media.MediaFormat;
进口android.os.Bundle;
进口android.os.Environment;
进口android.os.Handler;
进口android.util.Log;
进口android.view.Surface;
进口android.view.SurfaceHolder;
进口android.view.SurfaceView;
进口android.widget.Toast;公共类MainActivity扩展活动实现SurfaceHolder.Callback
{ 私有静态最后弦乐文件路径= Environment.getExternalStorageDirectory()+/H264Data1.264 // +/video_en$c$cd.263\";//\"/video_en$c$cd.264
私人PlayerThread MPLAYER = NULL;
处理程序处理程序= NULL;
公共静态的byte [] SPS = NULL;
公共静态的byte [] PPS = NULL;
公共静态的ArrayList<车架和GT;帧= NULL;
公共静态INT frameID = 0;
公共静态布尔incompleteLastFrame = FALSE;
文件连接codedFile =新的文件(文件路径);
InputStream为; 私有静态类框架
{
公众诠释身份证;
公众的byte [] frameData; 公共帧(INT ID)
{
this.id = ID;
}
} @覆盖
保护无效的onCreate(捆绑savedInstanceState)
{
super.onCreate(savedInstanceState);
尝试
{
是=新的FileInputStream(EN codedFile);
字节[]数据=新的字节[(INT)连接codedFile.length()]; 的System.out.println(文件总大小:恩+ codedFile.length());
frameID = 0;
帧=新的ArrayList<车架和GT;(); 尝试{
如果((is.read(数据,0,(int)的EN codedFile.length()))!= -1)
{
getFramesFromData(数据);
Toast.makeText(getApplicationContext(),。帧处理完的帧的数目:+ frames.size(),Toast.LENGTH_SHORT).show(); SurfaceView SV =新SurfaceView(本);
处理器=新的处理程序();
。sv.getHolder()的addCallback(本);
的setContentView(SV);
} }赶上(IOException异常五){
e.printStackTrace();
}
}赶上(FileNotFoundException异常五){
e.printStackTrace();
}
}
公共静态无效getFramesFromData(字节[]数据)
{
INT DATALENGTH = data.length;
INT帧长= 0;
frameID = 0; 如果(data.length< = 0)回报; //在这个循环每次迭代指示世代新的一帧
的for(int i = 0;)
{
如果(ⅰ+3 = DATALENGTH)返回; 帧长=((数据[1] - 安培;为0xFF)LT;< 24)
+((数据第[i + 1]&安培; 0xff的)下;&下; 16)
+((数据第[i + 2]&放大器; 0xff的)下;&下; 8)
+(数据[1 + 3]安培; 0xFF的); 我+ = 4; 如果(帧长大于0)
{
如果(1 +帧长-1> = DATALENGTH)回报;
帧一帧=新的帧(frameID);
frame.frameData =新的字节[帧长]
System.arraycopy(数据,我,frame.frameData,0,帧长);
frames.add(架);
frameID ++;
我+ =帧长;
} }
} @覆盖
公共无效surfaceCreated(SurfaceHolder持有人)
{
Log.d(德codeActivity,在surfaceCreated);
} @覆盖
公共无效surfaceChanged(SurfaceHolder架,INT格式,诠释的宽度,高度INT)
{
Log.d(德codeActivity,在surfaceChanged);
如果(MPLAYER == NULL)
{
Toast.makeText(getApplicationContext(),在surfaceChanged创造playerthread,Toast.LENGTH_SHORT).show();
MPLAYER =新PlayerThread(holder.getSurface());
mPlayer.start();
}
} @覆盖
公共无效surfaceDestroyed(SurfaceHolder持有人)
{
如果(MPLAYER!= NULL)
{
mPlayer.interrupt();
}
} 私有类PlayerThread继承Thread
{
//私人MediaExtractor提取;
私营媒体codeC德codeR;
私人表面的表面; 公共PlayerThread(表面表面)
{
this.surface =表面;
} @覆盖
公共无效的run()
{
handler.post(新的Runnable()
{ @覆盖
公共无效的run()
{ 德codeR =媒体codec.createDe coderByType(视频/ AVC);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(视频/ AVC,320,240); 字节[] header_sps = {0×00,0×00,0×00,0×01,0x67,0x42后,(字节)0x80的,的0x0C(字节)0xE4,0X40(字节)0XA0,(字节)是0xFD,0×00,(字节)0xDA,0x14的,0×26,(字节)0XA0};
字节[] = header_pps {0×00,0×00,0×00,0×01,为0x68(字节)0xCE,0x38(字节)0x80的};
mediaFormat.setByteBuffer(CSD-0,ByteBuffer.wrap(header_sps));
mediaFormat.setByteBuffer(CSD-1,ByteBuffer.wrap(header_pps)); 德coder.configure(mediaFormat,表面/ * *表面/,空/ * *加密/ 0 / * *标志/); 如果(德codeR == NULL)
{
Log.e(德codeActivity,无法找到视频信息!);
返回;
} 德coder.start();
Log.d(德codeActivity,德coder.start()被称为); ByteBuffer的[] inputBuffers =去coder.getInputBuffers();
ByteBuffer的[] outputBuffers =去coder.getOutputBuffers();
长startMs = System.currentTimeMillis的();
INT I = 0;
而(!Thread.interrupted())
{
如果(ⅰ&GT = frames.size())
打破;
字节[]数据=新的字节[frames.get(我).frameData.length]
System.arraycopy(frames.get㈠.frameData,0,数据,0,frames.get(ⅰ).frameData.length);
Log.d(德codeActivity,我=+ I +DATALENGTH =+ frames.get(我).frameData.length); INT inIndex = 0;
而((inIndex =去coder.dequeueInputBuffer(1))小于0)
; 如果(inIndex> = 0)
{
ByteBuffer的缓冲= inputBuffers [inIndex]
buffer.clear();
INT采样大小= data.length;
如果(采样大小℃,)
{
Log.d(德codeActivity,INPUTBUFFER BUFFER_FLAG_END_OF_STREAM);
德coder.queueInputBuffer(inIndex,0,0,0,媒体codec.BUFFER_FLAG_END_OF_STREAM);
打破;
}
其他
{
Log.d(德codeActivity,试样尺寸:+采样大小); 缓冲= ByteBuffer.allocate(data.length);
buffer.put(数据);
德coder.queueInputBuffer(inIndex,0,采样大小,0,0);
}
BufferInfo信息=新BufferInfo();
INT outIndex =去coder.dequeueOutputBuffer(资讯,100000); 开关(outIndex)
{
案例媒体codec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d(德codeActivity,INFO_OUTPUT_BUFFERS_CHANGED);
outputBuffers =去coder.getOutputBuffers();
打破;
案例媒体codec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d(德codeActivity,新格式+德coder.getOutputFormat()); 打破;
案例媒体codec.INFO_TRY_AGAIN_LATER:
Log.d(德codeActivity,dequeueOutputBuffer超时!);
尝试{
睡眠(100);
}赶上(InterruptedException的E){
// TODO自动生成catch块
e.printStackTrace();
}
打破;
默认:
ByteBuffer的outbuffer = outputBuffers [outIndex] Log.d(德codeActivity,我们不能用这个缓冲区,但使它由于API的限制,+ outbuffer); / *而(资讯presentationTimeUs / 1000方式> System.currentTimeMillis的() - startMs)
{
尝试
{
睡眠(10);
}赶上(InterruptedException的E){
e.printStackTrace();
打破;
}
} * / 德coder.releaseOutputBuffer(outIndex,真);
打破;
}
我++;
//所有德codeD架已经呈现,我们现在可以停止播放
/ *如果((info.flags&安培;媒体codec.BUFFER_FLAG_END_OF_STREAM)= 0)
{
Log.d(德codeActivity,OutputBuffer中BUFFER_FLAG_END_OF_STREAM);
打破;
} * / }
} 德coder.stop();
德coder.release();
}
});
}
}
}
这部分看起来错误的:
ByteBuffer的缓冲= inputBuffers [inIndex]
[...]
缓冲= ByteBuffer.allocate(data.length);
buffer.put(数据);
德coder.queueInputBuffer(inIndex,0,采样大小,0,0);
你得到输入缓冲区,然后有利于你自己分配一个缓冲区的忽略它。替换)的
。 ByteBuffer.allocate()
通话buffer.clear(
你在做什么类似于支票传中的,但后者只是抱着整个事情在内存中,而不是它的序列化到磁盘。看看 checkVideoData()
。
您可能会想采用序列化块标志出的数据的测试的做法。如果你这样做,你就不需要专门治疗SPS / PPS头 - 只是把它写像其他块流(它恰好有 codeC_CONFIG
标记集)。这也将是一个好主意,序列化的时间戳,除非输入视频是保证有一个已知的,不变化的帧速率。
I have been trying to decode a video file which is encoded via H264 encoding with Android's MediaCodec and tried to put the output of the decoder to a surface, but when I run the app it shows a black surface and in DDMS logcat I see that decoder timed out.
I have parsed the file into valid frames first [reading 4 bytes first which indicates the length of the upcoming frame and then read length amount bytes which indicates the frame, then again reading 4 bytes for the length of the next frame and so on.] then passed the frames to the decoder in a loop. When configuring the decoder, I passed sps & pps in the mediaFormat by hardcoding values directly from the encoded file [I got those values by opening that file with hexedit]. I did not set any presentationTimeUS and used 0 for it. Now the decoder's dequeInputBuffer() method returns >=0 value but dequeOutputBuffer() only returns MediaCodec.INFO_TRY_AGAIN_LATER which eventually means the decoder is being timed out.
Please have a look at my code and please help.
Thanks in advance.
Here is the file url - https://drive.google.com/file/d/0B39qOyEnXlR8Z3FSb2lzTWlORUU/edit?usp=sharing
Here is my code -
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import android.app.Activity;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.Toast;
public class MainActivity extends Activity implements SurfaceHolder.Callback
{
private static final String filePath = Environment.getExternalStorageDirectory()+ "/H264Data1.264"; // + "/video_encoded.263";//"/video_encoded.264";
private PlayerThread mPlayer = null;
Handler handler = null;
public static byte[] SPS = null;
public static byte[] PPS = null;
public static ArrayList<Frame> frames = null;
public static int frameID = 0;
public static boolean incompleteLastFrame = false;
File encodedFile = new File(filePath);
InputStream is;
private static class Frame
{
public int id;
public byte[] frameData;
public Frame(int id)
{
this.id = id;
}
}
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
try
{
is = new FileInputStream(encodedFile);
byte[] data = new byte[(int)encodedFile.length()];
System.out.println("Total file size : " + encodedFile.length());
frameID = 0;
frames = new ArrayList<Frame>();
try {
if ((is.read(data, 0, (int)encodedFile.length())) != -1)
{
getFramesFromData(data);
Toast.makeText(getApplicationContext(), "frames processing finished. number of frames : " + frames.size(), Toast.LENGTH_SHORT).show();
SurfaceView sv = new SurfaceView(this);
handler = new Handler();
sv.getHolder().addCallback(this);
setContentView(sv);
}
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
public static void getFramesFromData(byte[] data)
{
int dataLength = data.length;
int frameLength = 0;
frameID = 0;
if(data.length <= 0) return;
// each iteration in this loop indicates generation of a new frame
for(int i = 0; ; )
{
if(i+3 >= dataLength) return;
frameLength = ((data[i] & 0xff) << 24)
+ ((data[i + 1] & 0xff) << 16)
+ ((data[i + 2] & 0xff) << 8)
+ (data[i + 3] & 0xff);
i += 4;
if(frameLength > 0)
{
if(i+frameLength-1 >= dataLength) return;
Frame frame = new Frame(frameID);
frame.frameData = new byte[frameLength];
System.arraycopy(data, i, frame.frameData, 0, frameLength);
frames.add(frame);
frameID++;
i += frameLength;
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder)
{
Log.d("DecodeActivity", "in surfaceCreated");
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
Log.d("DecodeActivity", "in surfaceChanged");
if (mPlayer == null)
{
Toast.makeText(getApplicationContext(), "in surfaceChanged. creating playerthread", Toast.LENGTH_SHORT).show();
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
if (mPlayer != null)
{
mPlayer.interrupt();
}
}
private class PlayerThread extends Thread
{
//private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
public PlayerThread(Surface surface)
{
this.surface = surface;
}
@Override
public void run()
{
handler.post(new Runnable()
{
@Override
public void run()
{
decoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 320, 240);
byte[] header_sps = { 0x00, 0x00, 0x00, 0x01, 0x67, 0x42, (byte)0x80, 0x0C, (byte)0xE4, 0x40, (byte)0xA0, (byte)0xFD, 0x00, (byte)0xDA, 0x14, 0x26, (byte)0xA0 };
byte[] header_pps = {0x00, 0x00, 0x00, 0x01, 0x68, (byte)0xCE, 0x38, (byte)0x80 };
mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */);
if (decoder == null)
{
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
Log.d("DecodeActivity", "decoder.start() called");
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
long startMs = System.currentTimeMillis();
int i = 0;
while(!Thread.interrupted())
{
if(i >= frames.size())
break;
byte[] data = new byte[frames.get(i).frameData.length];
System.arraycopy(frames.get(i).frameData, 0, data, 0, frames.get(i).frameData.length);
Log.d("DecodeActivity", "i = " + i + " dataLength = " + frames.get(i).frameData.length);
int inIndex = 0;
while ((inIndex = decoder.dequeueInputBuffer(1)) < 0)
;
if (inIndex >= 0)
{
ByteBuffer buffer = inputBuffers[inIndex];
buffer.clear();
int sampleSize = data.length;
if (sampleSize < 0)
{
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
}
else
{
Log.d("DecodeActivity", "sample size: " + sampleSize);
buffer = ByteBuffer.allocate(data.length);
buffer.put(data);
decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0);
}
BufferInfo info = new BufferInfo();
int outIndex = decoder.dequeueOutputBuffer(info, 100000);
switch (outIndex)
{
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
try {
sleep(100);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
default:
ByteBuffer outbuffer = outputBuffers[outIndex];
Log.d("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outbuffer);
/*while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs)
{
try
{
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}*/
decoder.releaseOutputBuffer(outIndex, true);
break;
}
i++;
// All decoded frames have been rendered, we can stop playing now
/*if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
{
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}*/
}
}
decoder.stop();
decoder.release();
}
});
}
}
}
This part looks wrong:
ByteBuffer buffer = inputBuffers[inIndex];
[...]
buffer = ByteBuffer.allocate(data.length);
buffer.put(data);
decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0);
You're getting the input buffer, and then ignoring it in favor of a buffer you're allocating yourself. Replace the ByteBuffer.allocate()
call with buffer.clear()
.
What you're doing is pretty similar to the check pass in DecodeEditEncodeTest, except that the latter is just holding the whole thing in memory instead of serializing it to disk. Take a look at checkVideoData()
.
You may want to adopt the test's approach of serializing the chunk flags out with the data. If you did that, you wouldn't need to treat the SPS/PPS header specially -- just write it to the stream like any other chunk (it just happens to have the CODEC_CONFIG
flag set). It would also be a good idea to serialize the time stamp unless the input video is guaranteed to have a known, non-varying frame rate.
这篇关于媒体codeC德codeR总是超时同时解码H264文件的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持!