ffmpeg在android上输出滑屏问题处理

少年辛苦终身事,莫向光阴惰寸功。这篇文章主要讲述ffmpeg在android上输出滑屏问题处理相关的知识,希望能为你提供帮助。
ffmpeg部分机器上有花屏的问题

ffmpeg在android上输出滑屏问题处理

文章图片


原代码例如以下:

while(av_read_frame(formatCtx, & packet)> =0 & & !_stop & & NULL!=window & & bInit) { // Is this a packet from the video stream? if(packet.stream_index==videoStream) { // Decode video frame avcodec_decode_video2(codecCtx, decodedFrame, & frameFinished, & packet); // Did we get a video frame? if(frameFinished) { // Convert the image from its native format to RGBA sws_scale ( sws_ctx, (uint8_t const * const *)decodedFrame-> data, decodedFrame-> linesize, 0, codecCtx-> height, frameRGBA-> data, frameRGBA-> linesize ); if(packet.dts == AV_NOPTS_VALUE & & decodedFrame-> opaque & & *(uint64_t*)decodedFrame-> opaque != AV_NOPTS_VALUE) { pts = *(uint64_t *)decodedFrame-> opaque; LOGD("pst1: %d",pts); } else if(packet.dts != AV_NOPTS_VALUE) { pts = packet.dts; LOGD("pst2: %d",pts); } else { pts = 0; LOGD("pst3: %d",pts); } //pts = av_q2d(codecCtx-> time_base) * 1000000.0 * i * 2; pts *= 1000; //LOGD("debug %d,%d,%f",pts, (long)(av_q2d(codecCtx-> time_base) * 1000000.0 * i * 2), av_q2d(codecCtx-> time_base)); if(0 == pts || 0 == baseTime) { baseTime = av_gettime() - pts; LOGD("BASETIME: %d",baseTime); }else{ waitTime = (baseTime + pts) - av_gettime(); LOGD("WAITTIME: %d, %d",waitTime,pts); }//waitTime = (av_q2d(codecCtx-> time_base) * 1000.0 - 0.0) * 1000; if(waitTime> 0) usleep(waitTime); if(!_stop) { synchronized(lockWindow) { if(!_stop & & NULL!=window) { // lock the window buffer if (ANativeWindow_lock(pWin, & windowBuffer, NULL) < 0) { LOGE("cannot lock window"); } else { // draw the frame on buffer //LOGD("copy buffer %d:%d:%d", width, height, width*height*RGB_SIZE); //LOGD("window buffer: %d:%d:%d", windowBuffer.width, windowBuffer.height, windowBuffer.stride); memcpy(windowBuffer.bits, buffer,width * height * RGB_SIZE); // unlock the window buffer and post it to display ANativeWindow_unlockAndPost(pWin); // count number of frames ++i; } } } } } }

细致分析后发现 部分分辨率又可以正常展示,感觉是宽度错位导致的,分析例如以下:
【ffmpeg在android上输出滑屏问题处理】ORG: 176   * 144   F
X2:   352 288   O
X3:   528 432   F
X4:   704 576   O
X6:   1056 *    O


X1.1 193 158   F
X1.2 211 172   F
X1.5 264 216   F


X0.5 88 72     F




X2?
: 352 290   O
X2?: 352 600   O
X2?: 352 720   O
X4?: 704 720   O
X6?: 1056 720   O
   


1280 ---1312
        1056
1184
1248 ok
发现分辨率依照%64+32对齐, 感觉是内存对齐造成的, 查看ANativeWindow_Buffer例如以下

typedef struct ANativeWindow_Buffer { // The number of pixels that are show horizontally. int32_t width; // The number of pixels that are shown vertically. int32_t height; // The number of *pixels* that a line in the buffer takes in // memory.This may be > = width. int32_t stride; // The format of the buffer.One of WINDOW_FORMAT_* int32_t format; // The actual bits. void* bits; // Do not touch. uint32_t reserved[6]; } ANativeWindow_Buffer;



输出stride和width的日志发现,假设正常显示则stride==width, 通过凝视能够看出应该是内存对齐问题导致的,调整代码:

if(packet.stream_index==videoStream) { // Decode video frame avcodec_decode_video2(codecCtx, decodedFrame, & frameFinished, & packet); // Did we get a video frame?
if(frameFinished) { // Convert the image from its native format to RGBA sws_scale ( sws_ctx, (uint8_t const * const *)decodedFrame-> data, decodedFrame-> linesize, 0, codecCtx-> height, frameRGBA-> data, frameRGBA-> linesize ); if(packet.dts == AV_NOPTS_VALUE & & decodedFrame-> opaque & & *(uint64_t*)decodedFrame-> opaque != AV_NOPTS_VALUE) { pts = *(uint64_t *)decodedFrame-> opaque; LOGD("pst1: %d",pts); } else if(packet.dts != AV_NOPTS_VALUE) { pts = packet.dts; LOGD("pst2: %d",pts); } else { pts = 0; LOGD("pst3: %d",pts); } //pts = av_q2d(codecCtx-> time_base) * 1000000.0 * i * 2; pts *= 1000; //LOGD("debug %d,%d,%f",pts, (long)(av_q2d(codecCtx-> time_base) * 1000000.0 * i * 2), av_q2d(codecCtx-> time_base)); if(0 == pts || 0 == baseTime) { baseTime = av_gettime() - pts; LOGD("BASETIME: %d",baseTime); }else{ waitTime = (baseTime + pts) - av_gettime(); LOGD("WAITTIME: %d, %d",waitTime,pts); }//waitTime = (av_q2d(codecCtx-> time_base) * 1000.0 - 0.0) * 1000; if(waitTime> 0) usleep(waitTime); if(!_stop) { synchronized(lockWindow) { if(!_stop & & NULL!=window) { // lock the window buffer if (ANativeWindow_lock(pWin, & windowBuffer, NULL) < 0) { LOGE("cannot lock window"); } else { // draw the frame on buffer //LOGD("copy buffer %d:%d:%d", width, height, width*height*RGB_SIZE); //LOGD("window buffer: %d:%d:%d", windowBuffer.width, windowBuffer.height, windowBuffer.stride); //memcpy(windowBuffer.bits, buffer,width * height * RGB_SIZE); if(windowBuffer.width > = windowBuffer.stride){ memcpy(windowBuffer.bits, buffer,width * height * RGB_SIZE); }else{ //skip stride-width 跳过padding部分内存 for(int i=0; i< height; ++i) memcpy(windowBuffer.bits +windowBuffer.stride * i * RGB_SIZE , buffer + width * i * RGB_SIZE , width * RGB_SIZE); } // unlock the window buffer and post it to display ANativeWindow_unlockAndPost(pWin); // count number of frames ++i; } } } } } }


通过行拷贝方式,跳过后面对齐部分的内存, 
解决这个问题,
ffmpeg在android上输出滑屏问题处理

文章图片





































    推荐阅读