Dowemo
0 0 0 0

From this start into deep water, compiling c c + + libraries, with ndk, writing native, compiling app, really tired. The main content of this is to speak ffmpeg to decode video files and display playback on android.

To compile ffmpeg

Reference to ffmpeg 's android platform migration - compilation of, where compilation needs attention is the version, script writing, and.

Writing android code

Play local video by using a bar, call code mPlay ( ), play a child thread.

player = new YoungPlayer();publicvoidmPlay(View btn){
 String video = sp_video.getSelectedItem().toString();
 final String input = new File(Environment.getExternalStorageDirectory(),video).getAbsolutePath();
 //Surface传入到Native函数中,用于绘制final Surface surface = videoView.getHolder().getSurface();
 new Thread(new Runnable() {
 publicvoidrun() {
 player.render(input,surface);
 }
 }).start();
 }

You need to enter the incoming videoview as follows:

publicclassVideoViewextendsSurfaceView {publicVideoView(Context context) {
 super(context);
 init();
 }
 publicVideoView(Context context, AttributeSet attrs) {
 super(context, attrs);
 init();
 }
 publicVideoView(Context context, AttributeSet attrs, int defStyle) {
 super(context, attrs, defStyle);
 init();
 }
 privatevoidinit(){
 //初始化,SufaceView绘制的像素格式 SurfaceHolder holder = getHolder();
 holder.setFormat(PixelFormat.RGBA_8888);
 }
}

A proxy class that performs related actio &:

publicclassYoungPlayer {publicnativevoidrender(String input,Surface surface);
 publicnativevoidsound(String input,String output);
 publicnativevoidplay(String input,Surface surface);
 static{
 System.loadLibrary("avutil-54");
 System.loadLibrary("swresample-1");
 System.loadLibrary("avcodec-56");
 System.loadLibrary("avformat-56");
 System.loadLibrary("swscale-3");
 System.loadLibrary("postproc-53");
 System.loadLibrary("avfilter-5");
 System.loadLibrary("avdevice-56");
 System.loadLibrary("yuv");
 System.loadLibrary("myffmpeg");
 }
}

As a result, only the native methods in the proxy class youngplayer are written, the header files are generated by the java h command, different jdk versions, the java h command.

Write c c + + implementation file decoding

> video player. C, to decode the video file by step ( ignore log _ <), the steps that are followed by the decoding are typically the following:
这里写图片描述

#include"com_yang_ffmpegDemo_YoungPlayer.h"#include <stdlib.h>#include <stdio.h>#include <unistd.h>#include <android/log.h>#include <android/native_window_jni.h>#include <android/native_window.h>#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"yang",FORMAT,##__VA_ARGS__);#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"yang",FORMAT,##__VA_ARGS__);#include"libyuv.h"//封装格式#include"libavformat/avformat.h"//解码#include"libavcodec/avcodec.h"//缩放#include"libswscale/swscale.h"JNIEXPORT void JNICALL Java_com_td_youngplayer_YoungPlayer_render
(JNIEnv *env, jobject jobj, jstring input_jstr, jobject surface){
 constchar* input_cstr = (*env)->GetStringUTFChars(env,input_jstr,NULL);
 //1.注册组件 av_register_all();
 //封装格式上下文 AVFormatContext *pFormatCtx = avformat_alloc_context();
 //2.打开输入视频文件if(avformat_open_input(&pFormatCtx,input_cstr,NULL,NULL)!= 0){
 LOGE("%s","打开输入视频文件失败");
 return;
 }
 //3.获取视频信息if(avformat_find_stream_info(pFormatCtx,NULL) <0){
 LOGE("%s","获取视频信息失败");
 return;
 }
 int64_t duration = pFormatCtx->duration;
 int bit_rate = pFormatCtx->bit_rate;
 LOGI("视频时长%llu:%llu",duration/1000000/60,duration/1000000%60);
 LOGI("比特率%dkps",bit_rate/1000);
 //视频解码,需要找到视频对应的AVStream所在pFormatCtx->streams的索引位置int video_stream_idx = -1;
 int i = 0;
 for(; i <pFormatCtx->nb_streams;i++){
 //根据类型判断,是否是视频流if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
 video_stream_idx = i;
 break;
 }
 }
 //4.获取视频解码器 AVCodecContext *pCodeCtx = pFormatCtx->streams[video_stream_idx]->codec;
 AVStream *avstream =pFormatCtx->streams[video_stream_idx];
 AVCodec *pCodec = avcodec_find_decoder(pCodeCtx->codec_id);
 if(pCodec == NULL){
 LOGE("%s","无法解码");
 return;
 }
 //5.打开解码器if(avcodec_open2(pCodeCtx,pCodec,NULL) <0){
 LOGE("%s","解码器无法打开");
 return;
 }
 //编码数据 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
 //像素数据(解码数据) AVFrame *yuv_frame = av_frame_alloc();
 AVFrame *rgb_frame = av_frame_alloc();
 //native绘制//窗体 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env,surface);
 //绘制时的缓冲区 ANativeWindow_Buffer outBuffer;
 AVRational framerate = pCodeCtx->framerate;
 AVRational time_base = pCodeCtx->time_base;
 //宽高比 AVRational sample_aspect_ratio = pCodeCtx->sample_aspect_ratio;
 //播放器宽高 int32_t width = ANativeWindow_getWidth(nativeWindow);
 int32_t height = ANativeWindow_getHeight(nativeWindow);
 int showWidth = pCodeCtx->width;
 int showHeight = pCodeCtx->height;
 int a = sample_aspect_ratio.num;
 int b = sample_aspect_ratio.den;//if(width<height*showWidth/showHeight){//showHeight=width*showHeight/showWidth;//}else{//showHeight = height;//}//showHeight = 300;//showWidth = width;//showWidth = 640;//showHeight = 360; LOGI("showWidth%d",showWidth);
 LOGI("showHeight%d",showHeight);
 int len, got_frame, framecount = 0;
 //6.一阵一阵读取压缩的视频数据AVPacketwhile(av_read_frame(pFormatCtx,packet)> = 0){
 if(packet->stream_index == video_stream_idx){
 //解码AVPacket->AVFrame len = avcodec_decode_video2(pCodeCtx, yuv_frame, &got_frame, packet);
 //Zero if no frame could be decompressed//非零,正在解码if(got_frame){
 LOGI("packet大小%hhu",packet->data);
 LOGI("packet显示时间戳%llu",packet->pts);
 LOGI("width%d",yuv_frame->width);
 LOGI("height%d",yuv_frame->height);
 LOGI("控件width%d",width);
 LOGI("控件height%d",height);
 LOGI("packet显示时间%llu",packet->pts*1000*(avstream->time_base.num)/(avstream->time_base.den));
 LOGI("avstream->time_base.den%d",avstream->time_base.den);
 LOGI("avstream->time_base.num%d",avstream->time_base.num);
 LOGI("解码后原始数据类型%d",yuv_frame->format);
 LOGI("是否是关键帧%d",yuv_frame->key_frame);
 LOGI("宽高比%d",yuv_frame->sample_aspect_ratio.den);
 LOGI("编码帧序号%d",yuv_frame->coded_picture_number);
 LOGI("显示帧序号%d",yuv_frame->display_picture_number);
 LOGI("packet解码时间戳%llu",packet->dts/1000);
 LOGI("解码%d帧 开始",framecount);
 LOGI("帧数%d",framerate.num);
 LOGI("时间num%d",time_base.num);
 LOGI("时间den%d",time_base.den);
 //lock//设置缓冲区的属性(宽、高、像素格式) ANativeWindow_setBuffersGeometry(nativeWindow, showWidth, showHeight,WINDOW_FORMAT_RGBA_8888);
 ANativeWindow_lock(nativeWindow,&outBuffer,NULL);
 //设置rgb_frame的属性(像素格式、宽高)和缓冲区//rgb_frame缓冲区与outBuffer.bits是同一块内存 avpicture_fill((AVPicture *)rgb_frame, outBuffer.bits, AV_PIX_FMT_RGBA, showWidth, showHeight);
 //YUV->RGBA_8888 I420ToARGB(yuv_frame->data[0],yuv_frame->linesize[0],
 yuv_frame->data[2],yuv_frame->linesize[2],
 yuv_frame->data[1],yuv_frame->linesize[1],
 rgb_frame->data[0], rgb_frame->linesize[0],
 showWidth,showHeight);
 //unlock ANativeWindow_unlockAndPost(nativeWindow);
 LOGI("解码%d帧 结束",framecount++);
 //usleep(1000 * 10); }
 }
 av_free_packet(packet);
 }
 ANativeWindow_release(nativeWindow);
 av_frame_free(&yuv_frame);
 avcodec_close(pCodeCtx);
 avformat_free_context(pFormatCtx);
 (*env)->ReleaseStringUTFChars(env,input_jstr,input_cstr);
}

In this paper, we decode yuv_frame to rgb_frame, and convert the original i420toargb method with yuvlib. This method is slower than the following method.

sws_scale(sws_ctx, (uint8_t const * const *) pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
 pFrameRGBA->data, pFrameRGBA->linesize);

The following method saves the steps from the rgb frame memory to outbuffer, using the same memory space.

avpicture_fill((AVPicture *)rgb_frame, outBuffer.bits, AV_PIX_FMT_RGBA, showWidth, showHeight);

To improve the mk, compile the release.
这里写图片描述

demo




Copyright © 2011 Dowemo All rights reserved.    Creative Commons   AboutUs