ffmpegandroid解码器
❶ 如何在Android用FFmpeg解码图像
解决方案:
include/libswscale/jni;
SDL_mutex *mutex;
int allocated;
SDL_Thread *parse_tid: clarck
*/*source height & width*/./
#include "
unsigned int audio_buf_size;
SDL_Renderer *renderer;
uint8_t *audio_pkt_data;
SDL_cond *pictq_cond。
/
int64_t audio_tgt_channel_layout;
#include "include/SDL_events;include/.;
PacketQueue audioq;*
* SDL_Lesson;logger;.h".h"include/
struct SwsContext *sws_ctx.h"/
typedef struct VideoState {
char filename[1024].h"
SDL_cond *cond;
uint8_t *audio_buf;
#include "ffmpeg/
enum AVSampleFormat audio_tgt_fmt;swscale, *last_pkt.h", pictq_windex;
int audio_src_freq.h"
AVFrame* rawdata;
enum AVSampleFormat audio_src_fmt;
int pictq_size;
PacketQueue videoq;
int audio_tgt_freq;
#include <, pictq_rindex;
SDL_Thread *audio_tid;
int audio_tgt_channels;
int size.;ffmpeg/
VideoState *global_video_state.h".c
*
* Created on; 1 second of 48khz 32bit audio
typedef struct PacketQueue {
AVPacketList *first_pkt;ffmpeg/
int videoStream;libavcodec/
AVStream *audio_st创建一个VideoPicture结构体用来保存解码出来的图像., audioStream; /;libavutil/android/native_window_jni;
int width;
unsigned int audio_buf_index;
#include <.;SDL: Aug 12../
SDL_mutex *pictq_mutex;
AVStream *video_st;
#include "./libswresample/
} PacketQueue;SDL_thread;./
} VideoPicture;
AVFormatContext *ic;
SDL_Texture *bmp./ffmpeg/
struct SwrContext *swr_ctx.h"
int64_t audio_src_channel_layout,uint8_t;
int nb_packets;include/
#include "avcodec./
SDL_Thread *video_tid;
#include ", 2014
* Author, height;
int audio_src_channels;;avformat;include/
AVFrame *audio_frame;.;
#include ",audio_buf2) [AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
int audio_pkt_size;
#include "pixfmt;swresample.h>
DECLARE_ALIGNED(16;
typedef struct VideoPicture {
SDL_Window *screen;;
int quit;
#define SDL_AUDIO_BUFFER_SIZE 1024
#define MAX_AUDIO_SIZE (5 * 16 * 1024)
#define MAX_VIDEO_SIZE (5 * 256 * 1024)
#define FF_ALLOC_EVENT (SDL_USEREVENT)
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
#define VIDEO_PICTURE_QUEUE_SIZE 1
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 /ffmpeg/
} VideoState;libavformat/
AVPacket audio_pkt.h>
AVIOContext *io_ctx;
VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE].h"
#include "
❷ 如何在Android用FFmpeg解码图像
创建一个VideoPicture结构体用来保存解码出来的图像。
LOCAL_PATH := $(call my-dir)
###########################
#
# SDL shared library
#
###########################
include $(CLEAR_VARS)
LOCAL_MODULE := SDL2
LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_C_INCLUDES)
LOCAL_SRC_FILES := \
$(subst $(LOCAL_PATH)/,, \
$(wildcard $(LOCAL_PATH)/src/*.c) \
$(wildcard $(LOCAL_PATH)/src/audio/*.c) \
$(wildcard $(LOCAL_PATH)/src/audio/android/*.c) \
$(wildcard $(LOCAL_PATH)/src/audio/mmy/*.c) \
$(LOCAL_PATH)/src/atomic/SDL_atomic.c \
$(LOCAL_PATH)/src/atomic/SDL_spinlock.c.arm \
$(wildcard $(LOCAL_PATH)/src/core/android/*.c) \
$(wildcard $(LOCAL_PATH)/src/cpuinfo/*.c) \
$(wildcard $(LOCAL_PATH)/src/dynapi/*.c) \
$(wildcard $(LOCAL_PATH)/src/events/*.c) \
$(wildcard $(LOCAL_PATH)/src/file/*.c) \
$(wildcard $(LOCAL_PATH)/src/haptic/*.c) \
$(wildcard $(LOCAL_PATH)/src/haptic/mmy/*.c) \
$(wildcard $(LOCAL_PATH)/src/joystick/*.c) \
$(wildcard $(LOCAL_PATH)/src/joystick/android/*.c) \
$(wildcard $(LOCAL_PATH)/src/loadso/dlopen/*.c) \
$(wildcard $(LOCAL_PATH)/src/power/*.c) \
$(wildcard $(LOCAL_PATH)/src/power/android/*.c) \
$(wildcard $(LOCAL_PATH)/src/filesystem/mmy/*.c) \
$(wildcard $(LOCAL_PATH)/src/render/*.c) \
$(wildcard $(LOCAL_PATH)/src/render/*/*.c) \
$(wildcard $(LOCAL_PATH)/src/stdlib/*.c) \
$(wildcard $(LOCAL_PATH)/src/thread/*.c) \
$(wildcard $(LOCAL_PATH)/src/thread/pthread/*.c) \
$(wildcard $(LOCAL_PATH)/src/timer/*.c) \
$(wildcard $(LOCAL_PATH)/src/timer/unix/*.c) \
$(wildcard $(LOCAL_PATH)/src/video/*.c) \
$(wildcard $(LOCAL_PATH)/src/video/android/*.c) \
$(wildcard $(LOCAL_PATH)/src/test/*.c))
LOCAL_CFLAGS += -DGL_GLEXT_PROTOTYPES
LOCAL_LDLIBS := -ldl -lGLESv1_CM -lGLESv2 -llog -landroid
include $(BUILD_SHARED_LIBRARY)
###########################
#
# SDL static library
#
###########################
#LOCAL_MODULE := SDL2_static
#LOCAL_MODULE_FILENAME := libSDL2
#LOCAL_SRC_FILES += $(LOCAL_PATH)/src/main/android/SDL_android_main.c
#LOCAL_LDLIBS :=
#LOCAL_EXPORT_LDLIBS := -Wl,--undefined=Java_org_libsdl_app_SDLActivity_nativeInit -ldl -lGLESv1_CM -lGLESv2 -llog -landroid
#include $(BUILD_STATIC_LIBRARY)
二、参考[原]如何在Android用FFmpeg解码图像, 在工程中新建一个ffmpeg文件夹,将与ffmpeg相关的头文件include进来。ffmpeg文件夹下的Android.mk内容:
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := /path/to/build/ffmpeg/libffmpeg.so
include $(PREBUILT_SHARED_LIBRARY)
三、新建player文件夹,用来编写解码与显示文件。player.c文件内容:
/*
* SDL_Lesson.c
*
* Created on: Aug 12, 2014
* Author: clarck
*/
#include <jni.h>
#include <android/native_window_jni.h>
#include "SDL.h"
#include "SDL_thread.h"
#include "SDL_events.h"
#include "../include/logger.h"
#include "../ffmpeg/include/libavcodec/avcodec.h"
#include "../ffmpeg/include/libavformat/avformat.h"
#include "../ffmpeg/include/libavutil/pixfmt.h"
#include "../ffmpeg/include/libswscale/swscale.h"
int main(int argc, char *argv[]) {
char *file_path = argv[1];
LOGI("file_path:%s", file_path);
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameYUV;
AVPacket *packet;
uint8_t *out_buffer;
SDL_Texture *bmp = NULL;
SDL_Window *screen = NULL;
SDL_Rect rect;
SDL_Event event;
static struct SwsContext *img_convert_ctx;
int videoStream, i, numBytes;
int ret, got_picture;
av_register_all();
pFormatCtx = avformat_alloc_context();
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
LOGE("Could not initialize SDL - %s. \n", SDL_GetError());
exit(1);
}
if (avformat_open_input(&pFormatCtx, file_path, NULL, NULL) != 0) {
LOGE("can't open the file. \n");
return -1;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("Could't find stream infomation.\n");
return -1;
}
videoStream = 1;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
}
}
LOGI("videoStream:%d", videoStream);
if (videoStream == -1) {
LOGE("Didn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
LOGE("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Could not open codec.\n");
return -1;
}
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
//---------------------------init sdl---------------------------//
screen = SDL_CreateWindow("My Player Window", SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED, pCodecCtx->width, pCodecCtx->height,
SDL_WINDOW_FULLSCREEN | SDL_WINDOW_OPENGL);
SDL_Renderer *renderer = SDL_CreateRenderer(screen, -1, 0);
bmp = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
//-------------------------------------------------------------//
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
pCodecCtx->height);
out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height);
rect.x = 0;
rect.y = 0;
rect.w = pCodecCtx->width;
rect.h = pCodecCtx->height;
int y_size = pCodecCtx->width * pCodecCtx->height;
packet = (AVPacket *) malloc(sizeof(AVPacket));
av_new_packet(packet, y_size);
av_mp_format(pFormatCtx, 0, file_path, 0);
while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoStream) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,
packet);
if (ret < 0) {
LOGE("decode error.\n");
return -1;
}
LOGI("got_picture:%d", got_picture);
if (got_picture) {
sws_scale(img_convert_ctx,
(uint8_t const * const *) pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data,
pFrameYUV->linesize);
////iPitch 计算yuv一行数据占的字节数
SDL_UpdateTexture(bmp, &rect, pFrameYUV->data[0], pFrameYUV->linesize[0]);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, bmp, &rect, &rect);
SDL_RenderPresent(renderer);
}
SDL_Delay(50);
}
av_free_packet(packet);
SDL_PollEvent(&event);
switch (event.type) {
case SDL_QUIT:
SDL_Quit();
exit(0);
break;
default:
break;
}
❸ 如何在Android用FFmpeg+SDL2.0解码声音
一、创建一个VideoPicture结构体用来保存解码出来的图像;
二、添加数据队列的初始化、添加以及读取的函数;
三、audio_decode_frame():解码音频;
四、audio_callback(): 回调函数,向SDL缓冲区填充数据;
五、创建刷新相关的函数;
六、添加显示函数;
七、分配显示输出内存空间;
八、解码线程,将解码器,建立音频线,保存重要信息到数据结构中;
九、编写Main函数用来调用解码线程。
知识点延伸:
FFmpeg是一个开源跨的和音频流方案,属于自由,采用LGPL或GPL许可证(依据你选择的组件)。它提供了录制、转换以及流化音的完整解决方案。它包含了非常先进的音频/编解码库libavcodec,为了保证高可移植性和编解码质量,libavcodec里很多codec都是从头开发的。FFmpeg在Linux下开发,但它同样也可以在其它操作系统环境中编译运行。
SDL2.0(Simple DirectMedia Layer)是一套开放源代码的跨多媒体开发库,使用C语言写成。SDL多用于开发游戏、模拟器、媒体播放器等多媒体应用领域。SDL内置了调用OpenGL的函数。SDL提供了数种控制图像、声音、输出入的函数,让开发者只要用相同或是相似的代码就可以开发出跨多个(Linux、Windows、Mac OS X等)的应用。
❹ 如何在Android用FFmpeg解码图像
这涉及到两个问题,一个是解码,另一个是显示,解码问题要先交叉编译ffmpeg,然后参考下面的解码流程
avcodec_register_all();
/*
########################################
[1]
########################################
*/
av_register_all();
/*
// Open video file
########################################
[2]
########################################
*/
pFormatCtx = avformat_alloc_context();
if(avformat_open_input(&pFormatCtx, filename, NULL, NULL)!=0)
return -1; // Couldn't open file
// Retrieve stream information
/*
########################################
[3]
########################################
*/
if(avformat_find_stream_info(pFormatCtx,NULL)<0)
return -1; // Couldn't find stream information
// Dump information about file onto standard error
// mp_format(pFormatCtx, 0, argv[1], 0);
// Find the first video stream
videoStream=-1;
for(i=0; i<pFormatCtx->nb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
videoStream=i;
break;
}
if(videoStream==-1)
return -1; // Didn't find a video stream
// Get a pointer to the codec context for the video stream
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
fprintf(stderr, "Unsupported codec!\n");
return -1; // Codec not found
}
// Open codec
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
pFrame=avcodec_alloc_frame();
// Allocate an picture structure
avpicture_alloc(&picture, PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
// Determine required buffer size and allocate buffer
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
if(img_convert_ctx == NULL)
{
fprintf(stderr, "Cannot initialize the conversion context!\n");
exit(1);
}
// Read frames and save first five frames to disk
/*
########################################
[4]
########################################
*/
i=0;
dirtyRegion.set(android::Rect(0x3FFF, 0x3FFF));
while(av_read_frame(pFormatCtx, &packet)>=0) {
// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
&packet);
// Did we get a video frame?
if(frameFinished) {
// Convert the image from its native format to RGB
sws_scale(img_convert_ctx,
pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
picture.data, picture.linesize);
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}
pFrame为解码后的数据,将它显示在画布上,就完成了FFMEPG解码
❺ (四)Android通过ffmpeg解码音频
音频解码与视频解码的流程大致相同,唯一的区别只有处理帧数据的时候,视频是像素转换并显示,而音频则是重采样并播放。
所以基于这一点,在以后做架构的时候,可以将音频、视频这两部分,相同的逻辑放在共同的父类当中,自身子类则各自处理视频显示和声音播放等逻辑。
然后,就是将重采样后的数据,交给OpenSLES去处理。
OpenSL ES 全称为: Open Sound Library for Embedded Systems,是一个针对嵌入式系统的开放硬件音频加速库,支持音频的采集和播放,它提供了一套高性能、低延迟的音频功能实现方法,并且实现了软硬件音频性能的跨平台部署,大大降低了上层处理音频应用的开发难度。
Object 和 Interface 是OpenSL ES 中的两大基本概念,可以类比为 Java 中的对象和接口。在 OpenSL ES 中, 每个 Object 可以存在一系列的 Interface ,并且为每个对象都提供了一系列的基本操作,如 Realize,GetState,Destroy 等。
重要的一点,只有通过 GetInterface 方法拿到 Object 的 Interface ,才能使用 Object 提供的功能。
这里的例子是播放一个手机里的视频文件,所以只介绍OpenSLES Audio Player 播放音频的过程。
音频播放的大致流程就是这样,其实还有音频录入的功能的,这个以后再介绍。音频的解码,大部分都和视频解码的流程一致,只要你熟悉OpenGLES的几个API和流程,基本都能播放成功。