如何在Android用FFmpeg解码图像

如题所述

CC libavcodec/log2_tab.o
CC libavutil/log2_tab.o
CC libswresample/log2_tab.o
AR libavcodec/libavcodec.a
LD libavutil/libavutil.so.52
AR libavutil/libavutil.a
AR libswresample/libswresample.a
LD libavcodec/libavcodec.so.55
LD libswresample/libswresample.so.0
LD libswscale/libswscale.so.2
LD libavformat/libavformat.so.55
INSTALL libavformat/libavformat.a
INSTALL libavformat/libavformat.so
STRIP install-libavformat-shared
INSTALL libavcodec/libavcodec.a
INSTALL libavcodec/libavcodec.so
STRIP install-libavcodec-shared
INSTALL libswresample/libswresample.a
INSTALL libswresample/libswresample.so
STRIP install-libswresample-shared
INSTALL libswscale/libswscale.a
INSTALL libswscale/libswscale.so
STRIP install-libswscale-shared
INSTALL libavutil/libavutil.a
INSTALL libavutil/libavutil.so
STRIP install-libavutil-shared
INSTALL libavformat/avformat.h
INSTALL libavformat/avio.h
INSTALL libavformat/version.h
INSTALL libavformat/libavformat.pc
INSTALL libavcodec/avcodec.h
INSTALL libavcodec/avfft.h
INSTALL libavcodec/dxva2.h
INSTALL libavcodec/old_codec_ids.h
INSTALL libavcodec/vaapi.h
INSTALL libavcodec/vda.h
INSTALL libavcodec/vdpau.h
INSTALL libavcodec/version.h
INSTALL libavcodec/xvmc.h
INSTALL libavcodec/libavcodec.pc
INSTALL libswresample/swresample.h
INSTALL libswresample/version.h
INSTALL libswresample/libswresample.pc
INSTALL libswscale/swscale.h
INSTALL libswscale/version.h
INSTALL libswscale/libswscale.pc
INSTALL libavutil/adler32.h
INSTALL libavutil/aes.h
INSTALL libavutil/attributes.h
INSTALL libavutil/audio_fifo.h
INSTALL libavutil/audioconvert.h
INSTALL libavutil/avassert.h
INSTALL libavutil/avstring.h
INSTALL libavutil/avutil.h
INSTALL libavutil/base64.h
INSTALL libavutil/blowfish.h
INSTALL libavutil/bprint.h
INSTALL libavutil/bswap.h
INSTALL libavutil/buffer.h
INSTALL libavutil/channel_layout.h
INSTALL libavutil/common.h
INSTALL libavutil/cpu.h
INSTALL libavutil/crc.h
INSTALL libavutil/error.h
INSTALL libavutil/eval.h
INSTALL libavutil/fifo.h
INSTALL libavutil/file.h
INSTALL libavutil/frame.h
INSTALL libavutil/hmac.h
INSTALL libavutil/imgutils.h
INSTALL libavutil/intfloat.h
INSTALL libavutil/intfloat_readwrite.h
INSTALL libavutil/intreadwrite.h
INSTALL libavutil/lfg.h
INSTALL libavutil/log.h
INSTALL libavutil/mathematics.h
INSTALL libavutil/md5.h
INSTALL libavutil/mem.h
INSTALL libavutil/murmur3.h
INSTALL libavutil/dict.h
INSTALL libavutil/old_pix_fmts.h
INSTALL libavutil/opt.h
INSTALL libavutil/parseutils.h
INSTALL libavutil/pixdesc.h
INSTALL libavutil/pixfmt.h
INSTALL libavutil/random_seed.h
INSTALL libavutil/rational.h
INSTALL libavutil/ripemd.h
INSTALL libavutil/samplefmt.h
INSTALL libavutil/sha.h
INSTALL libavutil/sha512.h
INSTALL libavutil/time.h
INSTALL libavutil/timecode.h
INSTALL libavutil/timestamp.h
INSTALL libavutil/version.h
INSTALL libavutil/xtea.h
INSTALL libavutil/lzo.h
INSTALL libavutil/avconfig.h
INSTALL libavutil/libavutil.pc

link ffmpeg.

二、新建一个Android工程,在工程目录下新建一个jni文件夹,在文件夹下新建一个ffmpeg文件夹,用来放ffmpeg相关的头文件。在ffmpeg文件夹下新建Android.mk文件用来预先加载ffmpeg动态库。Android.mk文件内容如下:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := /path/to/build/output/libffmpeg.so
include $(PREBUILT_SHARED_LIBRARY)

三、在jni下新建Android.mk文件和Application.mk两个文件用来指定编译的顺序和编译的平台以及对应的cpu指令集。

Application.mk

APP_ABI := armeabi
APP_PLATFORM := android-9
Android.mk

include $(call all-subdir-makefiles)
四、编写JNI文件,用来绑定java文件与.c文件的交互,文件内容如下:

/*
* ffmpeg_jni.c
*
* Created on: Sep 1, 2014
* Author: clarck
*/
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <jni.h>

#include "../include/ffmpeg_logger.h"
#include "../include/ffmpeg.h"

// 指定要注册的类,对应完整的java类名
#define JNIREG_CLASS "com/clarck/android/ffmpeg/MainActivity"

JNIEXPORT void JNICALL native_setDataSource(JNIEnv *env, jclass classzz, jstring path) {
char *filepath = ffmpeg_jstringTostr(env, path);
ffmpeg_setDataSource(filepath);
}

//Java和JNI函数的绑定
static JNINativeMethod method_table[] = {
{ "setDataSource", "(Ljava/lang/String;)V", native_setDataSource }
};

//注册native方法到java中
static int registerNativeMethods(JNIEnv *env, const char *className,
JNINativeMethod *gMethods, int numMethods) {
jclass clazz;
clazz = (*env)->FindClass(env, className);
if (clazz == NULL) {
return JNI_FALSE;
}

if ((*env)->RegisterNatives(env, clazz, gMethods, numMethods) < 0) {
return JNI_FALSE;
}

return JNI_TRUE;
}

//调用注册方法
int register_ndk_load(JNIEnv *env) {
return registerNativeMethods(env, JNIREG_CLASS, method_table,
(int) (sizeof(method_table) / sizeof(method_table[0])));
}

JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void *reserved) {
JNIEnv *env = NULL;
jint result = -1;

if ((*vm)->GetEnv(vm, (void**)&env, JNI_VERSION_1_6) != JNI_OK) {
return result;
}

register_ndk_load(env);

//返回JNI的版本
return JNI_VERSION_1_6;
}

五、编写ffmpeg调用函数,内容如下:

/*
* ffmpeg.c
*
* Created on: Sep 1, 2014
* Author: clarck
*/
#include <jni.h>
#include <android/native_window_jni.h>
#include "../include/ffmpeg.h"
#include "../include/ffmpeg_logger.h"
#include "../ffmpeg/include/libavcodec/avcodec.h"
#include "../ffmpeg/include/libavformat/avformat.h"
#include "../ffmpeg/include/libavutil/pixfmt.h"
#include "../ffmpeg/include/libswscale/swscale.h"

char* ffmpeg_jstringTostr(JNIEnv* env, jstring jstr) {
char* pStr = NULL;

jclass jstrObj = (*env)->FindClass(env, "java/lang/String");
jstring encode = (*env)->NewStringUTF(env, "utf-8");
jmethodID methodId = (*env)->GetMethodID(env, jstrObj, "getBytes",
"(Ljava/lang/String;)[B");
jbyteArray byteArray = (jbyteArray) (*env)->CallObjectMethod(env, jstr,
methodId, encode);
jsize strLen = (*env)->GetArrayLength(env, byteArray);
jbyte *jBuf = (*env)->GetByteArrayElements(env, byteArray, JNI_FALSE);

if (jBuf > 0) {
pStr = (char*) malloc(strLen + 1);

if (!pStr) {
return NULL ;
}

memcpy(pStr, jBuf, strLen);

pStr[strLen] = 0;
}

(*env)->ReleaseByteArrayElements(env, byteArray, jBuf, 0);

return pStr;
}

void ffmpeg_setDataSource(char *file_path) {
LOGI("ffmpeg_setDataSource:%s", file_path);

AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameYUV;
AVPacket *packet;
uint8_t *out_buffer;

static struct SwsContext *img_convert_ctx;

int videoStream, i, numBytes;
int ret, got_picture;

av_register_all();
pFormatCtx = avformat_alloc_context();

if (avformat_open_input(&pFormatCtx, file_path, NULL, NULL) != 0) {
LOGE("can‘t open the file. \n");
return;
}

if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("Could‘t find stream infomation.\n");
return;
}

videoStream = 1;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
}
}

if (videoStream == -1) {
LOGE("Didn‘t find a video stream.\n");
return;
}

pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);

if (pCodec == NULL) {
LOGE("Codec not found.\n");
return;
}

if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Could not open codec.\n");
return;
}

pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();

numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
pCodecCtx->height);
out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height);

int y_size = pCodecCtx->width * pCodecCtx->height;

packet = (AVPacket *) malloc(sizeof(AVPacket));
av_new_packet(packet, y_size);

av_dump_format(pFormatCtx, 0, file_path, 0);

while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoStream) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,
packet);

LOGI("avcodec_decode_video2 ret:%d", ret);

if (ret < 0) {
LOGE("decode error.\n");
return;
}

if (got_picture) {
//TODO 此处可以将解码出来的图片保存起来。
}
}
av_free_packet(packet);
}

av_free(out_buffer);
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}

六、编写Android.mk用来编译相关的.c文件,内容如下:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

FFMPEG_PATH := ../ffmpeg
LOCAL_C_INCLUDES := $(LOCAL_PATH)/$(FFMPEG_PATH)/include

LOCAL_MODULE := ffmpeg_player
LOCAL_SRC_FILES += ffmpeg_jni.c
LOCAL_SRC_FILES += ffmpeg.c

LOCAL_SHARED_LIBRARIES := ffmpeg
LOCAL_LDLIBS := -llog

include $(BUILD_SHARED_LIBRARY)

七、编写java文件中相关执行调用方法
package com.clarck.android.ffmpeg;

import android.app.Activity;
import android.os.Bundle;

public class MainActivity extends Activity {

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

setDataSource("/sdcard/a.mp4");
}

public native void setDataSource(String path);

static {
System.loadLibrary("ffmpeg");
System.loadLibrary("ffmpeg_player");
}
}
温馨提示:答案为网友推荐,仅供参考
第1个回答  2016-03-03
创建一个VideoPicture结构体用来保存解码出来的图像。

/*
* SDL_Lesson.c
*
* Created on: Aug 12, 2014
* Author: clarck
*/
#include <jni.h>
#include <android/native_window_jni.h>
#include "SDL.h"
#include "SDL_thread.h"
#include "SDL_events.h"
#include "../include/logger.h"
#include "../ffmpeg/include/libavcodec/avcodec.h"
#include "../ffmpeg/include/libavformat/avformat.h"
#include "../ffmpeg/include/libavutil/pixfmt.h"
#include "../ffmpeg/include/libswscale/swscale.h"
#include "../ffmpeg/include/libswresample/swresample.h"

#define SDL_AUDIO_BUFFER_SIZE 1024

#define MAX_AUDIO_SIZE (5 * 16 * 1024)
#define MAX_VIDEO_SIZE (5 * 256 * 1024)

#define FF_ALLOC_EVENT (SDL_USEREVENT)
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
#define FF_QUIT_EVENT (SDL_USEREVENT + 2)

#define VIDEO_PICTURE_QUEUE_SIZE 1
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio

typedef struct PacketQueue {
AVPacketList *first_pkt, *last_pkt;
int nb_packets;
int size;
SDL_mutex *mutex;
SDL_cond *cond;
} PacketQueue;

typedef struct VideoPicture {
SDL_Window *screen;
SDL_Renderer *renderer;
SDL_Texture *bmp;

AVFrame* rawdata;
int width, height; /*source height & width*/
int allocated;
} VideoPicture;

typedef struct VideoState {
char filename[1024];
AVFormatContext *ic;
int videoStream, audioStream;
AVStream *audio_st;
AVFrame *audio_frame;
PacketQueue audioq;
unsigned int audio_buf_size;
unsigned int audio_buf_index;
AVPacket audio_pkt;
uint8_t *audio_pkt_data;
int audio_pkt_size;
uint8_t *audio_buf;
DECLARE_ALIGNED(16,uint8_t,audio_buf2) [AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
enum AVSampleFormat audio_src_fmt;
enum AVSampleFormat audio_tgt_fmt;
int audio_src_channels;
int audio_tgt_channels;
int64_t audio_src_channel_layout;
int64_t audio_tgt_channel_layout;
int audio_src_freq;
int audio_tgt_freq;
struct SwrContext *swr_ctx;

AVStream *video_st;
PacketQueue videoq;

VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
int pictq_size, pictq_rindex, pictq_windex;
SDL_mutex *pictq_mutex;
SDL_cond *pictq_cond;

SDL_Thread *parse_tid;
SDL_Thread *audio_tid;
SDL_Thread *video_tid;

AVIOContext *io_ctx;
struct SwsContext *sws_ctx;

int quit;
} VideoState;

VideoState *global_video_state;

相关了解……

你可能感兴趣的内容

本站内容来自于网友发表,不代表本站立场,仅表示其个人看法,不对其真实性、正确性、有效性作任何的担保
相关事宜请发邮件给我们
© 非常风气网