我们首先需要到ffempeg官网下载然后进行编译生成相关文件,我们会有以下几个文件
我们这时候会发现源文件中很多报错了,这是因为我们没有指定路径,和链接额外的 ffmpeg 的编译.so文件,修改CMakeList.txt文件
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11")
#判断编译器类型,如果是gcc编译器,则在编译选项中加入c++11支持
if(CMAKE_COMPILER_IS_GNUCXX)
set(CMAKE_CXX_FLAGS "-std=c++11 ${CMAKE_CXX_FLAGS}")
message(STATUS "optional:-std=c++11")
endif(CMAKE_COMPILER_IS_GNUCXX)
#需要引入我们头文件,以这个配置的目录为基准
include_directories(src/main/jniLibs/include)
include_directories(src/main/jniLibs/other)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
#FFMpeg配置
#FFmpeg配置目录
set(distribution_DIR ${CMAKE_SOURCE_DIR}/../../../../src/main/jniLibs)
# 编解码(最重要的库)
add_library(
avcodec
SHARED
IMPORTED)
set_target_properties(
avcodec
PROPERTIES IMPORTED_LOCATION
../../../../src/main/jniLibs/armeabi/libavcodec-56.so)
# 设备信息
add_library(
avdevice
SHARED
IMPORTED)
set_target_properties(
avdevice
PROPERTIES IMPORTED_LOCATION
../../../../src/main/jniLibs/armeabi/libavdevice-56.so)
# 滤镜特效处理库
add_library(
avfilter
SHARED
IMPORTED)
set_target_properties(
avfilter
PROPERTIES IMPORTED_LOCATION
../../../../src/main/jniLibs/armeabi/libavfilter-5.so)
# 封装格式处理库
add_library(
avformat
SHARED
IMPORTED)
set_target_properties(
avformat
PROPERTIES IMPORTED_LOCATION
../../../../src/main/jniLibs/armeabi/libavformat-56.so)
# 工具库(大部分库都需要这个库的支持)
add_library(
avutil
SHARED
IMPORTED)
set_target_properties(
avutil
PROPERTIES IMPORTED_LOCATION
../../../../src/main/jniLibs/armeabi/libavutil-54.so)
# 后期处理
add_library(
postproc
SHARED
IMPORTED)
set_target_properties(
postproc
PROPERTIES IMPORTED_LOCATION
../../../../src/main/jniLibs/armeabi/libpostproc-53.so)
# 音频采样数据格式转换库
add_library(
swresample
SHARED
IMPORTED)
set_target_properties(
swresample
PROPERTIES IMPORTED_LOCATION
../../../../src/main/jniLibs/armeabi/libswresample-1.so)
# 视频像素数据格式转换
add_library(
swscale
SHARED
IMPORTED)
set_target_properties(
swscale
PROPERTIES IMPORTED_LOCATION
../../../../src/main/jniLibs/armeabi/libswscale-3.so)
add_library( # Sets the name of the library.
native-lib
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
src/main/cpp/native-lib.cpp
# 编译额外的 C 文件
src/main/cpp/ffmpeg_filter.c
src/main/cpp/ffmpeg_mod.c
src/main/cpp/ffmpeg_opt.c
src/main/cpp/cmdutils.c
)
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
# 链接额外的 ffmpeg 的编译,.so文件
native-lib avcodec avdevice avfilter avformat avutil postproc swresample swscale
# Links the target library to the log library
# included in the NDK.
${log-lib} )
build.gradle中需要添加这行代码
defaultConfig {
applicationId "com.peakmain.ndk"
minSdkVersion 15
targetSdkVersion 27
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags ""
//只生成aremabi
abiFilters "armeabi"
}
}
}
注意:这里有些人在程序编译的时候会报ABIs [armeabi] are not supported for platform. Supported ABIs are [armeabi-v7a, arm64-v8a, x86, x86_64].的错误,解决办法NDK降级到v16 替换AS里NDK或者修改ndk-bundle路径即可,具体的大家可以看这个博客http://blog.51cto.com/4789781/2116935
添加依赖
implementation 'com.tbruyelle.rxpermissions2:rxpermissions:0.9.5@aar'
implementation 'io.reactivex.rxjava2:rxandroid:2.0.1'
VideoCompress视频压缩java类
public class VideoCompress {
//加载so
static {
System.loadLibrary("native-lib");
// 不需要全部 load 相当于 android 调用其他方法类型,不需要全部 load
}
// native ffmpeg 压缩视频
public native void compressVideo(String[] compressCommand,CompressCallback callback);
public interface CompressCallback{
public void onCompress(int current,int total);
}
}
MainActivity的使用
public class MainActivity extends AppCompatActivity {
private File mInFile = new File(Environment.getExternalStorageDirectory()+"/视频/", "test.mp4");
private File mOutFile = new File(Environment.getExternalStorageDirectory()+"/视频/", "out.mp4");
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.e("TAG",mInFile.exists()+"");
TextView tv = (TextView) findViewById(R.id.sample_text);
// ffmpeg -i test.mp4 -b:v 1024k out.mp4
// -b:v 码率是什么? 码率越高视频越清晰,而且视频越大
// 1M 1024K
// test.mp4 需要压缩的视频路径
// out.mp4 压缩后的路径
}
public void compressVideo(View view) {
RxPermissions rxPermissions = new RxPermissions(this);
rxPermissions.request(Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE)
.subscribe(new Consumer<Boolean>() {
@Override
public void accept(Boolean aBoolean) throws Exception {
if (aBoolean) {
compressVideo();
}
}
});
}
private void compressVideo() {
String[] compressCommand = {"ffmpeg", "-i", mInFile.getAbsolutePath(), "-b:v", "1024k", mOutFile.getAbsolutePath()};
Observable.just(compressCommand)
.map(new Function<String[], File>() {
@Override
public File apply(String[] compressCommand) throws Exception {
// 压缩是耗时的,子线程,处理权限
VideoCompress videoCompress = new VideoCompress();
videoCompress.compressVideo(compressCommand, new VideoCompress.CompressCallback() {
@Override
public void onCompress(int current, int total) {
Log.e("TAG","压缩进度:"+current+"/"+total);
}
});
return mOutFile;
}
}).subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<File>() {
@Override
public void accept(File file) throws Exception {
// 压缩完成
Log.e("TAG","压缩完成");
}
});
}
}
native-lib的代码实现
#include <jni.h>
#include <string>
#include <malloc.h>
#include <android/log.h>
#define TAG "JNI_TAG"
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,TAG,__VA_ARGS__)
extern "C" {
JNIEXPORT void JNICALL
Java_com_peakmain_ndk_VideoCompress_compressVideo(JNIEnv *env, jclass type,
jobjectArray
compressCommand, jobject callback);
// 声明方法(实现的方式就是基于命令,windows )
// argc 命令的个数
// char **argv 二维数组
int ffmpegmain(int argc, char **argv, void(call_back)(int, int));
}
static jobject call_back_jobj;
static JNIEnv *mEnv;
//回调函数
void call_back(int current, int total) {
//LOGE("压缩进度:%d/%d",current,total);
// 把进度回调出去 对象是 jobject callback
if(call_back_jobj != NULL && mEnv != NULL){
jclass j_clazz = mEnv->GetObjectClass(call_back_jobj);
// javap 命令也能打印
jmethodID j_mid = mEnv->GetMethodID(j_clazz,"onCompress","(II)V");
mEnv->CallVoidMethod(call_back_jobj,j_mid,current,total);
}
};
JNIEXPORT void JNICALL
Java_com_peakmain_ndk_VideoCompress_compressVideo(JNIEnv *env, jclass type,
jobjectArray compressCommand, jobject callback) {
call_back_jobj = env->NewGlobalRef(callback);
mEnv=env;
//ffmpeg处理视频的压缩
//aremabi这个里面的so都是用来处理音视频,include都是头文件
//还有几个没有被打包编译成.so文件,因为这些不算是音视频的处理代码,只是我们现在支持命令(封装)
//1.获取命令行的个数
int argc = env->GetArrayLength(compressCommand);
//2.给char **argv填充数据
char **argv = (char **) malloc(sizeof(char *) * argc);
for (int i = 0; i < argc; i++) {
jstring j_params = (jstring) (env->GetObjectArrayElement(compressCommand, i));
argv[i] = (char *) (env->GetStringUTFChars(j_params, NULL));
LOGE("参数:%s", argv[i]);
}
//3.调用命令去压缩
ffmpegmain(argc, argv, call_back);
//4.释放内存
for (int i = 0; i < argc; ++i) {
free(argv[i]);
}
free(argv);
env->DeleteGlobalRef(call_back_jobj);
}
注意:我们源码中是没有回调方法的,这时候我们需要修改下ffempeg_mode.c源码
int ffmpegmain(int argc, char **argv,void(call_back)(int,int))
{
if (transcode(call_back) < 0){
ffmpeg_cleanup(1); return 1;}
}
static int transcode(void(call_back)(int,int)){
print_report(0, timer_start, cur_time,call_back);
}
static void print_report(int is_last_report, int64_t timer_start, int64_t cur_time,void(call_back)(int,int))
{
// __android_log_print(ANDROID_LOG_ERROR,"TAG","当前帧:%d",frame_number);
int total_frame_number = input_files[0]->ctx->streams[0]->nb_frames;
call_back(frame_number,total_frame_number);
}
为了方便知道在哪里添加这行,我截了图