ffmpeg测试以及代码保护

ffmpeg测试

https://juejin.cn/post/6844904101365432327

Native

#include <jni.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <android/log.h>
#include <string>

extern "C"{
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
}

extern "C" JNIEXPORT jstring JNICALL
Java_com_example_ffmpeg_1demo_MainActivity_stringFromJNI(
        JNIEnv* env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    return env->NewStringUTF(hello.c_str());
}
#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR, "FFPlayer", FORMAT, ##__VA_ARGS__);

extern "C"
JNIEXPORT jstring JNICALL
Java_com_example_ffmpeg_1demo_MainActivity_getConfiguration(JNIEnv *env, jobject thiz) {
    // TODO: implement getConfiguration()
    return env->NewStringUTF(avcodec_configuration());

}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_ffmpeg_1demo_MainActivity_play(JNIEnv *env, jobject type, jstring source, jobject surface) {
    int result = 0;
    const char *path = env->GetStringUTFChars(source, 0);
    //初始化
//    av_register_all();  已经被弃用
    AVFormatContext *format_context = avformat_alloc_context();
    // 打开视频文件 不能从网络上获取 
    result = avformat_open_input(&format_context, path, NULL, NULL);
    if (result < 0) {
        LOGE("Player Error : Can not open video file");
        return;
    }
    result = avformat_find_stream_info(format_context, NULL);
    if (result < 0) {
        LOGE("Player Error : Can not find video file stream info");
        return;
    }

    // 查找视频编码器
    int video_stream_index = -1;
    for (int i = 0; i < format_context->nb_streams; i++) {
        // 匹配视频流
        if (format_context->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_stream_index = i;
        }
    }
    // 没找到视频流
    if (video_stream_index == -1) {
        LOGE("Player Error : Can not find video stream");
        return;
    }

    // 初始化视频编码器上下文
    AVCodecContext *video_codec_context = avcodec_alloc_context3(NULL);
    avcodec_parameters_to_context(video_codec_context,
                                  format_context->streams[video_stream_index]->codecpar);
    // 初始化视频编码器
     const AVCodec * video_codec = avcodec_find_decoder(video_codec_context->codec_id);
    if (video_codec == NULL) {
        LOGE("Player Error : Can not find video codec");
        return;
    }

    result = avcodec_open2(video_codec_context, video_codec, NULL);
    if (result < 0) {
        LOGE("Player Error : Can not find video stream");
        return;
    }
    // 获取视频的宽高
    int videoWidth = video_codec_context->width;
    int videoHeight = video_codec_context->height;
    // R4 初始化 Native Window 用于播放视频
    ANativeWindow *native_window = ANativeWindow_fromSurface(env, surface);
    if (native_window == NULL) {
        LOGE("Player Error : Can not create native window");
        return;
    }
    // 通过设置宽高限制缓冲区中的像素数量,而非屏幕的物理显示尺寸。
    // 如果缓冲区与物理屏幕的显示尺寸不相符,则实际显示可能会是拉伸,或者被压缩的图像
    result = ANativeWindow_setBuffersGeometry(native_window, videoWidth, videoHeight,
                                              WINDOW_FORMAT_RGBA_8888);
    if (result < 0) {
        LOGE("Player Error : Can not set native window buffer");
        ANativeWindow_release(native_window);
        return;
    }
    // 定义绘图缓冲区
    ANativeWindow_Buffer window_buffer;
    // 声明数据容器 有3个
    // R5 解码前数据容器 Packet 编码数据
    AVPacket *packet = av_packet_alloc();
    // R6 解码后数据容器 Frame 像素数据 不能直接播放像素数据 还要转换
    AVFrame *frame = av_frame_alloc();
    // R7 转换后数据容器 这里面的数据可以用于播放
    AVFrame *rgba_frame = av_frame_alloc();
    // 数据格式转换准备
    // 输出 Buffer
    int buffer_size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, videoWidth, videoHeight, 1);
    // R8 申请 Buffer 内存
    uint8_t *out_buffer = (uint8_t *) av_malloc(buffer_size * sizeof(uint8_t));
    av_image_fill_arrays(rgba_frame->data, rgba_frame->linesize, out_buffer, AV_PIX_FMT_RGBA,
                         videoWidth, videoHeight, 1);
    // R9 数据格式转换上下文
    struct SwsContext *data_convert_context = sws_getContext(
            videoWidth, videoHeight, video_codec_context->pix_fmt,
            videoWidth, videoHeight, AV_PIX_FMT_RGBA,
            SWS_BICUBIC, NULL, NULL, NULL);

    // 开始读取帧
    //读取帧
    while (av_read_frame(format_context, packet) >= 0) {
        if (packet->stream_index == video_stream_index) {
            //视频解码
            int ret = avcodec_send_packet(video_codec_context, packet);
            if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
                continue;
            }
            ret = avcodec_receive_frame(video_codec_context, frame);
            if (ret < 0 && ret != AVERROR_EOF) {
                continue;
            }
            sws_scale(data_convert_context, (const uint8_t *const *) frame->data, frame->linesize,
                      0, video_codec_context->height,
                      rgba_frame->data, rgba_frame->linesize);
            if (ANativeWindow_lock(native_window, &window_buffer, NULL) < 0) {
                continue;
            } else {
                //将图像绘制到界面上,注意这里pFrameRGBA一行的像素和windowBuffer一行的像素长度可能不一致
                //需要转换好,否则可能花屏
                uint8_t *dst = (uint8_t *) window_buffer.bits;
                for (int h = 0; h < videoHeight; h++) {
                    memcpy(dst + h * window_buffer.stride * 4,
                           out_buffer + h * rgba_frame->linesize[0],
                           rgba_frame->linesize[0]);
                }
                ANativeWindow_unlockAndPost(native_window);
            }
        }
        av_packet_unref(packet);
    }
    //释放内存
    sws_freeContext(data_convert_context);
    av_free(packet);
    av_free(rgba_frame);
    avcodec_close(video_codec_context);
    avformat_close_input(&format_context);
}

Cmake

target_link_libraries(
        #指定目标so
        ffmpegNdkCustom
        #链接目标so
        android
        avcodec
        avdevice
        avfilter
        avformat
        avutil
        swresample
        swscale
        ${log-lib}
)

Mainctivity

package com.example.ffmpeg_demo;

import androidx.appcompat.app.AppCompatActivity;

import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.content.res.Resources;
import android.os.Bundle;
import android.os.Environment;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.TextView;

import com.example.ffmpeg_demo.databinding.ActivityMainBinding;

import java.io.File;
import java.io.FileDescriptor;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;

public class MainActivity extends AppCompatActivity {

    // Used to load the 'ffmpeg_demo' library on application startup.
    static {
        System.loadLibrary("ffmpegNdkCustom");
    }
    private static Context context;
    private SurfaceView surfaceView;
    private SurfaceHolder surfaceHolder;

    private ActivityMainBinding binding;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        binding = ActivityMainBinding.inflate(getLayoutInflater());
        setContentView(binding.getRoot());
        surfaceView = (SurfaceView) findViewById(R.id.surface_view);
        surfaceHolder = surfaceView.getHolder();
        context=MainActivity.this;

        // Example of a call to a native method
        TextView tv = binding.sampleText;
//        tv.setText(getConfiguration());
    }

    /**
     * A native method that is implemented by the 'ffmpeg_demo' native library,
     * which is packaged with this application.
     */
    public native String stringFromJNI();
    public native String getConfiguration();
    public native void play(String url, Surface surface);
    public void play(View view) throws IOException {
        int resid=R.raw.test;
        InputStream inputStream = context.getResources().openRawResource(resid);
        File videoPath = new File(context.getCacheDir(), "test.mp4");
        writeInputStreamToFile(inputStream, videoPath);

        System.out.println(videoPath);
        if(videoPath.exists()) {
            System.out.println("视频文件存在:" + videoPath);
            play(videoPath.getAbsolutePath(), surfaceHolder.getSurface())   ;
        } else {
            System.out.println("视频文件不存在:" + videoPath);
        }
    }

    private static void writeInputStreamToFile(InputStream inputStream, File file) throws IOException {
        FileOutputStream outputStream = null;
        try {
            outputStream = new FileOutputStream(file);
            byte[] buffer = new byte[1024];
            int length;
            while ((length = inputStream.read(buffer)) != -1) {
                outputStream.write(buffer, 0, length);
            }
        } finally {
            if (inputStream != null) {
                try {
                    inputStream.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if (outputStream != null) {
                try {
                    outputStream.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }
}

代码保护

地址 as插件

https://github.com/CodingGay/BlackObfuscator-ASPlugin?tab=readme-ov-file

build.gradle插件配置

buildscript {
    repositories {
        maven { url 'https://jitpack.io' }
    }
    dependencies {
        classpath "com.github.CodingGay:BlackObfuscator-ASPlugin:3.9"

    }
}
plugins {
    id 'com.android.application'
    id 'top.niunaijun.blackobfuscator'
}
// 加入混淆配置
BlackObfuscator {
    // 是否启用
    enabled true
    // 混淆深度
    depth 2
    // 需要混淆的包或者类(匹配前面一段)
    obfClass = ["com.example", "com.example.protection.Dog"]
    // blackClass中的包或者类不会进行混淆(匹配前面一段)
    blackClass = ["com.example.protection.MainActivity"]
}