android调用ffmpeg解析rtsp协议的视频流
文章目录
- 一、背景
- 二、解析rtsp数据
- 1、C层功能代码
- 2、jni层的定义
- 3、app层的调用
- 三、源码下载
一、背景
本demo主要介绍android调用ffmpeg中的接口解析rtsp协议的视频流(不解析音频),得到yuv数据,把yuv转bitmap在android设备上显示,涉及到打开视频、解封装、解码、回调yuv数据。学习记录帖,C语言小白,不足的地方请指正,多谢!
二、解析rtsp数据
1、C层功能代码
Decoder.h
#ifndef DECODERTSP_DECODER_H
#define DECODERTSP_DECODER_H
#include <thread>
#include "include/jniLog.h"
extern "C"
{
#include <libavutil/time.h>
#include <libavcodec/avcodec.h>
#include <libavcodec/packet.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/opt.h>
#include <functional>
};
using namespace std;
// 定义一个回调函数类型(typedef 用于为已有的数据类型创建一个别名)
typedef std::function<void(uint8_t *buf, int size)> Callback;
// 解析rtsp视频流
int ReadFrameAndDecoder(const char *url,Callback callback);
void DoStop();
#endif //DECODERTSP_DECODER_H
Decoder.cpp
#include "include/Decoder.h"
#include "include/jniLog.h"
bool isStop = false;
int ReadFrameAndDecoder(const char* m_Url,Callback callback){
char url[100] = {0};
strcpy(url,m_Url);
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVDictionary *options = NULL;
av_dict_set(&options, "buffer_size", "1024000", 0);// 设置缓冲区大小
av_dict_set(&options, "stimeout", "20000000", 0);
av_dict_set(&options, "max_delay", "30000000", 0);
// av_dict_set(&options, "rtsp_transport", "tcp", 0); //使用 TCP 传输
LOGI("ReadFrameAndDecoder:url = %s",url);
if (avformat_open_input(&pFormatCtx, url, NULL, NULL) != 0) // 打开视频文件
return -1;
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) // 查找视频流
return -2;
//视频解码,需要找到视频对应的AVStream所在pFormatCtx->streams的索引位置
int video_stream_idx = -1;
for (int i = 0; i < pFormatCtx->nb_streams; i++) {
//根据类型判断,是否是视频流
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_idx = i;
break;
}
}
//只有知道视频的编码方式,才能够根据编码方式去找到解码器
//获取视频流中的编解码上下文
AVCodecContext *pCodecCtx = avcodec_alloc_context3(NULL);;
avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[video_stream_idx]->codecpar);
// AVDISCARD_NONKEY; // 丢弃非关键帧(如B帧)
// AVDISCARD_NONINTRA; // 丢弃所有非帧内编码的帧(这个参数不会灰屏)
// AVDISCARD_NONREF // 丢弃所有非参考帧
// AVDISCARD_BIDIR // 丢弃所有双向预测帧
// AVDISCARD_DEFAULT
// pCodecCtx->skip_frame = AVDISCARD_NONKEY;
//4.根据编解码上下文中的编码id查找对应的解码
//AVCodec *pCodec = const_cast<AVCodec *>(avcodec_find_decoder(pCodecCtx->codec_id));
AVCodec *pCodec = const_cast<AVCodec *>(avcodec_find_decoder(AV_CODEC_ID_HEVC));
if (pCodec == NULL)
return -3;
// 打开解码器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
return -4;
// 设置参数(不缓冲,低延时)
// av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);
// pCodecCtx->flags |= AV_CODEC_FLAG_LOW_DELAY;
// 分配视频帧(装载解码后的数据)
AVFrame *pFrame = av_frame_alloc();
AVPacket packet; // 读取视频帧(解码前的数据包)
while (av_read_frame(pFormatCtx, &packet) >= 0 && !isStop) {
if (packet.stream_index == video_stream_idx) {
int got_picture;
// 向解码器输入数据包
got_picture = avcodec_send_packet(pCodecCtx, &packet);
if (got_picture < 0) {
LOGI("got_picture = %d", got_picture);
continue;
}
while (got_picture == 0) {
// 从解码器获取帧 返回值 -11:数据包不足?
got_picture = avcodec_receive_frame(pCodecCtx, pFrame);
//此时,pFrame中的数据就是YUV数据
if(got_picture == 0){
// 计算 frame 的数据大小
int data_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pFrame->width, pFrame->height, 1);
if (data_size < 0) {
LOGE("Could not calculate buffer size");
return -5;
}
// 分配内存来存储 byte[]
uint8_t *buffer = (uint8_t *)av_malloc(data_size * sizeof(uint8_t));
if (!buffer) {
LOGE("Could not allocate memory for buffer");
return -6;
}
// 将 AVFrame 的数据复制到 buffer 中
int ret = av_image_copy_to_buffer(buffer, data_size,
(const uint8_t * const *)pFrame->data, pFrame->linesize,
AV_PIX_FMT_YUV420P, pFrame->width, pFrame->height, 1);
if (ret < 0) {
LOGE("Could not copy image data to buffer");
av_free(buffer);
return -7;
}
if (callback){
callback(buffer,data_size * sizeof(uint8_t));
}
// 释放 buffer
av_free(buffer);
}else{
LOGI("avcodec_receive_frame # got_picture = %d", got_picture);
}
}
}
av_packet_unref(&packet);
}
// 释放资源
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}
void DoStop(){
isStop = true;
}
2、jni层的定义
native-lib.cpp
#include <jni.h>
#include <string>
#include <include/jniLog.h>
#include "include/Decoder.h"
extern "C"
JNIEXPORT jint JNICALL
Java_com_hisign_decodertsp_DecodeLib_native_1readAndDecode(JNIEnv *env, jobject thiz,
jstring rtsp_url) {
// TODO: implement native_readAndDecode()
const char *url = env->GetStringUTFChars(rtsp_url, nullptr);
LOGI("url = %s", url);
// java层的回调函数
jmethodID mid = env->GetMethodID(env->GetObjectClass(thiz), "packetEventCallback", "([B)V");
if(!mid){
LOGI("StartRestAndDecodePackage, mid is null");
}
// 获取java层的回调函数
Callback dataCallback = [&env, &mid, &thiz](uint8_t *buf, int size){
// todo 通过jni调用java层返回数据
if(mid != nullptr){
jbyteArray array1 = env->NewByteArray(size);
env->SetByteArrayRegion(array1,0,size,(jbyte*)buf);
env->CallVoidMethod(thiz, mid, array1);
env->DeleteLocalRef(array1);
}
};
int ret = ReadFrameAndDecoder(url,dataCallback);
LOGI("ReadFrameAndDecoder ret = %d",ret);
env->ReleaseStringUTFChars(rtsp_url, url);
return 0;
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_hisign_decodertsp_DecodeLib_native_1Stop(JNIEnv *env, jobject thiz) {
// TODO: implement native_Stop()
DoStop();
return 0;
}
3、app层的调用
public class DecodeLib {
static {
System.loadLibrary("decodertsp");
}
private EventCallback mEventCallback = null;
// 开始解码
public void start(String url) {
native_readAndDecode(url);
}
public void stop() {
native_Stop();
}
public void addEventCallback(EventCallback callback) {
mEventCallback = callback;
}
// 被native层回调
private void packetEventCallback(byte[] data) {
if (mEventCallback != null)
mEventCallback.onReceiveData(data);
}
// 测试读取视频帧并解码
private native int native_readAndDecode(String rtsp_url);
private native int native_Stop();
/**
* 返回yuv数据
*/
public interface EventCallback {
void onReceiveData(byte[] yuv);
}
}
MainActivity.java
public class MainActivity extends AppCompatActivity {
private final static String KEY_IP = "KEY_IP";
private static int width = 1920;
private static int height = 1080;
private ActivityMainBinding binding;
private DecodeLib decodeLib;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
binding = ActivityMainBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
decodeLib = new DecodeLib();
String ip = SPUtils.getInstance().getString(KEY_IP);
binding.etIp.setText(ip);
binding.btnStart.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
String ip = binding.etIp.getText().toString();
if(TextUtils.isEmpty(ip)){
Toast.makeText(MainActivity.this, "please input ip!", Toast.LENGTH_SHORT).show();
return;
}
SPUtils.getInstance().put(KEY_IP,ip);
String url = "rtsp://"+ip+":554/livestream/0";
new Thread(){
public void run(){
startRtsp(url);
}
}.start();
KeyboardUtils.hideSoftInput(MainActivity.this);
}
});
binding.btnStop.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
decodeLib.stop();
}
});
}
private void startRtsp(String url){
decodeLib.addEventCallback(new DecodeLib.EventCallback() {
@Override
public void onReceiveData(byte[] yuv) {
// todo 这里显示图片
runOnUiThread(new Runnable() {
@Override
public void run() {
Bitmap bmp = YuvToBitmapConverter.i420ToBitmap(yuv, width, height);
if(bmp != null){
binding.img.setImageBitmap(bmp);
}
}
});
}
});
decodeLib.start(url);
}
}
三、源码下载
https://download.csdn.net/download/dami_lixm/90408495?spm=1001.2014.3001.5503