转载

Android视音频开发初探【三】(简单的播放器)

上一篇博客中已经完成了一个简单的推流器,这篇我们来实现一个简单的播放器来拉流播放;实际上就是把之前推流器的流程倒过来走了一遍:

  1. 连接流媒体服务器取流
  2. 码流解码得到原数据
  3. 将得到的yuv数据转换为rgb数据进行绘制显示

JNI绑定

java 部分

class LyjPlayer {

    companion object {
        // 一些状态常量,用于回调
        const val STATE_CONNECTED = 0
        const val STATE_START = STATE_CONNECTED + 1
        const val STATE_STOP = STATE_START + 1

        const val ERROR_CONNECT_TIMEOUT = 0
        const val ERROR_STREAM = ERROR_CONNECT_TIMEOUT + 1
        const val NONE_VIDEO_STREAM = ERROR_STREAM + 1
        const val UNKNOW = NONE_VIDEO_STREAM + 1

        init {
            LibLoader.loadLib("lyjplayer")
        }
    }

    init {
        initPlayer()
    }

    external fun initPlayer()
    // 设置surface用于显示画面
    external fun setSurface(surface: Surface)

    external fun setVideoCallBack(callback: VideoCallBack)

    external fun startPlay(url: String): Int

    external fun stopPlay(): Int

    external fun release()
}
复制代码

native 部分

这里用一个map用于保存多个播放器实例,根据java层对象toString方法得到的字符串作为key标识native层的对象,做到一一对应。

另外在native层中需要通过ANativeWindow(可以认为是native层的surface)来显示画面,所以在player_set_surface方法中根据java层传入surface创建对应实例。

register_jni.cpp

template<class T>
int arrayLen(T &array) {
    return (sizeof(array) / sizeof(array[0]));
}

#ifdef __cplusplus
extern "C" {
#endif

const char *cls_player = "com/lyj/learnffmpeg/LyjPlayer";

// 实例集合
map<string, LyjPlayer *> player_map;

// 根据java object生成key标识native层对象,这里的key是java bean的内存地址字符串
const string getKey(JNIEnv *env, jobject obj) {
    jclass cls = env->GetObjectClass(obj);
    jmethodID mid = env->GetMethodID(cls, "toString", "()Ljava/lang/String;");
    jstring jstr = static_cast<jstring>(env->CallObjectMethod(obj, mid, nullptr));
    return string(env->GetStringUTFChars(jstr, nullptr));
}

void player_init_play(JNIEnv *env, jobject obj) {
    string key = getKey(env, obj);
    LyjPlayer *player = new LyjPlayer();
    // 保存jvm实例用于回调
    env->GetJavaVM(&player->vm);
    player_map[key] = player;
    player->init();
}

// 设置surface
void player_set_surface(JNIEnv *env, jobject obj, jobject surface) {
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        // 根据传入surface创建对应ANativeWindow用于显示画面
        ANativeWindow *window = ANativeWindow_fromSurface(env, surface);
        if (!window) {
            LOGE("window null");
        } else {
            player->window = window;
        }
    }
}

// 设置回调
void player_set_callback(JNIEnv *env, jobject obj, jobject callback) {
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        if (player->callback) {
            env->DeleteGlobalRef(player->callback);
        }
        player->callback = env->NewGlobalRef(callback);
    }
}

// 开始播放
int player_start_play(JNIEnv *env, jobject obj, jstring url) {
    const char *path = nullptr;
    path = env->GetStringUTFChars(url, nullptr);
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        player->stopPlay();
        player->startPlay(path);
    } else {
        LOGE("cant not find player");
    }
    env->ReleaseStringUTFChars(url, path);
    return 0;
}

// 停止播放释放资源
int player_stop_play(JNIEnv *env, jobject obj) {
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        player->stopPlay();
    }
    return 0;
}

void player_release(JNIEnv *env, jobject obj) {
    string key = getKey(env, obj);
    LyjPlayer *player = player_map[key];
    if (player) {
        env->DeleteGlobalRef(player->callback);
        player->release();
        player_map.erase(key);
        delete player;
    }
}

// 方法映射
JNINativeMethod player_methods[] = {
        {"initPlayer",       "()V",                                    (void *) player_init_play},
        {"setSurface",       "(Landroid/view/Surface;)V",              (void *) player_set_surface},
        {"setVideoCallBack", "(Lcom/lyj/learnffmpeg/VideoCallBack;)V", (void *) player_set_callback},
        {"startPlay",        "(Ljava/lang/String;)I",                  (void *) player_start_play},
        {"stopPlay",         "()I",                                    (void *) player_stop_play},
        {"release",          "()V",                                    (void *) player_release}
};
// jni注册
int jniRegisterNativeMethods(JNIEnv *env, const char *className, const JNINativeMethod *methods,
                             int count) {
    int res = -1;
    jclass cls = env->FindClass(className);
    if (cls != nullptr) {
        int ret = env->RegisterNatives(cls, methods, count);
        if (ret > 0) {
            res = 0;
        }
    }
    env->DeleteLocalRef(cls);
    return res;
}

JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void *reserved) {
    JNIEnv *env = nullptr;
    jint result = -1;
    if (vm->GetEnv((void **) (&env), JNI_VERSION_1_6) != JNI_OK) {
        return result;
    }
    jniRegisterNativeMethods(env, cls_player, player_methods, arrayLen(player_methods));
    return JNI_VERSION_1_6;
}

JNIEXPORT void JNI_OnUnload(JavaVM *jvm, void *reserved) {}

#ifdef __cplusplus
}
#endif
复制代码

播放器主要逻辑

简单思路

  1. 开启一个线程不断取流,对码流进行解码,将解码后的数据放入队列
  2. 根据码流信息计算每一帧时间,开启一个定时任务不断从队列取rgb数据绘制

代码实现

lyjplayer.h

#ifdef __cplusplus
extern "C" {
#endif

#include <libavformat/avformat.h>
#include <libswscale/swscale.h>

#ifdef __cplusplus
}
#endif

using namespace std;

struct FrameData {
    AVFrame *frame;
    // 帧的实际数据
    uint8_t *buffer;
};

class LyjPlayer {
private:
    const char *url;
    int width = 0;
    int height = 0;
    atomic_bool playing = {false};
    // AVFormatContext用于解封装 flv,avi,rmvb,mp4
    AVFormatContext *formatContext = nullptr;
    AVCodecContext *codecContext = nullptr;
    int buffer_size;
    AVFrame *frame = nullptr, *temp = nullptr;
    AVPacket *packet = nullptr;
    SwsContext *sws_context = nullptr;
    uint8_t *buffer = nullptr;
    ANativeWindow_Buffer windowBuffer;
    thread task;
    // 记录帧编号
    int index;
    // 网络接收的缓冲
    LinkedBlockingQueue<FrameData> queue;
    // 定时器
    Timer timer;

    int decodeFrame();

    int render();

    int destroyPlay();

    void callbackState(JNIEnv *env, PlayState state);

    void callbackError(JNIEnv *env, PlayError error);
public:
    JavaVM *vm = nullptr;
    jobject callback = nullptr;
    ANativeWindow *window = nullptr;

    LyjPlayer();

    int init();

    void startPlay(const char *url);

    int stopPlay();

    void release();

    virtual ~LyjPlayer();

};
#endif
复制代码

lyjplayer.cpp

初始化

int LyjPlayer::init() {
    // 由于mediaCodec硬解要用到jni,所以需要调用这个函数
    av_jni_set_java_vm(vm, 0);
    return 0;
}
复制代码

开始播放

Timer是一个简单的定时任务,每过40ms绘制一帧画面,保持帧率稳定,40ms这个时间根据帧率和视频流的时间基计算出来,这里流的时间基是1/1000,相当于把一秒分成了1000份,一帧占40个时间基也就是40毫秒。

使用mediacodec硬解只需要设置相应的解码器就可以

void LyjPlayer::startPlay(const char *url) {
    this->url = url;
    playing = true;
    if (task.joinable()) {
        task.join();
    }
    // 取流解码线程
    task = thread([this] {
        JNIEnv *env = nullptr;
        int ret = vm->AttachCurrentThread(&env, nullptr);
        avformat_network_init();
        formatContext = avformat_alloc_context();
        // 打开文件
        LOGE("正在连接");
        ret = avformat_open_input(&formatContext, this->url, nullptr, nullptr);
        if (ret < 0) {
            LOGE("打开文件失败code:%d msg:%s", ret, av_err2str(ret));
            callbackError(env, PlayError::CONNECT_TIMEOUT);
            vm->DetachCurrentThread();
            destroyPlay();
            return ret;
        }
        callbackState(env, PlayState::CONNECTED);
        LOGE("连接到流媒体成功");
        ret = avformat_find_stream_info(formatContext, nullptr);
        if (ret < 0) {
            LOGE("查找流失败 %s", av_err2str(ret));
            callbackError(env, PlayError::ERROR_STREAM);
            vm->DetachCurrentThread();
            destroyPlay();
            return ret;
        }
        int index = -1;
        for (int i = 0; i < formatContext->nb_streams; i++) {
            // 查找视频流,如果有音频的话就不止一个流,所以需要查找
            if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
                index = i;
                break;
            }
        }
        if (index == -1) {
            LOGE("没有视频流");
            callbackError(env, PlayError::NONE_VIDEO_STREAM);
            vm->DetachCurrentThread();
            destroyPlay();
            return -1;
        }
        AVStream *videoStream = formatContext->streams[index];
        AVCodecParameters *params = videoStream->codecpar;
        LOGE("AVCodecParameters id:%d, width:%d, height%d", params->codec_id, params->width,
             params->height);
        // 查找解码器
        AVCodecID codecId = videoStream->codecpar->codec_id;
        AVCodec *codec = nullptr;
        // 使用h264硬解
        if (codecId == AV_CODEC_ID_H264) {
            codec = avcodec_find_decoder_by_name("h264_mediacodec");
            if (codec == nullptr) {
                LOGE("can not find mediacodec");
                codec = avcodec_find_decoder(codecId);
            } else {
                LOGE("使用硬解");
            }
        }
        if (codec == nullptr) {
            LOGE("找不到解码器");
            callbackError(env, PlayError::UNKNOW);
            vm->DetachCurrentThread();
            destroyPlay();
            return -1;
        }
        codecContext = avcodec_alloc_context3(codec);
        // 复制码流配置到解码器
        avcodec_parameters_to_context(codecContext, videoStream->codecpar);
        ret = avcodec_open2(codecContext, codec, nullptr);
        if (ret < 0) {
            LOGE("初始化解码器失败:%s", av_err2str(ret));
            callbackError(env, PlayError::UNKNOW);
            vm->DetachCurrentThread();
            destroyPlay();
            return -1;
        }
        this->width = codecContext->width;
        this->height = codecContext->height;
        buffer_size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 1);
        temp = av_frame_alloc();
        packet = av_packet_alloc();
        // 创建格式转换方式,用于将yuv数据转换为rgba
        sws_context = sws_getContext(width, height, codecContext->pix_fmt, width,
                                     height, AV_PIX_FMT_RGBA, SWS_BICUBIC,
                                     nullptr, nullptr, nullptr);
        // 设置窗口参数
        if (ANativeWindow_setBuffersGeometry(window, width, height, WINDOW_FORMAT_RGBA_8888) < 0) {
            callbackError(env, PlayError::UNKNOW);
            vm->DetachCurrentThread();
            destroyPlay();
            LOGE("初始化播放窗口失败");
            return -1;
        }
        // 获取帧率
        double fps = av_q2d(videoStream->avg_frame_rate);
        AVRational timebase = videoStream->time_base;
        // 计算每一帧持续时间 毫秒
        int duration = static_cast<int>(timebase.den / timebase.num / fps / (timebase.den / 1000));
        LOGE("videoStream FPS %lf, duration %d", fps, duration);
        // 定时绘制,保持帧率
        timer.setInterval([this] {
            // 绘制一帧
            render();
        }, duration);
        while (playing) {
            // 读流
            ret = av_read_frame(formatContext, packet);
            if (ret < 0) {
                continue;
            }
            if (packet->stream_index == index) {
                // 解码一帧
                decodeFrame();
            }
            av_packet_unref(packet);
        }
        vm->DetachCurrentThread();
        return 0;
    });
}
复制代码

decodeFrame 解码数据

int LyjPlayer::decodeFrame() {
    int ret = avcodec_send_packet(codecContext, packet);
    if (ret == AVERROR(EAGAIN)) {
        ret = 0;
    } else if (ret < 0) {
        LOGE("avcodec_send_packet err code: %d, msg:%s", ret, av_err2str(ret));
        av_packet_free(&packet);
        vm->DetachCurrentThread();
        destroyPlay();
        return -1;
    }
    LOGE("send a packet");
    while (ret >= 0) {
        ret = avcodec_receive_frame(codecContext, temp);
        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            // packet已读完
            return 0;
        } else if (ret < 0) {
            LOGE("avcodec_receive_frame error %s", av_err2str(ret));
            av_packet_free(&packet);
            vm->DetachCurrentThread();
            destroyPlay();
            return -1;
        }
        AVFrame *frame = av_frame_alloc();
        uint8_t *buffer = static_cast<uint8_t *>(av_malloc(buffer_size));
        av_image_fill_arrays(frame->data, frame->linesize, buffer, AV_PIX_FMT_RGBA, width, height,
                             1);
        // 将frame数据转为rgba格式
        sws_scale(sws_context, temp->data, temp->linesize, 0, codecContext->height,
                  frame->data, frame->linesize);
        FrameData frameData = {frame, buffer};
        queue.push(frameData);
    }
    return ret;
}
复制代码

显示每一帧的画面

int LyjPlayer::render() {
    int ret = 0;
    JNIEnv *env = nullptr;
    FrameData frameData = queue.pop();
    AVFrame *frame = frameData.frame;
    uint8_t *buffer = frameData.buffer;
    // 开始绘制第一帧回调
    if (index == 0) {
        ret = vm->AttachCurrentThread(&env, nullptr);
        callbackState(env, PlayState::START);
    }
    index++;
    ret = ANativeWindow_lock(window, &windowBuffer, nullptr);
    if (ret < 0) {
        LOGE("cannot lock window");
    } else {
        uint8_t *bufferBits = (uint8_t *) windowBuffer.bits;
        // 逐行复制,显示画面其实就是把rgba数据逐行复制到ANativeWindow中的byte数组里
        for (int h = 0; h < height; h++) {
            // rgba四通道,每个像素需要4byte,所以需要stride*4
            memcpy(bufferBits + h * windowBuffer.stride * 4,
                   buffer + h * frame->linesize[0],
                   static_cast<size_t>(frame->linesize[0]));
        }
        ANativeWindow_unlockAndPost(window);
    }
    av_free(buffer);
    av_frame_free(&frame);
    if (env) {
        vm->DetachCurrentThread();
    }
    return ret;
}
复制代码

整个流程基本就是这个样子,以上代码就是拉流播放的主要代码,注释都写得比较清楚,这里就不再赘述,其他代码可以到demo里看;可以发现其实推流拉流用到的api都差不多,主要是顺序的变化。

原文  https://juejin.im/post/5ef30064f265da22fe240256
正文到此结束
Loading...