void FFMpegPlayer::doRender(JNIEnv *env, AVFrame *avFrame) {
if (avFrame->format == AV_PIX_FMT_YUV420P) {
if (!avFrame->data[0] || !avFrame->data[1] || !avFrame->data[2]) {
LOGE("doRender failed, no yuv buffer")
return;
}
int ySize = avFrame->width * avFrame->height;
auto y = env->NewByteArray(ySize);
env->SetByteArrayRegion(y, 0, ySize, reinterpret_cast<const jbyte *>(avFrame->data[0]));
auto u = env->NewByteArray(ySize / 4);
env->SetByteArrayRegion(u, 0, ySize / 4, reinterpret_cast<const jbyte *>(avFrame->data[1]));
auto v = env->NewByteArray(ySize / 4);
env->SetByteArrayRegion(v, 0, ySize / 4, reinterpret_cast<const jbyte *>(avFrame->data[2]));
if (mPlayerJni.isValid()) {
env->CallVoidMethod(mPlayerJni.instance, mPlayerJni.onVideoFrameArrived,
avFrame->width, avFrame->height, FMT_VIDEO_YUV420, y, u, v);
}
env->DeleteLocalRef(y);
env->DeleteLocalRef(u);
env->DeleteLocalRef(v);
} else if (avFrame->format == AV_PIX_FMT_NV12) {
if (!avFrame->data[0] || !avFrame->data[1]) {
LOGE("doRender failed, no nv21 buffer")
return;
}
int ySize = avFrame->width * avFrame->height;
auto y = env->NewByteArray(ySize);
env->SetByteArrayRegion(y, 0, ySize, reinterpret_cast<const jbyte *>(avFrame->data[0]));
auto uv = env->NewByteArray(ySize / 2);
env->SetByteArrayRegion(uv, 0, ySize / 2, reinterpret_cast<const jbyte *>(avFrame->data[1]));
if (mPlayerJni.isValid()) {
env->CallVoidMethod(mPlayerJni.instance, mPlayerJni.onVideoFrameArrived,
avFrame->width, avFrame->height, FMT_VIDEO_NV12, y, uv, nullptr);
}
env->DeleteLocalRef(y);
env->DeleteLocalRef(uv);
} else if (avFrame->format == AV_PIX_FMT_RGBA) {
if (!avFrame->data[0]) {
LOGE("doRender failed, no rgba buffer")
return;
}
int size = avFrame->width * avFrame->height * 4;
auto rgba = env->NewByteArray(size);
env->SetByteArrayRegion(rgba, 0, size, reinterpret_cast<const jbyte *>(avFrame->data[0]));
if (mPlayerJni.isValid()) {
env->CallVoidMethod(mPlayerJni.instance, mPlayerJni.onVideoFrameArrived,
avFrame->width, avFrame->height, FMT_VIDEO_RGBA, rgba, nullptr, nullptr);
}
env->DeleteLocalRef(rgba);
} else if (avFrame->format == AV_PIX_FMT_MEDIACODEC) {
av_mediacodec_release_buffer((AVMediaCodecBuffer *)avFrame->data[3], 1);
} else if (avFrame->format == AV_SAMPLE_FMT_FLTP) {
int dataSize = mAudioDecoder->mDataSize;
bool flushRender = mAudioDecoder->mNeedFlushRender;
if (dataSize > 0) {
uint8_t *audioBuffer = mAudioDecoder->mAudioBuffer;
if (mIsMute) {
memset(audioBuffer, 0, dataSize);
}
auto jByteArray = env->NewByteArray(dataSize);
env->SetByteArrayRegion(jByteArray, 0, dataSize, reinterpret_cast<const jbyte *>(audioBuffer));
if (mPlayerJni.isValid()) {
env->CallVoidMethod(mPlayerJni.instance, mPlayerJni.onAudioFrameArrived, jByteArray, dataSize, flushRender);
}
env->DeleteLocalRef(jByteArray);
}
}
}
void copyFrameData(uint8_t * dst, uint8_t * src, int image_width, int image_height, int line_stride, int pixel_stride) {
if ((image_width * pixel_stride) >= line_stride) {
memcpy(dst, src, image_width * image_height * pixel_stride);
} else {
uint8_t *dst_offset = dst;
uint8_t *src_offset = src;
int image_line_len = image_width * pixel_stride;
for (int i = 0; i < image_height; i ++) {
memcpy(dst_offset, src_offset, image_line_len);
dst_offset += image_line_len;
src_offset += line_stride;
}
}
}
// ...
// yuv420p
uint8_t *yBuffer = videoBuffer->yBuffer;
uint8_t *uBuffer = videoBuffer->uBuffer;
uint8_t *vBuffer = videoBuffer->vBuffer;
copyFrameData(yBuffer, frame->data[0], w, h, frame->linesize[0], 1);
copyFrameData(uBuffer, frame->data[1], w / 2, h / 2, frame->linesize[1], 1);
copyFrameData(vBuffer, frame->data[2], w / 2, h / 2, frame->linesize[2], 1);
//...
// ...
// yuv420sp
uint8_t *yBuffer = videoBuffer->yBuffer;
uint8_t *uvBuffer = videoBuffer->uvBuffer;
copyFrameData(yBuffer, frame->data[0], w, h, frame->linesize[0], 1);
copyFrameData(uvBuffer, frame->data[1], w / 2, h / 2, frame->linesize[1], 2);
// ...
// ...
// rgba
uint8_t *rgbaBuffer = videoBuffer->rgbaBuffer;
copyFrameData(rgbaBuffer, frame->data[0], w, h, frame->linesize[0], 4);