Android ffmpeg Encoding camera data mpegts format mpegvideo Encoding error

android integrated ffmpeg encoding camera data mpegts format mpegvideo encoding error
first step initialization

extern "C"
JNIEXPORT jint JNICALL
Java_com_deerlive_jni_ffmpeg_FFmpegHandle_onFrameCallback(JNIEnv *env, jobject instance,
                                                                jbyteArray buffer_) {

    startTime = av_gettime();
    jbyte *in = env->GetByteArrayElements(buffer_, NULL);

    int ret = 0;
    //
    //AV_PIX_FMT_YUV420P


    //NV21YUV420P
    ////N21   0~width * heightY  width*height~ width*height*3/2VU
    //Y
    memcpy(pFrameYUV->data[0], in, y_length); //Y

    for (int i = 0; i < uv_length; iPP) {
        //v
        *(pFrameYUV->data[2] + i) = *(in + y_length + i * 2);
        //U
        *(pFrameYUV->data[1] + i) = *(in + y_length + i * 2 + 1);
    }

    pFrameYUV->format = pCodecCtx->pix_fmt;
    pFrameYUV->width = yuv_width;
    pFrameYUV->height = yuv_height;
    pFrameYUV->pts = count;
    //pFrameYUV->pts = (1.0 / 30) * 90 * count;                     ////////////////////////////
    //H.2641AVPacketdataNAL
    //AVPacket
    enc_pkt.data = NULL;
    enc_pkt.size = 0;
    av_init_packet(&enc_pkt);
//    __android_log_print(ANDROID_LOG_WARN, "eric", ":%lld",
//                        (long long) ((av_gettime() - startTime) / 1000));
    //YUV

    /* send the frame to the encoder */

    ret = avcodec_send_frame(pCodecCtx, pFrameYUV);
    if (ret < 0) {
        logi("Error sending a frame for encoding\n");
    }

    ret = avcodec_receive_packet(pCodecCtx, &enc_pkt);
    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
        logi("Error during encodg\n");
    else if (ret < 0) {
        logi("Error during encoding\n");
    }


    //av_frame_free(&pFrameYUV);

    if (ret != 0 || enc_pkt.size <= 0) {
        loge("avcodec_receive_packet error");
        return -2;
    }
    enc_pkt.stream_index = video_st->index;

    AVRational time_base = ofmt_ctx->streams[0]->time_base;
    AVRational r_frame_rate1 = pCodecCtx->framerate;
    AVRational time_base_q = {1, AV_TIME_BASE};
    int64_t calc_duration = (double)(AV_TIME_BASE) * (1 / av_q2d(r_frame_rate1));
    //enc_pkt.pts = count * (video_st->time_base.den) / ((video_st->time_base.num) * fps);
    enc_pkt.pts = av_rescale_q(count * calc_duration, time_base_q, time_base);
    enc_pkt.dts = enc_pkt.pts;
    //enc_pkt.duration = (video_st->time_base.den) / ((video_st->time_base.num) * fps);
    enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);

    enc_pkt.pos = -1;

    ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
    if (ret != 0) {
        loge("av_interleaved_write_frame failed");
    }
    countPP;
    env->ReleaseByteArrayElements(buffer_, in, 0);
    return 0;

}

error message:
avcodec_send_frame send error

avcodec_receive_packet error

Mar.28,2021

I would like to ask what skills you need to learn to learn ffmpeg first


first of all, your encoder is wrong. X264 corresponds to AV_CODEC_ID_H264.

second, ffmpeg has its own AV_PIX_FMT_NV21.

third, the first frame should be set to keyframe, and then give another keyframe every other GOP frame.

Finally, you might as well start with the simplest JPEG (AV_CODEC_ID_MJPEG) coding, learn the basics of ffmpeg, and then make a video, which is actually dealing with a lot of pictures linked together.

Menu