FFMPEG 保存实时流到本地文件

发布于:2025-02-11 ⋅ 阅读:(82) ⋅ 点赞:(0)

最近项目上需要把网络实时流保存到本地文件,自己正好用ffmpeg封装了一个库,直接上代码

bool VideoRecordModule::StartRecord(std::string inputName, std::string outputName)
{
	StopRecord();
	m_u64NextPts = 0;

	/// 1:初始化输入上下文
	if (!InitInputFomat(inputName))
	{
		return false;
	}

	/// 2:初始化输出上下文
	if (!InitOutputFormat(outputName))
	{
		return false;
	}

	/// 3:打开解码器
	if (!OpenDecoder())
	{
		return false;
	}

	/// 4:添加编码流格式
	AddStream();

	/// 5:打开编码器
	if (!OpenEncoder())
	{
		return false;
	}

	/// 6:写入输出流
	ActivateRecord();
	m_bVideoRecord = true;
	return true;
}


bool VideoRecordModule::InitInputFomat(std::string inputName)
{
	AVFormatContext *infmtCtx = nullptr;
	AVDictionary *opt = nullptr;
	AVCodec *pCodec = nullptr;
	AVCodecContext* pCodecCtx = nullptr;
	int ret = 0;
	av_dict_set(&opt, "buffer_size", "1024000", 0);        // 缓冲区大小 单位字节 解决高画质模糊的问题
	av_dict_set(&opt, "max_delay", "100", 0);              // 最大延时 单位微妙
	av_dict_set(&opt, "stimeout", "3000000", 0);           // 设置超时断开连接时间 3s 单位微妙
	av_dict_set(&opt, "rtsp_transport", "tcp", 0);         // 以tcp方式打开,如果以udp方式打开将tcp替换为udp
	av_dict_set(&opt, "fflags", "nobuffer", 0);
	av_dict_set(&opt, "rtbufsize", "6", 0);
	av_dict_set(&opt, "start_time_realtime", 0, 0);

	if ((ret = avformat_open_input(&infmtCtx, inputName.data(), nullptr, &opt)) != 0)
	{
		return false;
	}

	m_spInputFormatContext = std::shared_ptr<AVFormatContext>(infmtCtx, [](AVFormatContext* ctx)
	{
		avformat_close_input(&ctx);
	});

	m_spInputFormatContext->probesize = 1000 * 2; //探测大小 单位字节
	m_spInputFormatContext->max_analyze_duration = 2 * AV_TIME_BASE; // 最大码流分析时间

	if (avformat_find_stream_info(m_spInputFormatContext.get(), NULL) < 0)
	{
		return false;
	}

	if ((ret = av_find_best_stream(m_spInputFormatContext.get(), AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0)) >= 0)
	{
		m_inputVideoStream = m_spInputFormatContext->streams[ret];
	}
	pCodecCtx = avcodec_alloc_context3(pCodec);
	if (pCodecCtx == NULL)
	{
		return false;
	}
	m_spDeCodecContext = std::shared_ptr<AVCodecContext>(pCodecCtx, [](AVCodecContext* ctx)
	{
		avcodec_free_context(&ctx);
	});

	if (avcodec_parameters_to_context(m_spDeCodecContext.get(), m_inputVideoStream->codecpar) < 0)
	{
		return false;
	}

	/*if ((ret = av_find_best_stream(infmtCtx, AVMEDIA_TYPE_AUDIO, -1, -1, &pCodec, 0)) >= 0)
	{
	m_inputAudioStream = infmtCtx->streams[ret];
	}*/

	m_spDeCodecContext->flags2 |= AV_CODEC_FLAG2_FAST;    // 允许不符合规范的加速技巧。
	m_spDeCodecContext->thread_count = 8;                 // 使用8线程解码
	return true;
}

bool VideoRecordModule::InitOutputFormat(std::string outputName)
{
	AVFormatContext *outfmtCtx = nullptr;
	int ret = 0;
	/// 分配并初始化 输出上下文
	if((ret = avformat_alloc_output_context2(&outfmtCtx, NULL, NULL, outputName.c_str())) < 0)
	{
		return false;
	}
	m_spOutputFormatContext = std::shared_ptr<AVFormatContext>(outfmtCtx, [](AVFormatContext* ctx)
	{
		avformat_close_input(&ctx);
	});

	if (!m_spOutputFormatContext)
	{
		return false;
	}
	return true;
}

bool VideoRecordModule::OpenDecoder()
{
	if (!m_spDeCodecContext)
	{
		return false;
	}
	/// 打开解码器
	if (avcodec_open2(m_spDeCodecContext.get(), m_spDeCodecContext->codec, NULL) < 0)
	{
		return false;
	}
	return true;
}

void VideoRecordModule::AddStream()
{
	AVCodecContext *codeCtx = nullptr;
	if (!m_spOutputFormatContext)
	{
		return;
	}

	AVCodec *codec = avcodec_find_encoder(m_spOutputFormatContext->oformat->video_codec);
	if (!codec)
	{
		return;
	}

	m_outputVideoStream = avformat_new_stream(m_spOutputFormatContext.get(), NULL);
	if (!m_outputVideoStream)
	{
		return;
	}
	m_outputVideoStream->id = m_spOutputFormatContext->nb_streams - 1;
	codeCtx = avcodec_alloc_context3(codec);
	if (!codeCtx)
		return;
	m_spEnCodecContext = std::shared_ptr<AVCodecContext>(codeCtx, [](AVCodecContext* ctx)
	{
		avcodec_free_context(&ctx);
	});

	switch (codec->type)
	{
	case AVMEDIA_TYPE_AUDIO:
	{
		break;
	}
	case AVMEDIA_TYPE_VIDEO:
	{
		m_spEnCodecContext->codec_id = m_spOutputFormatContext->oformat->video_codec;
		///平均比特率,代码默认值是400000
		m_spEnCodecContext->bit_rate = 500000;
		/// 分辨率必须是2的倍数 (放开)
		m_spEnCodecContext->width = m_nVideoWidth;
		m_spEnCodecContext->height = m_nVideoHeight;
		/// 时基:这是基本的时间单位(以秒为单位)
		/// 表示其中的帧时间戳。 对于固定fps内容,(放开)
		AVRational avrational{ 1, STREAM_FRAME_RATE };
		m_outputVideoStream->time_base = avrational;
		m_spEnCodecContext->time_base = m_outputVideoStream->time_base;
		/// 最多每十二帧发射一帧内帧
		m_spEnCodecContext->gop_size = 12;
		m_spEnCodecContext->pix_fmt = STREAM_PIX_FMT;
		if (m_spEnCodecContext->codec_id == AV_CODEC_ID_MPEG2VIDEO)
		{
			///添加了B帧
			m_spEnCodecContext->max_b_frames = 2;
		}
		if (m_spEnCodecContext->codec_id == AV_CODEC_ID_MPEG1VIDEO)
		{
			/// 需要避免使用其中一些系数溢出的宏块
			/// 普通视频不会发生这种情况,因为色度平面的运动与亮度平面不匹配
			m_spEnCodecContext->mb_decision = 2;
		}
		break;
	}
	default:
		break;
	}

	//某些格式希望流头分开
	if (m_spOutputFormatContext->oformat->flags & AVFMT_GLOBALHEADER)
	{
		m_spEnCodecContext->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
	}

}

bool VideoRecordModule::OpenEncoder()
{
	int ret = 0;
	AVDictionary *opt = nullptr;
	av_dict_set(&opt, "buffer_size", "1024000", 0);        // 缓冲区大小 单位字节 解决高画质模糊的问题
	av_dict_set(&opt, "max_delay", "100", 0);              // 最大延时 单位微妙
	av_dict_set(&opt, "stimeout", "3000000", 0);           // 设置超时断开连接时间 3s 单位微妙
	av_dict_set(&opt, "rtsp_transport", "tcp", 0);         // 以tcp方式打开,如果以udp方式打开将tcp替换为udp
	av_dict_set(&opt, "fflags", "nobuffer", 0);
	av_dict_set(&opt, "rtbufsize", "6", 0);
	av_dict_set(&opt, "start_time_realtime", 0, 0);

	if (!m_spEnCodecContext)
	{
		return false;
	}

	/// 打开编码器
	ret = avcodec_open2(m_spEnCodecContext.get(), m_spEnCodecContext->codec, &opt);
	//av_dict_free(&opt);
	if (ret < 0)
	{
		return false;
	}

	/// 分配并初始化可重用框架
	m_spFrame = std::shared_ptr<AVFrame>(AllocPicture(m_spEnCodecContext->pix_fmt, m_spEnCodecContext->width, m_spEnCodecContext->height), [](AVFrame* p)
	{
		av_frame_free(&p);
	});
	if (!m_spFrame)
	{
		return false;
	}

	if (av_frame_make_writable(m_spFrame.get()) < 0)
	{
		return false;
	}

	/// 如果输出格式不是YUV420P,则为临时YUV420P 
	if (m_spEnCodecContext->pix_fmt != AV_PIX_FMT_YUV420P)
	{
		m_spTempFrame = std::shared_ptr<AVFrame>(AllocPicture(AV_PIX_FMT_YUV420P, m_spEnCodecContext->width, m_spEnCodecContext->height), [](AVFrame* p)
		{
			av_frame_free(&p);
		});

		if (!m_spTempFrame)
		{
			return false;
		}
	}
	if (!m_outputVideoStream)
	{
		return false;
	}
	/// 将流参数复制到多路复用器
	avcodec_parameters_from_context(m_outputVideoStream->codecpar, m_spEnCodecContext.get());

	if (!m_spOutputFormatContext)
	{
		return false;
	}

	if (!(m_spOutputFormatContext->oformat->flags & AVFMT_NOFILE))
	{
		ret = avio_open(&m_spOutputFormatContext->pb, m_spOutputFormatContext->url, AVIO_FLAG_WRITE);
		if (ret < 0)
		{
			return false;
		}
	}

	/// 编写流头
	ret = avformat_write_header(m_spOutputFormatContext.get(), &opt);
	if (ret < 0)
	{
		return false;
	}
	return true;
}

后续我会把代码上传到csdn上


网站公告

今日签到

点亮在社区的每一天
去签到