如何使用ffmpeg实现h264流传输+H264实现RTP传输数据
Posted
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了如何使用ffmpeg实现h264流传输+H264实现RTP传输数据相关的知识,希望对你有一定的参考价值。
参考技术A H264编码后,就可以逐个NAL做RTP打包,接收端RTP解包,然后H264解码,渲染了。FFMPEG应该可以实现。本回答被提问者采纳
ffmpeg 探测流
// Decoder.cpp : Defines the exported functions for the DLL application.
//
#include "stdafx.h"
#include "Decoder.h"
#include "DeException.h"
#include <direct.h>
#include <string>
#include <io.h>
#include "Demuxer.h"
//////////////////////////////////////////////////////////////////////////
///decoder class
/////////////////////////////
enum {
FLV_TAG_TYPE_AUDIO = 0x08,
FLV_TAG_TYPE_VIDEO = 0x09,
FLV_TAG_TYPE_META = 0x12,
};
CDecoder::CDecoder()
: IDecoderStream()
, m_TimeStampe(nullptr)
, m_pContext(nullptr)
, m_eStatus(eUndefine)
, m_bAudio(false)
, m_nStep(0)
, av_sync_type(0)
, seek_req(0)
, seek_flags(0)
, seek_pos(0)
, m_ePlayType(eFile)
, m_pAvioBuf(nullptr)
{
m_fPlaySpeed = 1.0;
m_pDemuxer = nullptr;
m_szPlayFile[0] = ‘\0‘;
ZeroMemory(&m_FlushPkt, sizeof(AVPacket));
av_init_packet(&m_FlushPkt);
m_FlushPkt.data = (unsigned char *) "FLUSH";
m_strCaptureFile = "";
m_strPictureFolder = "";
m_strRecodFolder = "";
m_strModelPath = "";
//get model path.
char buffer[512] = {0};
_getcwd(buffer, 512);
m_strModelPath = buffer;
m_pVideoc = new VideoDecodec(this);
m_pAudioc = new AudioDecodec(this);
}
CDecoder::~CDecoder()
{
delete m_pVideoc;
delete m_pAudioc;
if (m_pDemuxer)
{
delete m_pDemuxer;
}
}
double CDecoder::GetExternalClock()
{
return av_gettime() / 1000000.0;
}
double CDecoder::GetVideoClock()
{
double delta = 0;
double pts = 0;
if (m_bPaused)
{
pts = m_pVideoc->GetCurPts();
}
else
{
delta = (av_gettime() - m_pVideoc->video_current_pts_time) / 1000000.0;
pts = m_pVideoc->GetCurPts() + delta;
}
return pts;
}
double CDecoder::GetAudioClock()
{
double pts = 0;
if (m_bPaused)
{
pts = m_pAudioc->GetCurPts();
}
if (m_pAudioc->audioStream >= 0)
{
pts = m_pAudioc->GetAudioClock();
}
return pts;
}
double CDecoder::GetMasterClock()
{
double pts = 0;
if (av_sync_type == AV_SYNC_VIDEO_MASTER)
{
pts = GetVideoClock();
}
else if (av_sync_type == AV_SYNC_AUDIO_MASTER)
{
pts = GetAudioClock();
}
else
{
pts = GetExternalClock();
}
return pts;
}
/* pause or resume the video */
void CDecoder::StreamTogglePause()
{
if (m_bPaused)
{
m_pVideoc->frame_timer += av_gettime() / 1000000.0 + - m_pVideoc->GetCurPts();
// if (is->read_pause_return != AVERROR(ENOSYS))
// {
// m_pVideoc->video_current_pts = m_pVideoc->video_current_pts_drift + av_gettime() / 1000000.0;
// }
// m_pVideoc->video_current_pts_drift = m_pVideoc->video_current_pts - av_gettime() / 1000000.0;
}
m_bPaused = !m_bPaused;
}
Uint32 CDecoder::SdlRefreshTimerCb(Uint32 interval, void *opaque)
{
SDL_Event event;
event.type = FF_REFRESH_EVENT;
event.user.data1 = opaque;
SDL_PushEvent(&event);
return 0;
}
void CDecoder::ScheduleRefresh(int nDelay)
{
SDL_AddTimer(nDelay, SdlRefreshTimerCb, this);
}
bool CDecoder::DirectShow()
{
return (eFile != m_ePlayType);
}
int CDecoder::DecodeInterruptCb(void *opaque)
{
//return (global_video_state && global_video_state->quit);
return 0;
}
int CDecoder::DecodeFileThread(void *arg)
{
CDecoder *pDecoder = (CDecoder *) arg;
AVFormatContext *pFormatCtx = pDecoder->m_pVideoc->ic;
AVPacket pkt1 = {0};
AVPacket* packet = &pkt1;
unsigned int i = 0;
VideoDecodec* pVideo = pDecoder->m_pVideoc;
AudioDecodec* pAudioc = pDecoder->m_pAudioc;
try
{
//main decode loop
/* 读包的主循环, av_read_frame不停的从文件中读取数据包*/
while(!pDecoder->IsStop())
{
if (pDecoder->seek_req)
{
int stream_index = -1;
int64_t seek_target = pDecoder->seek_pos * 100;
if (pVideo->videoStream >= 0)
stream_index = pVideo->videoStream;
else if (pAudioc->audioStream >= 0)
stream_index = pAudioc->audioStream;
if (stream_index >= 0)
{
AVRational bq = {1, 100};
seek_target = av_rescale_q(seek_target, bq, pFormatCtx->streams[stream_index]->time_base);
}
if (av_seek_frame(pVideo->ic, stream_index, seek_target, pDecoder->seek_flags) < 0)
{
_TCHAR szData[256] = {0};
wsprintf(szData, _T("Can not find pos: %d"), seek_target/AV_TIME_BASE);
OutputDebugString(szData);
}
else
{
if (pAudioc && pAudioc->audioStream >= 0)
{
pAudioc->audioq.PacketQueueFlush();
pAudioc->audioq.PacketQueuePut(&pDecoder->m_FlushPkt);//内存泄露???
}
if (pVideo->videoStream >= 0)
{
pVideo->videoq.PacketQueueFlush();
pVideo->videoq.PacketQueuePut(&pDecoder->m_FlushPkt);//内存泄露???
}
}
pDecoder->seek_req = 0;
}
//seek stuff goes here
/* 这里audioq.size是指队列中的所有数据包带的音频数据的总量或者视频数据总量,并不是包的数量 */
if (pAudioc->audioq.size > MAX_AUDIOQ_SIZE || pVideo->videoq.size > MAX_VIDEOQ_SIZE)
{
SDL_Delay(10);
continue;
}
if (av_read_frame(pVideo->ic, packet) < 0)
{
if (pVideo->ic->pb->error == 0)
{
pDecoder->m_eStatus = eStop;
DeException exMsg(ePlayEnd, pDecoder);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, pDecoder, ePlayEnd, exMsg.GetCurString());
exMsg.Execute();
break;
//continue;/* no error; wait for user input, should be stop in the file mode */
}
else
{
break;
}
}
//record data.
if (pDecoder->m_pDemuxer)
{
pDecoder->m_pDemuxer->Demuxer(pFormatCtx->streams[packet->stream_index], *packet);
}
if (pDecoder->IsStop())
break;
// Is this a packet from the video stream?
if (packet->stream_index == pVideo->videoStream)
{
pVideo->videoq.PacketQueuePut(packet);//内存泄露???
}
else if (packet->stream_index == pAudioc->audioStream)
{
pAudioc->audioq.PacketQueuePut(packet);//内存泄露???
}
else
{
av_packet_unref(packet);
}
}
}
catch(DeException ex)
{
pDecoder->parse_tid = 0;
return 1;
}
pDecoder->parse_tid = 0;
//av_packet_unref(packet);
return 0;
}
int CDecoder::ReadStreamData(void *pContext, uint8_t *buf, int buf_size)
{
if (pContext)
{
PRAME frame = {0};
frame.pFrame = buf;
frame.FrameLen = buf_size;
int nLen = 0;
CDecoder* pDecoder = (CDecoder*)pContext;
nLen = pDecoder->GetOneCompletesFrame(&frame);
return nLen;
}
return 0;
}
int CDecoder::FindStreamIndex(AVFormatContext *pformat_ctx, int& video_stream, int& audio_stream)
{
int i = 0;
video_stream = -1;
audio_stream = -1;
for (i = 0; i < pformat_ctx->nb_streams; i++)
{
if (pformat_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
video_stream = i;
}
if (pformat_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
audio_stream = i;
}
}
return 0;
}
int CDecoder::DecodeStreamThread(void *arg)
{
CDecoder* pDecoder = (CDecoder *) arg;
VideoDecodec* pVideo = pDecoder->m_pVideoc;
AudioDecodec* pAudioc = pDecoder->m_pAudioc;
AVFormatContext *pFormatCtx = pVideo->ic;
AVPacket pkt1, *packet = &pkt1;
unsigned int i = 0;
try
{
//main decode loop
/* 读包的主循环, av_read_frame不停的从文件中读取数据包*/
while(!pDecoder->IsStop())
{
if(pDecoder->m_bPaused)// 2 stop
{
SDL_Delay(2);
continue;
}
//seek stuff goes here
/* 这里audioq.size是指队列中的所有数据包带的音频数据的总量或者视频数据总量,并不是包的数量 */
if (pAudioc->audioq.size > MAX_AUDIOQ_SIZE || pVideo->videoq.size > MAX_VIDEOQ_SIZE)
{
SDL_Delay(10);
continue;
}
if (av_read_frame(pVideo->ic, packet) < 0)
{
if (pVideo->ic->pb->error == 0)
{
SDL_Delay(10); /* no error; wait for user input */
continue;
}
else
{
break;
}
}
//record data.
if (pDecoder->m_pDemuxer)
{
pDecoder->m_pDemuxer->Demuxer(pFormatCtx->streams[packet->stream_index], *packet);
}
// Is this a packet from the video stream?
if (packet->stream_index == pVideo->videoStream)
{
pVideo->videoq.PacketQueuePut(packet);//内存泄露???
}
else if (packet->stream_index == pAudioc->audioStream)
{
if (pAudioc->IsPlay())
{
pAudioc->audioq.PacketQueuePut(packet);//内存泄露???
}
}
else
{
av_packet_unref(packet);
}
}
//av_free(packet);
/*all done - wait for it*/
// while (true)
// {
// SDL_Delay(100);
// }
//av_free(buf);
}
catch(DeException ex)
{
pDecoder->parse_tid = 0;
return 1;
}
pDecoder->parse_tid = 0;
return 0;
}
void CDecoder::UpdatePteCall()
{
if (m_TimeStampe != nullptr && m_pVideoc->video_st)
{
stTimeStampe time = {0};
//this may not correct.
time.dCurrentTime = GetCutPts();
//time.dCurrentTime = packet->dts * av_q2d(pVideo->video_st->time_base);
time.dTotalTime = m_pVideoc->ic->duration * av_q2d(m_pVideoc->video_st->time_base);
m_TimeStampe((PLAYHANDLE)this, m_pContext, time);
}
}
double CDecoder::GetCutPts()
{
double pts = 0;
if (av_sync_type == AV_SYNC_VIDEO_MASTER)
{
pts = m_pVideoc->GetCurPts();
}
else if (av_sync_type == AV_SYNC_AUDIO_MASTER)
{
pts = m_pAudioc->GetCurPts();
}
return pts;
}
int CDecoder::InitialzieDecodeParas(DecoderParas* _Out_ paras)
{
SDL_Event event;
event.type = FF_START_DECODER_EVENT;
event.user.data1 = this;
SDL_PushEvent(&event);
return 0;
}
int CDecoder::GetOneCompletesFrame(PRAME* _Out_ pFrame)
{
return __super::GetOneCompletesFrame(pFrame);
}
//for download, playback.
int CDecoder::NoDataRcvEvent(char* pFileName)
{
//TODO: how about playback.
if (m_ePlayType == eDownload)
{
DeException exMsg(eDw this, false);
MODIFY_MSG(exMsg.m_szTemp, exMsg.GetCurString(), pFileName);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eDw exMsg.m_szTemp);
exMsg.Execute();
}
SDL_Event event;
event.type = FF_STOP_RTSP_REV;
event.user.data1 = this;
SDL_PushEvent(&event);
return 0;
}
int CDecoder::OnNotDataRcv()
{
DeException exMsg(eOnNoDataRev, this, false);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eOnNoDataRev, exMsg.GetCurString());
exMsg.Execute();
return 0;
}
void CDecoder::StopRtsp()
{
if (m_pRtsp)
{
m_pRtsp->StopPlay();
delete m_pRtsp;
m_pRtsp = nullptr;
}
DeException exMsg(eNoDataReceive, this, false);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eNoDataReceive, exMsg.GetCurString());
exMsg.Execute();
SDL_Event event;
event.type = FF_QUIT_EVENT;
event.user.data1 = this;
SDL_PushEvent(&event);
}
int CDecoder::RecordInfo(stFileInfo fileInfo)
{
DeException exMsg(eRecordSize, this, false);
MODIFY_MSG(exMsg.m_szTemp, exMsg.GetCurString(), fileInfo.ulFileSize);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eRecordSize, exMsg.m_szTemp);
exMsg.Execute();
return 0;
}
///////////////////////public interface///////////////////////////////////
/////////////////////////export interface.//////////////////////////////
int CDecoder::StartPlay(const stPlay &play)
{
m_pAudioc->Initialize();
m_pVideoc->Initialize();
m_ePlayType = play.type;
switch (play.type)
{
case eFile:
return StartFilePlay(play);
break;
case eRealPlay:
case ePlayback:
return StartStreamPlay(play);
break;
case eDownload:
return StartDownload(play);
break;
default:
break;
}
return 0;
}
int CDecoder::StartFilePlay(const stPlay &play)
{
AVIOInterruptCB interupt_cb = {0};
try
{
if (play.url[0] == ‘\0‘ || _access(play.url, 0))
{
DeException exMsg(eFileNoExist, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eFileNoExist, exMsg.GetCurString());
exMsg.Execute();
}
if (!IsWindow(play.wndPlay))
{
DeException exMsg(eWndInvalid, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eWndInvalid, exMsg.GetCurString());
exMsg.Execute();
}
strcpy_s(m_szPlayFile, sizeof(m_szPlayFile), play.url);
m_pVideoc->pictq->wndPlay = play.wndPlay;
m_bAudio = play.bAudio;
ScheduleRefresh(10);
av_sync_type = DEFAULT_AV_SYNC_TYPE;
m_pVideoc->videoStream = -1;
m_pAudioc->audioStream = -1;
// will interrupt blocking functions if we quit!
interupt_cb.callback = DecodeInterruptCb;
interupt_cb.opaque = this;
if (avio_open2(&m_pVideoc->io_ctx, m_szPlayFile, 0, &interupt_cb, NULL))
{
DeException exMsg(eOpenIOError, this);
MODIFY_MSG(exMsg.m_szTemp, exMsg.GetCurString(), m_szPlayFile);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eOpenIOError, exMsg.m_szTemp);
exMsg.Execute();
}
//Open video file
if (avformat_open_input(&m_pVideoc->ic, m_szPlayFile, NULL, NULL) != 0)
{
DeException exMsg(eOpenFileError, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eOpenFileError, exMsg.GetCurString());
exMsg.Execute();
}
//Retrieve stream information
if (avformat_find_stream_info(m_pVideoc->ic, NULL) < 0)
{
DeException exMsg(eRetrieveStreamInformation, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eRetrieveStreamInformation, exMsg.GetCurString());
exMsg.Execute();
}
int nRetCtx = -1;
//Dump information about file onto standard error
av_dump_format(m_pVideoc->ic, 0, m_szPlayFile, 0);
//Find the first video stream
FindStreamIndex(m_pVideoc->ic, m_pVideoc->videoStream, m_pAudioc->audioStream);
if (m_pAudioc->audioStream >= 0 && m_bAudio)
{
m_pAudioc->m_codecCtx = avcodec_alloc_context3(NULL);
if (m_pAudioc->m_codecCtx == nullptr)
{
DeException exMsg(eAvcodecAllocContextFailed, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eAvcodecAllocContextFailed, exMsg.GetCurString());
exMsg.Execute();
}
nRetCtx = avcodec_parameters_to_context(m_pAudioc->m_codecCtx, m_pVideoc->ic->streams[m_pAudioc->audioStream]->codecpar);
/* 所有设置SDL音频流信息的步骤都在这个函数里完成 */
int nError = m_pAudioc->AudioStreamComponentOpen(m_pVideoc->ic);
}
else
{
m_pAudioc->audioStream = -1;
}
if (m_pVideoc->videoStream >= 0)
{
m_pVideoc->m_codecCtx = avcodec_alloc_context3(NULL);
if (m_pVideoc->m_codecCtx == nullptr)
{
DeException exMsg(eAvcodecAllocContextFailed, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eAvcodecAllocContextFailed, exMsg.GetCurString());
exMsg.Execute();
}
nRetCtx = avcodec_parameters_to_context(m_pVideoc->m_codecCtx, m_pVideoc->ic->streams[m_pVideoc->videoStream]->codecpar);
m_pVideoc->VideoStreamComponentOpen();
}
//to do if
if (m_pVideoc->videoStream < 0 && m_pAudioc->audioStream < 0)
{
DeException exMsg(eOpenCodecFailed, this);
MODIFY_MSG(exMsg.m_szTemp, exMsg.GetCurString(), m_szPlayFile);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eOpenCodecFailed, exMsg.m_szTemp);
exMsg.Execute();
}
//内存分配失败??
parse_tid = SDL_CreateThread(DecodeFileThread, "parse_thread", this);
if (!parse_tid)
{
DeException exMsg(cReateDecodeThreadFailed, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, cReateDecodeThreadFailed, exMsg.GetCurString());
exMsg.Execute();
}
}
catch(DeException ex)
{
return ex.m_nCode;
}
return 0;
}
int CDecoder::StartStreamPlay(const stPlay &play)
{
m_pVideoc->pictq->wndPlay= play.wndPlay;
m_bAudio = play.bAudio;
m_pVideoc->videoStream = -1;
m_pAudioc->audioStream = -1;
m_pAvioBuf = (uint8_t*)av_mallocz(sizeof(uint8_t)*BUF_SIZE);
AVIOInterruptCB interupt_cb = {0};
// will interrupt blocking functions if we quit!
interupt_cb.callback = nullptr;
interupt_cb.opaque = this;
//Create rtsp stream
StreamPara para = {0};
para.pDecoder = this;
para.eType = (play.type == eRealPlay) ? eRtspRealPlay : eRtspPlayback;
para.eTransType = (play.eTranType == TransmissionType::eUDP) ? RtspTrans::eRtspUDP: RtspTrans::eRtspTCP;
strcpy_s(para.szUrl, 260, play.url);
strcpy_s(para.szIP, 16, "127.0.0.1");
strcpy_s(para.szUser, 64, play.szUser);
strcpy_s(para.szPassword, 64, play.szPassward);
//for playback and download
strcpy_s(para.szFileName, 512, play.szFileName);
try
{
if (!IsWindow(play.wndPlay))
{
DeException exMsg(eWndInvalid, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eWndInvalid, exMsg.GetCurString());
exMsg.Execute();
}
m_pRtsp = new RtspClient();
int nRet = m_pRtsp->StartPlay(para);
if (nRet != 0)
{
m_pRtsp->StopPlay();
delete m_pRtsp;
m_pRtsp = nullptr;
DeException exMsg(eRtspError, this);
MODIFY_MSG(exMsg.m_szTemp, exMsg.GetCurString(), nRet);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eRtspError, exMsg.m_szTemp);
exMsg.Execute();
}
}
catch(...)
{
return eRtspError;
}
return 0;
}
int CDecoder::StartDownload(const stPlay &play)
{
//Create rtsp stream
StreamPara para = {0};
para.pDecoder = this;
para.eType = RequestType::eRtspDownLoad;
para.eTransType = (play.eTranType == TransmissionType::eUDP) ? RtspTrans::eRtspUDP: RtspTrans::eRtspTCP;
char szFileName[512] = {0};
strcpy_s(para.szUrl, 260, play.url);
strcpy_s(para.szIP, 16, "127.0.0.1");
strcpy_s(para.szUser, 64, play.szUser);
strcpy_s(para.szPassword, 64, play.szPassward);
//for playback and download
char* pFolder = "Download";
_mkdir(pFolder);
//set the file name.
if (nullptr == play.szFileName || ‘\0‘ == play.szFileName[0])
{
time_t m_Time;
struct tm timeinfo = {0};
time ( &m_Time );
localtime_s(&timeinfo, &m_Time);
sprintf_s(szFileName, "%04d%02d%02d_%02d%02d%02d.mpg", timeinfo.tm_year+1900, timeinfo.tm_mon+1, timeinfo.tm_mday, timeinfo.tm_hour, timeinfo.tm_min, timeinfo.tm_sec);
sprintf_s(para.szFileName, 512, "%s\\%s\\%s", m_strModelPath.c_str(), pFolder, szFileName);
}
else
{
sprintf_s(para.szFileName, 512, play.szFileName);
}
try
{
m_pRtsp = new RtspClient();
int nRet = m_pRtsp->StartPlay(para);
if (nRet != 0)
{
m_pRtsp->StopPlay();
delete m_pRtsp;
m_pRtsp = nullptr;
DeException exMsg(eRtspError, this);
MODIFY_MSG(exMsg.m_szTemp, exMsg.GetCurString(), nRet);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eRtspError, exMsg.m_szTemp);
exMsg.Execute();
}
else
{
m_fPlaySpeed = 4;
SetSpeed(1);
}
}
catch(...)
{
return eRtspError;
}
return 0;
}
void CDecoder::DeCodeStream()
{
AVInputFormat *piFmt = NULL;
AVFormatContext *pFormatCtx = nullptr;
try
{
m_pVideoc->io_ctx = avio_alloc_context(m_pAvioBuf, BUF_SIZE, 0, this, ReadStreamData, NULL, NULL);
if (!m_pVideoc->io_ctx)
{
DeException exMsg(eIOAllocError, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eIOAllocError, exMsg.GetCurString());
exMsg.Execute();
}
if (av_probe_input_buffer(m_pVideoc->io_ctx, &piFmt, "", NULL, 0, 0) < 0)
{
av_free(m_pVideoc->io_ctx);
DeException exMsg(eProbeError, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eProbeError, exMsg.GetCurString());
exMsg.Execute();
}
else
{
fprintf(stdout, "probe success!\n");
fprintf(stdout, "format: %s[%s]\n", piFmt->name, piFmt->long_name);
}
pFormatCtx = avformat_alloc_context();
pFormatCtx->pb = m_pVideoc->io_ctx;
if (avformat_open_input(&pFormatCtx, "", piFmt, NULL) < 0)
{
av_free(m_pVideoc->io_ctx);
avformat_free_context(m_pVideoc->ic);
DeException exMsg(eAvformatOpenError, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eAvformatOpenError, exMsg.GetCurString());
exMsg.Execute();
}
else
{
fprintf(stdout, "open stream success!\n");
}
m_pVideoc->ic = pFormatCtx;
//m_pCodec =
//TODO:must set the capabilities
//AVCodec *codec;
//AVCodecContext *c;
// if(m_pVideoc->io_ctx->capabilities&CODEC_CAP_TRUNCATED)
// c->flags|= CODEC_FLAG_TRUNCATED;
//fixed find stream info to slow.
AVDictionary* pOptions = NULL;
pFormatCtx->probesize = 200 *1024;
pFormatCtx->max_analyze_duration = 3 * AV_TIME_BASE;
//Retrieve stream information
// if (avformat_find_stream_info(pFormatCtx, &pOptions) < 0)
// {
// DeException exMsg(eStreamInforError, this);
// OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eStreamInforError, exMsg.GetCurString());
// exMsg.Execute();
// //return -1; // Couldn‘t find stream information
// }
InitDecode(pFormatCtx);
//Dump information about file onto standard error
av_dump_format(pFormatCtx, 0, "", 0);
int nRetCtx = -1;
//Find the first video stream
FindStreamIndex(pFormatCtx, m_pVideoc->videoStream, m_pAudioc->audioStream);
if (m_pVideoc->videoStream >= 0)
{
m_pVideoc->m_codecCtx = avcodec_alloc_context3(NULL);
if (m_pVideoc->m_codecCtx == nullptr)
{
DeException exMsg(eAvcodecAllocContextFailed, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eAvcodecAllocContextFailed, exMsg.GetCurString());
exMsg.Execute();
}
/* 所有设置SDL音频流信息的步骤都在这个函数里完成 */
nRetCtx = avcodec_parameters_to_context(m_pVideoc->m_codecCtx, pFormatCtx->streams[m_pVideoc->videoStream]->codecpar);
m_pVideoc->VideoStreamComponentOpen();
}
if (m_bAudio && m_pAudioc->audioStream >= 0)
{
m_pAudioc->m_codecCtx = avcodec_alloc_context3(NULL);
if (m_pAudioc->m_codecCtx == nullptr)
{
DeException exMsg(eAvcodecAllocContextFailed, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eAvcodecAllocContextFailed, exMsg.GetCurString());
exMsg.Execute();
}
nRetCtx = avcodec_parameters_to_context(m_pAudioc->m_codecCtx, pFormatCtx->streams[m_pAudioc->audioStream]->codecpar);
m_pAudioc->AudioStreamComponentOpen(pFormatCtx);
}
else
{
m_pAudioc->audioStream = -1;
}
if (m_pVideoc->videoStream < 0 && m_pAudioc->audioStream < 0)
{
DeException exMsg(eOpenCodecFailed, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eOpenCodecFailed, exMsg.GetCurString());
exMsg.Execute();
}
m_szPlayFile[0] = ‘\0‘;
ScheduleRefresh(10);
m_eStatus = ePlay;
av_sync_type = DEFAULT_AV_SYNC_TYPE;
parse_tid = SDL_CreateThread(DecodeStreamThread, "parse_thread", this);
if (!parse_tid)
{
DeException exMsg(cReateDecodeThreadFailed, this);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, cReateDecodeThreadFailed, exMsg.GetCurString());
exMsg.Execute();
}
}
catch(DeException ex)
{
//play false
if (m_pRtsp)
m_pRtsp->StopPlay();
ex;
//return ex.m_nCode;
}
}
AVStream* CDecoder::CreateStream(AVFormatContext* pFormatContext, int nCodecType)
{
AVStream *st = avformat_new_stream(pFormatContext, NULL);
if (!st)
return NULL;
st->codecpar->codec_type = (AVMediaType)nCodecType;
return st;
}
int CDecoder::GetVideoExtraData(AVFormatContext* pFormatContext, int nVideoIndex)
{
int type, size, flags, pos, stream_type;
int ret = -1;
int64_t dts;
bool got_extradata = false;
if (!pFormatContext || nVideoIndex < 0 || nVideoIndex > 2)
return ret;
for (;; avio_skip(pFormatContext->pb, 4)) {
pos = avio_tell(pFormatContext->pb);
type = avio_r8(pFormatContext->pb);
size = avio_rb24(pFormatContext->pb);
dts = avio_rb24(pFormatContext->pb);
dts |= avio_r8(pFormatContext->pb) << 24;
avio_skip(pFormatContext->pb, 3);
if (0 == size)
break;
if (FLV_TAG_TYPE_AUDIO == type || FLV_TAG_TYPE_META == type) {
/*if audio or meta tags, skip them.*/
avio_seek(pFormatContext->pb, size, SEEK_CUR);
}
else if (type == FLV_TAG_TYPE_VIDEO) {
/*if the first video tag, read the sps/pps info from it. then break.*/
size -= 5;
pFormatContext->streams[nVideoIndex]->codecpar->extradata = (uint8_t*)av_malloc(size + FF_INPUT_BUFFER_PADDING_SIZE);
if (NULL == pFormatContext->streams[nVideoIndex]->codecpar->extradata)
break;
memset(pFormatContext->streams[nVideoIndex]->codecpar->extradata, 0, size + FF_INPUT_BUFFER_PADDING_SIZE);
memcpy(pFormatContext->streams[nVideoIndex]->codecpar->extradata, pFormatContext->pb->buf_ptr + 5, size);
pFormatContext->streams[nVideoIndex]->codecpar->extradata_size = size;
ret = 0;
got_extradata = true;
}
else {
/*The type unknown,something wrong.*/
break;
}
if (got_extradata)
break;
}
return ret;
}
int CDecoder::InitDecode(AVFormatContext *pFormatContext)
{
int video_index = -1;
int audio_index = -1;
int ret = -1;
if (!pFormatContext)
return ret;
/*
Get video stream index, if no video stream then create it.
And audio so on.
*/
if (0 == pFormatContext->nb_streams) {
CreateStream(pFormatContext, AVMEDIA_TYPE_VIDEO);
CreateStream(pFormatContext, AVMEDIA_TYPE_AUDIO);
video_index = 0;
audio_index = 1;
}
else if (1 == pFormatContext->nb_streams) {
if (AVMEDIA_TYPE_VIDEO == pFormatContext->streams[0]->codecpar->codec_type) {
CreateStream(pFormatContext, AVMEDIA_TYPE_AUDIO);
video_index = 0;
audio_index = 1;
}
else if (AVMEDIA_TYPE_AUDIO == pFormatContext->streams[0]->codecpar->codec_type) {
CreateStream(pFormatContext, AVMEDIA_TYPE_VIDEO);
video_index = 1;
audio_index = 0;
}
}
else if (2 == pFormatContext->nb_streams) {
if (AVMEDIA_TYPE_VIDEO == pFormatContext->streams[0]->codecpar->codec_type) {
video_index = 0;
audio_index = 1;
}
else if (AVMEDIA_TYPE_VIDEO == pFormatContext->streams[1]->codecpar->codec_type) {
video_index = 1;
audio_index = 0;
}
}
/*Error. I can‘t find video stream.*/
if (video_index != 0 && video_index != 1)
return ret;
//Init the audio codec(AAC).
pFormatContext->streams[audio_index]->codecpar->codec_id = AV_CODEC_ID_AAC;
pFormatContext->streams[audio_index]->codecpar->sample_rate = 44100;
// pFormatContext->streams[audio_index]->codec->time_base.den = 44100;
// pFormatContext->streams[audio_index]->codec->time_base.num = 1;
pFormatContext->streams[audio_index]->codecpar->bits_per_coded_sample = 16;
pFormatContext->streams[audio_index]->codecpar->channels = 2;
pFormatContext->streams[audio_index]->codecpar->channel_layout = 3;
pFormatContext->streams[audio_index]->pts_wrap_bits = 32;
pFormatContext->streams[audio_index]->time_base.den = 1000;
pFormatContext->streams[audio_index]->time_base.num = 1;
//Init the video codec(H264).
pFormatContext->streams[video_index]->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
pFormatContext->streams[video_index]->codecpar->codec_id = AV_CODEC_ID_H264;
pFormatContext->streams[video_index]->codecpar->format = 12;
pFormatContext->streams[video_index]->codecpar->bits_per_raw_sample = 8;
pFormatContext->streams[video_index]->codecpar->profile = 66;
pFormatContext->streams[video_index]->codecpar->level = 42;
pFormatContext->streams[video_index]->codecpar->width = 1920;
pFormatContext->streams[video_index]->codecpar->height = 1080;
pFormatContext->streams[video_index]->codecpar->sample_aspect_ratio.num = 0;
pFormatContext->streams[video_index]->codecpar->sample_aspect_ratio.den = 1;
// pFormatContext->streams[video_index]->codecpar->ticks_per_frame = 2;
// pFormatContext->streams[video_index]->codecpar->pix_fmt = 0;
pFormatContext->streams[video_index]->pts_wrap_bits = 64;
pFormatContext->streams[video_index]->time_base.den = 1200000;
pFormatContext->streams[video_index]->time_base.num = 1;
pFormatContext->streams[video_index]->avg_frame_rate.den = 1;
pFormatContext->streams[video_index]->avg_frame_rate.num = 25;
/*Need to change, different condition has different frame_rate. ‘r_frame_rate‘ is new in ffmepg2.3.3*/
pFormatContext->streams[video_index]->r_frame_rate.den = 25;
pFormatContext->streams[video_index]->r_frame_rate.num = 1;
/* H264 need sps/pps for decoding, so read it from the first video tag.*/
ret = GetVideoExtraData(pFormatContext, video_index);
/*Update the AVFormatContext Info*/
pFormatContext->nb_streams = 1;
/*empty the buffer.*/
pFormatContext->pb->buf_ptr = pFormatContext->pb->buf_end;
/*
something wrong.
TODO: find out the ‘pos‘ means what.
then set it.
*/
// pFormatContext->pb->pos = (int64_t)pFormatContext->pb->buf_end;
// pFormatContext->pb->pos = pFormatContext->pb->buffer_size;
return ret;
}
void CDecoder::VideoRefreshTimer()
{
m_pVideoc->VideoRefreshTimer();
}
void CDecoder::AllocPicture()
{
m_pVideoc->AllocPicture();
}
void CDecoder::FlushData()
{
if (m_pVideoc)
m_pVideoc->FlushData();
if (m_pAudioc)
m_pAudioc->FlushData();
}
void CDecoder::ClearBuffer()
{
int nRet = 0;
if (0 != parse_tid)//stream is end before this.
SDL_WaitThread(parse_tid, &nRet);
m_bStop = true;
ShowWindow(m_pVideoc->pictq->wndPlay, SW_HIDE);
// av_free(m_pAvioBuf);
m_pAudioc->CleanupBuffer();
m_pVideoc->CleanupBuffer();
av_packet_unref(&m_FlushPkt);
if (m_pRtsp)
{
delete m_pRtsp;
m_pRtsp = nullptr;
}
ShowWindow(m_pVideoc->pictq->wndPlay, SW_SHOW);
}
int CDecoder::StopPlay()
{
//TODO:此处用等待线程的方式退出更为安全。Waitforobject.
if (m_pRtsp)
m_pRtsp->StopPlay();
m_pVideoc->FlushData();
m_eStatus = eStop;
m_bStop = true;
m_pVideoc->SetSignal();//sometimes can not stop play.
if (m_bAudio)
{
m_pAudioc->FlushData();
m_pAudioc->SetSignal();
}
return 0;
}
void CDecoder::SetTimeStampeCallBack(fTimeStampe pFunction, LPVOID pContext)
{
m_TimeStampe = pFunction;
m_pContext = pContext;
}
bool CDecoder::IsStop()
{
return (m_eStatus == eStop);
}
double CDecoder::GetTotalTime()
{
if ( m_ePlayType == eFile)
{
while (m_pVideoc->ic == nullptr)
SDL_Delay(10);
AVStream* pStream = m_pVideoc->ic->streams[0];
return pStream->duration * av_q2d(pStream->time_base);
}
return 0;
}
PlayStatus CDecoder::GetPlayStatus()
{
return m_eStatus;
}
int CDecoder::PlayConctrol(PlayStatus status)
{
m_eStatus = status;
try
{
switch (status)
{
case ePlay:
{
if (m_pAudioc)
{
m_pAudioc->PauseAudio(0);
}
if (m_pRtsp && (eRealPlay == m_ePlayType || ePlayback == m_ePlayType))
{
stPlayContrl ctrl;
ctrl.eControlType = eRtspContrlPlay;
ZeroMemory(ctrl.szStartTime, sizeof(ctrl.szStartTime));
m_pRtsp->PlayControl(ctrl);
}
m_bPaused = false;
}
break;
case ePause:
{
if (m_pAudioc)
{
m_pAudioc->PauseAudio(1);
m_nStep = 0;//for play by step, now did not implement.
}
//
if (m_pRtsp && (eRealPlay == m_ePlayType || ePlayback == m_ePlayType))
{
stPlayContrl ctrl;
ctrl.eControlType = eRtspContrlPause;
ZeroMemory(ctrl.szStartTime, sizeof(ctrl.szStartTime));
m_pRtsp->PlayControl(ctrl);
}
m_bPaused = true;
}
break;
case eFastForward:
switch (m_ePlayType)
{
case eFile:
StreamSeek(60);
break;
case eRealPlay:
break;
case ePlayback:
SetSpeed(1);
break;
case eDownload:
break;
default:
break;
}
break;
case eFastBackward:
switch (m_ePlayType)
{
case eFile:
StreamSeek(-60, -1);
break;
case eRealPlay:
break;
case ePlayback:
SetSpeed(-1);
break;
case eDownload:
break;
default:
break;
}
break;
case eStep:
{
m_bPaused ? StreamTogglePause() : 0;
m_nStep = 1;
}
case eRecord:
{
if (m_pDemuxer != nullptr)
{
delete m_pDemuxer;
m_pDemuxer = nullptr;
SDL_Delay(10);
}
std::string strFullPath;
if (m_strRecodFolder.empty())
{
time_t m_Time;
char szFileName[512]= {0};
struct tm timeinfo = {0};
time ( &m_Time );
_mkdir("Record");
localtime_s(&timeinfo, &m_Time);
sprintf_s(szFileName, "Record/%04d%02d%02d_%02d%02d%02d.mpg", timeinfo.tm_year+1900, timeinfo.tm_mon+1, timeinfo.tm_mday, timeinfo.tm_hour, timeinfo.tm_min, timeinfo.tm_sec);
strFullPath = m_strModelPath;
strFullPath += "\\";
strFullPath += szFileName;
}
else
{
strFullPath = m_strRecodFolder;
}
FILE* pfRecord = nullptr;
std::string strResult;
if(fopen_s(&pfRecord, strFullPath.c_str(), "wb+") == 0)
{
m_pDemuxer = new CDemuxer();
m_pDemuxer->InitData(pfRecord);
strResult = strFullPath;
}
else
{
strResult = "open file error,";
}
DeException exMsg(eRecordFilePath, this, false);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eRecordFilePath, strResult.c_str());
exMsg.Execute();
}
break;
case eStopRecord:
{
if (m_pDemuxer)
{
delete m_pDemuxer;
m_pDemuxer = nullptr;
SDL_Delay(10);
}
}
break;
case eStop:
StopPlay();
break;
case ePlayAudio:
if (m_pAudioc)
{
m_pAudioc->SetPlayAudio(true);
}
break;
case eStopAudio:
if (m_pAudioc)
{
m_pAudioc->SetPlayAudio(false);
}
break;
default:
break;
}
}
catch (...)
{
return 1;
}
return 0;
}
int CDecoder::SetSoundVolume(int nVolume)
{
if (nullptr == m_pAudioc)
{
return -1;
}
else
{
return m_pAudioc->SetSoundVolume(nVolume);
}
return -1;
}
void CDecoder::SetSpeed(short uSpeed)
{
if (m_pRtsp)
{
stPlayContrl ctrl;
ctrl.eControlType = eRtspContrlScale;
ZeroMemory(ctrl.szStartTime, sizeof(ctrl.szStartTime));
if (uSpeed == -1)
{
if (m_fPlaySpeed > 0.125)
{
ctrl.fScale = m_fPlaySpeed * 0.5;
m_pRtsp->PlayControl(ctrl);
}
}
else if(uSpeed == 1)
{
if (m_fPlaySpeed < 8)
{
ctrl.fScale = m_fPlaySpeed * 2;
m_pRtsp->PlayControl(ctrl);
}
}
}
}
int CDecoder::PlaybackControl(stPlayback ctrl)
{
if (m_pRtsp)
{
stPlayContrl PlaybackContrl;
PlaybackContrl.eControlType = (EnumControl)ctrl.eControlType;
PlaybackContrl.fScale = ctrl.fScale;
strcpy_s(PlaybackContrl.szStartTime, 16, ctrl.szStartTime);
return m_pRtsp->PlayControl(PlaybackContrl);
}
return 0;
}
int CDecoder::CapturePicture(const char* pFileName)
{
m_strCaptureFile = pFileName;
std::string strTickCount;
if (-1 == m_strCaptureFile.find(":") && !m_strPictureFolder.empty())
{
if (pFileName == nullptr || pFileName[0] == ‘\0‘)
{
m_strCaptureFile = m_strPictureFolder;
m_strCaptureFile += "\\";
m_strCaptureFile += std::to_string(GetTickCount());
m_strCaptureFile += ".jpg";
}
else
{
m_strCaptureFile = m_strPictureFolder;
m_strCaptureFile += "\\";
m_strCaptureFile += pFileName;
}
}
if (m_strCaptureFile.empty())
{
//create folder and file.---default file name.
char* pFolder = "CapturePicture";
_mkdir(pFolder);
m_strCaptureFile += m_strModelPath;
m_strCaptureFile += "\\";
m_strCaptureFile += pFolder;
m_strCaptureFile += "\\";
m_strCaptureFile += std::to_string(GetTickCount());
m_strCaptureFile += ".jpg";
}
DeException exMsg(eCapturePicturePath, this, false);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eCapturePicturePath, m_strCaptureFile.c_str());
exMsg.Execute();
return 0;
}
int CDecoder::LocalRecord(const char* pFileName)
{
std::string strFileName = pFileName;
if (-1 == strFileName.find(":") && !m_strRecodFolder.empty())
{
if (pFileName == nullptr || pFileName[0] == ‘\0‘)
{
time_t m_Time;
char szFileName[512]= {0};
struct tm timeinfo = {0};
time ( &m_Time );
localtime_s(&timeinfo, &m_Time);
sprintf_s(szFileName, "%04d%02d%02d_%02d%02d%02d.mpg", timeinfo.tm_year+1900, timeinfo.tm_mon+1, timeinfo.tm_mday, timeinfo.tm_hour, timeinfo.tm_min, timeinfo.tm_sec);
strFileName = m_strRecodFolder;
strFileName += "\\";
strFileName += szFileName;
}
else
{
strFileName = m_strRecodFolder;
strFileName += "\\";
strFileName += pFileName;
}
}
if (m_pDemuxer != nullptr)
{
delete m_pDemuxer;
m_pDemuxer = nullptr;
SDL_Delay(10);
}
if (strFileName.empty())
{
//create folder and file.---default file name.
char* pFolder = "Record";
_mkdir(pFolder);
time_t m_Time;
char szFileName[512]= {0};
struct tm timeinfo = {0};
time ( &m_Time );
localtime_s(&timeinfo, &m_Time);
sprintf_s(szFileName, "%04d%02d%02d_%02d%02d%02d.mpg", timeinfo.tm_year+1900, timeinfo.tm_mon+1, timeinfo.tm_mday, timeinfo.tm_hour, timeinfo.tm_min, timeinfo.tm_sec);
strFileName += m_strModelPath;
strFileName += "\\";
strFileName += pFolder;
strFileName += "\\";
strFileName += szFileName;
}
FILE* pfRecord = nullptr;
if(fopen_s(&pfRecord, strFileName.c_str(), "wb+") == 0)
{
m_pDemuxer = new CDemuxer();
m_pDemuxer->InitData(pfRecord);
}
else
{
strFileName = "open file error,";
}
DeException exMsg(eRecordFilePath, this, false);
OUTPUT_DECODER("[%ld]<%d>%s", exMsg.m_szMsg, this, eRecordFilePath, strFileName.c_str());
exMsg.Execute();
return 0;
}
int CDecoder::StopLoadRecord()
{
if (m_pDemuxer != nullptr)
{
delete m_pDemuxer;
m_pDemuxer = nullptr;
SDL_Delay(10);
}
return 0;
}
int CDecoder::SetRecordPath(const char* pFolder)
{
if (pFolder)
{
m_strRecodFolder = pFolder;
}
return 0;
}
int CDecoder::SetPicturePath(const char* pFolder)
{
if (pFolder)
{
m_strPictureFolder = pFolder;
}
return 0;
}
int CDecoder::StreamSeek(int64_t nIncrease, int rel)
{
int64_t nPos = (int64_t)( GetCutPts() + nIncrease) /** AV_TIME_BASE*/;
if (!seek_req)
{
seek_pos = nPos;
seek_flags = rel < 0 ? AVSEEK_FLAG_BACKWARD : 0;
seek_req = 1;
}
return 0;
}
bool CDecoder::IsWindowInUse(const HWND& wnd)
{
if (m_pVideoc && m_pVideoc->pictq)
{
return (m_pVideoc->pictq->wndPlay == wnd);
}
return false;
}
以上是关于如何使用ffmpeg实现h264流传输+H264实现RTP传输数据的主要内容,如果未能解决你的问题,请参考以下文章