使用 Live555 从连接到 H264 编码器的 IP 摄像机流式传输实时视频

Posted

技术标签:

【中文标题】使用 Live555 从连接到 H264 编码器的 IP 摄像机流式传输实时视频【英文标题】:Using Live555 to Stream Live Video from an IP camera connected to an H264 encoder 【发布时间】:2015-02-01 10:55:29 【问题描述】:

我正在使用基于德州仪器 OMAP-L138 的定制板,该板基本上由基于 ARM9 的 SoC 和 DSP 处理器组成。它连接到相机镜头。我要做的是捕获发送到 dsp 处理器进行 H264 编码的实时视频流,该视频流通过 uPP 以 8192 字节的数据包发送。我想使用 Live555 提供的 testH264VideoStreamer 通过 RTSP 直播 H264 编码的视频。我修改的代码如下所示:

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() function




UsageEnvironment* env;
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;


//-------------------------------------------------------------------------------
/* Open File Descriptor*/
int stream = open("/dev/upp", O_RDONLY);
/* Declaring a static 8 bit unsigned integer of size 8192 bytes that keeps its value between invocations */
static uint8_t buf[8192];
//------------------------------------------------------------------------------


//------------------------------------------------------------------------------
// Execute play function as a forwarding mechanism
//------------------------------------------------------------------------------
void play(); // forward


//------------------------------------------------------------------------------
// MAIN FUNCTION / ENTRY POINT 
//------------------------------------------------------------------------------
int main(int argc, char** argv) 

    // Begin by setting up our live555 usage environment:
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    env = BasicUsageEnvironment::createNew(*scheduler);

    // Create 'groupsocks' for RTP and RTCP:
    struct in_addr destinationAddress;
    destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
    // Note: This is a multicast address.  If you wish instead to stream
    // using unicast, then you should use the "testOnDemandRTSPServer"
    // test program - not this test program - as a model.

    const unsigned short rtpPortNum = 18888;
    const unsigned short rtcpPortNum = rtpPortNum+1;
    const unsigned char ttl = 255;

    const Port rtpPort(rtpPortNum);
    const Port rtcpPort(rtcpPortNum);

    Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
    rtpGroupsock.multicastSendOnly(); // we're a SSM source
    Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
    rtcpGroupsock.multicastSendOnly(); // we're a SSM source

    // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
    OutPacketBuffer::maxSize = 1000000;
    videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

    // Create (and start) a 'RTCP instance' for this RTP sink:
    const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
    const unsigned maxCNAMElen = 100;
    unsigned char CNAME[maxCNAMElen+1];
    gethostname((char*)CNAME, maxCNAMElen);
    CNAME[maxCNAMElen] = '\0'; // just in case
    RTCPInstance* rtcp
    = RTCPInstance::createNew(*env, &rtcpGroupsock,
                estimatedSessionBandwidth, CNAME,
                videoSink, NULL /* we're a server */,
                True /* we're a SSM source */);
    // Note: This starts RTCP running automatically

    /*Create RTSP SERVER*/
    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
    if (rtspServer == NULL) 
    
         *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
         exit(1);
    
    ServerMediaSession* sms
        = ServerMediaSession::createNew(*env, "IPCAM @ TeReSol","UPP Buffer" ,
           "Session streamed by \"testH264VideoStreamer\"",
                       True /*SSM*/);
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
    rtspServer->addServerMediaSession(sms);

    char* url = rtspServer->rtspURL(sms);
    *env << "Play this stream using the URL \"" << url << "\"\n";
    delete[] url;

    // Start the streaming:
    *env << "Beginning streaming...\n";
    play();

    env->taskScheduler().doEventLoop(); // does not return

    return 0; // only to prevent compiler warning




//----------------------------------------------------------------------------------
// afterPlaying() -> Defines what to do once a buffer is streamed
//----------------------------------------------------------------------------------
void afterPlaying(void* /*clientData*/) 

    *env << "...done reading from upp buffer\n";
    //videoSink->stopPlaying();
    //Medium::close(videoSource);
    // Note that this also closes the input file that this source read from.

    // Start playing once again to get the next stream      
    play();

    /* We don't need to close the dev as long as we're reading from it. But if we do, use: close( "/dev/upp", O_RDWR);*/ 





//----------------------------------------------------------------------------------------------
// play() Method -> Defines how to read and what to make of the input stream 
//----------------------------------------------------------------------------------------------
void play()




    /* Read nbytes of buffer (sizeof buf ) from the filedescriptor stream and assign them to address where buf is located */
    read(stream, &buf, sizeof buf);
    printf("Reading from UPP in to Buffer");

    /*Open the input file as a 'byte-stream file source': */
    ByteStreamMemoryBufferSource* buffSource
        = ByteStreamMemoryBufferSource::createNew(*env, buf, sizeof buf,False/*Empty Buffer After Reading*/);
    /*By passing False in the above creatNew() method means that the buffer would be read at once */

    if (buffSource == NULL) 
    
      *env << "Unable to read from\"" << "Buffer"
           << "\" as a byte-stream source\n";
          exit(1);
    

    FramedSource* videoES = buffSource;
    // Create a framer for the Video Elementary Stream:
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);
    // Finally, start playing:
    *env << "Beginning to read from UPP...\n";
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink);

问题是代码虽然编译成功,但我无法获得所需的输出。 VLC 播放器上的 RTSP 流处于播放模式,但我看不到任何视频。我将不胜感激在这件事上的任何帮助。我的描述可能有点含糊,但我很乐意进一步解释所需的任何部分。

【问题讨论】:

【参考方案1】:

好的,所以我想出了需要做的事情,并写信是为了所有可能面临类似问题的人的利益。我需要做的是修改我的 testH264VideoStreamer.cpp 和 DeviceSource.cpp 文件,使其直接从设备中读取数据(在我的情况下,它是自定义 am1808 板),将其存储在缓冲区中并流式传输。我所做的更改是:

testH264VideoStreamer.cpp

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() function




UsageEnvironment* env;

H264VideoStreamFramer* videoSource;
RTPSink* videoSink;

void play(); // forward
//-------------------------------------------------------------------------
//Entry Point -> Main FUNCTION  
//-------------------------------------------------------------------------

int main(int argc, char** argv) 
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  struct in_addr destinationAddress;
  destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
  // Note: This is a multicast address.  If you wish instead to stream
  // using unicast, then you should use the "testOnDemandRTSPServer"
  // test program - not this test program - as a model.

  const unsigned short rtpPortNum = 18888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 255;

  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  rtpGroupsock.multicastSendOnly(); // we're a SSM source
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
  rtcpGroupsock.multicastSendOnly(); // we're a SSM source

  // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
  OutPacketBuffer::maxSize = 600000;
  videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 1024; // in kbps; for RTCP b/w share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
  RTCPInstance* rtcp
  = RTCPInstance::createNew(*env, &rtcpGroupsock,
                estimatedSessionBandwidth, CNAME,
                videoSink, NULL /* we're a server */,
                True /* we're a SSM source */);
  // Note: This starts RTCP running automatically

  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
  if (rtspServer == NULL) 
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  
  ServerMediaSession* sms
    = ServerMediaSession::createNew(*env, "ipcamera","UPP Buffer" ,
           "Session streamed by \"testH264VideoStreamer\"",
                       True /*SSM*/);
  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
  rtspServer->addServerMediaSession(sms);

  char* url = rtspServer->rtspURL(sms);
  *env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;

  // Start the streaming:
  *env << "Beginning streaming...\n";
  play();

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning

//----------------------------------------------------------------------
//AFTER PLAY FUNCTION CALLED HERE
//----------------------------------------------------------------------
void afterPlaying(void* /*clientData*/) 


    play();

//------------------------------------------------------------------------
//PLAY FUNCTION () 
//------------------------------------------------------------------------
void play()



      // Open the input file as with Device as the source:
    DeviceSource* devSource
        = DeviceSource::createNew(*env);
    if (devSource == NULL) 
    

          *env << "Unable to read from\"" << "Buffer"
           << "\" as a byte-stream source\n";
          exit(1);
    

    FramedSource* videoES = devSource;

    // Create a framer for the Video Elementary Stream:
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);

    // Finally, start playing:
    *env << "Beginning to read from UPP...\n";
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink);

DeviceSource.cpp

#include "DeviceSource.hh"
#include <GroupsockHelper.hh> // for "gettimeofday()"
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h>

//static uint8_t *buf = (uint8_t*)malloc(102400);
static uint8_t buf[8192];
int upp_stream;
//static uint8_t *bufPtr = buf;

DeviceSource*
DeviceSource::createNew(UsageEnvironment& env)


  return new DeviceSource(env);


EventTriggerId DeviceSource::eventTriggerId = 0;

unsigned DeviceSource::referenceCount = 0;

DeviceSource::DeviceSource(UsageEnvironment& env):FramedSource(env) 
 

  if (referenceCount == 0) 
  

      upp_stream = open("/dev/upp",O_RDWR);

  
  ++referenceCount;

  if (eventTriggerId == 0) 
  
    eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
  


DeviceSource::~DeviceSource(void) 
  --referenceCount;
  envir().taskScheduler().deleteEventTrigger(eventTriggerId);
  eventTriggerId = 0;

  if (referenceCount == 0) 
  

  


int loop_count;

void DeviceSource::doGetNextFrame() 


    //for (loop_count=0; loop_count < 13; loop_count++)
    //
        read(upp_stream,buf, 8192);

        //bufPtr+=8192;

    //
    deliverFrame();



void DeviceSource::deliverFrame0(void* clientData) 

  ((DeviceSource*)clientData)->deliverFrame();


void DeviceSource::deliverFrame() 



  if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet

  u_int8_t* newFrameDataStart = (u_int8_t*) buf;             //(u_int8_t*) buf; //%%% TO BE WRITTEN %%%
  unsigned newFrameSize = sizeof(buf); //%%% TO BE WRITTEN %%%

  // Deliver the data here:
  if (newFrameSize > fMaxSize) 
    fFrameSize = fMaxSize;
    fNumTruncatedBytes = newFrameSize - fMaxSize;
   else 
    fFrameSize = newFrameSize;
  
  gettimeofday(&fPresentationTime, NULL); 
  memmove(fTo, newFrameDataStart, fFrameSize);
  FramedSource::afterGetting(this);

通过这些修改编译代码后,我能够在 vlc 播放器上接收视频流。

【讨论】:

以上是关于使用 Live555 从连接到 H264 编码器的 IP 摄像机流式传输实时视频的主要内容,如果未能解决你的问题,请参考以下文章

使用live555制作rtsp客户端,捕获h264等解码

使用live555 快速建立rtsp 服务

OpenCV 流使用 H264 (mp4) 编解码器捕获 CAM

建立live555海思编码推流服务

live555 client 接收rtp数据

live555的使用(转载)