Использование Live555 для потоковой передачи живого видео с IP-камеры, подключенной к кодеру H264

Я использую пользовательскую плату на основе Texas Instruments OMAP-L138, которая в основном состоит из SoC на базе ARM9 и процессора DSP. Это связано с объективом камеры. То, что я пытаюсь сделать, это захватить поток живого видео, который отправляется процессору dsp для кодирования H264, который отправляется через uPP в пакетах по 8192 байта. Я хочу использовать testH264VideoStreamer, поставляемый Live555, для прямой трансляции видео в кодировке H264 через RTSP. Код, который я изменил, показан ниже:

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() function




UsageEnvironment* env;
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;


//-------------------------------------------------------------------------------
/* Open File Descriptor*/
int stream = open("/dev/upp", O_RDONLY);
/* Declaring a static 8 bit unsigned integer of size 8192 bytes that keeps its value between invocations */
static uint8_t buf[8192];
//------------------------------------------------------------------------------


//------------------------------------------------------------------------------
// Execute play function as a forwarding mechanism
//------------------------------------------------------------------------------
void play(); // forward


//------------------------------------------------------------------------------
// MAIN FUNCTION / ENTRY POINT 
//------------------------------------------------------------------------------
int main(int argc, char** argv) 
{
    // Begin by setting up our live555 usage environment:
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    env = BasicUsageEnvironment::createNew(*scheduler);

    // Create 'groupsocks' for RTP and RTCP:
    struct in_addr destinationAddress;
    destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
    // Note: This is a multicast address.  If you wish instead to stream
    // using unicast, then you should use the "testOnDemandRTSPServer"
    // test program - not this test program - as a model.

    const unsigned short rtpPortNum = 18888;
    const unsigned short rtcpPortNum = rtpPortNum+1;
    const unsigned char ttl = 255;

    const Port rtpPort(rtpPortNum);
    const Port rtcpPort(rtcpPortNum);

    Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
    rtpGroupsock.multicastSendOnly(); // we're a SSM source
    Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
    rtcpGroupsock.multicastSendOnly(); // we're a SSM source

    // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
    OutPacketBuffer::maxSize = 1000000;
    videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

    // Create (and start) a 'RTCP instance' for this RTP sink:
    const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
    const unsigned maxCNAMElen = 100;
    unsigned char CNAME[maxCNAMElen+1];
    gethostname((char*)CNAME, maxCNAMElen);
    CNAME[maxCNAMElen] = '\0'; // just in case
    RTCPInstance* rtcp
    = RTCPInstance::createNew(*env, &rtcpGroupsock,
                estimatedSessionBandwidth, CNAME,
                videoSink, NULL /* we're a server */,
                True /* we're a SSM source */);
    // Note: This starts RTCP running automatically

    /*Create RTSP SERVER*/
    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
    if (rtspServer == NULL) 
    {
         *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
         exit(1);
    }
    ServerMediaSession* sms
        = ServerMediaSession::createNew(*env, "IPCAM @ TeReSol","UPP Buffer" ,
           "Session streamed by \"testH264VideoStreamer\"",
                       True /*SSM*/);
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
    rtspServer->addServerMediaSession(sms);

    char* url = rtspServer->rtspURL(sms);
    *env << "Play this stream using the URL \"" << url << "\"\n";
    delete[] url;

    // Start the streaming:
    *env << "Beginning streaming...\n";
    play();

    env->taskScheduler().doEventLoop(); // does not return

    return 0; // only to prevent compiler warning
}



//----------------------------------------------------------------------------------
// afterPlaying() -> Defines what to do once a buffer is streamed
//----------------------------------------------------------------------------------
void afterPlaying(void* /*clientData*/) 
{
    *env << "...done reading from upp buffer\n";
    //videoSink->stopPlaying();
    //Medium::close(videoSource);
    // Note that this also closes the input file that this source read from.

    // Start playing once again to get the next stream      
    play();

    /* We don't need to close the dev as long as we're reading from it. But if we do, use: close( "/dev/upp", O_RDWR);*/ 

}



//----------------------------------------------------------------------------------------------
// play() Method -> Defines how to read and what to make of the input stream 
//----------------------------------------------------------------------------------------------
void play()
{



    /* Read nbytes of buffer (sizeof buf ) from the filedescriptor stream and assign them to address where buf is located */
    read(stream, &buf, sizeof buf);
    printf("Reading from UPP in to Buffer");

    /*Open the input file as a 'byte-stream file source': */
    ByteStreamMemoryBufferSource* buffSource
        = ByteStreamMemoryBufferSource::createNew(*env, buf, sizeof buf,False/*Empty Buffer After Reading*/);
    /*By passing False in the above creatNew() method means that the buffer would be read at once */

    if (buffSource == NULL) 
    {
      *env << "Unable to read from\"" << "Buffer"
           << "\" as a byte-stream source\n";
          exit(1);
    }

    FramedSource* videoES = buffSource;
    // Create a framer for the Video Elementary Stream:
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);
    // Finally, start playing:
    *env << "Beginning to read from UPP...\n";
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

Проблема в том, что код успешно компилируется, но я не могу получить желаемый результат. поток RTSP на проигрывателе VLC находится в режиме воспроизведения, однако я не вижу видео. Буду благодарен за любую помощь в этом вопросе. Я мог бы быть немного расплывчатым в своем описании, но я рад дополнительно объяснить любую часть, которая требуется.

Ответы на вопрос(1)

Ваш ответ на вопрос