Live555: X264 Stream Źródło na podstawie „testOnDemandRTSPServer”

Próbuję utworzyć serwer rtsp, który strumieniuje wyjście OpenGL mojego programu. Spojrzałem naJak napisać Live555 FramedSource, aby umożliwić mi przesyłanie strumieniowe H.264 na żywo, ale potrzebuję strumienia, aby był unicast. Sprawdziłem więc testOnDemandRTSPServer. Użycie tego samego kodu nie powiedzie się. Według mojej wiedzy muszę zapewnić pamięć, w której przechowuję ramki h264, aby serwer OnDemandServer mógł je odczytać na żądanie.

H264VideoStreamServerMediaSubsession.cpp

H264VideoStreamServerMediaSubsession*
H264VideoStreamServerMediaSubsession::createNew(UsageEnvironment& env,
                          Boolean reuseFirstSource) {
  return new H264VideoStreamServerMediaSubsession(env, reuseFirstSource);
}

H264VideoStreamServerMediaSubsession::H264VideoStreamServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource)
  : OnDemandServerMediaSubsession(env, reuseFirstSource), fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL) {
}

H264VideoStreamServerMediaSubsession::~H264VideoStreamServerMediaSubsession() {
  delete[] fAuxSDPLine;
}

static void afterPlayingDummy(void* clientData) {
  H264VideoStreamServerMediaSubsession* subsess = (H264VideoStreamServerMediaSubsession*)clientData;
  subsess->afterPlayingDummy1();
}

void H264VideoStreamServerMediaSubsession::afterPlayingDummy1() {
  // Unschedule any pending 'checking' task:
  envir().taskScheduler().unscheduleDelayedTask(nextTask());
  // Signal the event loop that we're done:
  setDoneFlag();
}

static void checkForAuxSDPLine(void* clientData) {
  H264VideoStreamServerMediaSubsession* subsess = (H264VideoStreamServerMediaSubsession*)clientData;
  subsess->checkForAuxSDPLine1();
}

void H264VideoStreamServerMediaSubsession::checkForAuxSDPLine1() {
  char const* dasl;

  if (fAuxSDPLine != NULL) {
    // Signal the event loop that we're done:
    setDoneFlag();
  } else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {
    fAuxSDPLine = strDup(dasl);
    fDummyRTPSink = NULL;

    // Signal the event loop that we're done:
    setDoneFlag();
  } else {
    // try again after a brief delay:
    int uSecsToDelay = 100000; // 100 ms
    nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
                  (TaskFunc*)checkForAuxSDPLine, this);
  }
}

char const* H264VideoStreamServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {
  if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client)

  if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream
    // Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known
    // until we start reading the file.  This means that "rtpSink"s "auxSDPLine()" will be NULL initially,
    // and we need to start reading data from our file until this changes.
    fDummyRTPSink = rtpSink;

    // Start reading the file:
    fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);

    // Check whether the sink's 'auxSDPLine()' is ready:
    checkForAuxSDPLine(this);
  }

  envir().taskScheduler().doEventLoop(&fDoneFlag);

  return fAuxSDPLine;
}

FramedSource* H264VideoStreamServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
  estBitrate = 500; // kb
  megamol::remotecontrol::View3D_MRC *parent = (megamol::remotecontrol::View3D_MRC*)this->parent;
  return H264VideoStreamFramer::createNew(envir(), parent->h264FramedSource);
}

RTPSink* H264VideoStreamServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/) {
  return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}

FramedSource.cpp

H264FramedSource* H264FramedSource::createNew(UsageEnvironment& env,
                                          unsigned preferredFrameSize,
                                          unsigned playTimePerFrame)
{
    return new H264FramedSource(env, preferredFrameSize, playTimePerFrame);
}

H264FramedSource::H264FramedSource(UsageEnvironment& env,
                               unsigned preferredFrameSize,
                               unsigned playTimePerFrame)
    : FramedSource(env),
    fPreferredFrameSize(fMaxSize),
    fPlayTimePerFrame(playTimePerFrame),
    fLastPlayTime(0),
    fCurIndex(0)
{

    x264_param_default_preset(&param, "veryfast", "zerolatency");
    param.i_threads = 1;
    param.i_width = 1024;
    param.i_height = 768;
    param.i_fps_num = 30;
    param.i_fps_den = 1;
    // Intra refres:
    param.i_keyint_max = 60;
    param.b_intra_refresh = 1;
    //Rate control:
    param.rc.i_rc_method = X264_RC_CRF;
    param.rc.f_rf_constant = 25;
    param.rc.f_rf_constant_max = 35;
    param.i_sps_id = 7;
    //For streaming:
    param.b_repeat_headers = 1;
    param.b_annexb = 1;
    x264_param_apply_profile(&param, "baseline");

    param.i_log_level = X264_LOG_ERROR;

    encoder = x264_encoder_open(&param);
    pic_in.i_type            = X264_TYPE_AUTO;
    pic_in.i_qpplus1         = 0;
    pic_in.img.i_csp         = X264_CSP_I420;
    pic_in.img.i_plane       = 3;


    x264_picture_alloc(&pic_in, X264_CSP_I420, 1024, 768);

    convertCtx = sws_getContext(1024, 768, PIX_FMT_RGBA, 1024, 768, PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
    eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
}

H264FramedSource::~H264FramedSource()
{
    envir().taskScheduler().deleteEventTrigger(eventTriggerId);
    eventTriggerId = 0;
}

void H264FramedSource::AddToBuffer(uint8_t* buf, int surfaceSizeInBytes)
{
    uint8_t* surfaceData = (new uint8_t[surfaceSizeInBytes]);

    memcpy(surfaceData, buf, surfaceSizeInBytes);

    int srcstride = 1024*4;
    sws_scale(convertCtx, &surfaceData, &srcstride,0, 768, pic_in.img.plane, pic_in.img.i_stride);
    x264_nal_t* nals = NULL;
    int i_nals = 0;
    int frame_size = -1;


    frame_size = x264_encoder_encode(encoder, &nals, &i_nals, &pic_in, &pic_out);

    static bool finished = false;

    if (frame_size >= 0)
    {
    static bool alreadydone = false;
    if(!alreadydone)
    {

        x264_encoder_headers(encoder, &nals, &i_nals);
        alreadydone = true;
    }
    for(int i = 0; i < i_nals; ++i)
    {
        m_queue.push(nals[i]);
    }
    }
    delete [] surfaceData;
    surfaceData = nullptr;

    envir().taskScheduler().triggerEvent(eventTriggerId, this);
}

void H264FramedSource::doGetNextFrame()
{
    deliverFrame();
}

void H264FramedSource::deliverFrame0(void* clientData)
{
    ((H264FramedSource*)clientData)->deliverFrame();
}

void H264FramedSource::deliverFrame()
{
    x264_nal_t nalToDeliver;

    if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) {
    if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
        // This is the first frame, so use the current time:
        gettimeofday(&fPresentationTime, NULL);
    } else {
        // Increment by the play time of the previous data:
        unsigned uSeconds   = fPresentationTime.tv_usec + fLastPlayTime;
        fPresentationTime.tv_sec += uSeconds/1000000;
        fPresentationTime.tv_usec = uSeconds%1000000;
    }

    // Remember the play time of this data:
    fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
    fDurationInMicroseconds = fLastPlayTime;
    } else {
    // We don't know a specific play time duration for this data,
    // so just record the current time as being the 'presentation time':
    gettimeofday(&fPresentationTime, NULL);
    }

    if(!m_queue.empty())
    {
    m_queue.wait_and_pop(nalToDeliver);

    uint8_t* newFrameDataStart = (uint8_t*)0xD15EA5E;

    newFrameDataStart = (uint8_t*)(nalToDeliver.p_payload);
    unsigned newFrameSize = nalToDeliver.i_payload;

    // Deliver the data here:
    if (newFrameSize > fMaxSize) {
        fFrameSize = fMaxSize;
        fNumTruncatedBytes = newFrameSize - fMaxSize;
    }
    else {
        fFrameSize = newFrameSize;
    }

    memcpy(fTo, nalToDeliver.p_payload, nalToDeliver.i_payload);

    FramedSource::afterGetting(this);
    }
}

Odpowiednia część RTSP-Server Therad

  RTSPServer* rtspServer = RTSPServer::createNew(*(parent->env), 8554, NULL);
  if (rtspServer == NULL) {
    *(parent->env) << "Failed to create RTSP server: " << (parent->env)->getResultMsg() << "\n";
    exit(1);
  }
  char const* streamName = "Stream";
  parent->h264FramedSource = H264FramedSource::createNew(*(parent->env), 0, 0);
  H264VideoStreamServerMediaSubsession *h264VideoStreamServerMediaSubsession = H264VideoStreamServerMediaSubsession::createNew(*(parent->env), true);
  h264VideoStreamServerMediaSubsession->parent = parent;
  sms->addSubsession(h264VideoStreamServerMediaSubsession);
  rtspServer->addServerMediaSession(sms);

  parent->env->taskScheduler().doEventLoop(); // does not return

Po nawiązaniu połączenia wywołania pętli renderowania

h264FramedSource->AddToBuffer(videoData, 1024*768*4);

questionAnswers(1)

yourAnswerToTheQuestion