Live555: источник потокового вещания X264 на основе «testOnDemandRTSPServer»

Я пытаюсь создать сервер RTSP, который транслирует выходные данные OpenGL моей программы. Я посмотрел наКак написать Live555 FramedSource, чтобы позволить мне транслировать H.264 в прямом эфире, но мне нужен поток, чтобы быть одноадресным. Итак, я взглянул на testOnDemandRTSPServer. Использование того же кода не удается. Насколько я понимаю, мне нужно предоставить память, в которой я храню свои кадры h264, чтобы OnDemandServer мог читать их по требованию.

H264VideoStreamServerMediaSubsession.cpp

H264VideoStreamServerMediaSubsession*
H264VideoStreamServerMediaSubsession::createNew(UsageEnvironment& env,
                          Boolean reuseFirstSource) {
  return new H264VideoStreamServerMediaSubsession(env, reuseFirstSource);
}

H264VideoStreamServerMediaSubsession::H264VideoStreamServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource)
  : OnDemandServerMediaSubsession(env, reuseFirstSource), fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL) {
}

H264VideoStreamServerMediaSubsession::~H264VideoStreamServerMediaSubsession() {
  delete[] fAuxSDPLine;
}

static void afterPlayingDummy(void* clientData) {
  H264VideoStreamServerMediaSubsession* subsess = (H264VideoStreamServerMediaSubsession*)clientData;
  subsess->afterPlayingDummy1();
}

void H264VideoStreamServerMediaSubsession::afterPlayingDummy1() {
  // Unschedule any pending 'checking' task:
  envir().taskScheduler().unscheduleDelayedTask(nextTask());
  // Signal the event loop that we're done:
  setDoneFlag();
}

static void checkForAuxSDPLine(void* clientData) {
  H264VideoStreamServerMediaSubsession* subsess = (H264VideoStreamServerMediaSubsession*)clientData;
  subsess->checkForAuxSDPLine1();
}

void H264VideoStreamServerMediaSubsession::checkForAuxSDPLine1() {
  char const* dasl;

  if (fAuxSDPLine != NULL) {
    // Signal the event loop that we're done:
    setDoneFlag();
  } else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {
    fAuxSDPLine = strDup(dasl);
    fDummyRTPSink = NULL;

    // Signal the event loop that we're done:
    setDoneFlag();
  } else {
    // try again after a brief delay:
    int uSecsToDelay = 100000; // 100 ms
    nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
                  (TaskFunc*)checkForAuxSDPLine, this);
  }
}

char const* H264VideoStreamServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {
  if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client)

  if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream
    // Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known
    // until we start reading the file.  This means that "rtpSink"s "auxSDPLine()" will be NULL initially,
    // and we need to start reading data from our file until this changes.
    fDummyRTPSink = rtpSink;

    // Start reading the file:
    fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);

    // Check whether the sink's 'auxSDPLine()' is ready:
    checkForAuxSDPLine(this);
  }

  envir().taskScheduler().doEventLoop(&fDoneFlag);

  return fAuxSDPLine;
}

FramedSource* H264VideoStreamServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
  estBitrate = 500; // kb
  megamol::remotecontrol::View3D_MRC *parent = (megamol::remotecontrol::View3D_MRC*)this->parent;
  return H264VideoStreamFramer::createNew(envir(), parent->h264FramedSource);
}

RTPSink* H264VideoStreamServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/) {
  return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}

FramedSource.cpp

H264FramedSource* H264FramedSource::createNew(UsageEnvironment& env,
                                          unsigned preferredFrameSize,
                                          unsigned playTimePerFrame)
{
    return new H264FramedSource(env, preferredFrameSize, playTimePerFrame);
}

H264FramedSource::H264FramedSource(UsageEnvironment& env,
                               unsigned preferredFrameSize,
                               unsigned playTimePerFrame)
    : FramedSource(env),
    fPreferredFrameSize(fMaxSize),
    fPlayTimePerFrame(playTimePerFrame),
    fLastPlayTime(0),
    fCurIndex(0)
{

    x264_param_default_preset(&param, "veryfast", "zerolatency");
    param.i_threads = 1;
    param.i_width = 1024;
    param.i_height = 768;
    param.i_fps_num = 30;
    param.i_fps_den = 1;
    // Intra refres:
    param.i_keyint_max = 60;
    param.b_intra_refresh = 1;
    //Rate control:
    param.rc.i_rc_method = X264_RC_CRF;
    param.rc.f_rf_constant = 25;
    param.rc.f_rf_constant_max = 35;
    param.i_sps_id = 7;
    //For streaming:
    param.b_repeat_headers = 1;
    param.b_annexb = 1;
    x264_param_apply_profile(&param, "baseline");

    param.i_log_level = X264_LOG_ERROR;

    encoder = x264_encoder_open(&param);
    pic_in.i_type            = X264_TYPE_AUTO;
    pic_in.i_qpplus1         = 0;
    pic_in.img.i_csp         = X264_CSP_I420;
    pic_in.img.i_plane       = 3;


    x264_picture_alloc(&pic_in, X264_CSP_I420, 1024, 768);

    convertCtx = sws_getContext(1024, 768, PIX_FMT_RGBA, 1024, 768, PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
    eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
}

H264FramedSource::~H264FramedSource()
{
    envir().taskScheduler().deleteEventTrigger(eventTriggerId);
    eventTriggerId = 0;
}

void H264FramedSource::AddToBuffer(uint8_t* buf, int surfaceSizeInBytes)
{
    uint8_t* surfaceData = (new uint8_t[surfaceSizeInBytes]);

    memcpy(surfaceData, buf, surfaceSizeInBytes);

    int srcstride = 1024*4;
    sws_scale(convertCtx, &surfaceData, &srcstride,0, 768, pic_in.img.plane, pic_in.img.i_stride);
    x264_nal_t* nals = NULL;
    int i_nals = 0;
    int frame_size = -1;


    frame_size = x264_encoder_encode(encoder, &nals, &i_nals, &pic_in, &pic_out);

    static bool finished = false;

    if (frame_size >= 0)
    {
    static bool alreadydone = false;
    if(!alreadydone)
    {

        x264_encoder_headers(encoder, &nals, &i_nals);
        alreadydone = true;
    }
    for(int i = 0; i < i_nals; ++i)
    {
        m_queue.push(nals[i]);
    }
    }
    delete [] surfaceData;
    surfaceData = nullptr;

    envir().taskScheduler().triggerEvent(eventTriggerId, this);
}

void H264FramedSource::doGetNextFrame()
{
    deliverFrame();
}

void H264FramedSource::deliverFrame0(void* clientData)
{
    ((H264FramedSource*)clientData)->deliverFrame();
}

void H264FramedSource::deliverFrame()
{
    x264_nal_t nalToDeliver;

    if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) {
    if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
        // This is the first frame, so use the current time:
        gettimeofday(&fPresentationTime, NULL);
    } else {
        // Increment by the play time of the previous data:
        unsigned uSeconds   = fPresentationTime.tv_usec + fLastPlayTime;
        fPresentationTime.tv_sec += uSeconds/1000000;
        fPresentationTime.tv_usec = uSeconds%1000000;
    }

    // Remember the play time of this data:
    fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
    fDurationInMicroseconds = fLastPlayTime;
    } else {
    // We don't know a specific play time duration for this data,
    // so just record the current time as being the 'presentation time':
    gettimeofday(&fPresentationTime, NULL);
    }

    if(!m_queue.empty())
    {
    m_queue.wait_and_pop(nalToDeliver);

    uint8_t* newFrameDataStart = (uint8_t*)0xD15EA5E;

    newFrameDataStart = (uint8_t*)(nalToDeliver.p_payload);
    unsigned newFrameSize = nalToDeliver.i_payload;

    // Deliver the data here:
    if (newFrameSize > fMaxSize) {
        fFrameSize = fMaxSize;
        fNumTruncatedBytes = newFrameSize - fMaxSize;
    }
    else {
        fFrameSize = newFrameSize;
    }

    memcpy(fTo, nalToDeliver.p_payload, nalToDeliver.i_payload);

    FramedSource::afterGetting(this);
    }
}

Соответствующая часть RTSP-сервера Therad

  RTSPServer* rtspServer = RTSPServer::createNew(*(parent->env), 8554, NULL);
  if (rtspServer == NULL) {
    *(parent->env) << "Failed to create RTSP server: " << (parent->env)->getResultMsg() << "\n";
    exit(1);
  }
  char const* streamName = "Stream";
  parent->h264FramedSource = H264FramedSource::createNew(*(parent->env), 0, 0);
  H264VideoStreamServerMediaSubsession *h264VideoStreamServerMediaSubsession = H264VideoStreamServerMediaSubsession::createNew(*(parent->env), true);
  h264VideoStreamServerMediaSubsession->parent = parent;
  sms->addSubsession(h264VideoStreamServerMediaSubsession);
  rtspServer->addServerMediaSession(sms);

  parent->env->taskScheduler().doEventLoop(); // does not return

Как только соединение существует, вызовы цикла рендеринга

h264FramedSource->AddToBuffer(videoData, 1024*768*4);

Ответы на вопрос(1)

Ваш ответ на вопрос