1
0
Fork 0
mirror of https://github.com/ossrs/srs.git synced 2025-02-14 12:21:55 +00:00

WHIP: Fix bug for converting WHIP to RTMP/HLS. v5.0.208 (#3920)

1. When converting RTC to RTMP, it is necessary to synchronize the audio
and video timestamps. When the synchronization status changes, whether
it is unsynchronized or synchronized, print logs to facilitate
troubleshooting of such issues.
2. Chrome uses the STAP-A packet, which means a single RTP packet
contains SPS/PPS information. OBS WHIP, on the other hand, sends SPS and
PPS in separate RTP packets. Therefore, SPS and PPS are in two
independent RTP packets, and SRS needs to cache these two packets.

---------

Co-authored-by: john <hondaxiao@tencent.com>
This commit is contained in:
john 2024-02-06 14:22:33 +08:00
parent ba150beb0f
commit 732569f792
7 changed files with 96 additions and 31 deletions

View file

@ -430,6 +430,10 @@ rtc_server {
# * Retrieve server IP automatically, from all network interfaces.
# $CANDIDATE Read the IP from ENV variable, use * if not set.
# x.x.x.x A specified IP address or DNS name, use * if 0.0.0.0.
# You can also set the candidate by the query string eip, note that you can also set the UDP port,
# for example:
# http://locahost:1985/rtc/v1/whip/?app=live&stream=livestream&eip=192.168.3.11
# http://locahost:1985/rtc/v1/whip/?app=live&stream=livestream&eip=192.168.3.11:18000
# @remark For Firefox, the candidate MUST be IP, MUST NOT be DNS name, see https://bugzilla.mozilla.org/show_bug.cgi?id=1239006
# @see https://ossrs.net/lts/zh-cn/docs/v4/doc/webrtc#config-candidate
# Overwrite by env SRS_RTC_SERVER_CANDIDATE

View file

@ -7,6 +7,7 @@ The changelog for SRS.
<a name="v5-changes"></a>
## SRS 5.0 Changelog
* v5.0, 2024-02-06, Merge [#3920](https://github.com/ossrs/srs/pull/3920): WHIP: Fix bug for converting WHIP to RTMP/HLS. v5.0.208 (#3920)
* v5.0, 2024-02-05, Merge [#3925](https://github.com/ossrs/srs/pull/3925): RTC: Fix video and audio track pt_ is not change in player before publisher. v5.0.207 (#3925)
* v5.0, 2024-02-05, Merge [#3923](https://github.com/ossrs/srs/pull/3923): Configure: print enabled/disable sanitizer. v5.0.206 (#3923)
* v5.0, 2023-12-30, Merge [#3916](https://github.com/ossrs/srs/pull/3916): Enhancing the compatibility of options.sh. v5.0.204 (#3916)

View file

@ -1332,6 +1332,9 @@ SrsRtmpFromRtcBridge::SrsRtmpFromRtcBridge(SrsLiveSource *src)
rtp_key_frame_ts_ = -1;
header_sn_ = 0;
memset(cache_video_pkts_, 0, sizeof(cache_video_pkts_));
rtp_key_frame_ts_ = -1;
sync_state_ = -1;
obs_whip_sps_ = obs_whip_pps_ = NULL;
}
SrsRtmpFromRtcBridge::~SrsRtmpFromRtcBridge()
@ -1339,6 +1342,8 @@ SrsRtmpFromRtcBridge::~SrsRtmpFromRtcBridge()
srs_freep(codec_);
srs_freep(format);
clear_cached_video();
srs_freep(obs_whip_sps_);
srs_freep(obs_whip_pps_);
}
srs_error_t SrsRtmpFromRtcBridge::initialize(SrsRequest* r)
@ -1392,8 +1397,18 @@ srs_error_t SrsRtmpFromRtcBridge::on_rtp(SrsRtpPacket *pkt)
// Have no received any sender report, can't calculate avsync_time,
// discard it to avoid timestamp problem in live source
const SrsRtpHeader& h = pkt->header;
if (pkt->get_avsync_time() <= 0) {
if (sync_state_ < 0) {
srs_trace("RTC: Discard no-sync %s, ssrc=%u, seq=%u, ts=%u, state=%d", pkt->is_audio() ? "Audio" : "Video",
h.get_ssrc(), h.get_sequence(), h.get_timestamp(), sync_state_);
sync_state_ = 0;
}
return err;
} else if (sync_state_ < 1) {
srs_trace("RTC: Accept sync %s, ssrc=%u, seq=%u, ts=%u, state=%d", pkt->is_audio() ? "Audio" : "Video",
h.get_ssrc(), h.get_sequence(), h.get_timestamp(), sync_state_);
sync_state_ = 2;
}
if (pkt->is_audio()) {
@ -1521,41 +1536,70 @@ srs_error_t SrsRtmpFromRtcBridge::packet_video_key_frame(SrsRtpPacket* pkt)
{
srs_error_t err = srs_success;
// TODO: handle sps and pps in 2 rtp packets
// For OBS WHIP, it uses RTP Raw packet with SPS/PPS/IDR frame. Note that not all
// raw payload is SPS/PPS.
bool has_sps_pps_in_raw_payload = false;
SrsRtpRawPayload* raw_payload = dynamic_cast<SrsRtpRawPayload*>(pkt->payload());
if (raw_payload) {
if (pkt->nalu_type == SrsAvcNaluTypeSPS) {
has_sps_pps_in_raw_payload = true;
srs_freep(obs_whip_sps_);
obs_whip_sps_ = pkt->copy();
} else if (pkt->nalu_type == SrsAvcNaluTypePPS) {
has_sps_pps_in_raw_payload = true;
srs_freep(obs_whip_pps_);
obs_whip_pps_ = pkt->copy();
}
// Ignore if one of OBS WHIP SPS/PPS is not ready.
if (has_sps_pps_in_raw_payload && (!obs_whip_sps_ || !obs_whip_pps_)) {
return err;
}
}
// Generally, there will be SPS+PPS+IDR in a STAP-A packet.
SrsRtpSTAPPayload* stap_payload = dynamic_cast<SrsRtpSTAPPayload*>(pkt->payload());
if (stap_payload) {
SrsSample* sps = stap_payload->get_sps();
SrsSample* pps = stap_payload->get_pps();
if (NULL == sps || NULL == pps) {
// Handle SPS/PPS in cache or STAP-A packet.
if (stap_payload || has_sps_pps_in_raw_payload) {
// Get the SPS/PPS from cache or STAP-A packet.
SrsSample* sps = stap_payload ? stap_payload->get_sps() : NULL;
if (!sps && obs_whip_sps_) sps = dynamic_cast<SrsRtpRawPayload*>(obs_whip_sps_->payload())->sample_;
SrsSample* pps = stap_payload ? stap_payload->get_pps() : NULL;
if (!pps && obs_whip_pps_) pps = dynamic_cast<SrsRtpRawPayload*>(obs_whip_pps_->payload())->sample_;
if (!sps || !pps) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "no sps or pps in stap-a rtp. sps: %p, pps:%p", sps, pps);
} else {
// h264 raw to h264 packet.
std::string sh;
SrsRawH264Stream* avc = new SrsRawH264Stream();
SrsAutoFree(SrsRawH264Stream, avc);
}
if ((err = avc->mux_sequence_header(string(sps->bytes, sps->size), string(pps->bytes, pps->size), sh)) != srs_success) {
return srs_error_wrap(err, "mux sequence header");
}
// Reset SPS/PPS cache, ensuring that the next SPS/PPS will be handled when both are received.
SrsAutoFree(SrsRtpPacket, obs_whip_sps_);
SrsAutoFree(SrsRtpPacket, obs_whip_pps_);
// h264 packet to flv packet.
char* flv = NULL;
int nb_flv = 0;
if ((err = avc->mux_avc2flv(sh, SrsVideoAvcFrameTypeKeyFrame, SrsVideoAvcFrameTraitSequenceHeader, pkt->get_avsync_time(),
pkt->get_avsync_time(), &flv, &nb_flv)) != srs_success) {
return srs_error_wrap(err, "avc to flv");
}
// h264 raw to h264 packet.
std::string sh;
SrsRawH264Stream* avc = new SrsRawH264Stream();
SrsAutoFree(SrsRawH264Stream, avc);
SrsMessageHeader header;
header.initialize_video(nb_flv, pkt->get_avsync_time(), 1);
SrsCommonMessage rtmp;
if ((err = rtmp.create(&header, flv, nb_flv)) != srs_success) {
return srs_error_wrap(err, "create rtmp");
}
if ((err = avc->mux_sequence_header(string(sps->bytes, sps->size), string(pps->bytes, pps->size), sh)) != srs_success) {
return srs_error_wrap(err, "mux sequence header");
}
if ((err = source_->on_video(&rtmp)) != srs_success) {
return err;
}
// h264 packet to flv packet.
char* flv = NULL;
int nb_flv = 0;
if ((err = avc->mux_avc2flv(sh, SrsVideoAvcFrameTypeKeyFrame, SrsVideoAvcFrameTraitSequenceHeader, pkt->get_avsync_time(),
pkt->get_avsync_time(), &flv, &nb_flv)) != srs_success) {
return srs_error_wrap(err, "avc to flv");
}
SrsMessageHeader header;
header.initialize_video(nb_flv, pkt->get_avsync_time(), 1);
SrsCommonMessage rtmp;
if ((err = rtmp.create(&header, flv, nb_flv)) != srs_success) {
return srs_error_wrap(err, "create rtmp");
}
if ((err = source_->on_video(&rtmp)) != srs_success) {
return err;
}
}
@ -1600,7 +1644,7 @@ srs_error_t SrsRtmpFromRtcBridge::packet_video_key_frame(SrsRtpPacket* pkt)
if (-1 == sn) {
if (check_frame_complete(header_sn_, tail_sn)) {
if ((err = packet_video_rtmp(header_sn_, tail_sn)) != srs_success) {
err = srs_error_wrap(err, "fail to packet key frame");
err = srs_error_wrap(err, "fail to packet frame");
}
}
} else if (-2 == sn) {

View file

@ -313,6 +313,13 @@ private:
uint16_t header_sn_;
uint16_t lost_sn_;
int64_t rtp_key_frame_ts_;
private:
// The state for timestamp sync state. -1 for init. 0 not sync. 1 sync.
int sync_state_;
private:
// For OBS WHIP, send SPS/PPS in dedicated RTP packet.
SrsRtpPacket* obs_whip_sps_;
SrsRtpPacket* obs_whip_pps_;
public:
SrsRtmpFromRtcBridge(SrsLiveSource *src);
virtual ~SrsRtmpFromRtcBridge();

View file

@ -9,6 +9,6 @@
#define VERSION_MAJOR 5
#define VERSION_MINOR 0
#define VERSION_REVISION 207
#define VERSION_REVISION 208
#endif

View file

@ -970,12 +970,14 @@ SrsRtpRawPayload::SrsRtpRawPayload()
{
payload = NULL;
nn_payload = 0;
sample_ = new SrsSample();
++_srs_pps_objs_rraw->sugar;
}
SrsRtpRawPayload::~SrsRtpRawPayload()
{
srs_freep(sample_);
}
uint64_t SrsRtpRawPayload::nb_bytes()
@ -1007,6 +1009,9 @@ srs_error_t SrsRtpRawPayload::decode(SrsBuffer* buf)
payload = buf->head();
nn_payload = buf->left();
sample_->bytes = payload;
sample_->size = nn_payload;
return srs_success;
}
@ -1016,6 +1021,7 @@ ISrsRtpPayloader* SrsRtpRawPayload::copy()
cp->payload = payload;
cp->nn_payload = nn_payload;
cp->sample_ = sample_->copy();
return cp;
}

View file

@ -347,6 +347,9 @@ public:
// @remark We only refer to the memory, user must free its bytes.
char* payload;
int nn_payload;
public:
// Use the whole RAW RTP payload as a sample.
SrsSample* sample_;
public:
SrsRtpRawPayload();
virtual ~SrsRtpRawPayload();