1
0
Fork 0
mirror of https://github.com/ossrs/srs.git synced 2025-03-09 15:49:59 +00:00
This commit is contained in:
Haibo Chen 2025-03-04 08:02:59 +00:00 committed by GitHub
commit ea01064c44
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 1004 additions and 146 deletions

View file

@ -1068,7 +1068,7 @@ srs_error_t SrsHlsController::write_video(SrsVideoFrame* frame, int64_t dts)
// Refresh the codec ASAP.
if (muxer->latest_vcodec() != frame->vcodec()->id) {
srs_trace("HLS: Switch video codec %d(%s) to %d(%s)", muxer->latest_acodec(), srs_video_codec_id2str(muxer->latest_vcodec()).c_str(),
srs_trace("HLS: Switch video codec %d(%s) to %d(%s)", muxer->latest_vcodec(), srs_video_codec_id2str(muxer->latest_vcodec()).c_str(),
frame->vcodec()->id, srs_video_codec_id2str(frame->vcodec()->id).c_str());
muxer->set_latest_vcodec(frame->vcodec()->id);
}

View file

@ -2598,6 +2598,51 @@ bool srs_sdp_has_h264_profile(const SrsSdp& sdp, const string& profile)
return false;
}
bool srs_sdp_has_h265_profile(const SrsMediaPayloadType& payload_type, const string& profile)
{
srs_error_t err = srs_success;
if (payload_type.format_specific_param_.empty()) {
return false;
}
H265SpecificParam h265_param;
if ((err = srs_parse_h265_fmtp(payload_type.format_specific_param_, h265_param)) != srs_success) {
srs_error_reset(err);
return false;
}
if (h265_param.profile_id == profile) {
return true;
}
return false;
}
bool srs_sdp_has_h265_profile(const SrsSdp& sdp, const string& profile)
{
for (size_t i = 0; i < sdp.media_descs_.size(); ++i) {
const SrsMediaDesc& desc = sdp.media_descs_[i];
if (!desc.is_video()) {
continue;
}
std::vector<SrsMediaPayloadType> payloads = desc.find_media_with_encoding_name("H265");
if (payloads.empty()) {
continue;
}
for (std::vector<SrsMediaPayloadType>::iterator it = payloads.begin(); it != payloads.end(); ++it) {
const SrsMediaPayloadType& payload_type = *it;
if (srs_sdp_has_h265_profile(payload_type, profile)) {
return true;
}
}
}
return false;
}
srs_error_t SrsRtcConnection::negotiate_publish_capability(SrsRtcUserConfig* ruc, SrsRtcSourceDescription* stream_desc)
{
srs_error_t err = srs_success;
@ -3041,8 +3086,6 @@ srs_error_t SrsRtcConnection::negotiate_play_capability(SrsRtcUserConfig* ruc, s
bool nack_enabled = _srs_config->get_rtc_nack_enabled(req->vhost);
bool twcc_enabled = _srs_config->get_rtc_twcc_enabled(req->vhost);
// TODO: FIME: Should check packetization-mode=1 also.
bool has_42e01f = srs_sdp_has_h264_profile(remote_sdp, "42e01f");
SrsSharedPtr<SrsRtcSource> source;
if ((err = _srs_rtc_sources->fetch_or_create(req, source)) != srs_success) {
@ -3083,56 +3126,87 @@ srs_error_t SrsRtcConnection::negotiate_play_capability(SrsRtcUserConfig* ruc, s
remote_payload = payloads.at(0);
track_descs = source->get_track_desc("audio", "opus");
} else if (remote_media_desc.is_video() && ruc->codec_ == "av1") {
std::vector<SrsMediaPayloadType> payloads = remote_media_desc.find_media_with_encoding_name("AV1");
if (payloads.empty()) {
// Be compatible with the Chrome M96, still check the AV1X encoding name
// @see https://bugs.chromium.org/p/webrtc/issues/detail?id=13166
payloads = remote_media_desc.find_media_with_encoding_name("AV1X");
}
if (payloads.empty()) {
return srs_error_new(ERROR_RTC_SDP_EXCHANGE, "no found valid AV1 payload type");
}
remote_payload = payloads.at(0);
track_descs = source->get_track_desc("video", "AV1");
if (track_descs.empty()) {
// Be compatible with the Chrome M96, still check the AV1X encoding name
// @see https://bugs.chromium.org/p/webrtc/issues/detail?id=13166
track_descs = source->get_track_desc("video", "AV1X");
}
} else if (remote_media_desc.is_video() && ruc->codec_ == "hevc") {
std::vector<SrsMediaPayloadType> payloads = remote_media_desc.find_media_with_encoding_name("H265");
if (payloads.empty()) {
return srs_error_new(ERROR_RTC_SDP_EXCHANGE, "no valid found h265 payload type");
}
remote_payload = payloads.at(0);
// TODO: FIXME: pick up a profile for HEVC.
// @see https://www.rfc-editor.org/rfc/rfc7798#section-7.2.1
track_descs = source->get_track_desc("video", "H265");
} else if (remote_media_desc.is_video()) {
// TODO: check opus format specific param
vector<SrsMediaPayloadType> payloads = remote_media_desc.find_media_with_encoding_name("H264");
if (payloads.empty()) {
return srs_error_new(ERROR_RTC_SDP_EXCHANGE, "no valid found h264 payload type");
}
remote_payload = payloads.at(0);
for (int j = 0; j < (int)payloads.size(); j++) {
const SrsMediaPayloadType& payload = payloads.at(j);
// If exists 42e01f profile, choose it; otherwise, use the first payload.
// TODO: FIME: Should check packetization-mode=1 also.
if (!has_42e01f || srs_sdp_has_h264_profile(payload, "42e01f")) {
remote_payload = payload;
break;
std::string prefer_codec = ruc->codec_;
if (prefer_codec.empty()) {
// Get the source codec if not specified.
std::vector<SrsRtcTrackDescription*> track_descs = source->get_track_desc("video", "");
if (!track_descs.empty()) {
std::string codec_name = track_descs.at(0)->media_->name_;
std::transform(codec_name.begin(), codec_name.end(), codec_name.begin(), ::tolower);
if (codec_name == "h265") {
prefer_codec = "hevc";
} else {
prefer_codec = codec_name;
}
} else {
return srs_error_new(ERROR_RTC_SDP_EXCHANGE, "no video track in source");
}
}
track_descs = source->get_track_desc("video", "H264");
if (prefer_codec == "av1") {
std::vector<SrsMediaPayloadType> payloads = remote_media_desc.find_media_with_encoding_name("AV1");
if (payloads.empty()) {
// Be compatible with the Chrome M96, still check the AV1X encoding name
// @see https://bugs.chromium.org/p/webrtc/issues/detail?id=13166
payloads = remote_media_desc.find_media_with_encoding_name("AV1X");
}
if (payloads.empty()) {
return srs_error_new(ERROR_RTC_SDP_EXCHANGE, "no found valid AV1 payload type");
}
remote_payload = payloads.at(0);
track_descs = source->get_track_desc("video", "AV1");
if (track_descs.empty()) {
// Be compatible with the Chrome M96, still check the AV1X encoding name
// @see https://bugs.chromium.org/p/webrtc/issues/detail?id=13166
track_descs = source->get_track_desc("video", "AV1X");
}
} else if (prefer_codec == "hevc") {
std::vector<SrsMediaPayloadType> payloads = remote_media_desc.find_media_with_encoding_name("H265");
if (payloads.empty()) {
return srs_error_new(ERROR_RTC_SDP_EXCHANGE, "no valid found h265 payload type");
}
// @see https://www.rfc-editor.org/rfc/rfc7798#section-7.2.1
bool has_main_profile = srs_sdp_has_h265_profile(remote_sdp, "1");
remote_payload = payloads.at(0);
for (int j = 0; j < (int)payloads.size(); j++) {
const SrsMediaPayloadType& payload = payloads.at(j);
// For H.265, we only check if profile-id=1 (Main Profile)
// Format example: level-id=180;profile-id=1;tier-flag=0;tx-mode=SRST
if (!has_main_profile || srs_sdp_has_h265_profile(payload, "1")) {
remote_payload = payload;
break;
}
}
track_descs = source->get_track_desc("video", "H265");
} else {
vector<SrsMediaPayloadType> payloads = remote_media_desc.find_media_with_encoding_name("H264");
if (payloads.empty()) {
return srs_error_new(ERROR_RTC_SDP_EXCHANGE, "no valid found h264 payload type");
}
// TODO: FIME: Should check packetization-mode=1 also.
bool has_42e01f = srs_sdp_has_h264_profile(remote_sdp, "42e01f");
remote_payload = payloads.at(0);
for (int j = 0; j < (int)payloads.size(); j++) {
const SrsMediaPayloadType& payload = payloads.at(j);
// If exists 42e01f profile, choose it; otherwise, use the first payload.
// TODO: FIME: Should check packetization-mode=1 also.
if (!has_42e01f || srs_sdp_has_h264_profile(payload, "42e01f")) {
remote_payload = payload;
break;
}
}
track_descs = source->get_track_desc("video", "H264");
}
}
for (int j = 0; j < (int)track_descs.size(); ++j) {
@ -3238,7 +3312,11 @@ void video_track_generate_play_offer(SrsRtcTrackDescription* track, string mid,
SrsVideoPayload* payload = (SrsVideoPayload*)track->media_;
local_media_desc.payload_types_.push_back(payload->generate_media_payload_type());
if (payload->name_ == "H265") {
local_media_desc.payload_types_.push_back(payload->generate_media_payload_type_h265());
} else {
local_media_desc.payload_types_.push_back(payload->generate_media_payload_type());
}
if (track->red_) {
SrsRedPayload* red_payload = (SrsRedPayload*)track->red_;

View file

@ -92,6 +92,42 @@ srs_error_t srs_parse_h264_fmtp(const std::string& fmtp, H264SpecificParam& h264
return err;
}
srs_error_t srs_parse_h265_fmtp(const std::string& fmtp, H265SpecificParam& h265_param)
{
srs_error_t err = srs_success;
std::vector<std::string> vec = srs_string_split(fmtp, ";");
for (size_t i = 0; i < vec.size(); ++i) {
std::vector<std::string> kv = srs_string_split(vec[i], "=");
if (kv.size() != 2) continue;
if (kv[0] == "level-id") {
h265_param.level_id = kv[1];
} else if (kv[0] == "profile-id") {
h265_param.profile_id = kv[1];
} else if (kv[0] == "tier-flag") {
h265_param.tier_flag = kv[1];
} else if (kv[0] == "tx-mode") {
h265_param.tx_mode = kv[1];
}
}
if (h265_param.level_id.empty()) {
return srs_error_new(ERROR_RTC_SDP_DECODE, "no h265 param: level-id");
}
if (h265_param.profile_id.empty()) {
return srs_error_new(ERROR_RTC_SDP_DECODE, "no h265 param: profile-id");
}
if (h265_param.tier_flag.empty()) {
return srs_error_new(ERROR_RTC_SDP_DECODE, "no h265 param: tier-flag");
}
if (h265_param.tx_mode.empty()) {
return srs_error_new(ERROR_RTC_SDP_DECODE, "no h265 param: tx-mode");
}
return err;
}
SrsSessionInfo::SrsSessionInfo()
{
}

View file

@ -97,8 +97,16 @@ struct H264SpecificParam
std::string level_asymmerty_allow;
};
extern srs_error_t srs_parse_h264_fmtp(const std::string& fmtp, H264SpecificParam& h264_param);
struct H265SpecificParam
{
std::string level_id;
std::string profile_id;
std::string tier_flag;
std::string tx_mode;
};
extern srs_error_t srs_parse_h264_fmtp(const std::string& fmtp, H264SpecificParam& h264_param);
extern srs_error_t srs_parse_h265_fmtp(const std::string& fmtp, H265SpecificParam& h265_param);
class SrsMediaPayloadType
{
public:

View file

@ -785,7 +785,15 @@ std::vector<SrsRtcTrackDescription*> SrsRtcSource::get_track_desc(std::string ty
if (type == "video") {
std::vector<SrsRtcTrackDescription*>::iterator it = stream_desc_->video_track_descs_.begin();
while (it != stream_desc_->video_track_descs_.end() ){
track_descs.push_back(*it);
if (media_name.empty()) {
track_descs.push_back(*it);
} else {
string name = (*it)->media_->name_;
std::transform(name.begin(), name.end(), name.begin(), static_cast<int(*)(int)>(std::toupper));
if (name == media_name) {
track_descs.push_back(*it);
}
}
++it;
}
}
@ -1045,6 +1053,13 @@ srs_error_t SrsRtcRtpBuilder::package_opus(SrsAudioFrame* audio, SrsRtpPacket* p
return err;
}
void cleanup_packets(vector<SrsRtpPacket*>& packets) {
for (size_t i = 0; i < packets.size(); i++) {
srs_freep(packets[i]);
}
packets.clear();
}
srs_error_t SrsRtcRtpBuilder::on_video(SrsSharedPtrMessage* msg)
{
srs_error_t err = srs_success;
@ -1065,11 +1080,18 @@ srs_error_t SrsRtcRtpBuilder::on_video(SrsSharedPtrMessage* msg)
return err;
}
// WebRTC does NOT support HEVC.
#ifdef SRS_H265
if (format->vcodec->id == SrsVideoCodecIdHEVC) {
// support video codec: h264/h265
SrsVideoCodecId vcodec = format->vcodec->id;
if (vcodec != SrsVideoCodecIdAVC && vcodec != SrsVideoCodecIdHEVC) {
return err;
}
#ifdef SRS_H265
if (vcodec == SrsVideoCodecIdHEVC) {
if ((err = bridge_->update_codec(vcodec)) != srs_success) {
return srs_error_wrap(err, "update codec");
}
}
#endif
bool has_idr = false;
@ -1096,6 +1118,7 @@ srs_error_t SrsRtcRtpBuilder::on_video(SrsSharedPtrMessage* msg)
vector<SrsRtpPacket*> pkts;
if (merge_nalus && nn_samples > 1) {
if ((err = package_nalus(msg, samples, pkts)) != srs_success) {
cleanup_packets(pkts);
return srs_error_wrap(err, "package nalus as one");
}
} else {
@ -1105,10 +1128,12 @@ srs_error_t SrsRtcRtpBuilder::on_video(SrsSharedPtrMessage* msg)
if (sample->size <= kRtpMaxPayloadSize) {
if ((err = package_single_nalu(msg, sample, pkts)) != srs_success) {
cleanup_packets(pkts);
return srs_error_wrap(err, "package single nalu");
}
} else {
if ((err = package_fu_a(msg, sample, kRtpMaxPayloadSize, pkts)) != srs_success) {
cleanup_packets(pkts);
return srs_error_wrap(err, "package fu-a");
}
}
@ -1149,10 +1174,16 @@ srs_error_t SrsRtcRtpBuilder::filter(SrsSharedPtrMessage* msg, SrsFormat* format
// Because RTC does not support B-frame, so we will drop them.
// TODO: Drop B-frame in better way, which not cause picture corruption.
if (!keep_bframe && format->vcodec->id == SrsVideoCodecIdAVC) {
bool is_b_frame;
if ((err = SrsVideoFrame::parse_avc_b_frame(sample, is_b_frame)) != srs_success) {
return srs_error_wrap(err, "parse bframe");
if (!keep_bframe) {
bool is_b_frame = false;
if (format->vcodec->id == SrsVideoCodecIdAVC) {
if ((err = SrsVideoFrame::parse_avc_b_frame(sample, is_b_frame)) != srs_success) {
return srs_error_wrap(err, "parse bframe");
}
} else if (format->vcodec->id == SrsVideoCodecIdHEVC) {
if ((err = SrsVideoFrame::parse_hevc_b_frame(sample, format, is_b_frame)) != srs_success) {
return srs_error_wrap(err, "parse bframe");
}
}
if (is_b_frame) {
continue;
@ -1174,53 +1205,59 @@ srs_error_t SrsRtcRtpBuilder::package_stap_a(SrsSharedPtrMessage* msg, SrsRtpPac
return err;
}
// Note that the sps/pps may change, so we should copy it.
const vector<char>& sps = format->vcodec->sequenceParameterSetNALUnit;
const vector<char>& pps = format->vcodec->pictureParameterSetNALUnit;
if (sps.empty() || pps.empty()) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "sps/pps empty");
}
pkt->header.set_payload_type(video_payload_type_);
pkt->header.set_ssrc(video_ssrc_);
pkt->frame_type = SrsFrameTypeVideo;
pkt->nalu_type = (SrsAvcNaluType)kStapA;
pkt->header.set_marker(false);
pkt->header.set_sequence(video_sequence++);
pkt->header.set_timestamp(msg->timestamp * 90);
SrsRtpSTAPPayload* stap = new SrsRtpSTAPPayload();
pkt->set_payload(stap, SrsRtspPacketPayloadTypeSTAP);
ISrsRtpPayloader* stap = NULL;
vector<vector<char>*> params;
int size = 0;
if (format->vcodec->id == SrsVideoCodecIdHEVC) {
for (int i = 0; i < format->vcodec->hevc_dec_conf_record_.nalu_vec.size(); i++) {
if (format->vcodec->hevc_dec_conf_record_.nalu_vec[i].nal_unit_type == SrsHevcNaluType_VPS
|| format->vcodec->hevc_dec_conf_record_.nalu_vec[i].nal_unit_type == SrsHevcNaluType_SPS
|| format->vcodec->hevc_dec_conf_record_.nalu_vec[i].nal_unit_type == SrsHevcNaluType_PPS) {
vector<char>& nalu = (vector<char>&)format->vcodec->hevc_dec_conf_record_.nalu_vec[i].nal_data_vec[0].nal_unit_data;
params.push_back(&nalu);
size += format->vcodec->hevc_dec_conf_record_.nalu_vec[i].nal_data_vec[0].nal_unit_length;
}
}
uint8_t header = sps[0];
stap->nri = (SrsAvcNaluType)header;
stap = new SrsRtpSTAPPayloadHevc();
pkt->set_payload(stap, SrsRtspPacketPayloadTypeSTAPHevc);
pkt->nalu_type = kStapHevc;
} else if (format->vcodec->id == SrsVideoCodecIdAVC) {
params.push_back(&format->vcodec->sequenceParameterSetNALUnit);
params.push_back(&format->vcodec->pictureParameterSetNALUnit);
size = format->vcodec->sequenceParameterSetNALUnit.size() + format->vcodec->pictureParameterSetNALUnit.size();
// Copy the SPS/PPS bytes, because it may change.
int size = (int)(sps.size() + pps.size());
stap = new SrsRtpSTAPPayload();
pkt->set_payload(stap, SrsRtspPacketPayloadTypeSTAP);
pkt->nalu_type = kStapA;
}
if (size == 0) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "vps/sps/pps empty");
}
char* payload = pkt->wrap(size);
if (true) {
for (vector<char>* param : params) {
SrsSample* sample = new SrsSample();
sample->bytes = payload;
sample->size = (int)sps.size();
stap->nalus.push_back(sample);
sample->size = param->size();
if (format->vcodec->id == SrsVideoCodecIdHEVC) {
static_cast<SrsRtpSTAPPayloadHevc*>(stap)->nalus.push_back(sample);
} else {
static_cast<SrsRtpSTAPPayload*>(stap)->nalus.push_back(sample);
}
memcpy(payload, (char*)&sps[0], sps.size());
payload += (int)sps.size();
memcpy(payload, (char*)param->data(), param->size());
payload += (int)param->size();
}
if (true) {
SrsSample* sample = new SrsSample();
sample->bytes = payload;
sample->size = (int)pps.size();
stap->nalus.push_back(sample);
memcpy(payload, (char*)&pps[0], pps.size());
payload += (int)pps.size();
}
srs_info("RTC STAP-A seq=%u, sps %d, pps %d bytes", pkt->header.get_sequence(), sps.size(), pps.size());
return err;
}
@ -1228,8 +1265,14 @@ srs_error_t SrsRtcRtpBuilder::package_nalus(SrsSharedPtrMessage* msg, const vect
{
srs_error_t err = srs_success;
SrsFormat* format = meta->vsh_format();
if (!format || !format->vcodec) {
return err;
}
bool is_hevc = format->vcodec->id == SrsVideoCodecIdHEVC;
SrsRtpRawNALUs* raw_raw = new SrsRtpRawNALUs();
SrsAvcNaluType first_nalu_type = SrsAvcNaluTypeReserved;
uint8_t first_nalu_type = 0;
for (int i = 0; i < (int)samples.size(); i++) {
SrsSample* sample = samples[i];
@ -1238,8 +1281,8 @@ srs_error_t SrsRtcRtpBuilder::package_nalus(SrsSharedPtrMessage* msg, const vect
continue;
}
if (first_nalu_type == SrsAvcNaluTypeReserved) {
first_nalu_type = SrsAvcNaluType((uint8_t)(sample->bytes[0] & kNalTypeMask));
if (first_nalu_type == 0) {
first_nalu_type = is_hevc ? SrsHevcNaluTypeParse(sample->bytes[0]) : SrsAvcNaluTypeParse(sample->bytes[0]);
}
raw_raw->push_back(sample->copy());
@ -1260,7 +1303,7 @@ srs_error_t SrsRtcRtpBuilder::package_nalus(SrsSharedPtrMessage* msg, const vect
pkt->header.set_payload_type(video_payload_type_);
pkt->header.set_ssrc(video_ssrc_);
pkt->frame_type = SrsFrameTypeVideo;
pkt->nalu_type = (SrsAvcNaluType)first_nalu_type;
pkt->nalu_type = first_nalu_type;
pkt->header.set_sequence(video_sequence++);
pkt->header.set_timestamp(msg->timestamp * 90);
pkt->set_payload(raw_raw, SrsRtspPacketPayloadTypeNALU);
@ -1270,40 +1313,54 @@ srs_error_t SrsRtcRtpBuilder::package_nalus(SrsSharedPtrMessage* msg, const vect
// because more than one RTP packet will refer to it.
SrsUniquePtr<SrsRtpRawNALUs> raw(raw_raw);
int header_size = is_hevc ? SrsHevcNaluHeaderSize : SrsAvcNaluHeaderSize;
// Package NALUs in FU-A RTP packets.
int fu_payload_size = kRtpMaxPayloadSize;
// The first byte is store in FU-A header.
uint8_t header = raw->skip_first_byte();
uint8_t nal_type = header & kNalTypeMask;
int nb_left = nn_bytes - 1;
uint8_t header = raw->skip_bytes(header_size);
int nb_left = nn_bytes - header_size;
int num_of_packet = 1 + (nn_bytes - 1) / fu_payload_size;
for (int i = 0; i < num_of_packet; ++i) {
int packet_size = srs_min(nb_left, fu_payload_size);
SrsRtpFUAPayload* fua = new SrsRtpFUAPayload();
if ((err = raw->read_samples(fua->nalus, packet_size)) != srs_success) {
srs_freep(fua);
return srs_error_wrap(err, "read samples %d bytes, left %d, total %d", packet_size, nb_left, nn_bytes);
}
SrsRtpPacket* pkt = new SrsRtpPacket();
pkts.push_back(pkt);
pkt->header.set_payload_type(video_payload_type_);
pkt->header.set_ssrc(video_ssrc_);
pkt->frame_type = SrsFrameTypeVideo;
pkt->nalu_type = (SrsAvcNaluType)kFuA;
pkt->nalu_type = kFuA;
pkt->header.set_sequence(video_sequence++);
pkt->header.set_timestamp(msg->timestamp * 90);
fua->nri = (SrsAvcNaluType)header;
fua->nalu_type = (SrsAvcNaluType)nal_type;
fua->start = bool(i == 0);
fua->end = bool(i == num_of_packet - 1);
if (is_hevc) {
SrsRtpFUAPayloadHevc* fua = new SrsRtpFUAPayloadHevc();
if ((err = raw->read_samples(fua->nalus, packet_size)) != srs_success) {
srs_freep(fua);
return srs_error_wrap(err, "read hevc samples %d bytes, left %d, total %d", packet_size, nb_left, nn_bytes);
}
fua->nalu_type = SrsHevcNaluTypeParse(header);
fua->start = bool(i == 0);
fua->end = bool(i == num_of_packet - 1);
pkt->set_payload(fua, SrsRtspPacketPayloadTypeFUAHevc);
} else {
SrsRtpFUAPayload* fua = new SrsRtpFUAPayload();
if ((err = raw->read_samples(fua->nalus, packet_size)) != srs_success) {
srs_freep(fua);
return srs_error_wrap(err, "read samples %d bytes, left %d, total %d", packet_size, nb_left, nn_bytes);
}
fua->nalu_type = SrsAvcNaluTypeParse(header);
fua->start = bool(i == 0);
fua->end = bool(i == num_of_packet - 1);
pkt->set_payload(fua, SrsRtspPacketPayloadTypeFUA);
}
pkt->set_payload(fua, SrsRtspPacketPayloadTypeFUA);
pkt->wrap(msg);
nb_left -= packet_size;
@ -1342,11 +1399,19 @@ srs_error_t SrsRtcRtpBuilder::package_fu_a(SrsSharedPtrMessage* msg, SrsSample*
{
srs_error_t err = srs_success;
char* p = sample->bytes + 1;
int nb_left = sample->size - 1;
uint8_t header = sample->bytes[0];
uint8_t nal_type = header & kNalTypeMask;
SrsFormat* format = meta->vsh_format();
if (!format || !format->vcodec) {
return err;
}
bool is_hevc = format->vcodec->id == SrsVideoCodecIdHEVC;
int header_size = is_hevc ? SrsHevcNaluHeaderSize : SrsAvcNaluHeaderSize;
srs_assert(sample->size >= header_size);
char* p = sample->bytes + header_size;
int nb_left = sample->size - header_size;
uint8_t header = sample->bytes[0];
int num_of_packet = 1 + (nb_left - 1) / fu_payload_size;
for (int i = 0; i < num_of_packet; ++i) {
int packet_size = srs_min(nb_left, fu_payload_size);
@ -1359,17 +1424,32 @@ srs_error_t SrsRtcRtpBuilder::package_fu_a(SrsSharedPtrMessage* msg, SrsSample*
pkt->frame_type = SrsFrameTypeVideo;
pkt->header.set_sequence(video_sequence++);
pkt->header.set_timestamp(msg->timestamp * 90);
pkt->nalu_type = is_hevc ? kFuHevc : kFuA;
SrsRtpFUAPayload2* fua = new SrsRtpFUAPayload2();
pkt->set_payload(fua, SrsRtspPacketPayloadTypeFUA2);
if (is_hevc) {
// H265 FU-A header
SrsRtpFUAPayloadHevc2* fua = new SrsRtpFUAPayloadHevc2();
pkt->set_payload(fua, SrsRtspPacketPayloadTypeFUAHevc);
fua->nri = (SrsAvcNaluType)header;
fua->nalu_type = (SrsAvcNaluType)nal_type;
fua->start = bool(i == 0);
fua->end = bool(i == num_of_packet - 1);
fua->nalu_type = SrsHevcNaluTypeParse(header);
fua->start = bool(i == 0);
fua->end = bool(i == num_of_packet - 1);
fua->payload = p;
fua->size = packet_size;
fua->payload = p;
fua->size = packet_size;
} else {
// H264 FU-A header
SrsRtpFUAPayload2* fua = new SrsRtpFUAPayload2();
pkt->set_payload(fua, SrsRtspPacketPayloadTypeFUA2);
fua->nri = (SrsAvcNaluType)header;
fua->nalu_type = SrsAvcNaluTypeParse(header);
fua->start = bool(i == 0);
fua->end = bool(i == num_of_packet - 1);
fua->payload = p;
fua->size = packet_size;
}
pkt->wrap(msg);
@ -2042,6 +2122,7 @@ SrsVideoPayload* SrsVideoPayload::copy()
cp->sample_ = sample_;
cp->rtcp_fbs_ = rtcp_fbs_;
cp->h264_param_ = h264_param_;
cp->h265_param_ = h265_param_;
return cp;
}
@ -2055,14 +2136,55 @@ SrsMediaPayloadType SrsVideoPayload::generate_media_payload_type()
media_payload_type.rtcp_fb_ = rtcp_fbs_;
std::ostringstream format_specific_param;
bool has_param = false;
if (!h264_param_.level_asymmerty_allow.empty()) {
format_specific_param << "level-asymmetry-allowed=" << h264_param_.level_asymmerty_allow;
has_param = true;
}
if (!h264_param_.packetization_mode.empty()) {
format_specific_param << ";packetization-mode=" << h264_param_.packetization_mode;
if (has_param) format_specific_param << ";";
format_specific_param << "packetization-mode=" << h264_param_.packetization_mode;
has_param = true;
}
if (!h264_param_.profile_level_id.empty()) {
format_specific_param << ";profile-level-id=" << h264_param_.profile_level_id;
if (has_param) format_specific_param << ";";
format_specific_param << "profile-level-id=" << h264_param_.profile_level_id;
}
media_payload_type.format_specific_param_ = format_specific_param.str();
return media_payload_type;
}
SrsMediaPayloadType SrsVideoPayload::generate_media_payload_type_h265()
{
SrsMediaPayloadType media_payload_type(pt_);
media_payload_type.encoding_name_ = name_;
media_payload_type.clock_rate_ = sample_;
media_payload_type.rtcp_fb_ = rtcp_fbs_;
std::ostringstream format_specific_param;
bool has_param = false;
if (!h265_param_.level_id.empty()) {
format_specific_param << "level-id=" << h265_param_.level_id;
has_param = true;
}
if (!h265_param_.profile_id.empty()) {
if (has_param) format_specific_param << ";";
format_specific_param << "profile-id=" << h265_param_.profile_id;
has_param = true;
}
if (!h265_param_.tier_flag.empty()) {
if (has_param) format_specific_param << ";";
format_specific_param << "tier-flag=" << h265_param_.tier_flag;
has_param = true;
}
if (!h265_param_.tx_mode.empty()) {
if (has_param) format_specific_param << ";";
format_specific_param << "tx-mode=" << h265_param_.tx_mode;
}
media_payload_type.format_specific_param_ = format_specific_param.str();
@ -2107,6 +2229,31 @@ srs_error_t SrsVideoPayload::set_h264_param_desc(std::string fmtp)
return err;
}
// level-id=180;profile-id=1;tier-flag=0;tx-mode=SRST
srs_error_t SrsVideoPayload::set_h265_param_desc(std::string fmtp)
{
std::vector<std::string> attributes = split_str(fmtp, ";");
for (size_t i = 0; i < attributes.size(); ++i) {
std::string attribute = attributes.at(i);
std::vector<std::string> kv = split_str(attribute, "=");
if (kv.size() != 2) {
return srs_error_new(ERROR_RTC_SDP_DECODE, "invalid h265 param=%s", attribute.c_str());
}
if (kv[0] == "level-id") {
h265_param_.level_id = kv[1];
} else if (kv[0] == "profile-id") {
h265_param_.profile_id = kv[1];
} else if (kv[0] == "tier-flag") {
h265_param_.tier_flag = kv[1];
} else if (kv[0] == "tx-mode") {
h265_param_.tx_mode = kv[1];
} else {
return srs_error_new(ERROR_RTC_SDP_DECODE, "invalid h265 param=%s", kv[0].c_str());
}
}
return srs_success;
}
SrsAudioPayload::SrsAudioPayload()
{
channel_ = 0;
@ -2699,7 +2846,7 @@ void SrsRtcVideoRecvTrack::on_before_decode_payload(SrsRtpPacket* pkt, SrsBuffer
}
uint8_t v = (uint8_t)(buf->head()[0] & kNalTypeMask);
pkt->nalu_type = SrsAvcNaluType(v);
pkt->nalu_type = v;
if (v == kStapA) {
*ppayload = new SrsRtpSTAPPayload();

View file

@ -394,6 +394,7 @@ class SrsVideoPayload : public SrsCodecPayload
{
public:
H264SpecificParam h264_param_;
H265SpecificParam h265_param_;
public:
SrsVideoPayload();
@ -402,8 +403,10 @@ public:
public:
virtual SrsVideoPayload* copy();
virtual SrsMediaPayloadType generate_media_payload_type();
virtual SrsMediaPayloadType generate_media_payload_type_h265();
public:
srs_error_t set_h264_param_desc(std::string fmtp);
srs_error_t set_h265_param_desc(std::string fmtp);
};
// TODO: FIXME: Rename it.

View file

@ -85,7 +85,7 @@ SrsFrameToRtcBridge::SrsFrameToRtcBridge(SrsSharedPtr<SrsRtcSource> source)
// video track ssrc
if (true) {
std::vector<SrsRtcTrackDescription*> descs = source->get_track_desc("video", "H264");
std::vector<SrsRtcTrackDescription*> descs = source->get_track_desc("video", "");
if (!descs.empty()) {
video_ssrc = descs.at(0)->ssrc_;
}
@ -95,6 +95,8 @@ SrsFrameToRtcBridge::SrsFrameToRtcBridge(SrsSharedPtr<SrsRtcSource> source)
rtp_builder_ = new SrsRtcRtpBuilder(this, audio_ssrc, audio_payload_type, video_ssrc, video_payload_type);
#endif
codec_switched_ = false;
}
SrsFrameToRtcBridge::~SrsFrameToRtcBridge()
@ -155,6 +157,45 @@ srs_error_t SrsFrameToRtcBridge::on_rtp(SrsRtpPacket* pkt)
{
return source_->on_rtp(pkt);
}
srs_error_t SrsFrameToRtcBridge::update_codec(SrsVideoCodecId id)
{
srs_error_t err = srs_success;
// Only handle H.265/HEVC codec switch.
if (id != SrsVideoCodecIdHEVC) {
return err;
}
if (codec_switched_) {
return err;
}
// Check if H.265 track description exists
if (!source_->get_track_desc("video", "H265").empty()) {
return err;
}
// Try to convert H.264 track to H.265
std::vector<SrsRtcTrackDescription*> video_track_descs = source_->get_track_desc("video", "H264");
if (video_track_descs.empty()) {
return srs_error_new(ERROR_RTC_NO_TRACK, "no H264 track found for conversion");
}
SrsRtcTrackDescription* video_track_desc = video_track_descs.at(0);
SrsVideoPayload* video_payload = (SrsVideoPayload*)video_track_desc->media_;
video_payload->name_ = "H265";
video_payload->set_h265_param_desc("level-id=180;profile-id=1;tier-flag=0;tx-mode=SRST");
codec_switched_ = true;
srs_trace("RTC: Switch video codec %d(%s) to %d(%s)",
SrsVideoCodecIdAVC, srs_video_codec_id2str(SrsVideoCodecIdAVC).c_str(),
id, srs_video_codec_id2str(id).c_str());
return err;
}
#endif
SrsCompositeBridge::SrsCompositeBridge()

View file

@ -65,6 +65,8 @@ private:
#if defined(SRS_FFMPEG_FIT)
SrsRtcRtpBuilder* rtp_builder_;
#endif
private:
bool codec_switched_;
public:
SrsFrameToRtcBridge(SrsSharedPtr<SrsRtcSource> source);
virtual ~SrsFrameToRtcBridge();
@ -74,6 +76,7 @@ public:
virtual void on_unpublish();
virtual srs_error_t on_frame(SrsSharedPtrMessage* frame);
srs_error_t on_rtp(SrsRtpPacket* pkt);
srs_error_t update_codec(SrsVideoCodecId id);
};
#endif

View file

@ -676,7 +676,7 @@ srs_error_t SrsVideoFrame::add_sample(char* bytes, int size)
// By default, use AVC(H.264) to parse NALU.
// For video, parse the nalu type, set the IDR flag.
SrsAvcNaluType nal_unit_type = (SrsAvcNaluType)(bytes[0] & 0x1f);
SrsAvcNaluType nal_unit_type = SrsAvcNaluTypeParse(bytes[0]);
if (nal_unit_type == SrsAvcNaluTypeIDR) {
has_idr = true;
@ -703,11 +703,11 @@ srs_error_t SrsVideoFrame::parse_avc_nalu_type(const SrsSample* sample, SrsAvcNa
srs_error_t err = srs_success;
if (sample == NULL || sample->size < 1) {
return srs_error_new(ERROR_AVC_NALU_EMPTY, "empty nalu");
return srs_error_new(ERROR_NALU_EMPTY, "empty nalu");
}
uint8_t header = sample->bytes[0];
avc_nalu_type = (SrsAvcNaluType)(header & kNalTypeMask);
avc_nalu_type = SrsAvcNaluTypeParse(header);
return err;
}
@ -716,10 +716,6 @@ srs_error_t SrsVideoFrame::parse_avc_b_frame(const SrsSample* sample, bool& is_b
{
srs_error_t err = srs_success;
if (sample == NULL || sample->size < 1) {
return srs_error_new(ERROR_AVC_NALU_EMPTY, "empty nalu");
}
SrsAvcNaluType nalu_type;
if ((err = parse_avc_nalu_type(sample, nalu_type)) != srs_success) {
return srs_error_wrap(err, "parse avc nalu type error");
@ -755,6 +751,84 @@ srs_error_t SrsVideoFrame::parse_avc_b_frame(const SrsSample* sample, bool& is_b
return err;
}
srs_error_t SrsVideoFrame::parse_hevc_nalu_type(const SrsSample* sample, SrsHevcNaluType& hevc_nalu_type)
{
srs_error_t err = srs_success;
if (sample == NULL || sample->size < 1) {
return srs_error_new(ERROR_NALU_EMPTY, "empty hevc nalu");
}
uint8_t header = sample->bytes[0];
hevc_nalu_type = SrsHevcNaluTypeParse(header);
return err;
}
srs_error_t SrsVideoFrame::parse_hevc_b_frame(const SrsSample* sample, SrsFormat *format, bool& is_b_frame)
{
srs_error_t err = srs_success;
SrsHevcNaluType nalu_type;
if ((err = parse_hevc_nalu_type(sample, nalu_type)) != srs_success) {
return srs_error_wrap(err, "parse hevc nalu type error");
}
if (nalu_type > SrsHevcNaluType_CODED_SLICE_BLA && nalu_type < SrsHevcNaluType_RESERVED_23) {
is_b_frame = false;
return err;
}
SrsUniquePtr<SrsBuffer> stream(new SrsBuffer(sample->bytes, sample->size));
stream->skip(2);
// @see 7.3.6.1 General slice segment header syntax
// @doc ITU-T-H.265-2021.pdf, page 66.
SrsBitBuffer bs(stream.get());
uint8_t first_slice_segment_in_pic_flag = bs.read_bit();
uint32_t slice_pic_parameter_set_id;
if ((err = bs.read_bits_ue(slice_pic_parameter_set_id)) != srs_success) {
return srs_error_wrap(err, "read slice pic parameter set id");
}
if (slice_pic_parameter_set_id >= SrsHevcMax_PPS_COUNT) {
return srs_error_new(ERROR_HEVC_DECODE_ERROR, "slice pic parameter set id out of range: %d", slice_pic_parameter_set_id);
}
SrsHevcRbspPps *pps = &(format->vcodec->hevc_dec_conf_record_.pps_table[slice_pic_parameter_set_id]);
uint8_t dependent_slice_segment_flag = 0;
if (!first_slice_segment_in_pic_flag) {
if (pps->dependent_slice_segments_enabled_flag) {
dependent_slice_segment_flag = bs.read_bit();
}
}
if (dependent_slice_segment_flag) {
return srs_error_new(ERROR_HEVC_DECODE_ERROR, "dependent slice segment flag is not supported");
}
for (int i = 0; i < pps->num_extra_slice_header_bits; i++) {
bs.skip_bits(1);
}
uint32_t slice_type;
if ((err = bs.read_bits_ue(slice_type)) != srs_success) {
return srs_error_wrap(err, "read slice type");
}
is_b_frame = slice_type == SrsHevcSliceTypeB;
if (is_b_frame) {
srs_verbose("nalu_type=%d, slice type=%d", nalu_type, slice_type);
}
// no need to evaluate the rest
return err;
}
SrsFormat::SrsFormat()
{
acodec = NULL;
@ -2263,7 +2337,7 @@ srs_error_t SrsFormat::avc_demux_sps()
// 7.4.1 NAL unit semantics
// ISO_IEC_14496-10-AVC-2012.pdf, page 61.
// nal_unit_type specifies the type of RBSP data structure contained in the NAL unit as specified in Table 7-1.
SrsAvcNaluType nal_unit_type = (SrsAvcNaluType)(nutv & 0x1f);
SrsAvcNaluType nal_unit_type = SrsAvcNaluTypeParse(nutv);
if (nal_unit_type != 7) {
return srs_error_new(ERROR_HLS_DECODE_ERROR, "for sps, nal_unit_type shall be equal to 7");
}

View file

@ -14,6 +14,12 @@
class SrsBuffer;
class SrsBitBuffer;
class SrsFormat;
// @see: https://datatracker.ietf.org/doc/html/rfc6184#section-1.3
const int SrsAvcNaluHeaderSize = 1;
// @see: https://datatracker.ietf.org/doc/html/rfc7798#section-1.1.4
const int SrsHevcNaluHeaderSize = 2;
/**
* The video codec id.
@ -421,6 +427,8 @@ enum SrsAvcNaluType
// Coded slice extension slice_layer_extension_rbsp( )
SrsAvcNaluTypeCodedSliceExt = 20,
};
// @see https://datatracker.ietf.org/doc/html/rfc6184#section-1.3
#define SrsAvcNaluTypeParse(code) (SrsAvcNaluType)(code & 0x1F)
std::string srs_avc_nalu2str(SrsAvcNaluType nalu_type);
#ifdef SRS_H265
@ -496,8 +504,19 @@ enum SrsHevcNaluType {
SrsHevcNaluType_UNSPECIFIED_63,
SrsHevcNaluType_INVALID,
};
// @see https://datatracker.ietf.org/doc/html/rfc7798#section-1.1.4
#define SrsHevcNaluTypeParse(code) (SrsHevcNaluType)((code & 0x7E) >> 1)
/**
* @see Table 7-7 Name association to slice_type
* @doc ITU-T-H.265-2021.pdf, page 96.
*/
enum SrsHevcSliceType {
SrsHevcSliceTypeB = 0,
SrsHevcSliceTypeP = 1,
SrsHevcSliceTypeI = 2,
};
struct SrsHevcNalData {
uint16_t nal_unit_length;
std::vector<uint8_t> nal_unit_data;
@ -1321,6 +1340,9 @@ public:
public:
static srs_error_t parse_avc_nalu_type(const SrsSample* sample, SrsAvcNaluType& avc_nalu_type);
static srs_error_t parse_avc_b_frame(const SrsSample* sample, bool& is_b_frame);
static srs_error_t parse_hevc_nalu_type(const SrsSample* sample, SrsHevcNaluType& hevc_nalu_type);
static srs_error_t parse_hevc_b_frame(const SrsSample* sample, SrsFormat* format, bool& is_b_frame);
};
/**

View file

@ -279,7 +279,7 @@
XX(ERROR_HEVC_DECODE_ERROR , 3099, "HevcDecode", "HEVC decode av stream failed") \
XX(ERROR_MP4_HVCC_CHANGE , 3100, "Mp4HvcCChange", "MP4 does not support video HvcC change") \
XX(ERROR_HEVC_API_NO_PREFIXED , 3101, "HevcAnnexbPrefix", "No annexb prefix for HEVC decoder") \
XX(ERROR_AVC_NALU_EMPTY , 3102, "AvcNaluEmpty", "AVC NALU is empty")
XX(ERROR_NALU_EMPTY , 3102, "NaluEmpty", "NALU is empty")
/**************************************************/
/* HTTP/StreamConverter protocol error. */

View file

@ -758,7 +758,7 @@ SrsRtpPacket::SrsRtpPacket()
shared_buffer_ = NULL;
actual_buffer_size_ = 0;
nalu_type = SrsAvcNaluTypeReserved;
nalu_type = 0;
frame_type = SrsFrameTypeReserved;
cached_payload_size = 0;
decode_handler = NULL;
@ -961,6 +961,23 @@ bool SrsRtpPacket::is_keyframe()
if((SrsAvcNaluTypeIDR == nalu_type) || (SrsAvcNaluTypeSPS == nalu_type) || (SrsAvcNaluTypePPS == nalu_type)) {
return true;
}
#ifdef SRS_H265
if(nalu_type == kStapHevc) {
SrsRtpSTAPPayloadHevc* stap_payload = dynamic_cast<SrsRtpSTAPPayloadHevc*>(payload_);
if(NULL != stap_payload->get_vps() || NULL != stap_payload->get_sps() || NULL != stap_payload->get_pps()) {
return true;
}
} else if(nalu_type == kFuHevc) {
SrsRtpFUAPayloadHevc2* fua_payload = dynamic_cast<SrsRtpFUAPayloadHevc2*>(payload_);
if(fua_payload->nalu_type >= SrsHevcNaluType_CODED_SLICE_BLA && fua_payload->nalu_type <= SrsHevcNaluType_RESERVED_23) {
return true;
}
} else {
if((SrsHevcNaluType_VPS == nalu_type) || (SrsHevcNaluType_SPS == nalu_type) || (SrsHevcNaluType_PPS == nalu_type)) {
return true;
}
}
#endif
}
return false;
@ -1061,10 +1078,10 @@ void SrsRtpRawNALUs::push_back(SrsSample* sample)
nalus.push_back(sample);
}
uint8_t SrsRtpRawNALUs::skip_first_byte()
uint8_t SrsRtpRawNALUs::skip_bytes(int count)
{
srs_assert (cursor >= 0 && nn_bytes > 0 && cursor < nn_bytes);
cursor++;
srs_assert (cursor >= 0 && nn_bytes > 0 && cursor + count < nn_bytes);
cursor += count;
return uint8_t(nalus[0]->bytes[0]);
}
@ -1191,7 +1208,7 @@ SrsSample* SrsRtpSTAPPayload::get_sps()
continue;
}
SrsAvcNaluType nalu_type = (SrsAvcNaluType)(p->bytes[0] & kNalTypeMask);
SrsAvcNaluType nalu_type = SrsAvcNaluTypeParse(p->bytes[0]);
if (nalu_type == SrsAvcNaluTypeSPS) {
return p;
}
@ -1209,7 +1226,7 @@ SrsSample* SrsRtpSTAPPayload::get_pps()
continue;
}
SrsAvcNaluType nalu_type = (SrsAvcNaluType)(p->bytes[0] & kNalTypeMask);
SrsAvcNaluType nalu_type = SrsAvcNaluTypeParse(p->bytes[0]);
if (nalu_type == SrsAvcNaluTypePPS) {
return p;
}
@ -1395,7 +1412,7 @@ srs_error_t SrsRtpFUAPayload::decode(SrsBuffer* buf)
v = buf->read_1bytes();
start = v & kStart;
end = v & kEnd;
nalu_type = SrsAvcNaluType(v & kNalTypeMask);
nalu_type = SrsAvcNaluTypeParse(v);
if (!buf->require(1)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 1);
@ -1496,7 +1513,7 @@ srs_error_t SrsRtpFUAPayload2::decode(SrsBuffer* buf)
v = buf->read_1bytes();
start = v & kStart;
end = v & kEnd;
nalu_type = SrsAvcNaluType(v & kNalTypeMask);
nalu_type = SrsAvcNaluTypeParse(v);
if (!buf->require(1)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 1);
@ -1522,3 +1539,362 @@ ISrsRtpPayloader* SrsRtpFUAPayload2::copy()
return cp;
}
SrsRtpSTAPPayloadHevc::SrsRtpSTAPPayloadHevc()
{
++_srs_pps_objs_rothers->sugar;
}
SrsRtpSTAPPayloadHevc::~SrsRtpSTAPPayloadHevc()
{
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
srs_freep(p);
}
}
SrsSample* SrsRtpSTAPPayloadHevc::get_vps()
{
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
if (!p || !p->size) {
continue;
}
SrsHevcNaluType nalu_type = SrsHevcNaluTypeParse(p->bytes[0]);
if (nalu_type == SrsHevcNaluType_VPS) {
return p;
}
}
return NULL;
}
SrsSample* SrsRtpSTAPPayloadHevc::get_sps()
{
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
if (!p || !p->size) {
continue;
}
SrsHevcNaluType nalu_type = SrsHevcNaluTypeParse(p->bytes[0]);
if (nalu_type == SrsHevcNaluType_SPS) {
return p;
}
}
return NULL;
}
SrsSample* SrsRtpSTAPPayloadHevc::get_pps()
{
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
if (!p || !p->size) {
continue;
}
SrsHevcNaluType nalu_type = SrsHevcNaluTypeParse(p->bytes[0]);
if (nalu_type == SrsHevcNaluType_PPS) {
return p;
}
}
return NULL;
}
uint64_t SrsRtpSTAPPayloadHevc::nb_bytes()
{
int size = 2;
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
size += 2 + p->size;
}
return size;
}
srs_error_t SrsRtpSTAPPayloadHevc::encode(SrsBuffer* buf)
{
if (!buf->require(2)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 2);
}
// STAP header, RTP payload format for aggregation packets
// @see https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.2
buf->write_1bytes(kStapHevc << 1);
buf->write_1bytes(1);
// NALUs.
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
if (!buf->require(2 + p->size)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 2 + p->size);
}
buf->write_2bytes(p->size);
buf->write_bytes(p->bytes, p->size);
}
return srs_success;
}
srs_error_t SrsRtpSTAPPayloadHevc::decode(SrsBuffer* buf)
{
if (!buf->require(2)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 2);
}
// STAP header, RTP payload format for aggregation packets
// @see https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.2
uint8_t v = buf->read_1bytes();
buf->skip(1);
// forbidden_zero_bit shoul be zero.
// @see https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.2
uint8_t f = (v & 0x80);
if (f == 0x80) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "forbidden_zero_bit should be zero");
}
// NALUs.
while (!buf->empty()) {
if (!buf->require(2)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 2);
}
int size = buf->read_2bytes();
if (!buf->require(size)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", size);
}
SrsSample* sample = new SrsSample();
sample->bytes = buf->head();
sample->size = size;
buf->skip(size);
nalus.push_back(sample);
}
return srs_success;
}
ISrsRtpPayloader* SrsRtpSTAPPayloadHevc::copy()
{
SrsRtpSTAPPayloadHevc* cp = new SrsRtpSTAPPayloadHevc();
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
cp->nalus.push_back(p->copy());
}
return cp;
}
SrsRtpFUAPayloadHevc::SrsRtpFUAPayloadHevc()
{
start = end = false;
nalu_type = (SrsHevcNaluType)0;
++_srs_pps_objs_rothers->sugar;
}
SrsRtpFUAPayloadHevc::~SrsRtpFUAPayloadHevc()
{
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
srs_freep(p);
}
}
uint64_t SrsRtpFUAPayloadHevc::nb_bytes()
{
int size = 3;
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
size += p->size;
}
return size;
}
srs_error_t SrsRtpFUAPayloadHevc::encode(SrsBuffer* buf)
{
if (!buf->require(3)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 3);
}
// PayloadHdr, @see: https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.3
buf->write_1bytes(kFuHevc << 1);
buf->write_1bytes(1);
// FU header, @see https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.3
uint8_t fu_header = (start ? kStart : 0) | (end ? kEnd : 0);
fu_header |= nalu_type;
buf->write_1bytes(fu_header);
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
if (!buf->require(p->size)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", p->size);
}
buf->write_bytes(p->bytes, p->size);
}
return srs_success;
}
srs_error_t SrsRtpFUAPayloadHevc::decode(SrsBuffer* buf)
{
if (!buf->require(3)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 3);
}
// skip PayloadHdr, 2 bytes
buf->skip(2);
uint8_t fu_header = buf->read_1bytes();
start = fu_header & kStart;
end = fu_header & kEnd;
nalu_type = SrsHevcNaluType(fu_header & 0x3F);
if (!buf->require(1)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 1);
}
SrsSample* sample = new SrsSample();
sample->bytes = buf->head();
sample->size = buf->left();
buf->skip(sample->size);
nalus.push_back(sample);
return srs_success;
}
ISrsRtpPayloader* SrsRtpFUAPayloadHevc::copy()
{
SrsRtpFUAPayloadHevc* cp = new SrsRtpFUAPayloadHevc();
cp->start = start;
cp->end = end;
cp->nalu_type = nalu_type;
int nn_nalus = (int)nalus.size();
for (int i = 0; i < nn_nalus; i++) {
SrsSample* p = nalus[i];
cp->nalus.push_back(p->copy());
}
return cp;
}
SrsRtpFUAPayloadHevc2::SrsRtpFUAPayloadHevc2()
{
start = end = false;
nalu_type = (SrsHevcNaluType)0;
payload = NULL;
size = 0;
++_srs_pps_objs_rfua->sugar;
}
SrsRtpFUAPayloadHevc2::~SrsRtpFUAPayloadHevc2()
{
}
uint64_t SrsRtpFUAPayloadHevc2::nb_bytes()
{
// PayloadHdr(2) + FU header(1)
return 3 + size;
}
srs_error_t SrsRtpFUAPayloadHevc2::encode(SrsBuffer* buf)
{
if (!buf->require(3 + size)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires %d bytes", 3 + size);
}
// Fast encoding.
char* p = buf->head();
// PayloadHdr, @see: https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.3
/*
* create the HEVC payload header and transmit the buffer as fragmentation units (FU)
*
* 0 1
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* |F| Type | LayerId | TID |
* +-------------+-----------------+
*
* F = 0
* Type = 49 (fragmentation unit (FU))
* LayerId = 0
* TID = 1
*/
*p++ = kFuHevc << 1;
*p++ = 1;
// FU header, @see https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.3
uint8_t fu_header = (start ? kStart : 0) | (end ? kEnd : 0);
fu_header |= nalu_type;
*p++ = fu_header;
memcpy(p, payload, size);
// Consume bytes.
buf->skip(3 + size);
return srs_success;
}
srs_error_t SrsRtpFUAPayloadHevc2::decode(SrsBuffer* buf)
{
if (!buf->require(3)) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "requires 3 bytes");
}
// skip PayloadHdr, 2 bytes
buf->skip(2);
uint8_t fu_header = buf->read_1bytes();
start = fu_header & kStart;
end = fu_header & kEnd;
nalu_type = SrsHevcNaluType(fu_header & 0x3F);
payload = buf->head();
size = buf->left();
buf->skip(size);
return srs_success;
}
ISrsRtpPayloader* SrsRtpFUAPayloadHevc2::copy()
{
SrsRtpFUAPayloadHevc2* cp = new SrsRtpFUAPayloadHevc2();
cp->start = start;
cp->end = end;
cp->nalu_type = nalu_type;
cp->payload = payload;
cp->size = size;
return cp;
}

View file

@ -29,10 +29,14 @@ const uint8_t kNalTypeMask = 0x1F;
// @see: https://tools.ietf.org/html/rfc6184#section-5.2
const uint8_t kStapA = 24;
// @see: https://tools.ietf.org/html/rfc6184#section-5.2
const uint8_t kFuA = 28;
// @see: https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.2
const uint8_t kStapHevc = 48;
// @see: https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.3
const uint8_t kFuHevc = 49;
// @see: https://tools.ietf.org/html/rfc6184#section-5.8
const uint8_t kStart = 0x80; // Fu-header start bit
const uint8_t kEnd = 0x40; // Fu-header end bit
@ -254,8 +258,10 @@ enum SrsRtspPacketPayloadType
SrsRtspPacketPayloadTypeRaw,
SrsRtspPacketPayloadTypeFUA2,
SrsRtspPacketPayloadTypeFUA,
SrsRtspPacketPayloadTypeFUAHevc,
SrsRtspPacketPayloadTypeNALU,
SrsRtspPacketPayloadTypeSTAP,
SrsRtspPacketPayloadTypeSTAPHevc,
SrsRtspPacketPayloadTypeUnknown,
};
@ -289,7 +295,7 @@ private:
// Helper fields.
public:
// The first byte as nalu type, for video decoder only.
SrsAvcNaluType nalu_type;
uint8_t nalu_type;
// The frame type, for RTMP bridge or SFU source.
SrsFrameType frame_type;
// Fast cache for performance.
@ -376,7 +382,7 @@ public:
public:
void push_back(SrsSample* sample);
public:
uint8_t skip_first_byte();
uint8_t skip_bytes(int count);
// We will manage the returned samples, if user want to manage it, please copy it.
srs_error_t read_samples(std::vector<SrsSample*>& samples, int packet_size);
// interface ISrsRtpPayloader
@ -460,4 +466,68 @@ public:
virtual ISrsRtpPayloader* copy();
};
class SrsRtpSTAPPayloadHevc : public ISrsRtpPayloader
{
public:
// The NALU samples, we will manage the samples.
// @remark We only refer to the memory, user must free its bytes.
std::vector<SrsSample*> nalus;
public:
SrsRtpSTAPPayloadHevc();
virtual ~SrsRtpSTAPPayloadHevc();
public:
SrsSample* get_vps();
SrsSample* get_sps();
SrsSample* get_pps();
// interface ISrsRtpPayloader
public:
virtual uint64_t nb_bytes();
virtual srs_error_t encode(SrsBuffer* buf);
virtual srs_error_t decode(SrsBuffer* buf);
virtual ISrsRtpPayloader* copy();
};
// FU, for one NALU with multiple fragments.
// With more than one payload for HEVC.
class SrsRtpFUAPayloadHevc : public ISrsRtpPayloader
{
public:
// The FUA header.
bool start;
bool end;
SrsHevcNaluType nalu_type;
// The NALU samples, we manage the samples.
// @remark We only refer to the memory, user must free its bytes.
std::vector<SrsSample*> nalus;
public:
SrsRtpFUAPayloadHevc();
virtual ~SrsRtpFUAPayloadHevc();
// interface ISrsRtpPayloader
public:
virtual uint64_t nb_bytes();
virtual srs_error_t encode(SrsBuffer* buf);
virtual srs_error_t decode(SrsBuffer* buf);
virtual ISrsRtpPayloader* copy();
};
// FU, for one NALU with multiple fragments.
// With only one payload for HEVC.
class SrsRtpFUAPayloadHevc2 : public ISrsRtpPayloader
{
public:
bool start;
bool end;
SrsHevcNaluType nalu_type;
char* payload;
int size;
public:
SrsRtpFUAPayloadHevc2();
virtual ~SrsRtpFUAPayloadHevc2();
public:
virtual uint64_t nb_bytes();
virtual srs_error_t encode(SrsBuffer* buf);
virtual srs_error_t decode(SrsBuffer* buf);
virtual ISrsRtpPayloader* copy();
};
#endif

View file

@ -62,7 +62,7 @@ VOID TEST(KernelRTCTest, RtpSTAPPayloadException)
SrsAvcNaluType nalu_type = SrsAvcNaluTypeReserved;
// Try to parse the NALU type for video decoder.
if (!buf.empty()) {
nalu_type = SrsAvcNaluType((uint8_t)(buf.head()[0] & kNalTypeMask));
nalu_type = SrsAvcNaluTypeParse(buf.head()[0]);
}
EXPECT_TRUE(nalu_type == kStapA);