1
0
Fork 0
mirror of https://github.com/ossrs/srs.git synced 2025-03-09 15:49:59 +00:00

Support composited bridges for 1:N protocols converting. v6.0.41 (#3392)

Co-authored-by: john <hondaxiao@tencent.com>
Co-authored-by: chundonglinlin <chundonglinlin@163.com>
This commit is contained in:
Winlin 2023-04-01 21:34:59 +08:00 committed by GitHub
parent 771ae0a1a6
commit dcd02fe69c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 770 additions and 400 deletions

View file

@ -416,6 +416,8 @@ jobs:
tag: ${{ github.ref }} tag: ${{ github.ref }}
name: Release ${{ env.SRS_TAG }} name: Release ${{ env.SRS_TAG }}
body: | body: |
If you would like to support SRS, please consider contributing to our [OpenCollective](https://opencollective.com/srs-server).
[${{ github.sha }}](https://github.com/ossrs/srs/commit/${{ github.sha }}) [${{ github.sha }}](https://github.com/ossrs/srs/commit/${{ github.sha }})
${{ github.event.head_commit.message }} ${{ github.event.head_commit.message }}

View file

@ -0,0 +1,21 @@
listen 1935;
max_connections 1000;
daemon off;
srs_log_tank console;
http_api {
enabled on;
listen 1985;
auth {
enabled on;
username admin;
password admin;
}
}
http_server {
enabled on;
listen 8080;
dir ./objs/nginx/html;
}
vhost __defaultVhost__ {
}

2
trunk/configure vendored
View file

@ -294,7 +294,7 @@ if [[ $SRS_FFMPEG_FIT == YES ]]; then
fi fi
MODULE_FILES=("srs_app_server" "srs_app_conn" "srs_app_rtmp_conn" "srs_app_source" MODULE_FILES=("srs_app_server" "srs_app_conn" "srs_app_rtmp_conn" "srs_app_source"
"srs_app_refer" "srs_app_hls" "srs_app_forward" "srs_app_encoder" "srs_app_http_stream" "srs_app_refer" "srs_app_hls" "srs_app_forward" "srs_app_encoder" "srs_app_http_stream"
"srs_app_st" "srs_app_log" "srs_app_config" "srs_app_st" "srs_app_log" "srs_app_config" "srs_app_stream_bridge"
"srs_app_pithy_print" "srs_app_reload" "srs_app_http_api" "srs_app_http_conn" "srs_app_http_hooks" "srs_app_pithy_print" "srs_app_reload" "srs_app_http_api" "srs_app_http_conn" "srs_app_http_hooks"
"srs_app_ingest" "srs_app_ffmpeg" "srs_app_utility" "srs_app_edge" "srs_app_ingest" "srs_app_ffmpeg" "srs_app_utility" "srs_app_edge"
"srs_app_heartbeat" "srs_app_empty" "srs_app_http_client" "srs_app_http_static" "srs_app_heartbeat" "srs_app_empty" "srs_app_http_client" "srs_app_http_static"

View file

@ -8,9 +8,10 @@ The changelog for SRS.
## SRS 6.0 Changelog ## SRS 6.0 Changelog
* v5.0, 2023-04-01, Merge [#3392](https://github.com/ossrs/srs/pull/3392): Support composited bridges for 1:N protocols converting. v6.0.41 (#3392)
* v5.0, 2023-04-01, Merge [#3458](https://github.com/ossrs/srs/pull/3450): API: Support HTTP basic authentication for API. v6.0.40 (#3458) * v5.0, 2023-04-01, Merge [#3458](https://github.com/ossrs/srs/pull/3450): API: Support HTTP basic authentication for API. v6.0.40 (#3458)
* v6.0, 2023-03-27, Merge [#3450](https://github.com/ossrs/srs/pull/3450): WebRTC: Error message carries the SDP when failed. v6.0.39 (#3450) * v6.0, 2023-03-27, Merge [#3450](https://github.com/ossrs/srs/pull/3450): WebRTC: Error message carries the SDP when failed. v6.0.39 (#3450)
* v6.0, 2023-03-25, Merge [#3477](https://github.com/ossrs/srs/pull/3477): Remove unneccessary NULL check in srs_freep. v6.0.38 (#3477) * v6.0, 2023-03-25, Merge [#3477](https://github.com/ossrs/srs/pull/3477): Remove unnecessary NULL check in srs_freep. v6.0.38 (#3477)
* v6.0, 2023-03-25, Merge [#3455](https://github.com/ossrs/srs/pull/3455): RTC: Call on_play before create session, for it might be freed for timeout. v6.0.37 (#3455) * v6.0, 2023-03-25, Merge [#3455](https://github.com/ossrs/srs/pull/3455): RTC: Call on_play before create session, for it might be freed for timeout. v6.0.37 (#3455)
* v6.0, 2023-03-22, Merge [#3427](https://github.com/ossrs/srs/pull/3427): WHIP: Support DELETE resource for Larix Broadcaster. v6.0.36 (#3427) * v6.0, 2023-03-22, Merge [#3427](https://github.com/ossrs/srs/pull/3427): WHIP: Support DELETE resource for Larix Broadcaster. v6.0.36 (#3427)
* v6.0, 2023-03-20, Merge [#3460](https://github.com/ossrs/srs/pull/3460): WebRTC: Support WHIP/WHEP players. v6.0.35 (#3460) * v6.0, 2023-03-20, Merge [#3460](https://github.com/ossrs/srs/pull/3460): WebRTC: Support WHIP/WHEP players. v6.0.35 (#3460)

View file

@ -692,6 +692,7 @@ public:
private: private:
SrsConfDirective* get_srt(std::string vhost); SrsConfDirective* get_srt(std::string vhost);
public: public:
// TODO: FIXME: Rename to get_vhost_srt_enabled.
bool get_srt_enabled(std::string vhost); bool get_srt_enabled(std::string vhost);
bool get_srt_to_rtmp(std::string vhost); bool get_srt_to_rtmp(std::string vhost);

View file

@ -48,6 +48,7 @@ using namespace std;
#include <srs_protocol_kbps.hpp> #include <srs_protocol_kbps.hpp>
#include <srs_kernel_kbps.hpp> #include <srs_kernel_kbps.hpp>
#include <srs_app_rtc_network.hpp> #include <srs_app_rtc_network.hpp>
#include <srs_app_srt_source.hpp>
SrsPps* _srs_pps_sstuns = NULL; SrsPps* _srs_pps_sstuns = NULL;
SrsPps* _srs_pps_srtcps = NULL; SrsPps* _srs_pps_srtcps = NULL;
@ -1185,6 +1186,22 @@ srs_error_t SrsRtcPublishStream::initialize(SrsRequest* r, SrsRtcSourceDescripti
return srs_error_new(ERROR_SYSTEM_STREAM_BUSY, "rtmp stream %s busy", r->get_stream_url().c_str()); return srs_error_new(ERROR_SYSTEM_STREAM_BUSY, "rtmp stream %s busy", r->get_stream_url().c_str());
} }
// Check whether SRT stream is busy.
#ifdef SRS_SRT
SrsSrtSource* srt = NULL;
bool srt_server_enabled = _srs_config->get_srt_enabled();
bool srt_enabled = _srs_config->get_srt_enabled(r->vhost);
if (srt_server_enabled && srt_enabled) {
if ((err = _srs_srt_sources->fetch_or_create(r, &srt)) != srs_success) {
return srs_error_wrap(err, "create source");
}
if (!srt->can_publish()) {
return srs_error_new(ERROR_SYSTEM_STREAM_BUSY, "srt stream %s busy", r->get_stream_url().c_str());
}
}
#endif
// Bridge to rtmp // Bridge to rtmp
#if defined(SRS_RTC) && defined(SRS_FFMPEG_FIT) #if defined(SRS_RTC) && defined(SRS_FFMPEG_FIT)
bool rtc_to_rtmp = _srs_config->get_rtc_to_rtmp(req_->vhost); bool rtc_to_rtmp = _srs_config->get_rtc_to_rtmp(req_->vhost);
@ -1197,7 +1214,9 @@ srs_error_t SrsRtcPublishStream::initialize(SrsRequest* r, SrsRtcSourceDescripti
// especially for stream merging. // especially for stream merging.
rtmp->set_cache(false); rtmp->set_cache(false);
SrsRtmpFromRtcBridge *bridge = new SrsRtmpFromRtcBridge(rtmp); SrsCompositeBridge* bridge = new SrsCompositeBridge();
bridge->append(new SrsFrameToRtmpBridge(rtmp));
if ((err = bridge->initialize(r)) != srs_success) { if ((err = bridge->initialize(r)) != srs_success) {
srs_freep(bridge); srs_freep(bridge);
return srs_error_wrap(err, "create bridge"); return srs_error_wrap(err, "create bridge");

View file

@ -52,15 +52,13 @@ SrsPps* _srs_pps_rmnack = NULL;
extern SrsPps* _srs_pps_aloss2; extern SrsPps* _srs_pps_aloss2;
// Firefox defaults as 109, Chrome is 111.
const int kAudioPayloadType = 111;
const int kAudioChannel = 2; const int kAudioChannel = 2;
const int kAudioSamplerate = 48000; const int kAudioSamplerate = 48000;
// Firefox defaults as 126, Chrome is 102.
const int kVideoPayloadType = 102;
const int kVideoSamplerate = 90000; const int kVideoSamplerate = 90000;
using namespace std;
// The RTP payload max size, reserved some paddings for SRTP as such: // The RTP payload max size, reserved some paddings for SRTP as such:
// kRtpPacketSize = kRtpMaxPayloadSize + paddings // kRtpPacketSize = kRtpMaxPayloadSize + paddings
// For example, if kRtpPacketSize is 1500, recommend to set kRtpMaxPayloadSize to 1400, // For example, if kRtpPacketSize is 1500, recommend to set kRtpMaxPayloadSize to 1400,
@ -71,8 +69,6 @@ const int kVideoSamplerate = 90000;
// see @doc https://groups.google.com/g/discuss-webrtc/c/gH5ysR3SoZI // see @doc https://groups.google.com/g/discuss-webrtc/c/gH5ysR3SoZI
const int kRtpMaxPayloadSize = kRtpPacketSize - 300; const int kRtpMaxPayloadSize = kRtpPacketSize - 300;
using namespace std;
// TODO: Add this function into SrsRtpMux class. // TODO: Add this function into SrsRtpMux class.
srs_error_t aac_raw_append_adts_header(SrsSharedPtrMessage* shared_audio, SrsFormat* format, char** pbuf, int* pnn_buf) srs_error_t aac_raw_append_adts_header(SrsSharedPtrMessage* shared_audio, SrsFormat* format, char** pbuf, int* pnn_buf)
{ {
@ -323,14 +319,6 @@ ISrsRtcSourceEventHandler::~ISrsRtcSourceEventHandler()
{ {
} }
ISrsRtcSourceBridge::ISrsRtcSourceBridge()
{
}
ISrsRtcSourceBridge::~ISrsRtcSourceBridge()
{
}
SrsRtcSource::SrsRtcSource() SrsRtcSource::SrsRtcSource()
{ {
is_created_ = false; is_created_ = false;
@ -341,6 +329,9 @@ SrsRtcSource::SrsRtcSource()
req = NULL; req = NULL;
bridge_ = NULL; bridge_ = NULL;
#ifdef SRS_FFMPEG_FIT
frame_builder_ = NULL;
#endif
pli_for_rtmp_ = pli_elapsed_ = 0; pli_for_rtmp_ = pli_elapsed_ = 0;
} }
@ -351,6 +342,9 @@ SrsRtcSource::~SrsRtcSource()
// for all consumers are auto free. // for all consumers are auto free.
consumers.clear(); consumers.clear();
#ifdef SRS_FFMPEG_FIT
srs_freep(frame_builder_);
#endif
srs_freep(bridge_); srs_freep(bridge_);
srs_freep(req); srs_freep(req);
srs_freep(stream_desc_); srs_freep(stream_desc_);
@ -464,10 +458,15 @@ SrsContextId SrsRtcSource::pre_source_id()
return _pre_source_id; return _pre_source_id;
} }
void SrsRtcSource::set_bridge(ISrsRtcSourceBridge *bridge) void SrsRtcSource::set_bridge(ISrsStreamBridge* bridge)
{ {
srs_freep(bridge_); srs_freep(bridge_);
bridge_ = bridge; bridge_ = bridge;
#ifdef SRS_FFMPEG_FIT
srs_freep(frame_builder_);
frame_builder_ = new SrsRtcFrameBuilder(bridge);
#endif
} }
srs_error_t SrsRtcSource::create_consumer(SrsRtcConsumer*& consumer) srs_error_t SrsRtcSource::create_consumer(SrsRtcConsumer*& consumer)
@ -541,6 +540,16 @@ srs_error_t SrsRtcSource::on_publish()
// If bridge to other source, handle event and start timer to request PLI. // If bridge to other source, handle event and start timer to request PLI.
if (bridge_) { if (bridge_) {
#ifdef SRS_FFMPEG_FIT
if ((err = frame_builder_->initialize(req)) != srs_success) {
return srs_error_wrap(err, "frame builder initialize");
}
if ((err = frame_builder_->on_publish()) != srs_success) {
return srs_error_wrap(err, "frame builder on publish");
}
#endif
if ((err = bridge_->on_publish()) != srs_success) { if ((err = bridge_->on_publish()) != srs_success) {
return srs_error_wrap(err, "bridge on publish"); return srs_error_wrap(err, "bridge on publish");
} }
@ -585,6 +594,11 @@ void SrsRtcSource::on_unpublish()
// For SrsRtcSource::on_timer() // For SrsRtcSource::on_timer()
_srs_hybrid->timer100ms()->unsubscribe(this); _srs_hybrid->timer100ms()->unsubscribe(this);
#ifdef SRS_FFMPEG_FIT
frame_builder_->on_unpublish();
srs_freep(frame_builder_);
#endif
bridge_->on_unpublish(); bridge_->on_unpublish();
srs_freep(bridge_); srs_freep(bridge_);
} }
@ -636,9 +650,11 @@ srs_error_t SrsRtcSource::on_rtp(SrsRtpPacket* pkt)
} }
} }
if (bridge_ && (err = bridge_->on_rtp(pkt)) != srs_success) { #ifdef SRS_FFMPEG_FIT
return srs_error_wrap(err, "bridge consume message"); if (frame_builder_ && (err = frame_builder_->on_rtp(pkt)) != srs_success) {
return srs_error_wrap(err, "frame builder consume packet");
} }
#endif
return err; return err;
} }
@ -712,85 +728,56 @@ srs_error_t SrsRtcSource::on_timer(srs_utime_t interval)
#ifdef SRS_FFMPEG_FIT #ifdef SRS_FFMPEG_FIT
SrsRtcFromRtmpBridge::SrsRtcFromRtmpBridge(SrsRtcSource* source) SrsRtcRtpBuilder::SrsRtcRtpBuilder(SrsFrameToRtcBridge* bridge, uint32_t assrc, uint8_t apt, uint32_t vssrc, uint8_t vpt)
{ {
req = NULL; req = NULL;
source_ = source; bridge_ = bridge;
format = new SrsRtmpFormat(); format = new SrsRtmpFormat();
codec_ = new SrsAudioTranscoder(); codec_ = new SrsAudioTranscoder();
latest_codec_ = SrsAudioCodecIdForbidden; latest_codec_ = SrsAudioCodecIdForbidden;
rtmp_to_rtc = false;
keep_bframe = false; keep_bframe = false;
merge_nalus = false; merge_nalus = false;
meta = new SrsMetaCache(); meta = new SrsMetaCache();
audio_sequence = 0; audio_sequence = 0;
video_sequence = 0; video_sequence = 0;
// audio track ssrc audio_ssrc_ = assrc;
if (true) { audio_payload_type_ = apt;
std::vector<SrsRtcTrackDescription*> descs = source->get_track_desc("audio", "opus"); video_ssrc_ = vssrc;
if (!descs.empty()) { video_payload_type_ = vpt;
audio_ssrc = descs.at(0)->ssrc_;
}
// Note we must use the PT of source, see https://github.com/ossrs/srs/pull/3079
audio_payload_type_ = descs.empty() ? kAudioPayloadType : descs.front()->media_->pt_;
}
// video track ssrc
if (true) {
std::vector<SrsRtcTrackDescription*> descs = source->get_track_desc("video", "H264");
if (!descs.empty()) {
video_ssrc = descs.at(0)->ssrc_;
}
// Note we must use the PT of source, see https://github.com/ossrs/srs/pull/3079
video_payload_type_ = descs.empty() ? kVideoPayloadType : descs.front()->media_->pt_;
}
} }
SrsRtcFromRtmpBridge::~SrsRtcFromRtmpBridge() SrsRtcRtpBuilder::~SrsRtcRtpBuilder()
{ {
srs_freep(format); srs_freep(format);
srs_freep(codec_); srs_freep(codec_);
srs_freep(meta); srs_freep(meta);
} }
srs_error_t SrsRtcFromRtmpBridge::initialize(SrsRequest* r) srs_error_t SrsRtcRtpBuilder::initialize(SrsRequest* r)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
req = r; req = r;
rtmp_to_rtc = _srs_config->get_rtc_from_rtmp(req->vhost);
if (rtmp_to_rtc) {
if ((err = format->initialize()) != srs_success) { if ((err = format->initialize()) != srs_success) {
return srs_error_wrap(err, "format initialize"); return srs_error_wrap(err, "format initialize");
} }
// Setup the SPS/PPS parsing strategy. // Setup the SPS/PPS parsing strategy.
format->try_annexb_first = _srs_config->try_annexb_first(r->vhost); format->try_annexb_first = _srs_config->try_annexb_first(r->vhost);
}
keep_bframe = _srs_config->get_rtc_keep_bframe(req->vhost); keep_bframe = _srs_config->get_rtc_keep_bframe(req->vhost);
merge_nalus = _srs_config->get_rtc_server_merge_nalus(); merge_nalus = _srs_config->get_rtc_server_merge_nalus();
srs_trace("RTC bridge from RTMP, rtmp2rtc=%d, keep_bframe=%d, merge_nalus=%d", srs_trace("RTC bridge from RTMP, keep_bframe=%d, merge_nalus=%d", keep_bframe, merge_nalus);
rtmp_to_rtc, keep_bframe, merge_nalus);
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::on_publish() srs_error_t SrsRtcRtpBuilder::on_publish()
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
if (!rtmp_to_rtc) {
return err;
}
// TODO: FIXME: Should sync with bridge?
if ((err = source_->on_publish()) != srs_success) {
return srs_error_wrap(err, "source publish");
}
// Reset the metadata cache, to make VLC happy when disable/enable stream. // Reset the metadata cache, to make VLC happy when disable/enable stream.
// @see https://github.com/ossrs/srs/issues/1630#issuecomment-597979448 // @see https://github.com/ossrs/srs/issues/1630#issuecomment-597979448
meta->clear(); meta->clear();
@ -798,30 +785,28 @@ srs_error_t SrsRtcFromRtmpBridge::on_publish()
return err; return err;
} }
void SrsRtcFromRtmpBridge::on_unpublish() void SrsRtcRtpBuilder::on_unpublish()
{ {
if (!rtmp_to_rtc) {
return;
}
// Reset the metadata cache, to make VLC happy when disable/enable stream. // Reset the metadata cache, to make VLC happy when disable/enable stream.
// @see https://github.com/ossrs/srs/issues/1630#issuecomment-597979448 // @see https://github.com/ossrs/srs/issues/1630#issuecomment-597979448
meta->update_previous_vsh(); meta->update_previous_vsh();
meta->update_previous_ash(); meta->update_previous_ash();
// @remark This bridge might be disposed here, so never use it.
// TODO: FIXME: Should sync with bridge?
source_->on_unpublish();
} }
srs_error_t SrsRtcFromRtmpBridge::on_audio(SrsSharedPtrMessage* msg) srs_error_t SrsRtcRtpBuilder::on_frame(SrsSharedPtrMessage* frame)
{
if (frame->is_audio()) {
return on_audio(frame);
} else if (frame->is_video()) {
return on_video(frame);
}
return srs_success;
}
srs_error_t SrsRtcRtpBuilder::on_audio(SrsSharedPtrMessage* msg)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
if (!rtmp_to_rtc) {
return err;
}
// TODO: FIXME: Support parsing OPUS for RTC. // TODO: FIXME: Support parsing OPUS for RTC.
if ((err = format->on_audio(msg)) != srs_success) { if ((err = format->on_audio(msg)) != srs_success) {
return srs_error_wrap(err, "format consume audio"); return srs_error_wrap(err, "format consume audio");
@ -880,7 +865,7 @@ srs_error_t SrsRtcFromRtmpBridge::on_audio(SrsSharedPtrMessage* msg)
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::init_codec(SrsAudioCodecId codec) srs_error_t SrsRtcRtpBuilder::init_codec(SrsAudioCodecId codec)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -909,11 +894,11 @@ srs_error_t SrsRtcFromRtmpBridge::init_codec(SrsAudioCodecId codec)
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::transcode(SrsAudioFrame* audio) srs_error_t SrsRtcRtpBuilder::transcode(SrsAudioFrame* audio)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
std::vector<SrsAudioFrame *> out_audios; std::vector<SrsAudioFrame*> out_audios;
if ((err = codec_->transcode(audio, out_audios)) != srs_success) { if ((err = codec_->transcode(audio, out_audios)) != srs_success) {
return srs_error_wrap(err, "recode error"); return srs_error_wrap(err, "recode error");
} }
@ -934,7 +919,7 @@ srs_error_t SrsRtcFromRtmpBridge::transcode(SrsAudioFrame* audio)
break; break;
} }
if ((err = source_->on_rtp(pkt)) != srs_success) { if ((err = bridge_->on_rtp(pkt)) != srs_success) {
err = srs_error_wrap(err, "consume opus"); err = srs_error_wrap(err, "consume opus");
break; break;
} }
@ -945,12 +930,12 @@ srs_error_t SrsRtcFromRtmpBridge::transcode(SrsAudioFrame* audio)
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::package_opus(SrsAudioFrame* audio, SrsRtpPacket* pkt) srs_error_t SrsRtcRtpBuilder::package_opus(SrsAudioFrame* audio, SrsRtpPacket* pkt)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
pkt->header.set_payload_type(audio_payload_type_); pkt->header.set_payload_type(audio_payload_type_);
pkt->header.set_ssrc(audio_ssrc); pkt->header.set_ssrc(audio_ssrc_);
pkt->frame_type = SrsFrameTypeAudio; pkt->frame_type = SrsFrameTypeAudio;
pkt->header.set_marker(true); pkt->header.set_marker(true);
pkt->header.set_sequence(audio_sequence++); pkt->header.set_sequence(audio_sequence++);
@ -966,21 +951,10 @@ srs_error_t SrsRtcFromRtmpBridge::package_opus(SrsAudioFrame* audio, SrsRtpPacke
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::on_video(SrsSharedPtrMessage* msg) srs_error_t SrsRtcRtpBuilder::on_video(SrsSharedPtrMessage* msg)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
if (!rtmp_to_rtc) {
return err;
}
// WebRTC NOT support HEVC.
#ifdef SRS_H265
if (format->vcodec->id == SrsVideoCodecIdHEVC) {
return err;
}
#endif
// cache the sequence header if h264 // cache the sequence header if h264
bool is_sequence_header = SrsFlvVideo::sh(msg->payload, msg->size); bool is_sequence_header = SrsFlvVideo::sh(msg->payload, msg->size);
if (is_sequence_header && (err = meta->update_vsh(msg)) != srs_success) { if (is_sequence_header && (err = meta->update_vsh(msg)) != srs_success) {
@ -997,6 +971,13 @@ srs_error_t SrsRtcFromRtmpBridge::on_video(SrsSharedPtrMessage* msg)
return err; return err;
} }
// WebRTC does NOT support HEVC.
#ifdef SRS_H265
if (format->vcodec->id == SrsVideoCodecIdHEVC) {
return err;
}
#endif
bool has_idr = false; bool has_idr = false;
vector<SrsSample*> samples; vector<SrsSample*> samples;
if ((err = filter(msg, format, has_idr, samples)) != srs_success) { if ((err = filter(msg, format, has_idr, samples)) != srs_success) {
@ -1009,11 +990,11 @@ srs_error_t SrsRtcFromRtmpBridge::on_video(SrsSharedPtrMessage* msg)
SrsRtpPacket* pkt = new SrsRtpPacket(); SrsRtpPacket* pkt = new SrsRtpPacket();
SrsAutoFree(SrsRtpPacket, pkt); SrsAutoFree(SrsRtpPacket, pkt);
if ((err = package_stap_a(source_, msg, pkt)) != srs_success) { if ((err = package_stap_a(msg, pkt)) != srs_success) {
return srs_error_wrap(err, "package stap-a"); return srs_error_wrap(err, "package stap-a");
} }
if ((err = source_->on_rtp(pkt)) != srs_success) { if ((err = bridge_->on_rtp(pkt)) != srs_success) {
return srs_error_wrap(err, "consume sps/pps"); return srs_error_wrap(err, "consume sps/pps");
} }
} }
@ -1054,7 +1035,7 @@ srs_error_t SrsRtcFromRtmpBridge::on_video(SrsSharedPtrMessage* msg)
return consume_packets(pkts); return consume_packets(pkts);
} }
srs_error_t SrsRtcFromRtmpBridge::filter(SrsSharedPtrMessage* msg, SrsFormat* format, bool& has_idr, vector<SrsSample*>& samples) srs_error_t SrsRtcRtpBuilder::filter(SrsSharedPtrMessage* msg, SrsFormat* format, bool& has_idr, vector<SrsSample*>& samples)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1084,7 +1065,7 @@ srs_error_t SrsRtcFromRtmpBridge::filter(SrsSharedPtrMessage* msg, SrsFormat* fo
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::package_stap_a(SrsRtcSource* source, SrsSharedPtrMessage* msg, SrsRtpPacket* pkt) srs_error_t SrsRtcRtpBuilder::package_stap_a(SrsSharedPtrMessage* msg, SrsRtpPacket* pkt)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1101,7 +1082,7 @@ srs_error_t SrsRtcFromRtmpBridge::package_stap_a(SrsRtcSource* source, SrsShared
} }
pkt->header.set_payload_type(video_payload_type_); pkt->header.set_payload_type(video_payload_type_);
pkt->header.set_ssrc(video_ssrc); pkt->header.set_ssrc(video_ssrc_);
pkt->frame_type = SrsFrameTypeVideo; pkt->frame_type = SrsFrameTypeVideo;
pkt->nalu_type = (SrsAvcNaluType)kStapA; pkt->nalu_type = (SrsAvcNaluType)kStapA;
pkt->header.set_marker(false); pkt->header.set_marker(false);
@ -1143,7 +1124,7 @@ srs_error_t SrsRtcFromRtmpBridge::package_stap_a(SrsRtcSource* source, SrsShared
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::package_nalus(SrsSharedPtrMessage* msg, const vector<SrsSample*>& samples, vector<SrsRtpPacket*>& pkts) srs_error_t SrsRtcRtpBuilder::package_nalus(SrsSharedPtrMessage* msg, const vector<SrsSample*>& samples, vector<SrsRtpPacket*>& pkts)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1183,7 +1164,7 @@ srs_error_t SrsRtcFromRtmpBridge::package_nalus(SrsSharedPtrMessage* msg, const
pkts.push_back(pkt); pkts.push_back(pkt);
pkt->header.set_payload_type(video_payload_type_); pkt->header.set_payload_type(video_payload_type_);
pkt->header.set_ssrc(video_ssrc); pkt->header.set_ssrc(video_ssrc_);
pkt->frame_type = SrsFrameTypeVideo; pkt->frame_type = SrsFrameTypeVideo;
pkt->nalu_type = (SrsAvcNaluType)first_nalu_type; pkt->nalu_type = (SrsAvcNaluType)first_nalu_type;
pkt->header.set_sequence(video_sequence++); pkt->header.set_sequence(video_sequence++);
@ -1217,7 +1198,7 @@ srs_error_t SrsRtcFromRtmpBridge::package_nalus(SrsSharedPtrMessage* msg, const
pkts.push_back(pkt); pkts.push_back(pkt);
pkt->header.set_payload_type(video_payload_type_); pkt->header.set_payload_type(video_payload_type_);
pkt->header.set_ssrc(video_ssrc); pkt->header.set_ssrc(video_ssrc_);
pkt->frame_type = SrsFrameTypeVideo; pkt->frame_type = SrsFrameTypeVideo;
pkt->nalu_type = (SrsAvcNaluType)kFuA; pkt->nalu_type = (SrsAvcNaluType)kFuA;
pkt->header.set_sequence(video_sequence++); pkt->header.set_sequence(video_sequence++);
@ -1239,7 +1220,7 @@ srs_error_t SrsRtcFromRtmpBridge::package_nalus(SrsSharedPtrMessage* msg, const
} }
// Single NAL Unit Packet @see https://tools.ietf.org/html/rfc6184#section-5.6 // Single NAL Unit Packet @see https://tools.ietf.org/html/rfc6184#section-5.6
srs_error_t SrsRtcFromRtmpBridge::package_single_nalu(SrsSharedPtrMessage* msg, SrsSample* sample, vector<SrsRtpPacket*>& pkts) srs_error_t SrsRtcRtpBuilder::package_single_nalu(SrsSharedPtrMessage* msg, SrsSample* sample, vector<SrsRtpPacket*>& pkts)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1247,7 +1228,7 @@ srs_error_t SrsRtcFromRtmpBridge::package_single_nalu(SrsSharedPtrMessage* msg,
pkts.push_back(pkt); pkts.push_back(pkt);
pkt->header.set_payload_type(video_payload_type_); pkt->header.set_payload_type(video_payload_type_);
pkt->header.set_ssrc(video_ssrc); pkt->header.set_ssrc(video_ssrc_);
pkt->frame_type = SrsFrameTypeVideo; pkt->frame_type = SrsFrameTypeVideo;
pkt->header.set_sequence(video_sequence++); pkt->header.set_sequence(video_sequence++);
pkt->header.set_timestamp(msg->timestamp * 90); pkt->header.set_timestamp(msg->timestamp * 90);
@ -1263,7 +1244,7 @@ srs_error_t SrsRtcFromRtmpBridge::package_single_nalu(SrsSharedPtrMessage* msg,
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::package_fu_a(SrsSharedPtrMessage* msg, SrsSample* sample, int fu_payload_size, vector<SrsRtpPacket*>& pkts) srs_error_t SrsRtcRtpBuilder::package_fu_a(SrsSharedPtrMessage* msg, SrsSample* sample, int fu_payload_size, vector<SrsRtpPacket*>& pkts)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1280,7 +1261,7 @@ srs_error_t SrsRtcFromRtmpBridge::package_fu_a(SrsSharedPtrMessage* msg, SrsSamp
pkts.push_back(pkt); pkts.push_back(pkt);
pkt->header.set_payload_type(video_payload_type_); pkt->header.set_payload_type(video_payload_type_);
pkt->header.set_ssrc(video_ssrc); pkt->header.set_ssrc(video_ssrc_);
pkt->frame_type = SrsFrameTypeVideo; pkt->frame_type = SrsFrameTypeVideo;
pkt->header.set_sequence(video_sequence++); pkt->header.set_sequence(video_sequence++);
pkt->header.set_timestamp(msg->timestamp * 90); pkt->header.set_timestamp(msg->timestamp * 90);
@ -1305,14 +1286,14 @@ srs_error_t SrsRtcFromRtmpBridge::package_fu_a(SrsSharedPtrMessage* msg, SrsSamp
return err; return err;
} }
srs_error_t SrsRtcFromRtmpBridge::consume_packets(vector<SrsRtpPacket*>& pkts) srs_error_t SrsRtcRtpBuilder::consume_packets(vector<SrsRtpPacket*>& pkts)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
// TODO: FIXME: Consume a range of packets. // TODO: FIXME: Consume a range of packets.
for (int i = 0; i < (int)pkts.size(); i++) { for (int i = 0; i < (int)pkts.size(); i++) {
SrsRtpPacket* pkt = pkts[i]; SrsRtpPacket* pkt = pkts[i];
if ((err = source_->on_rtp(pkt)) != srs_success) { if ((err = bridge_->on_rtp(pkt)) != srs_success) {
err = srs_error_wrap(err, "consume sps/pps"); err = srs_error_wrap(err, "consume sps/pps");
break; break;
} }
@ -1326,31 +1307,28 @@ srs_error_t SrsRtcFromRtmpBridge::consume_packets(vector<SrsRtpPacket*>& pkts)
return err; return err;
} }
SrsRtmpFromRtcBridge::SrsRtmpFromRtcBridge(SrsLiveSource *src) SrsRtcFrameBuilder::SrsRtcFrameBuilder(ISrsStreamBridge* bridge)
{ {
source_ = src; bridge_ = bridge;
is_first_audio_ = true;
codec_ = NULL; codec_ = NULL;
is_first_audio = true;
is_first_video = true;
format = NULL;
rtp_key_frame_ts_ = -1;
header_sn_ = 0; header_sn_ = 0;
memset(cache_video_pkts_, 0, sizeof(cache_video_pkts_)); memset(cache_video_pkts_, 0, sizeof(cache_video_pkts_));
rtp_key_frame_ts_ = -1;
} }
SrsRtmpFromRtcBridge::~SrsRtmpFromRtcBridge() SrsRtcFrameBuilder::~SrsRtcFrameBuilder()
{ {
srs_freep(codec_); srs_freep(codec_);
srs_freep(format);
clear_cached_video(); clear_cached_video();
} }
srs_error_t SrsRtmpFromRtcBridge::initialize(SrsRequest* r) srs_error_t SrsRtcFrameBuilder::initialize(SrsRequest* r)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
srs_freep(codec_);
codec_ = new SrsAudioTranscoder(); codec_ = new SrsAudioTranscoder();
format = new SrsRtmpFormat();
SrsAudioCodecId from = SrsAudioCodecIdOpus; // TODO: From SDP? SrsAudioCodecId from = SrsAudioCodecIdOpus; // TODO: From SDP?
SrsAudioCodecId to = SrsAudioCodecIdAAC; // The output audio codec. SrsAudioCodecId to = SrsAudioCodecIdAAC; // The output audio codec.
@ -1361,32 +1339,21 @@ srs_error_t SrsRtmpFromRtcBridge::initialize(SrsRequest* r)
return srs_error_wrap(err, "bridge initialize"); return srs_error_wrap(err, "bridge initialize");
} }
if ((err = format->initialize()) != srs_success) {
return srs_error_wrap(err, "format initialize");
}
// Setup the SPS/PPS parsing strategy.
format->try_annexb_first = _srs_config->try_annexb_first(r->vhost);
return err; return err;
} }
srs_error_t SrsRtmpFromRtcBridge::on_publish() srs_error_t SrsRtcFrameBuilder::on_publish()
{ {
srs_error_t err = srs_success; is_first_audio_ = true;
is_first_audio = true; return srs_success;
is_first_video = true;
// TODO: FIXME: Should sync with bridge?
if ((err = source_->on_publish()) != srs_success) {
return srs_error_wrap(err, "source publish");
}
return err;
} }
srs_error_t SrsRtmpFromRtcBridge::on_rtp(SrsRtpPacket *pkt) void SrsRtcFrameBuilder::on_unpublish()
{
}
srs_error_t SrsRtcFrameBuilder::on_rtp(SrsRtpPacket *pkt)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1409,35 +1376,34 @@ srs_error_t SrsRtmpFromRtcBridge::on_rtp(SrsRtpPacket *pkt)
return err; return err;
} }
void SrsRtmpFromRtcBridge::on_unpublish() srs_error_t SrsRtcFrameBuilder::transcode_audio(SrsRtpPacket *pkt)
{
// TODO: FIXME: Should sync with bridge?
source_->on_unpublish();
}
srs_error_t SrsRtmpFromRtcBridge::transcode_audio(SrsRtpPacket *pkt)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
// to common message. // to common message.
uint32_t ts = pkt->get_avsync_time(); uint32_t ts = pkt->get_avsync_time();
if (is_first_audio) { if (is_first_audio_) {
int header_len = 0; int header_len = 0;
uint8_t* header = NULL; uint8_t* header = NULL;
codec_->aac_codec_header(&header, &header_len); codec_->aac_codec_header(&header, &header_len);
SrsCommonMessage out_rtmp; SrsCommonMessage out_rtmp;
packet_aac(&out_rtmp, (char *)header, header_len, ts, is_first_audio); packet_aac(&out_rtmp, (char *)header, header_len, ts, is_first_audio_);
if ((err = source_->on_audio(&out_rtmp)) != srs_success) { SrsSharedPtrMessage msg;
if ((err = msg.create(&out_rtmp)) != srs_success) {
return srs_error_wrap(err, "create message");
}
if ((err = bridge_->on_frame(&msg)) != srs_success) {
return srs_error_wrap(err, "source on audio"); return srs_error_wrap(err, "source on audio");
} }
is_first_audio = false; is_first_audio_ = false;
} }
std::vector<SrsAudioFrame *> out_pkts; std::vector<SrsAudioFrame*> out_pkts;
SrsRtpRawPayload *payload = dynamic_cast<SrsRtpRawPayload *>(pkt->payload()); SrsRtpRawPayload *payload = dynamic_cast<SrsRtpRawPayload*>(pkt->payload());
SrsAudioFrame frame; SrsAudioFrame frame;
frame.add_sample(payload->payload, payload->nn_payload); frame.add_sample(payload->payload, payload->nn_payload);
@ -1449,12 +1415,17 @@ srs_error_t SrsRtmpFromRtcBridge::transcode_audio(SrsRtpPacket *pkt)
return err; return err;
} }
for (std::vector<SrsAudioFrame *>::iterator it = out_pkts.begin(); it != out_pkts.end(); ++it) { for (std::vector<SrsAudioFrame*>::iterator it = out_pkts.begin(); it != out_pkts.end(); ++it) {
SrsCommonMessage out_rtmp; SrsCommonMessage out_rtmp;
out_rtmp.header.timestamp = (*it)->dts; out_rtmp.header.timestamp = (*it)->dts;
packet_aac(&out_rtmp, (*it)->samples[0].bytes, (*it)->samples[0].size, ts, is_first_audio); packet_aac(&out_rtmp, (*it)->samples[0].bytes, (*it)->samples[0].size, ts, is_first_audio_);
if ((err = source_->on_audio(&out_rtmp)) != srs_success) { SrsSharedPtrMessage msg;
if ((err = msg.create(&out_rtmp)) != srs_success) {
return srs_error_wrap(err, "create message");
}
if ((err = bridge_->on_frame(&msg)) != srs_success) {
err = srs_error_wrap(err, "source on audio"); err = srs_error_wrap(err, "source on audio");
break; break;
} }
@ -1464,7 +1435,7 @@ srs_error_t SrsRtmpFromRtcBridge::transcode_audio(SrsRtpPacket *pkt)
return err; return err;
} }
void SrsRtmpFromRtcBridge::packet_aac(SrsCommonMessage* audio, char* data, int len, uint32_t pts, bool is_header) void SrsRtcFrameBuilder::packet_aac(SrsCommonMessage* audio, char* data, int len, uint32_t pts, bool is_header)
{ {
int rtmp_len = len + 2; int rtmp_len = len + 2;
audio->header.initialize_audio(rtmp_len, pts, 1); audio->header.initialize_audio(rtmp_len, pts, 1);
@ -1481,7 +1452,7 @@ void SrsRtmpFromRtcBridge::packet_aac(SrsCommonMessage* audio, char* data, int l
audio->size = rtmp_len; audio->size = rtmp_len;
} }
srs_error_t SrsRtmpFromRtcBridge::packet_video(SrsRtpPacket* src) srs_error_t SrsRtcFrameBuilder::packet_video(SrsRtpPacket* src)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1521,7 +1492,7 @@ srs_error_t SrsRtmpFromRtcBridge::packet_video(SrsRtpPacket* src)
return err; return err;
} }
srs_error_t SrsRtmpFromRtcBridge::packet_video_key_frame(SrsRtpPacket* pkt) srs_error_t SrsRtcFrameBuilder::packet_video_key_frame(SrsRtpPacket* pkt)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1557,7 +1528,12 @@ srs_error_t SrsRtmpFromRtcBridge::packet_video_key_frame(SrsRtpPacket* pkt)
return srs_error_wrap(err, "create rtmp"); return srs_error_wrap(err, "create rtmp");
} }
if ((err = source_->on_video(&rtmp)) != srs_success) { SrsSharedPtrMessage msg;
if ((err = msg.create(&rtmp)) != srs_success) {
return srs_error_wrap(err, "create message");
}
if ((err = bridge_->on_frame(&msg)) != srs_success) {
return err; return err;
} }
} }
@ -1616,7 +1592,7 @@ srs_error_t SrsRtmpFromRtcBridge::packet_video_key_frame(SrsRtpPacket* pkt)
return err; return err;
} }
srs_error_t SrsRtmpFromRtcBridge::packet_video_rtmp(const uint16_t start, const uint16_t end) srs_error_t SrsRtcFrameBuilder::packet_video_rtmp(const uint16_t start, const uint16_t end)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -1745,7 +1721,12 @@ srs_error_t SrsRtmpFromRtcBridge::packet_video_rtmp(const uint16_t start, const
srs_freep(pkt); srs_freep(pkt);
} }
if ((err = source_->on_video(&rtmp)) != srs_success) { SrsSharedPtrMessage msg;
if ((err = msg.create(&rtmp)) != srs_success) {
return srs_error_wrap(err, "create message");
}
if ((err = bridge_->on_frame(&msg)) != srs_success) {
srs_warn("fail to pack video frame"); srs_warn("fail to pack video frame");
} }
@ -1765,7 +1746,7 @@ srs_error_t SrsRtmpFromRtcBridge::packet_video_rtmp(const uint16_t start, const
return err; return err;
} }
int32_t SrsRtmpFromRtcBridge::find_next_lost_sn(uint16_t current_sn, uint16_t& end_sn) int32_t SrsRtcFrameBuilder::find_next_lost_sn(uint16_t current_sn, uint16_t& end_sn)
{ {
uint32_t last_rtp_ts = cache_video_pkts_[cache_index(header_sn_)].rtp_ts; uint32_t last_rtp_ts = cache_video_pkts_[cache_index(header_sn_)].rtp_ts;
for (int i = 0; i < s_cache_size; ++i) { for (int i = 0; i < s_cache_size; ++i) {
@ -1791,7 +1772,7 @@ int32_t SrsRtmpFromRtcBridge::find_next_lost_sn(uint16_t current_sn, uint16_t& e
return -2; return -2;
} }
void SrsRtmpFromRtcBridge::clear_cached_video() void SrsRtcFrameBuilder::clear_cached_video()
{ {
for (size_t i = 0; i < s_cache_size; i++) for (size_t i = 0; i < s_cache_size; i++)
{ {
@ -1805,7 +1786,7 @@ void SrsRtmpFromRtcBridge::clear_cached_video()
} }
} }
bool SrsRtmpFromRtcBridge::check_frame_complete(const uint16_t start, const uint16_t end) bool SrsRtcFrameBuilder::check_frame_complete(const uint16_t start, const uint16_t end)
{ {
int16_t cnt = srs_rtp_seq_distance(start, end) + 1; int16_t cnt = srs_rtp_seq_distance(start, end) + 1;
srs_assert(cnt >= 1); srs_assert(cnt >= 1);
@ -1833,6 +1814,7 @@ bool SrsRtmpFromRtcBridge::check_frame_complete(const uint16_t start, const uint
return fu_s_c == fu_e_c; return fu_s_c == fu_e_c;
} }
#endif #endif
SrsCodecPayload::SrsCodecPayload() SrsCodecPayload::SrsCodecPayload()

View file

@ -18,8 +18,10 @@
#include <srs_app_rtc_sdp.hpp> #include <srs_app_rtc_sdp.hpp>
#include <srs_protocol_st.hpp> #include <srs_protocol_st.hpp>
#include <srs_app_source.hpp>
#include <srs_kernel_rtc_rtp.hpp> #include <srs_kernel_rtc_rtp.hpp>
#include <srs_app_hourglass.hpp>
#include <srs_protocol_format.hpp>
#include <srs_app_stream_bridge.hpp>
class SrsRequest; class SrsRequest;
class SrsMetaCache; class SrsMetaCache;
@ -27,7 +29,7 @@ class SrsSharedPtrMessage;
class SrsCommonMessage; class SrsCommonMessage;
class SrsMessageArray; class SrsMessageArray;
class SrsRtcSource; class SrsRtcSource;
class SrsRtcFromRtmpBridge; class SrsFrameToRtcBridge;
class SrsAudioTranscoder; class SrsAudioTranscoder;
class SrsRtpPacket; class SrsRtpPacket;
class SrsSample; class SrsSample;
@ -38,6 +40,13 @@ class SrsRtpRingBuffer;
class SrsRtpNackForReceiver; class SrsRtpNackForReceiver;
class SrsJsonObject; class SrsJsonObject;
class SrsErrorPithyPrint; class SrsErrorPithyPrint;
class SrsRtcFrameBuilder;
class SrsLiveSource;
// Firefox defaults as 109, Chrome is 111.
const int kAudioPayloadType = 111;
// Firefox defaults as 126, Chrome is 102.
const int kVideoPayloadType = 102;
class SrsNtp class SrsNtp
{ {
@ -145,18 +154,6 @@ public:
virtual void on_consumers_finished() = 0; virtual void on_consumers_finished() = 0;
}; };
// SrsRtcSource bridge to SrsLiveSource
class ISrsRtcSourceBridge
{
public:
ISrsRtcSourceBridge();
virtual ~ISrsRtcSourceBridge();
public:
virtual srs_error_t on_publish() = 0;
virtual srs_error_t on_rtp(SrsRtpPacket *pkt) = 0;
virtual void on_unpublish() = 0;
};
// A Source is a stream, to publish and to play with, binding to SrsRtcPublishStream and SrsRtcPlayStream. // A Source is a stream, to publish and to play with, binding to SrsRtcPublishStream and SrsRtcPlayStream.
class SrsRtcSource : public ISrsFastTimer class SrsRtcSource : public ISrsFastTimer
{ {
@ -172,8 +169,13 @@ private:
ISrsRtcPublishStream* publish_stream_; ISrsRtcPublishStream* publish_stream_;
// Steam description for this steam. // Steam description for this steam.
SrsRtcSourceDescription* stream_desc_; SrsRtcSourceDescription* stream_desc_;
private:
#ifdef SRS_FFMPEG_FIT
// Collect and build WebRTC RTP packets to AV frames.
SrsRtcFrameBuilder* frame_builder_;
#endif
// The Source bridge, bridge stream to other source. // The Source bridge, bridge stream to other source.
ISrsRtcSourceBridge* bridge_; ISrsStreamBridge* bridge_;
private: private:
// To delivery stream to clients. // To delivery stream to clients.
std::vector<SrsRtcConsumer*> consumers; std::vector<SrsRtcConsumer*> consumers;
@ -205,7 +207,7 @@ public:
virtual SrsContextId source_id(); virtual SrsContextId source_id();
virtual SrsContextId pre_source_id(); virtual SrsContextId pre_source_id();
public: public:
void set_bridge(ISrsRtcSourceBridge *bridge); void set_bridge(ISrsStreamBridge* bridge);
public: public:
// Create consumer // Create consumer
// @param consumer, output the create consumer. // @param consumer, output the create consumer.
@ -245,60 +247,64 @@ private:
}; };
#ifdef SRS_FFMPEG_FIT #ifdef SRS_FFMPEG_FIT
class SrsRtcFromRtmpBridge : public ISrsLiveSourceBridge
// Convert AV frame to RTC RTP packets.
class SrsRtcRtpBuilder
{ {
private: private:
SrsRequest* req; SrsRequest* req;
SrsRtcSource* source_; SrsFrameToRtcBridge* bridge_;
// The format, codec information. // The format, codec information.
SrsRtmpFormat* format; SrsRtmpFormat* format;
// The metadata cache. // The metadata cache.
SrsMetaCache* meta; SrsMetaCache* meta;
private: private:
bool rtmp_to_rtc;
SrsAudioCodecId latest_codec_; SrsAudioCodecId latest_codec_;
SrsAudioTranscoder* codec_; SrsAudioTranscoder* codec_;
bool keep_bframe; bool keep_bframe;
bool merge_nalus; bool merge_nalus;
uint16_t audio_sequence; uint16_t audio_sequence;
uint16_t video_sequence; uint16_t video_sequence;
uint32_t audio_ssrc; private:
uint32_t video_ssrc; uint32_t audio_ssrc_;
uint32_t video_ssrc_;
uint8_t audio_payload_type_; uint8_t audio_payload_type_;
uint8_t video_payload_type_; uint8_t video_payload_type_;
public: public:
SrsRtcFromRtmpBridge(SrsRtcSource* source); SrsRtcRtpBuilder(SrsFrameToRtcBridge* bridge, uint32_t assrc, uint8_t apt, uint32_t vssrc, uint8_t vpt);
virtual ~SrsRtcFromRtmpBridge(); virtual ~SrsRtcRtpBuilder();
public: public:
virtual srs_error_t initialize(SrsRequest* r); virtual srs_error_t initialize(SrsRequest* r);
virtual srs_error_t on_publish(); virtual srs_error_t on_publish();
virtual void on_unpublish(); virtual void on_unpublish();
virtual srs_error_t on_frame(SrsSharedPtrMessage* frame);
private:
virtual srs_error_t on_audio(SrsSharedPtrMessage* msg); virtual srs_error_t on_audio(SrsSharedPtrMessage* msg);
private: private:
srs_error_t init_codec(SrsAudioCodecId codec); srs_error_t init_codec(SrsAudioCodecId codec);
srs_error_t transcode(SrsAudioFrame* audio); srs_error_t transcode(SrsAudioFrame* audio);
srs_error_t package_opus(SrsAudioFrame* audio, SrsRtpPacket* pkt); srs_error_t package_opus(SrsAudioFrame* audio, SrsRtpPacket* pkt);
public: private:
virtual srs_error_t on_video(SrsSharedPtrMessage* msg); virtual srs_error_t on_video(SrsSharedPtrMessage* msg);
private: private:
srs_error_t filter(SrsSharedPtrMessage* msg, SrsFormat* format, bool& has_idr, std::vector<SrsSample*>& samples); srs_error_t filter(SrsSharedPtrMessage* msg, SrsFormat* format, bool& has_idr, std::vector<SrsSample*>& samples);
srs_error_t package_stap_a(SrsRtcSource* source, SrsSharedPtrMessage* msg, SrsRtpPacket* pkt); srs_error_t package_stap_a(SrsSharedPtrMessage* msg, SrsRtpPacket* pkt);
srs_error_t package_nalus(SrsSharedPtrMessage* msg, const std::vector<SrsSample*>& samples, std::vector<SrsRtpPacket*>& pkts); srs_error_t package_nalus(SrsSharedPtrMessage* msg, const std::vector<SrsSample*>& samples, std::vector<SrsRtpPacket*>& pkts);
srs_error_t package_single_nalu(SrsSharedPtrMessage* msg, SrsSample* sample, std::vector<SrsRtpPacket*>& pkts); srs_error_t package_single_nalu(SrsSharedPtrMessage* msg, SrsSample* sample, std::vector<SrsRtpPacket*>& pkts);
srs_error_t package_fu_a(SrsSharedPtrMessage* msg, SrsSample* sample, int fu_payload_size, std::vector<SrsRtpPacket*>& pkts); srs_error_t package_fu_a(SrsSharedPtrMessage* msg, SrsSample* sample, int fu_payload_size, std::vector<SrsRtpPacket*>& pkts);
srs_error_t consume_packets(std::vector<SrsRtpPacket*>& pkts); srs_error_t consume_packets(std::vector<SrsRtpPacket*>& pkts);
}; };
class SrsRtmpFromRtcBridge : public ISrsRtcSourceBridge // Collect and build WebRTC RTP packets to AV frames.
class SrsRtcFrameBuilder
{ {
private: private:
SrsLiveSource *source_; ISrsStreamBridge* bridge_;
private:
bool is_first_audio_;
SrsAudioTranscoder *codec_; SrsAudioTranscoder *codec_;
bool is_first_audio; private:
bool is_first_video; const static uint16_t s_cache_size = 512;
// The format, codec information.
SrsRtmpFormat* format;
//TODO:use SrsRtpRingBuffer //TODO:use SrsRtpRingBuffer
//TODO:jitter buffer class //TODO:jitter buffer class
struct RtcPacketCache { struct RtcPacketCache {
@ -308,33 +314,33 @@ private:
uint32_t rtp_ts; uint32_t rtp_ts;
SrsRtpPacket* pkt; SrsRtpPacket* pkt;
}; };
const static uint16_t s_cache_size = 512;
RtcPacketCache cache_video_pkts_[s_cache_size]; RtcPacketCache cache_video_pkts_[s_cache_size];
uint16_t header_sn_; uint16_t header_sn_;
uint16_t lost_sn_; uint16_t lost_sn_;
int64_t rtp_key_frame_ts_; int64_t rtp_key_frame_ts_;
public: public:
SrsRtmpFromRtcBridge(SrsLiveSource *src); SrsRtcFrameBuilder(ISrsStreamBridge* bridge);
virtual ~SrsRtmpFromRtcBridge(); virtual ~SrsRtcFrameBuilder();
public: public:
srs_error_t initialize(SrsRequest* r); srs_error_t initialize(SrsRequest* r);
public:
virtual srs_error_t on_publish(); virtual srs_error_t on_publish();
virtual srs_error_t on_rtp(SrsRtpPacket *pkt);
virtual void on_unpublish(); virtual void on_unpublish();
virtual srs_error_t on_rtp(SrsRtpPacket *pkt);
private: private:
srs_error_t transcode_audio(SrsRtpPacket *pkt); srs_error_t transcode_audio(SrsRtpPacket *pkt);
void packet_aac(SrsCommonMessage* audio, char* data, int len, uint32_t pts, bool is_header); void packet_aac(SrsCommonMessage* audio, char* data, int len, uint32_t pts, bool is_header);
private:
srs_error_t packet_video(SrsRtpPacket* pkt); srs_error_t packet_video(SrsRtpPacket* pkt);
srs_error_t packet_video_key_frame(SrsRtpPacket* pkt); srs_error_t packet_video_key_frame(SrsRtpPacket* pkt);
srs_error_t packet_video_rtmp(const uint16_t start, const uint16_t end);
int32_t find_next_lost_sn(uint16_t current_sn, uint16_t& end_sn);
void clear_cached_video();
inline uint16_t cache_index(uint16_t current_sn) { inline uint16_t cache_index(uint16_t current_sn) {
return current_sn % s_cache_size; return current_sn % s_cache_size;
} }
int32_t find_next_lost_sn(uint16_t current_sn, uint16_t& end_sn);
bool check_frame_complete(const uint16_t start, const uint16_t end); bool check_frame_complete(const uint16_t start, const uint16_t end);
srs_error_t packet_video_rtmp(const uint16_t start, const uint16_t end);
void clear_cached_video();
}; };
#endif #endif
// TODO: FIXME: Rename it. // TODO: FIXME: Rename it.

View file

@ -39,6 +39,7 @@ using namespace std;
#include <srs_protocol_json.hpp> #include <srs_protocol_json.hpp>
#include <srs_app_rtc_source.hpp> #include <srs_app_rtc_source.hpp>
#include <srs_app_tencentcloud.hpp> #include <srs_app_tencentcloud.hpp>
#include <srs_app_srt_source.hpp>
// the timeout in srs_utime_t to wait encoder to republish // the timeout in srs_utime_t to wait encoder to republish
// if timeout, close the connection. // if timeout, close the connection.
@ -1081,7 +1082,7 @@ srs_error_t SrsRtmpConn::acquire_publish(SrsLiveSource* source)
// Check whether RTC stream is busy. // Check whether RTC stream is busy.
#ifdef SRS_RTC #ifdef SRS_RTC
SrsRtcSource *rtc = NULL; SrsRtcSource* rtc = NULL;
bool rtc_server_enabled = _srs_config->get_rtc_server_enabled(); bool rtc_server_enabled = _srs_config->get_rtc_server_enabled();
bool rtc_enabled = _srs_config->get_rtc_enabled(req->vhost); bool rtc_enabled = _srs_config->get_rtc_enabled(req->vhost);
if (rtc_server_enabled && rtc_enabled && !info->edge) { if (rtc_server_enabled && rtc_enabled && !info->edge) {
@ -1095,10 +1096,28 @@ srs_error_t SrsRtmpConn::acquire_publish(SrsLiveSource* source)
} }
#endif #endif
// Check whether SRT stream is busy.
#ifdef SRS_SRT
SrsSrtSource* srt = NULL;
bool srt_server_enabled = _srs_config->get_srt_enabled();
bool srt_enabled = _srs_config->get_srt_enabled(req->vhost);
if (srt_server_enabled && srt_enabled && !info->edge) {
if ((err = _srs_srt_sources->fetch_or_create(req, &srt)) != srs_success) {
return srs_error_wrap(err, "create source");
}
if (!srt->can_publish()) {
return srs_error_new(ERROR_SYSTEM_STREAM_BUSY, "srt stream %s busy", req->get_stream_url().c_str());
}
}
#endif
// Bridge to RTC streaming. // Bridge to RTC streaming.
#if defined(SRS_RTC) && defined(SRS_FFMPEG_FIT) #if defined(SRS_RTC) && defined(SRS_FFMPEG_FIT)
if (rtc) { if (rtc && _srs_config->get_rtc_from_rtmp(req->vhost)) {
SrsRtcFromRtmpBridge *bridge = new SrsRtcFromRtmpBridge(rtc); SrsCompositeBridge* bridge = new SrsCompositeBridge();
bridge->append(new SrsFrameToRtcBridge(rtc));
if ((err = bridge->initialize(req)) != srs_success) { if ((err = bridge->initialize(req)) != srs_success) {
srs_freep(bridge); srs_freep(bridge);
return srs_error_wrap(err, "bridge init"); return srs_error_wrap(err, "bridge init");

View file

@ -1910,14 +1910,6 @@ void SrsLiveSourceManager::destroy()
pool.clear(); pool.clear();
} }
ISrsLiveSourceBridge::ISrsLiveSourceBridge()
{
}
ISrsLiveSourceBridge::~ISrsLiveSourceBridge()
{
}
SrsLiveSource::SrsLiveSource() SrsLiveSource::SrsLiveSource()
{ {
req = NULL; req = NULL;
@ -2060,7 +2052,7 @@ srs_error_t SrsLiveSource::initialize(SrsRequest* r, ISrsLiveSourceHandler* h)
return err; return err;
} }
void SrsLiveSource::set_bridge(ISrsLiveSourceBridge* v) void SrsLiveSource::set_bridge(ISrsStreamBridge* v)
{ {
srs_freep(bridge_); srs_freep(bridge_);
bridge_ = v; bridge_ = v;
@ -2275,13 +2267,24 @@ srs_error_t SrsLiveSource::on_audio(SrsCommonMessage* shared_audio)
return srs_error_wrap(err, "create message"); return srs_error_wrap(err, "create message");
} }
return on_frame(&msg);
}
srs_error_t SrsLiveSource::on_frame(SrsSharedPtrMessage* msg)
{
srs_error_t err = srs_success;
// directly process the audio message. // directly process the audio message.
if (!mix_correct) { if (!mix_correct) {
return on_audio_imp(&msg); if (msg->is_audio()) {
return on_audio_imp(msg);
} else {
return on_video_imp(msg);
}
} }
// insert msg to the queue. // insert msg to the queue.
mix_queue->push(msg.copy()); mix_queue->push(msg->copy());
// fetch someone from mix queue. // fetch someone from mix queue.
SrsSharedPtrMessage* m = mix_queue->pop(); SrsSharedPtrMessage* m = mix_queue->pop();
@ -2333,7 +2336,7 @@ srs_error_t SrsLiveSource::on_audio_imp(SrsSharedPtrMessage* msg)
} }
// For bridge to consume the message. // For bridge to consume the message.
if (bridge_ && (err = bridge_->on_audio(msg)) != srs_success) { if (bridge_ && (err = bridge_->on_frame(msg)) != srs_success) {
return srs_error_wrap(err, "bridge consume audio"); return srs_error_wrap(err, "bridge consume audio");
} }
@ -2410,29 +2413,7 @@ srs_error_t SrsLiveSource::on_video(SrsCommonMessage* shared_video)
return srs_error_wrap(err, "create message"); return srs_error_wrap(err, "create message");
} }
// directly process the video message. return on_frame(&msg);
if (!mix_correct) {
return on_video_imp(&msg);
}
// insert msg to the queue.
mix_queue->push(msg.copy());
// fetch someone from mix queue.
SrsSharedPtrMessage* m = mix_queue->pop();
if (!m) {
return err;
}
// consume the monotonically increase message.
if (m->is_audio()) {
err = on_audio_imp(m);
} else {
err = on_video_imp(m);
}
srs_freep(m);
return err;
} }
srs_error_t SrsLiveSource::on_video_imp(SrsSharedPtrMessage* msg) srs_error_t SrsLiveSource::on_video_imp(SrsSharedPtrMessage* msg)
@ -2478,7 +2459,7 @@ srs_error_t SrsLiveSource::on_video_imp(SrsSharedPtrMessage* msg)
} }
// For bridge to consume the message. // For bridge to consume the message.
if (bridge_ && (err = bridge_->on_video(msg)) != srs_success) { if (bridge_ && (err = bridge_->on_frame(msg)) != srs_success) {
return srs_error_wrap(err, "bridge consume video"); return srs_error_wrap(err, "bridge consume video");
} }

View file

@ -18,6 +18,7 @@
#include <srs_core_performance.hpp> #include <srs_core_performance.hpp>
#include <srs_protocol_st.hpp> #include <srs_protocol_st.hpp>
#include <srs_app_hourglass.hpp> #include <srs_app_hourglass.hpp>
#include <srs_app_stream_bridge.hpp>
class SrsFormat; class SrsFormat;
class SrsRtmpFormat; class SrsRtmpFormat;
@ -471,19 +472,6 @@ public:
// Global singleton instance. // Global singleton instance.
extern SrsLiveSourceManager* _srs_sources; extern SrsLiveSourceManager* _srs_sources;
// For RTMP2RTC, bridge SrsLiveSource to SrsRtcSource
class ISrsLiveSourceBridge
{
public:
ISrsLiveSourceBridge();
virtual ~ISrsLiveSourceBridge();
public:
virtual srs_error_t on_publish() = 0;
virtual srs_error_t on_audio(SrsSharedPtrMessage* audio) = 0;
virtual srs_error_t on_video(SrsSharedPtrMessage* video) = 0;
virtual void on_unpublish() = 0;
};
// The live streaming source. // The live streaming source.
class SrsLiveSource : public ISrsReloadHandler class SrsLiveSource : public ISrsReloadHandler
{ {
@ -518,7 +506,7 @@ private:
// The event handler. // The event handler.
ISrsLiveSourceHandler* handler; ISrsLiveSourceHandler* handler;
// The source bridge for other source. // The source bridge for other source.
ISrsLiveSourceBridge* bridge_; ISrsStreamBridge* bridge_;
// The edge control service // The edge control service
SrsPlayEdge* play_edge; SrsPlayEdge* play_edge;
SrsPublishEdge* publish_edge; SrsPublishEdge* publish_edge;
@ -551,7 +539,7 @@ public:
// Initialize the hls with handlers. // Initialize the hls with handlers.
virtual srs_error_t initialize(SrsRequest* r, ISrsLiveSourceHandler* h); virtual srs_error_t initialize(SrsRequest* r, ISrsLiveSourceHandler* h);
// Bridge to other source, forward packets to it. // Bridge to other source, forward packets to it.
void set_bridge(ISrsLiveSourceBridge* v); void set_bridge(ISrsStreamBridge* v);
// Interface ISrsReloadHandler // Interface ISrsReloadHandler
public: public:
virtual srs_error_t on_reload_vhost_play(std::string vhost); virtual srs_error_t on_reload_vhost_play(std::string vhost);
@ -572,6 +560,7 @@ public:
public: public:
// TODO: FIXME: Use SrsSharedPtrMessage instead. // TODO: FIXME: Use SrsSharedPtrMessage instead.
virtual srs_error_t on_audio(SrsCommonMessage* audio); virtual srs_error_t on_audio(SrsCommonMessage* audio);
srs_error_t on_frame(SrsSharedPtrMessage* msg);
private: private:
virtual srs_error_t on_audio_imp(SrsSharedPtrMessage* audio); virtual srs_error_t on_audio_imp(SrsSharedPtrMessage* audio);
public: public:

View file

@ -357,7 +357,6 @@ srs_error_t SrsMpegtsSrtConn::acquire_publish()
return srs_error_new(ERROR_SRT_SOURCE_BUSY, "srt stream %s busy", req_->get_stream_url().c_str()); return srs_error_new(ERROR_SRT_SOURCE_BUSY, "srt stream %s busy", req_->get_stream_url().c_str());
} }
if (_srs_config->get_srt_to_rtmp(req_->vhost)) {
// Check rtmp stream is busy. // Check rtmp stream is busy.
SrsLiveSource *live_source = _srs_sources->fetch(req_); SrsLiveSource *live_source = _srs_sources->fetch(req_);
if (live_source && !live_source->can_publish(false)) { if (live_source && !live_source->can_publish(false)) {
@ -380,7 +379,7 @@ srs_error_t SrsMpegtsSrtConn::acquire_publish()
// Check whether RTC stream is busy. // Check whether RTC stream is busy.
#ifdef SRS_RTC #ifdef SRS_RTC
SrsRtcSource *rtc = NULL; SrsRtcSource* rtc = NULL;
bool rtc_server_enabled = _srs_config->get_rtc_server_enabled(); bool rtc_server_enabled = _srs_config->get_rtc_server_enabled();
bool rtc_enabled = _srs_config->get_rtc_enabled(req_->vhost); bool rtc_enabled = _srs_config->get_rtc_enabled(req_->vhost);
bool edge = _srs_config->get_vhost_is_edge(req_->vhost); bool edge = _srs_config->get_vhost_is_edge(req_->vhost);
@ -395,26 +394,23 @@ srs_error_t SrsMpegtsSrtConn::acquire_publish()
} }
#endif #endif
// Bridge to RTC streaming. if (_srs_config->get_srt_to_rtmp(req_->vhost)) {
#if defined(SRS_RTC) && defined(SRS_FFMPEG_FIT) // Bridge to RTMP and RTC streaming.
if (rtc) { SrsCompositeBridge* bridge = new SrsCompositeBridge();
SrsRtcFromRtmpBridge *bridge = new SrsRtcFromRtmpBridge(rtc); bridge->append(new SrsFrameToRtmpBridge(live_source));
if ((err = bridge->initialize(req_)) != srs_success) {
srs_freep(bridge);
return srs_error_wrap(err, "bridge init");
}
live_source->set_bridge(bridge); #if defined(SRS_RTC) && defined(SRS_FFMPEG_FIT)
if (rtc && _srs_config->get_rtc_from_rtmp(req_->vhost)) {
bridge->append(new SrsFrameToRtcBridge(rtc));
} }
#endif #endif
SrsRtmpFromSrtBridge *bridger = new SrsRtmpFromSrtBridge(live_source); if ((err = bridge->initialize(req_)) != srs_success) {
if ((err = bridger->initialize(req_)) != srs_success) { srs_freep(bridge);
srs_freep(bridger); return srs_error_wrap(err, "create bridge");
return srs_error_wrap(err, "create bridger");
} }
srt_source_->set_bridge(bridger); srt_source_->set_bridge(bridge);
} }
if ((err = srt_source_->on_publish()) != srs_success) { if ((err = srt_source_->on_publish()) != srs_success) {

View file

@ -235,15 +235,7 @@ void SrsSrtConsumer::wait(int nb_msgs, srs_utime_t timeout)
srs_cond_timedwait(mw_wait, timeout); srs_cond_timedwait(mw_wait, timeout);
} }
ISrsSrtSourceBridge::ISrsSrtSourceBridge() SrsSrtFrameBuilder::SrsSrtFrameBuilder(ISrsStreamBridge* bridge)
{
}
ISrsSrtSourceBridge::~ISrsSrtSourceBridge()
{
}
SrsRtmpFromSrtBridge::SrsRtmpFromSrtBridge(SrsLiveSource* source) : ISrsSrtSourceBridge()
{ {
ts_ctx_ = new SrsTsContext(); ts_ctx_ = new SrsTsContext();
@ -252,7 +244,7 @@ SrsRtmpFromSrtBridge::SrsRtmpFromSrtBridge(SrsLiveSource* source) : ISrsSrtSourc
pps_ = ""; pps_ = "";
req_ = NULL; req_ = NULL;
live_source_ = source; bridge_ = bridge;
video_streamid_ = 1; video_streamid_ = 1;
audio_streamid_ = 2; audio_streamid_ = 2;
@ -260,7 +252,7 @@ SrsRtmpFromSrtBridge::SrsRtmpFromSrtBridge(SrsLiveSource* source) : ISrsSrtSourc
pp_audio_duration_ = new SrsAlonePithyPrint(); pp_audio_duration_ = new SrsAlonePithyPrint();
} }
SrsRtmpFromSrtBridge::~SrsRtmpFromSrtBridge() SrsSrtFrameBuilder::~SrsSrtFrameBuilder()
{ {
srs_freep(ts_ctx_); srs_freep(ts_ctx_);
srs_freep(req_); srs_freep(req_);
@ -268,18 +260,12 @@ SrsRtmpFromSrtBridge::~SrsRtmpFromSrtBridge()
srs_freep(pp_audio_duration_); srs_freep(pp_audio_duration_);
} }
srs_error_t SrsRtmpFromSrtBridge::on_publish() srs_error_t SrsSrtFrameBuilder::on_publish()
{ {
srs_error_t err = srs_success; return srs_success;
if ((err = live_source_->on_publish()) != srs_success) {
return srs_error_wrap(err, "on publish");
}
return err;
} }
srs_error_t SrsRtmpFromSrtBridge::on_packet(SrsSrtPacket *pkt) srs_error_t SrsSrtFrameBuilder::on_packet(SrsSrtPacket *pkt)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -306,12 +292,11 @@ srs_error_t SrsRtmpFromSrtBridge::on_packet(SrsSrtPacket *pkt)
return err; return err;
} }
void SrsRtmpFromSrtBridge::on_unpublish() void SrsSrtFrameBuilder::on_unpublish()
{ {
live_source_->on_unpublish();
} }
srs_error_t SrsRtmpFromSrtBridge::initialize(SrsRequest* req) srs_error_t SrsSrtFrameBuilder::initialize(SrsRequest* req)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -321,7 +306,7 @@ srs_error_t SrsRtmpFromSrtBridge::initialize(SrsRequest* req)
return err; return err;
} }
srs_error_t SrsRtmpFromSrtBridge::on_ts_message(SrsTsMessage* msg) srs_error_t SrsSrtFrameBuilder::on_ts_message(SrsTsMessage* msg)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -369,7 +354,7 @@ srs_error_t SrsRtmpFromSrtBridge::on_ts_message(SrsTsMessage* msg)
return err; return err;
} }
srs_error_t SrsRtmpFromSrtBridge::on_ts_video_avc(SrsTsMessage* msg, SrsBuffer* avs) srs_error_t SrsSrtFrameBuilder::on_ts_video_avc(SrsTsMessage* msg, SrsBuffer* avs)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -430,7 +415,7 @@ srs_error_t SrsRtmpFromSrtBridge::on_ts_video_avc(SrsTsMessage* msg, SrsBuffer*
return on_h264_frame(msg, ipb_frames); return on_h264_frame(msg, ipb_frames);
} }
srs_error_t SrsRtmpFromSrtBridge::check_sps_pps_change(SrsTsMessage* msg) srs_error_t SrsSrtFrameBuilder::check_sps_pps_change(SrsTsMessage* msg)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -470,14 +455,19 @@ srs_error_t SrsRtmpFromSrtBridge::check_sps_pps_change(SrsTsMessage* msg)
return srs_error_wrap(err, "create rtmp"); return srs_error_wrap(err, "create rtmp");
} }
if ((err = live_source_->on_video(&rtmp)) != srs_success) { SrsSharedPtrMessage frame;
if ((err = frame.create(&rtmp)) != srs_success) {
return srs_error_wrap(err, "create frame");
}
if ((err = bridge_->on_frame(&frame)) != srs_success) {
return srs_error_wrap(err, "srt to rtmp sps/pps"); return srs_error_wrap(err, "srt to rtmp sps/pps");
} }
return err; return err;
} }
srs_error_t SrsRtmpFromSrtBridge::on_h264_frame(SrsTsMessage* msg, vector<pair<char*, int> >& ipb_frames) srs_error_t SrsSrtFrameBuilder::on_h264_frame(SrsTsMessage* msg, vector<pair<char*, int> >& ipb_frames)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -526,7 +516,12 @@ srs_error_t SrsRtmpFromSrtBridge::on_h264_frame(SrsTsMessage* msg, vector<pair<c
payload.write_bytes(nal, nal_size); payload.write_bytes(nal, nal_size);
} }
if ((err = live_source_->on_video(&rtmp)) != srs_success) { SrsSharedPtrMessage frame;
if ((err = frame.create(&rtmp)) != srs_success) {
return srs_error_wrap(err, "create frame");
}
if ((err = bridge_->on_frame(&frame)) != srs_success) {
return srs_error_wrap(err ,"srt ts video to rtmp"); return srs_error_wrap(err ,"srt ts video to rtmp");
} }
@ -534,7 +529,7 @@ srs_error_t SrsRtmpFromSrtBridge::on_h264_frame(SrsTsMessage* msg, vector<pair<c
} }
#ifdef SRS_H265 #ifdef SRS_H265
srs_error_t SrsRtmpFromSrtBridge::on_ts_video_hevc(SrsTsMessage *msg, SrsBuffer *avs) srs_error_t SrsSrtFrameBuilder::on_ts_video_hevc(SrsTsMessage *msg, SrsBuffer *avs)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -610,7 +605,7 @@ srs_error_t SrsRtmpFromSrtBridge::on_ts_video_hevc(SrsTsMessage *msg, SrsBuffer
return on_hevc_frame(msg, ipb_frames); return on_hevc_frame(msg, ipb_frames);
} }
srs_error_t SrsRtmpFromSrtBridge::check_vps_sps_pps_change(SrsTsMessage* msg) srs_error_t SrsSrtFrameBuilder::check_vps_sps_pps_change(SrsTsMessage* msg)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -650,14 +645,19 @@ srs_error_t SrsRtmpFromSrtBridge::check_vps_sps_pps_change(SrsTsMessage* msg)
return srs_error_wrap(err, "create rtmp"); return srs_error_wrap(err, "create rtmp");
} }
if ((err = live_source_->on_video(&rtmp)) != srs_success) { SrsSharedPtrMessage frame;
if ((err = frame.create(&rtmp)) != srs_success) {
return srs_error_wrap(err, "create frame");
}
if ((err = bridge_->on_frame(&frame)) != srs_success) {
return srs_error_wrap(err, "srt to rtmp vps/sps/pps"); return srs_error_wrap(err, "srt to rtmp vps/sps/pps");
} }
return err; return err;
} }
srs_error_t SrsRtmpFromSrtBridge::on_hevc_frame(SrsTsMessage* msg, vector<pair<char*, int> >& ipb_frames) srs_error_t SrsSrtFrameBuilder::on_hevc_frame(SrsTsMessage* msg, vector<pair<char*, int> >& ipb_frames)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -713,7 +713,12 @@ srs_error_t SrsRtmpFromSrtBridge::on_hevc_frame(SrsTsMessage* msg, vector<pair<c
payload.write_bytes(nal, nal_size); payload.write_bytes(nal, nal_size);
} }
if ((err = live_source_->on_video(&rtmp)) != srs_success) { SrsSharedPtrMessage frame;
if ((err = frame.create(&rtmp)) != srs_success) {
return srs_error_wrap(err, "create frame");
}
if ((err = bridge_->on_frame(&frame)) != srs_success) {
return srs_error_wrap(err ,"srt ts hevc video to rtmp"); return srs_error_wrap(err ,"srt ts hevc video to rtmp");
} }
@ -721,7 +726,7 @@ srs_error_t SrsRtmpFromSrtBridge::on_hevc_frame(SrsTsMessage* msg, vector<pair<c
} }
#endif #endif
srs_error_t SrsRtmpFromSrtBridge::on_ts_audio(SrsTsMessage* msg, SrsBuffer* avs) srs_error_t SrsSrtFrameBuilder::on_ts_audio(SrsTsMessage* msg, SrsBuffer* avs)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -794,7 +799,7 @@ srs_error_t SrsRtmpFromSrtBridge::on_ts_audio(SrsTsMessage* msg, SrsBuffer* avs)
return err; return err;
} }
srs_error_t SrsRtmpFromSrtBridge::check_audio_sh_change(SrsTsMessage* msg, uint32_t pts) srs_error_t SrsSrtFrameBuilder::check_audio_sh_change(SrsTsMessage* msg, uint32_t pts)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
@ -818,18 +823,23 @@ srs_error_t SrsRtmpFromSrtBridge::check_audio_sh_change(SrsTsMessage* msg, uint3
stream.write_1bytes(0); stream.write_1bytes(0);
stream.write_bytes((char*)audio_sh_.data(), audio_sh_.size()); stream.write_bytes((char*)audio_sh_.data(), audio_sh_.size());
if ((err = live_source_->on_audio(&rtmp)) != srs_success) { SrsSharedPtrMessage frame;
if ((err = frame.create(&rtmp)) != srs_success) {
return srs_error_wrap(err, "create frame");
}
if ((err = bridge_->on_frame(&frame)) != srs_success) {
return srs_error_wrap(err, "srt to rtmp audio sh"); return srs_error_wrap(err, "srt to rtmp audio sh");
} }
return err; return err;
} }
srs_error_t SrsRtmpFromSrtBridge::on_aac_frame(SrsTsMessage* msg, uint32_t pts, char* frame, int frame_size) srs_error_t SrsSrtFrameBuilder::on_aac_frame(SrsTsMessage* msg, uint32_t pts, char* data, int data_size)
{ {
srs_error_t err = srs_success; srs_error_t err = srs_success;
int rtmp_len = frame_size + 2/* 2 bytes of flv audio tag header*/; int rtmp_len = data_size + 2/* 2 bytes of flv audio tag header*/;
SrsCommonMessage rtmp; SrsCommonMessage rtmp;
rtmp.header.initialize_audio(rtmp_len, pts, audio_streamid_); rtmp.header.initialize_audio(rtmp_len, pts, audio_streamid_);
@ -842,9 +852,14 @@ srs_error_t SrsRtmpFromSrtBridge::on_aac_frame(SrsTsMessage* msg, uint32_t pts,
stream.write_1bytes(aac_flag); stream.write_1bytes(aac_flag);
stream.write_1bytes(1); stream.write_1bytes(1);
// Write audio frame. // Write audio frame.
stream.write_bytes(frame, frame_size); stream.write_bytes(data, data_size);
if ((err = live_source_->on_audio(&rtmp)) != srs_success) { SrsSharedPtrMessage frame;
if ((err = frame.create(&rtmp)) != srs_success) {
return srs_error_wrap(err, "create frame");
}
if ((err = bridge_->on_frame(&frame)) != srs_success) {
return srs_error_wrap(err, "srt to rtmp audio sh"); return srs_error_wrap(err, "srt to rtmp audio sh");
} }
@ -855,6 +870,7 @@ SrsSrtSource::SrsSrtSource()
{ {
req = NULL; req = NULL;
can_publish_ = true; can_publish_ = true;
frame_builder_ = NULL;
bridge_ = NULL; bridge_ = NULL;
} }
@ -864,6 +880,7 @@ SrsSrtSource::~SrsSrtSource()
// for all consumers are auto free. // for all consumers are auto free.
consumers.clear(); consumers.clear();
srs_freep(frame_builder_);
srs_freep(bridge_); srs_freep(bridge_);
srs_freep(req); srs_freep(req);
} }
@ -915,10 +932,13 @@ void SrsSrtSource::update_auth(SrsRequest* r)
req->update_auth(r); req->update_auth(r);
} }
void SrsSrtSource::set_bridge(ISrsSrtSourceBridge* bridge) void SrsSrtSource::set_bridge(ISrsStreamBridge* bridge)
{ {
srs_freep(bridge_); srs_freep(bridge_);
bridge_ = bridge; bridge_ = bridge;
srs_freep(frame_builder_);
frame_builder_ = new SrsSrtFrameBuilder(bridge);
} }
srs_error_t SrsSrtSource::create_consumer(SrsSrtConsumer*& consumer) srs_error_t SrsSrtSource::create_consumer(SrsSrtConsumer*& consumer)
@ -965,9 +985,19 @@ srs_error_t SrsSrtSource::on_publish()
return srs_error_wrap(err, "source id change"); return srs_error_wrap(err, "source id change");
} }
if (bridge_ && (err = bridge_->on_publish()) != srs_success) { if (bridge_) {
if ((err = frame_builder_->initialize(req)) != srs_success) {
return srs_error_wrap(err, "frame builder initialize");
}
if ((err = frame_builder_->on_publish()) != srs_success) {
return srs_error_wrap(err, "frame builder on publish");
}
if ((err = bridge_->on_publish()) != srs_success) {
return srs_error_wrap(err, "bridge on publish"); return srs_error_wrap(err, "bridge on publish");
} }
}
SrsStatistic* stat = SrsStatistic::instance(); SrsStatistic* stat = SrsStatistic::instance();
stat->on_stream_publish(req, _source_id.c_str()); stat->on_stream_publish(req, _source_id.c_str());
@ -985,9 +1015,12 @@ void SrsSrtSource::on_unpublish()
can_publish_ = true; can_publish_ = true;
if (bridge_) { if (bridge_) {
frame_builder_->on_unpublish();
srs_freep(frame_builder_);
bridge_->on_unpublish(); bridge_->on_unpublish();
}
srs_freep(bridge_); srs_freep(bridge_);
}
} }
srs_error_t SrsSrtSource::on_packet(SrsSrtPacket* packet) srs_error_t SrsSrtSource::on_packet(SrsSrtPacket* packet)
@ -1001,7 +1034,7 @@ srs_error_t SrsSrtSource::on_packet(SrsSrtPacket* packet)
} }
} }
if (bridge_ && (err = bridge_->on_packet(packet)) != srs_success) { if (frame_builder_ && (err = frame_builder_->on_packet(packet)) != srs_success) {
return srs_error_wrap(err, "bridge consume message"); return srs_error_wrap(err, "bridge consume message");
} }

View file

@ -14,13 +14,14 @@
#include <srs_kernel_ts.hpp> #include <srs_kernel_ts.hpp>
#include <srs_protocol_st.hpp> #include <srs_protocol_st.hpp>
#include <srs_app_source.hpp> #include <srs_app_stream_bridge.hpp>
class SrsSharedPtrMessage; class SrsSharedPtrMessage;
class SrsRequest; class SrsRequest;
class SrsLiveSource; class SrsLiveSource;
class SrsSrtSource; class SrsSrtSource;
class SrsAlonePithyPrint; class SrsAlonePithyPrint;
class SrsSrtFrameBuilder;
// The SRT packet with shared message. // The SRT packet with shared message.
class SrsSrtPacket class SrsSrtPacket
@ -91,28 +92,18 @@ public:
virtual void wait(int nb_msgs, srs_utime_t timeout); virtual void wait(int nb_msgs, srs_utime_t timeout);
}; };
class ISrsSrtSourceBridge // Collect and build SRT TS packet to AV frames.
class SrsSrtFrameBuilder : public ISrsTsHandler
{ {
public: public:
ISrsSrtSourceBridge(); SrsSrtFrameBuilder(ISrsStreamBridge* bridge);
virtual ~ISrsSrtSourceBridge(); virtual ~SrsSrtFrameBuilder();
public: public:
virtual srs_error_t on_publish() = 0; srs_error_t initialize(SrsRequest* r);
virtual srs_error_t on_packet(SrsSrtPacket *pkt) = 0;
virtual void on_unpublish() = 0;
};
class SrsRtmpFromSrtBridge : public ISrsSrtSourceBridge, public ISrsTsHandler
{
public:
SrsRtmpFromSrtBridge(SrsLiveSource* source);
virtual ~SrsRtmpFromSrtBridge();
public: public:
virtual srs_error_t on_publish(); virtual srs_error_t on_publish();
virtual srs_error_t on_packet(SrsSrtPacket *pkt); virtual srs_error_t on_packet(SrsSrtPacket* pkt);
virtual void on_unpublish(); virtual void on_unpublish();
public:
srs_error_t initialize(SrsRequest* req);
// Interface ISrsTsHandler // Interface ISrsTsHandler
public: public:
virtual srs_error_t on_ts_message(SrsTsMessage* msg); virtual srs_error_t on_ts_message(SrsTsMessage* msg);
@ -123,35 +114,31 @@ private:
srs_error_t on_h264_frame(SrsTsMessage* msg, std::vector<std::pair<char*, int> >& ipb_frames); srs_error_t on_h264_frame(SrsTsMessage* msg, std::vector<std::pair<char*, int> >& ipb_frames);
srs_error_t check_audio_sh_change(SrsTsMessage* msg, uint32_t pts); srs_error_t check_audio_sh_change(SrsTsMessage* msg, uint32_t pts);
srs_error_t on_aac_frame(SrsTsMessage* msg, uint32_t pts, char* frame, int frame_size); srs_error_t on_aac_frame(SrsTsMessage* msg, uint32_t pts, char* frame, int frame_size);
#ifdef SRS_H265 #ifdef SRS_H265
srs_error_t on_ts_video_hevc(SrsTsMessage *msg, SrsBuffer *avs); srs_error_t on_ts_video_hevc(SrsTsMessage *msg, SrsBuffer *avs);
srs_error_t check_vps_sps_pps_change(SrsTsMessage *msg); srs_error_t check_vps_sps_pps_change(SrsTsMessage *msg);
srs_error_t on_hevc_frame(SrsTsMessage *msg, std::vector<std::pair<char *, int>> &ipb_frames); srs_error_t on_hevc_frame(SrsTsMessage *msg, std::vector<std::pair<char *, int>> &ipb_frames);
#endif #endif
private:
ISrsStreamBridge* bridge_;
private: private:
SrsTsContext* ts_ctx_; SrsTsContext* ts_ctx_;
// Record sps/pps had changed, if change, need to generate new video sh frame. // Record sps/pps had changed, if change, need to generate new video sh frame.
bool sps_pps_change_; bool sps_pps_change_;
std::string sps_; std::string sps_;
std::string pps_; std::string pps_;
#ifdef SRS_H265 #ifdef SRS_H265
bool vps_sps_pps_change_; bool vps_sps_pps_change_;
std::string hevc_vps_; std::string hevc_vps_;
std::string hevc_sps_; std::string hevc_sps_;
std::string hevc_pps_; std::string hevc_pps_;
#endif #endif
// Record audio sepcific config had changed, if change, need to generate new audio sh frame. // Record audio sepcific config had changed, if change, need to generate new audio sh frame.
bool audio_sh_change_; bool audio_sh_change_;
std::string audio_sh_; std::string audio_sh_;
private:
SrsRequest* req_; SrsRequest* req_;
SrsLiveSource* live_source_; private:
// SRT to rtmp, video stream id. // SRT to rtmp, video stream id.
int video_streamid_; int video_streamid_;
// SRT to rtmp, audio stream id. // SRT to rtmp, audio stream id.
@ -176,7 +163,7 @@ public:
// Update the authentication information in request. // Update the authentication information in request.
virtual void update_auth(SrsRequest* r); virtual void update_auth(SrsRequest* r);
public: public:
void set_bridge(ISrsSrtSourceBridge *bridger); void set_bridge(ISrsStreamBridge* bridge);
public: public:
// Create consumer // Create consumer
// @param consumer, output the create consumer. // @param consumer, output the create consumer.
@ -201,7 +188,9 @@ private:
// To delivery packets to clients. // To delivery packets to clients.
std::vector<SrsSrtConsumer*> consumers; std::vector<SrsSrtConsumer*> consumers;
bool can_publish_; bool can_publish_;
ISrsSrtSourceBridge* bridge_; private:
SrsSrtFrameBuilder* frame_builder_;
ISrsStreamBridge* bridge_;
}; };
#endif #endif

View file

@ -178,7 +178,7 @@ srs_error_t SrsStatisticStream::dumps(SrsJsonObject* obj)
void SrsStatisticStream::publish(std::string id) void SrsStatisticStream::publish(std::string id)
{ {
// To prevent duplicated publish event by bridger. // To prevent duplicated publish event by bridge.
if (active) { if (active) {
return; return;
} }
@ -490,14 +490,14 @@ void SrsStatistic::cleanup_stream(SrsStatisticStream* stream)
// Do cleanup streams. // Do cleanup streams.
if (true) { if (true) {
std::map<std::string, SrsStatisticStream *>::iterator it; std::map<std::string, SrsStatisticStream*>::iterator it;
if ((it = streams.find(stream->id)) != streams.end()) { if ((it = streams.find(stream->id)) != streams.end()) {
streams.erase(it); streams.erase(it);
} }
} }
if (true) { if (true) {
std::map<std::string, SrsStatisticStream *>::iterator it; std::map<std::string, SrsStatisticStream*>::iterator it;
if ((it = rstreams.find(stream->url)) != rstreams.end()) { if ((it = rstreams.find(stream->url)) != rstreams.end()) {
rstreams.erase(it); rstreams.erase(it);
} }

View file

@ -0,0 +1,236 @@
//
// Copyright (c) 2013-2023 The SRS Authors
//
// SPDX-License-Identifier: MIT or MulanPSL-2.0
//
#include <srs_app_stream_bridge.hpp>
#include <srs_app_source.hpp>
#include <srs_protocol_format.hpp>
#include <srs_app_rtc_codec.hpp>
#include <srs_app_rtc_source.hpp>
#include <srs_app_config.hpp>
#include <srs_protocol_rtmp_stack.hpp>
#include <srs_kernel_rtc_rtp.hpp>
#include <srs_core_autofree.hpp>
#include <vector>
using namespace std;
ISrsStreamBridge::ISrsStreamBridge()
{
}
ISrsStreamBridge::~ISrsStreamBridge()
{
}
SrsFrameToRtmpBridge::SrsFrameToRtmpBridge(SrsLiveSource *src)
{
source_ = src;
}
SrsFrameToRtmpBridge::~SrsFrameToRtmpBridge()
{
}
srs_error_t SrsFrameToRtmpBridge::initialize(SrsRequest* r)
{
return srs_success;
}
srs_error_t SrsFrameToRtmpBridge::on_publish()
{
srs_error_t err = srs_success;
// TODO: FIXME: Should sync with bridge?
if ((err = source_->on_publish()) != srs_success) {
return srs_error_wrap(err, "source publish");
}
return err;
}
void SrsFrameToRtmpBridge::on_unpublish()
{
// TODO: FIXME: Should sync with bridge?
source_->on_unpublish();
}
srs_error_t SrsFrameToRtmpBridge::on_frame(SrsSharedPtrMessage* frame)
{
return source_->on_frame(frame);
}
SrsFrameToRtcBridge::SrsFrameToRtcBridge(SrsRtcSource* source)
{
#ifdef SRS_RTC
source_ = source;
#endif
#if defined(SRS_RTC) && defined(SRS_FFMPEG_FIT)
uint32_t audio_ssrc = 0;
uint8_t audio_payload_type = 0;
uint32_t video_ssrc = 0;
uint8_t video_payload_type = 0;
// audio track ssrc
if (true) {
std::vector<SrsRtcTrackDescription*> descs = source->get_track_desc("audio", "opus");
if (!descs.empty()) {
audio_ssrc = descs.at(0)->ssrc_;
}
// Note we must use the PT of source, see https://github.com/ossrs/srs/pull/3079
audio_payload_type = descs.empty() ? kAudioPayloadType : descs.front()->media_->pt_;
}
// video track ssrc
if (true) {
std::vector<SrsRtcTrackDescription*> descs = source->get_track_desc("video", "H264");
if (!descs.empty()) {
video_ssrc = descs.at(0)->ssrc_;
}
// Note we must use the PT of source, see https://github.com/ossrs/srs/pull/3079
video_payload_type = descs.empty() ? kVideoPayloadType : descs.front()->media_->pt_;
}
rtp_builder_ = new SrsRtcRtpBuilder(this, audio_ssrc, audio_payload_type, video_ssrc, video_payload_type);
#endif
}
SrsFrameToRtcBridge::~SrsFrameToRtcBridge()
{
#ifdef SRS_FFMPEG_FIT
srs_freep(rtp_builder_);
#endif
}
srs_error_t SrsFrameToRtcBridge::initialize(SrsRequest* r)
{
#ifdef SRS_FFMPEG_FIT
return rtp_builder_->initialize(r);
#else
return srs_success;
#endif
}
srs_error_t SrsFrameToRtcBridge::on_publish()
{
srs_error_t err = srs_success;
#ifdef SRS_RTC
// TODO: FIXME: Should sync with bridge?
if ((err = source_->on_publish()) != srs_success) {
return srs_error_wrap(err, "source publish");
}
#endif
#ifdef SRS_FFMPEG_FIT
if ((err = rtp_builder_->on_publish()) != srs_success) {
return srs_error_wrap(err, "rtp builder publish");
}
#endif
return err;
}
void SrsFrameToRtcBridge::on_unpublish()
{
#ifdef SRS_FFMPEG_FIT
rtp_builder_->on_unpublish();
#endif
#ifdef SRS_RTC
// @remark This bridge might be disposed here, so never use it.
// TODO: FIXME: Should sync with bridge?
source_->on_unpublish();
#endif
}
srs_error_t SrsFrameToRtcBridge::on_frame(SrsSharedPtrMessage* frame)
{
#ifdef SRS_FFMPEG_FIT
return rtp_builder_->on_frame(frame);
#else
return srs_success;
#endif
}
srs_error_t SrsFrameToRtcBridge::on_rtp(SrsRtpPacket* pkt)
{
#ifdef SRS_RTC
return source_->on_rtp(pkt);
#else
return srs_success;
#endif
}
SrsCompositeBridge::SrsCompositeBridge()
{
}
SrsCompositeBridge::~SrsCompositeBridge()
{
for (vector<ISrsStreamBridge*>::iterator it = bridges_.begin(); it != bridges_.end(); ++it) {
ISrsStreamBridge* bridge = *it;
srs_freep(bridge);
}
}
srs_error_t SrsCompositeBridge::initialize(SrsRequest* r)
{
srs_error_t err = srs_success;
for (vector<ISrsStreamBridge*>::iterator it = bridges_.begin(); it != bridges_.end(); ++it) {
ISrsStreamBridge* bridge = *it;
if ((err = bridge->initialize(r)) != srs_success) {
return err;
}
}
return err;
}
srs_error_t SrsCompositeBridge::on_publish()
{
srs_error_t err = srs_success;
for (vector<ISrsStreamBridge*>::iterator it = bridges_.begin(); it != bridges_.end(); ++it) {
ISrsStreamBridge* bridge = *it;
if ((err = bridge->on_publish()) != srs_success) {
return err;
}
}
return err;
}
void SrsCompositeBridge::on_unpublish()
{
for (vector<ISrsStreamBridge*>::iterator it = bridges_.begin(); it != bridges_.end(); ++it) {
ISrsStreamBridge* bridge = *it;
bridge->on_unpublish();
}
}
srs_error_t SrsCompositeBridge::on_frame(SrsSharedPtrMessage* frame)
{
srs_error_t err = srs_success;
for (vector<ISrsStreamBridge*>::iterator it = bridges_.begin(); it != bridges_.end(); ++it) {
ISrsStreamBridge* bridge = *it;
if ((err = bridge->on_frame(frame)) != srs_success) {
return err;
}
}
return err;
}
SrsCompositeBridge* SrsCompositeBridge::append(ISrsStreamBridge* bridge)
{
bridges_.push_back(bridge);
return this;
}

View file

@ -0,0 +1,95 @@
//
// Copyright (c) 2013-2023 The SRS Authors
//
// SPDX-License-Identifier: MIT or MulanPSL-2.0
//
#ifndef SRS_APP_STREAM_BRIDGE_HPP
#define SRS_APP_STREAM_BRIDGE_HPP
#include <srs_core.hpp>
#include <srs_kernel_codec.hpp>
#include <vector>
class SrsRequest;
class SrsSharedPtrMessage;
class SrsLiveSource;
class SrsRtcSource;
class SrsRtmpFormat;
class SrsMetaCache;
class SrsAudioTranscoder;
class SrsRtpPacket;
class SrsRtcRtpBuilder;
// A stream bridge is used to convert stream via different protocols, such as bridge for RTMP and RTC. Generally, we use
// frame as message for bridge. A frame is a audio or video frame, such as an I/B/P frame, a general frame for decoder.
// So you must assemble RTP or TS packets to a video frame if WebRTC or SRT.
class ISrsStreamBridge
{
public:
ISrsStreamBridge();
virtual ~ISrsStreamBridge();
public:
virtual srs_error_t initialize(SrsRequest* r) = 0;
virtual srs_error_t on_publish() = 0;
virtual srs_error_t on_frame(SrsSharedPtrMessage* frame) = 0;
virtual void on_unpublish() = 0;
};
// A bridge to feed AV frame to RTMP stream.
class SrsFrameToRtmpBridge : public ISrsStreamBridge
{
private:
SrsLiveSource *source_;
public:
SrsFrameToRtmpBridge(SrsLiveSource *src);
virtual ~SrsFrameToRtmpBridge();
public:
srs_error_t initialize(SrsRequest* r);
public:
virtual srs_error_t on_publish();
virtual void on_unpublish();
public:
virtual srs_error_t on_frame(SrsSharedPtrMessage* frame);
};
// A bridge to covert AV frame to WebRTC stream.
class SrsFrameToRtcBridge : public ISrsStreamBridge
{
private:
SrsRtcSource* source_;
SrsRtcRtpBuilder* rtp_builder_;
public:
SrsFrameToRtcBridge(SrsRtcSource* source);
virtual ~SrsFrameToRtcBridge();
public:
virtual srs_error_t initialize(SrsRequest* r);
virtual srs_error_t on_publish();
virtual void on_unpublish();
virtual srs_error_t on_frame(SrsSharedPtrMessage* frame);
srs_error_t on_rtp(SrsRtpPacket* pkt);
};
// A bridge chain, a set of bridges.
class SrsCompositeBridge : public ISrsStreamBridge
{
public:
SrsCompositeBridge();
virtual ~SrsCompositeBridge();
public:
srs_error_t initialize(SrsRequest* r);
public:
virtual srs_error_t on_publish();
virtual void on_unpublish();
public:
virtual srs_error_t on_frame(SrsSharedPtrMessage* frame);
public:
SrsCompositeBridge* append(ISrsStreamBridge* bridge);
private:
std::vector<ISrsStreamBridge*> bridges_;
};
#endif

View file

@ -9,6 +9,6 @@
#define VERSION_MAJOR 6 #define VERSION_MAJOR 6
#define VERSION_MINOR 0 #define VERSION_MINOR 0
#define VERSION_REVISION 40 #define VERSION_REVISION 41
#endif #endif

View file

@ -362,7 +362,7 @@
XX(ERROR_RTC_DISABLED , 5021, "RtcDisabled", "RTC is disabled by configuration") \ XX(ERROR_RTC_DISABLED , 5021, "RtcDisabled", "RTC is disabled by configuration") \
XX(ERROR_RTC_NO_SESSION , 5022, "RtcNoSession", "Invalid packet for no RTC session matched") \ XX(ERROR_RTC_NO_SESSION , 5022, "RtcNoSession", "Invalid packet for no RTC session matched") \
XX(ERROR_RTC_INVALID_PARAMS , 5023, "RtcInvalidParams", "Invalid API parameters for RTC") \ XX(ERROR_RTC_INVALID_PARAMS , 5023, "RtcInvalidParams", "Invalid API parameters for RTC") \
XX(ERROR_RTC_DUMMY_BRIDGER , 5024, "RtcDummyBridger", "RTC dummy bridger error") \ XX(ERROR_RTC_DUMMY_BRIDGE , 5024, "RtcDummyBridge", "RTC dummy bridge error") \
XX(ERROR_RTC_STREM_STARTED , 5025, "RtcStreamStarted", "RTC stream already started") \ XX(ERROR_RTC_STREM_STARTED , 5025, "RtcStreamStarted", "RTC stream already started") \
XX(ERROR_RTC_TRACK_CODEC , 5026, "RtcTrackCodec", "RTC track codec error") \ XX(ERROR_RTC_TRACK_CODEC , 5026, "RtcTrackCodec", "RTC track codec error") \
XX(ERROR_RTC_NO_PLAYER , 5027, "RtcNoPlayer", "RTC player not found") \ XX(ERROR_RTC_NO_PLAYER , 5027, "RtcNoPlayer", "RTC player not found") \