1
0
Fork 0
mirror of https://github.com/ossrs/srs.git synced 2025-03-09 15:49:59 +00:00

For #2200, Enable RTC and FLV for GB28181

This commit is contained in:
xialixin@kanzhun.com 2021-02-18 21:51:49 +08:00 committed by winlin
parent adb6f723c7
commit 4df6fa540f
12 changed files with 1289 additions and 845 deletions

View file

@ -269,6 +269,15 @@ function OSX_prepare()
echo "Please install pkg-config"; exit -1;
fi
if [[ $SRS_GB28181 == YES ]]; then
if [[ ! -f /usr/local/opt/libiconv/lib/libiconv.a ]]; then
echo "install libiconv"
echo "brew install libiconv"
brew install libiconv; ret=$?; if [[ 0 -ne $ret ]]; then return $ret; fi
echo "install libiconv success"
fi
fi
echo "OSX install tools success"
return 0
}

View file

@ -10,6 +10,11 @@ http_api {
listen 1985;
}
http_server {
enabled on;
listen 8080;
}
stats {
network 0;
}
@ -29,7 +34,7 @@ stream_caster {
listen 9000;
# 多路复用端口类型on为tcpoff为udp
# 默认off
tcp_enable on;
tcp_enable off;
# rtp接收监听端口范围最小值
rtp_port_min 58200;
@ -111,5 +116,27 @@ stream_caster {
query_catalog_interval 60;
}
}
vhost __defaultVhost__ {
rtc_server {
enabled on;
# Listen at udp://8000
listen 8000;
#
# The $CANDIDATE means fetch from env, if not configed, use * as default.
#
# The * means retrieving server IP automatically, from all network interfaces,
# @see https://github.com/ossrs/srs/issues/307#issuecomment-599028124
candidate $CANDIDATE;
}
vhost __defaultVhost__ {
rtc {
enabled on;
bframe discard;
}
http_remux {
enabled on;
mount [vhost]/[app]/[stream].flv;
}
}

21
trunk/configure vendored
View file

@ -169,6 +169,11 @@ if [[ $SRS_SRT == YES ]]; then
if [[ $SRS_SHARED_SRT == YES ]]; then LibSRTfile="-lsrt"; fi
fi
# For iconv on macOS only, CentOS seems ok.
if [[ $SRS_GB28181 == YES && $SRS_OSX == YES ]]; then
LibIconvRoot="/usr/local/opt/libiconv/include"; LibIconvfile="/usr/local/opt/libiconv/lib/libiconv.a"
fi
# the link options, always use static link
SrsLinkOptions="-ldl -lpthread";
if [[ $SRS_SSL == YES && $SRS_USE_SYS_SSL == YES ]]; then
@ -231,9 +236,6 @@ if [[ $SRS_RTC == YES ]]; then
MODULE_FILES+=("srs_rtc_stun_stack")
ModuleLibIncs+=(${LibSrtpRoot})
fi
if [[ $SRS_GB28181 == YES ]]; then
MODULE_FILES+=("srs_sip_stack")
fi
if [[ $SRS_FFMPEG_FIT == YES ]]; then
ModuleLibIncs+=("${LibFfmpegRoot[*]}")
fi
@ -283,8 +285,16 @@ if [[ $SRS_FFMPEG_FIT == YES ]]; then
MODULE_FILES+=("srs_app_rtc_codec")
fi
if [[ $SRS_GB28181 == YES ]]; then
MODULE_FILES+=("srs_app_gb28181" "srs_app_gb28181_sip" "srs_app_gb28181_jitbuffer")
MODULE_FILES+=("srs_app_gb28181" "srs_app_gb28181_sip")
fi
if [[ $SRS_GB28181 == YES || $SRS_RTC == YES ]]; then
MODULE_FILES+=("srs_app_rtc_jitbuffer")
fi
if [[ $SRS_GB28181 == YES ]]; then
MODULE_FILES+=("srs_app_sip")
ModuleLibIncs+=(${LibIconvRoot})
fi
DEFINES=""
# add each modules for app
for SRS_MODULE in ${SRS_MODULES[*]}; do
@ -358,6 +368,9 @@ fi
if [[ $SRS_SRT == YES ]]; then
ModuleLibFiles+=("${LibSRTfile[*]}")
fi
if [[ $SRS_GB28181 == YES ]]; then
ModuleLibFiles+=("${LibIconvfile[*]}")
fi
# all depends objects
MODULE_OBJS="${CORE_OBJS[@]} ${KERNEL_OBJS[@]} ${PROTOCOL_OBJS[@]} ${APP_OBJS[@]} ${SERVER_OBJS[@]}"
ModuleLibIncs=(${SRS_OBJS_DIR} ${LibSTRoot} ${LibGperfRoot} ${LibSSLRoot})

View file

@ -122,7 +122,7 @@
当前通道:<a id="gb28181ChannelId"></a>
<div>
<textarea class="span6" id="txt_rtmp_url" rows="2"></textarea>
<button class="btn btn-primary" id="btn_play">RTMP播放</button>
<button class="btn btn-primary" id="btn_play">FLV播放</button>
</div>
<div>
@ -141,187 +141,17 @@
</div>
<div id="main_content" class="hide">
<div id="link_modal" class="modal hide fade">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h3><a href="https://github.com/ossrs/srs">SRS Link Generator</a></h3>
</div>
<div class="modal-body">
<div class="form-horizontal">
<div class="control-group">
<label class="control-label" for="link_server">服务器地址</label>
<div class="controls">
<span id="link_server" class="span4 uneditable-input"></span>
</div>
</div>
<div class="control-group">
<label class="control-label" for="link_port">服务器端口</label>
<div class="controls">
<span id="link_port" class="span2 uneditable-input"></span>
</div>
</div>
<div class="control-group">
<label class="control-label" for="link_vhost">RTMP Vhost</label>
<div class="controls">
<span id="link_vhost" class="span4 uneditable-input"></span>
</div>
</div>
<div class="control-group">
<label class="control-label" for="link_app">RTMP App</label>
<div class="controls">
<span id="link_app" class="span4 uneditable-input"></span>
</div>
</div>
<div class="control-group">
<label class="control-label" for="link_stream">RTMP Stream</label>
<div class="controls">
<span id="link_stream" class="span4 uneditable-input"></span>
</div>
</div>
<div class="control-group">
<label class="control-label" for="link_rtmp">RTMP地址</label>
<div class="controls">
<span id="link_rtmp" class="span4 uneditable-input"></span>
</div>
</div>
<div class="control-group">
<label class="control-label" for="link_url">播放链接地址</label>
<div class="controls">
<div style="margin-top:5px;"><a href="#" id="link_url" target="_blank">请右键拷贝此链接地址.</a></div>
</div>
</div>
</div>
</div>
<div class="modal-footer"></div>
</div>
<div id="main_modal" class="modal hide fade">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h3><a href="https://github.com/ossrs/srs">SrsPlayer</a></h3>
<h3><a href="https://github.com/ossrs/srs">SrsFlvPlayer</a></h3>
</div>
<div class="modal-body">
<div id="player"></div>
<div class="progress progress-striped active" id="pb_buffer_bg">
<div class="bar" style="width: 0%;" id="pb_buffer"></div>
<div>
<video id="video_player" width="98%" autoplay controls></video>
</div>
</div>
<div class="modal-footer" id="my_modal_footer">
<div>
<div class="btn-group dropup">
<button class="btn dropdown-toggle" data-toggle="dropdown">
全屏比例大小<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li><a id="btn_fs_size_screen_100" href="#">屏幕大小(100%)</a></li>
<li><a id="btn_fs_size_screen_75" href="#">屏幕大小(75%)</a></li>
<li><a id="btn_fs_size_screen_50" href="#">屏幕大小(50%)</a></li>
<li><a id="btn_fs_size_video_100" href="#">视频大小(100%)</a></li>
<li><a id="btn_fs_size_video_75" href="#">视频大小(75%)</a></li>
<li><a id="btn_fs_size_video_50" href="#">视频大小(50%)</a></li>
</ul>
</div>
<div class="btn-group dropup">
<button class="btn dropdown-toggle" data-toggle="dropdown">显示比例<span class="caret"></span></button>
<ul class="dropdown-menu">
<li><a id="btn_dar_original" href="#">视频原始比例</a></li>
<li><a id="btn_dar_21_9" href="#">宽屏影院(21:9)</a></li>
<li><a id="btn_dar_16_9" href="#">宽屏电视(16:9)</a></li>
<li><a id="btn_dar_4_3" href="#">窄屏(4:3)</a></li>
<li><a id="btn_dar_fill" href="#">填充(容器比例)</a></li>
</ul>
</div>
<div class="btn-group dropup">
<button class="btn dropdown-toggle" data-toggle="dropdown">缓冲区大小<span class="caret"></span></button>
<ul class="dropdown-menu">
<li><a id="btn_bt_0_1" href="#">0.1秒(实时)</a></li>
<li><a id="btn_bt_0_2" href="#">0.2秒(实时)</a></li>
<li><a id="btn_bt_0_3" href="#">0.3秒(实时)</a></li>
<li><a id="btn_bt_0_5" href="#">0.5秒(实时)</a></li>
<li><a id="btn_bt_0_8" href="#">0.8秒(会议)</a></li>
<li><a id="btn_bt_1_0" href="#">1秒(低延迟)</a></li>
<li><a id="btn_bt_2_0" href="#">2秒(较低延时)</a></li>
<li><a id="btn_bt_3_0" href="#">3秒(流畅播放)</a></li>
<li><a id="btn_bt_4_0" href="#">4秒(流畅播放)</a></li>
<li><a id="btn_bt_5_0" href="#">5秒(网速较低)</a></li>
<li><a id="btn_bt_6_0" href="#">6秒(网速较低)</a></li>
<li><a id="btn_bt_8_0" href="#">8秒(网速较低)</a></li>
<li><a id="btn_bt_10_0" href="#">10秒(无所谓延迟)</a></li>
<li><a id="btn_bt_15_0" href="#">15秒(无所谓延迟)</a></li>
<li><a id="btn_bt_20_0" href="#">20秒(无所谓延迟)</a></li>
<li><a id="btn_bt_30_0" href="#">30秒(流畅第一)</a></li>
</ul>
</div>
<div class="btn-group dropup">
<button class="btn dropdown-toggle" data-toggle="dropdown">最大缓冲区<span class="caret"></span></button>
<ul class="dropdown-menu">
<li><a id="btn_mbt_0_6" href="#">0.6秒(实时)</a></li>
<li><a id="btn_mbt_0_9" href="#">0.9秒(实时)</a></li>
<li><a id="btn_mbt_1_2" href="#">1.2秒(实时)</a></li>
<li><a id="btn_mbt_1_5" href="#">1.5秒(实时)</a></li>
<li><a id="btn_mbt_2_4" href="#">2.4秒(会议)</a></li>
<li><a id="btn_mbt_3_0" href="#">3秒(低延迟)</a></li>
<li><a id="btn_mbt_6_0" href="#">6秒(较低延时)</a></li>
<li><a id="btn_mbt_9_0" href="#">9秒(流畅播放)</a></li>
<li><a id="btn_mbt_12_0" href="#">12秒(流畅播放)</a></li>
<li><a id="btn_mbt_15_0" href="#">15秒(网速较低)</a></li>
<li><a id="btn_mbt_18_0" href="#">18秒(网速较低)</a></li>
<li><a id="btn_mbt_24_0" href="#">24秒(网速较低)</a></li>
<li><a id="btn_mbt_30_0" href="#">30秒(无所谓延迟)</a></li>
<li><a id="btn_mbt_45_0" href="#">45秒(无所谓延迟)</a></li>
<li><a id="btn_mbt_60_0" href="#">60秒(无所谓延迟)</a></li>
<li><a id="btn_mbt_90_0" href="#">90秒(流畅第一)</a></li>
</ul>
</div>
<div class="btn-group dropup">
<a id="btn_fullscreen" class="btn">全屏</a>
</div>
<div class="btn-group dropup">
<button id="btn_pause" class="btn">暂停播放</button>
<button id="btn_resume" class="btn hide">继续播放</button>
</div>
<div class="btn-group dropup">
<button class="btn btn-primary" data-dismiss="modal" aria-hidden="true">关闭播放器</button>
</div>
</div>
<div class="hide" id="fullscreen_tips">
<font color="red">点击视频</font>进入全屏模式~<br/>
由于安全原因Flash全屏无法使用JS触发
</div>
<div>
<div class="input-prepend div_play_time" title="BufferLength/BufferTime/MaxBufferTime">
<span class="add-on">@B</span>
<input class="span2" style="width:80px" id="txt_buffer" type="text" placeholder="0/0/0s">
</div>
<div class="input-prepend div_play_time" title="视频的播放流畅度">
<span class="add-on">@F</span>
<input class="span2" style="width:57px" id="txt_fluency" type="text" placeholder="100%">
</div>
<div class="input-prepend div_play_time" title="视频总共卡顿次数">
<span class="add-on">@E</span>
<input class="span2" style="width:45px" id="txt_empty_count" type="text" placeholder="0">
</div>
<div class="input-prepend div_play_time" title="视频当前的帧率FPS">
<span class="add-on">@F</span>
<input class="span2" style="width:55px" id="txt_fps" type="text" placeholder="fps">
</div>
<div class="input-prepend div_play_time" title="视频当前的码率(视频+音频)单位Kbps">
<span class="add-on">@B</span>
<input class="span2" style="width:55px" id="txt_bitrate" type="text" placeholder="kbps">
</div>
<div class="input-prepend div_play_time" title="播放时长,格式:天 时:分:秒">
<span class="add-on">@T</span>
<input class="span2" style="width:85px" id="txt_time" type="text" placeholder="天 时:分:秒">
</div>
</div>
<div style="margin-top:-12px;">
<span id="debug_info"></span>
URL: <a href="#" id="player_url"></a>
<div class="input-prepend div_play_time" title="当前时间:年-月-日 时:分:秒">
<span class="add-on">@N</span>
<input class="span2" style="width:135px" id="player_clock" type="text" placeholder="年-月-日 时:分:秒">
</div>
</div>
<div>
<div class="btn-group dropup">
<button class="btn btn-primary" id="btn_ptz_up"> 上↑ </button>
@ -332,13 +162,8 @@
<button class="btn btn-primary" id="btn_ptz_zoomout"> 缩小- </button>
</div>
[注意] !!! 云台控制需要启用内部sip功能
<div class="input-prepend" title="首播时间,点播放到开始播放的时间,秒">
<span class="add-on">@PST</span>
<input class="span1" style="width:60px" id="txt_pst" type="text" placeholder="N秒">
</div>
</div>
</div>
</div>
<div id="rtc_player_modal" class="modal hide fade">
@ -410,54 +235,17 @@
</body>
<script type="text/javascript" src="js/jquery-1.10.2.min.js"></script>
<script type="text/javascript" src="js/bootstrap.min.js"></script>
<script type="text/javascript" src="js/swfobject.js"></script>
<script type="text/javascript" src="js/json2.js"></script>
<script type="text/javascript" src="js/srs.page.js"></script>
<script type="text/javascript" src="js/srs.log.js"></script>
<script type="text/javascript" src="js/srs.player.js"></script>
<script type="text/javascript" src="js/srs.publisher.js"></script>
<script type="text/javascript" src="js/srs.utility.js"></script>
<script type="text/javascript" src="js/winlin.utility.js"></script>
<script type="text/javascript" src="js/flv-1.5.0.min.js"></script>
<script type="text/javascript" src="js/hls-0.14.17.min.js"></script>
<script type="text/javascript">
var __on_flash_ready = null;
$(function(){
// 探测Flash是否正常启用。
$('#main_flash_hdr').html(
'\
<object classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="100%" height="100%"> \
<param name="movie" value="srs_player/release/srs_player.swf"> \
<param name="quality" value="autohigh"> \
<param name="swliveconnect" value="true"> \
<param name="allowScriptAccess" value="always"> \
<param name="bgcolor" value="#0"> \
<param name="allowFullScreen" value="true"> \
<param name="wmode" value="opaque"> \
<param name="FlashVars" value="log=1"> \
<param name="flashvars" value="id=1&on_player_ready=__on_flash_ready"> \
<embed src="srs_player/release/srs_player.swf" width="100%" height="100%" \
quality="autohigh" bgcolor="#0" align="middle" allowfullscreen="true" allowscriptaccess="always" \
type="application/x-shockwave-flash" swliveconnect="true" wmode="opaque" \
flashvars="id=1&on_player_ready=__on_flash_ready" \
pluginspage="http://www.macromedia.com/go/getflashplayer"> \
</object> \
'
);
$('#main_flash_hdr').show();
var showFlashHdr = setTimeout(function(){
$('#main_flash_alert').show();
}, 300);
__on_flash_ready = function (id) {
clearTimeout(showFlashHdr);
$('#main_flash_alert').hide();
$('#main_flash_hdr').hide();
$('#main_content').show();
autoLoadPage();
};
});
</script>
<script type="text/javascript">
@ -471,7 +259,6 @@
} else {
$("#txt_api_url").val("http://" + query.host + ":1985");
}
var __active_dar = null;
function select_dar(dar_id, num, den) {
srs_player.set_dar(num, den);
@ -631,13 +418,13 @@
$('#gb28181ChannelMessage').html(syntaxHighlight(ret));
if (ret.code == 0 && ret.data != undefined ) {
$("#txt_rtmp_url").val(ret.data.channels[0].rtmp_url);
$("#txt_rtmp_url").val(ret.data.channels[0].flv_url);
ret.data.channels[0].rtmp_url.split("//")
ret.data.channels[0].flv_url.split("//")
var urlObject = parse_rtmp_url(ret.data.channels[0].rtmp_url);
var werbrtc_url = "webrtc://"+ urlObject.server + "/" + urlObject.app + "/" + urlObject.stream;
$("#txt_rtc_url").val(werbrtc_url);
//var urlObject = parse_rtmp_url(ret.data.channels[0].rtmp_url);
//var werbrtc_url = "webrtc://"+ urlObject.server + "/" + urlObject.app + "/" + urlObject.stream;
$("#txt_rtc_url").val(ret.data.channels[0].webrtc_url);
}
}
@ -659,6 +446,292 @@
}
// Async-await-promise based SRS RTC Player.
function SrsRtcPlayerAsync() {
var self = {};
// @see https://github.com/rtcdn/rtcdn-draft
// @url The WebRTC url to play with, for example:
// webrtc://r.ossrs.net/live/livestream
// or specifies the API port:
// webrtc://r.ossrs.net:11985/live/livestream
// or autostart the play:
// webrtc://r.ossrs.net/live/livestream?autostart=true
// or change the app from live to myapp:
// webrtc://r.ossrs.net:11985/myapp/livestream
// or change the stream from livestream to mystream:
// webrtc://r.ossrs.net:11985/live/mystream
// or set the api server to myapi.domain.com:
// webrtc://myapi.domain.com/live/livestream
// or set the candidate(ip) of answer:
// webrtc://r.ossrs.net/live/livestream?eip=39.107.238.185
// or force to access https API:
// webrtc://r.ossrs.net/live/livestream?schema=https
// or use plaintext, without SRTP:
// webrtc://r.ossrs.net/live/livestream?encrypt=false
// or any other information, will pass-by in the query:
// webrtc://r.ossrs.net/live/livestream?vhost=xxx
// webrtc://r.ossrs.net/live/livestream?token=xxx
self.play = async function(url) {
var conf = self.__internal.prepareUrl(url);
self.pc.addTransceiver("audio", {direction: "recvonly"});
self.pc.addTransceiver("video", {direction: "recvonly"});
var offer = await self.pc.createOffer();
await self.pc.setLocalDescription(offer);
var session = await new Promise(function(resolve, reject) {
// @see https://github.com/rtcdn/rtcdn-draft
var data = {
api: conf.apiUrl, streamurl: conf.streamUrl, clientip: null, sdp: offer.sdp
};
console.log("Generated offer: ", data);
$.ajax({
type: "POST", url: conf.apiUrl, data: JSON.stringify(data),
contentType:'application/json', dataType: 'json'
}).done(function(data) {
console.log("Got answer: ", data);
if (data.code) {
reject(data); return;
}
resolve(data);
}).fail(function(reason){
reject(reason);
});
});
await self.pc.setRemoteDescription(
new RTCSessionDescription({type: 'answer', sdp: session.sdp})
);
return session;
};
// Close the publisher.
self.close = function() {
self.pc.close();
};
// The callback when got remote stream.
self.onaddstream = function (event) {};
// Internal APIs.
self.__internal = {
defaultPath: '/rtc/v1/play/',
prepareUrl: function (webrtcUrl) {
var urlObject = self.__internal.parse(webrtcUrl);
// If user specifies the schema, use it as API schema.
var schema = urlObject.user_query.schema;
schema = schema ? schema + ':' : window.location.protocol;
var port = urlObject.port || 1985;
if (schema === 'https:') {
port = urlObject.port || 443;
}
// @see https://github.com/rtcdn/rtcdn-draft
var api = urlObject.user_query.play || self.__internal.defaultPath;
if (api.lastIndexOf('/') !== api.length - 1) {
api += '/';
}
apiUrl = schema + '//' + urlObject.server + ':' + port + api;
for (var key in urlObject.user_query) {
if (key !== 'api' && key !== 'play') {
apiUrl += '&' + key + '=' + urlObject.user_query[key];
}
}
// Replace /rtc/v1/play/&k=v to /rtc/v1/play/?k=v
var apiUrl = apiUrl.replace(api + '&', api + '?');
var streamUrl = urlObject.url;
return {apiUrl: apiUrl, streamUrl: streamUrl, schema: schema, urlObject: urlObject, port: port};
},
parse: function (url) {
// @see: http://stackoverflow.com/questions/10469575/how-to-use-location-object-to-parse-url-without-redirecting-the-page-in-javascri
var a = document.createElement("a");
a.href = url.replace("rtmp://", "http://")
.replace("webrtc://", "http://")
.replace("rtc://", "http://");
var vhost = a.hostname;
var app = a.pathname.substr(1, a.pathname.lastIndexOf("/") - 1);
var stream = a.pathname.substr(a.pathname.lastIndexOf("/") + 1);
// parse the vhost in the params of app, that srs supports.
app = app.replace("...vhost...", "?vhost=");
if (app.indexOf("?") >= 0) {
var params = app.substr(app.indexOf("?"));
app = app.substr(0, app.indexOf("?"));
if (params.indexOf("vhost=") > 0) {
vhost = params.substr(params.indexOf("vhost=") + "vhost=".length);
if (vhost.indexOf("&") > 0) {
vhost = vhost.substr(0, vhost.indexOf("&"));
}
}
}
// when vhost equals to server, and server is ip,
// the vhost is __defaultVhost__
if (a.hostname === vhost) {
var re = /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/;
if (re.test(a.hostname)) {
vhost = "__defaultVhost__";
}
}
// parse the schema
var schema = "rtmp";
if (url.indexOf("://") > 0) {
schema = url.substr(0, url.indexOf("://"));
}
var port = a.port;
if (!port) {
if (schema === 'http') {
port = 80;
} else if (schema === 'https') {
port = 443;
} else if (schema === 'rtmp') {
port = 1935;
}
}
var ret = {
url: url,
schema: schema,
server: a.hostname, port: port,
vhost: vhost, app: app, stream: stream
};
self.__internal.fill_query(a.search, ret);
// For webrtc API, we use 443 if page is https, or schema specified it.
if (!ret.port) {
if (schema === 'webrtc' || schema === 'rtc') {
if (ret.user_query.schema === 'https') {
ret.port = 443;
} else if (window.location.href.indexOf('https://') === 0) {
ret.port = 443;
} else {
// For WebRTC, SRS use 1985 as default API port.
ret.port = 1985;
}
}
}
return ret;
},
fill_query: function (query_string, obj) {
// pure user query object.
obj.user_query = {};
if (query_string.length === 0) {
return;
}
// split again for angularjs.
if (query_string.indexOf("?") >= 0) {
query_string = query_string.split("?")[1];
}
var queries = query_string.split("&");
for (var i = 0; i < queries.length; i++) {
var elem = queries[i];
var query = elem.split("=");
obj[query[0]] = query[1];
obj.user_query[query[0]] = query[1];
}
// alias domain for vhost.
if (obj.domain) {
obj.vhost = obj.domain;
}
}
};
self.pc = new RTCPeerConnection(null);
self.pc.onaddstream = function (event) {
if (self.onaddstream) {
self.onaddstream(event);
}
};
return self;
}
var flvPlayer = null;
var hlsPlayer = null;
var stopPlayers = function () {
if (flvPlayer) {
flvPlayer.destroy();
flvPlayer = null;
}
if (hlsPlayer) {
hlsPlayer.destroy();
hlsPlayer = null;
}
};
var hide_for_error = function () {
$('#main_flash_alert').show();
$('#main_info').hide();
$('#main_tips').hide();
$('#video_player').hide();
//$('#btn_play').hide();
stopPlayers();
};
var show_for_ok = function () {
$('#main_flash_alert').hide();
$('#main_info').show();
$('#main_tips').show();
$('#video_player').show();
//$('#btn_play').show();
};
var start_play_live = function (r) {
stopPlayers();
if (!r) return;
// Start play HTTP-FLV.
if (r.stream.indexOf('.flv') > 0) {
if (!flvjs.isSupported()) {
hide_for_error();
return;
}
show_for_ok();
flvPlayer = flvjs.createPlayer({type: 'flv', url: r.url});
flvPlayer.attachMediaElement(document.getElementById('video_player'));
flvPlayer.load();
flvPlayer.play();
return;
}
// Start play HLS.
if (r.stream.indexOf('.m3u8') > 0) {
if (!Hls.isSupported()) {
hide_for_error();
return;
}
show_for_ok();
hlsPlayer = new Hls();
hlsPlayer.loadSource(r.url);
hlsPlayer.attachMedia(document.getElementById('video_player'));
return;
}
console.error('不支持的URL', r.url, r);
$('#video_player').hide();
};
/****
@ -693,154 +766,11 @@
// the play startup time.
var pst = new Date();
var pc = null; // Global handler to do cleanup when replaying.
$("#main_modal").on("show", function(){
if (srs_player) {
return;
}
$("#div_container").remove();
$("#debug_info").text("");
var div_container = $("<div/>");
$(div_container).attr("id", "div_container");
$("#player").append(div_container);
var player = $("<div/>");
$(player).attr("id", "player_id");
$(div_container).append(player);
srs_player = new SrsPlayer("player_id", srs_get_player_width(), srs_get_player_height());
srs_player.on_player_ready = function() {
var buffer_time = 0.5;
if (url.indexOf('.m3u8') > 0) {
buffer_time = 2;
}
if (query.buffer) {
for (var i = 0; i < bts.length - 1; i++) {
var cur = bts[i];
var next = bts[i+1];
if (Number(query.buffer) >= cur && Number(query.buffer) < next) {
buffer_time = cur;
break;
}
}
}
select_buffer(buffer_time);
this.play(url);
pst = new Date();
};
srs_player.on_player_status = function(code, desc) {
//console.log("[播放器状态] code=" + code + ", desc=" + desc);
};
srs_player.on_player_metadata = function(metadata) {
$("#btn_dar_original").text("视频原始比例" + "(" + metadata.width + ":" + metadata.height + ")");
if (metadata.ip && metadata.pid && metadata.cid) {
$("#debug_info").text("ID:" + metadata.ip + '/' + metadata.pid + '/' + metadata.cid + '');
}
select_dar("#btn_dar_original", 0, 0);
select_fs_size("#btn_fs_size_screen_100", "screen", 100);
};
srs_player.on_player_timer = function(time, buffer_length, kbps, fps, rtime) {
if (time > 0 && pst) {
var diff = (new Date().getTime() - pst.getTime()) / 1000.0;
$("#txt_pst").val(Number(diff).toFixed(2) + "秒");
pst = null;
}
var buffer = buffer_length / this.max_buffer_time * 100;
$("#pb_buffer").width(Number(buffer).toFixed(1) + "%");
$("#pb_buffer_bg").attr("title",
"缓冲区:" + buffer_length.toFixed(1) + "秒, 最大缓冲区:"
+ this.max_buffer_time.toFixed(1) + "秒, 当前:"
+ buffer.toFixed(1) + "%");
var bts = this.buffer_time >= 1? this.buffer_time.toFixed(0) : this.buffer_time.toFixed(1);
var mbts = this.buffer_time >= 1? this.max_buffer_time.toFixed(0) : this.max_buffer_time.toFixed(1);
$("#txt_buffer").val(buffer_length.toFixed(1) + "/" + bts + "/" + mbts + "s");
$("#txt_bitrate").val(kbps.toFixed(0) + "kbps");
$("#txt_fps").val(fps.toFixed(1) + "fps");
$("#txt_empty_count").val(srs_player.empty_count() + "次");
$("#txt_fluency").val(srs_player.fluency().toFixed(2) + "%");
var time_str = "";
// day
time_str = padding(parseInt(time / 24 / 3600), 2, '0') + " ";
// hour
time = time % (24 * 3600);
time_str += padding(parseInt(time / 3600), 2, '0') + ":";
// minute
time = time % (3600);
time_str += padding(parseInt(time / 60), 2, '0') + ":";
// seconds
time = time % (60);
time_str += padding(parseInt(time), 2, '0');
// show
$("#txt_time").val(time_str);
var clock = new Date().getTime() / 1000;
$("#player_clock").val(absolute_seconds_to_YYYYmmdd(clock) + " " + absolute_seconds_to_HHMMSS(clock));
};
srs_player.start();
});
$("#main_modal").on("hide", function(){
if (srs_player) {
// report the log to backend.
//console.log(srs_player.dump_log());
srs_player.stop();
srs_player = null;
}
});
var apply_url_change = function() {
var rtmp = parse_rtmp_url($("#txt_url").val());
var url = "http://" + query.host + query.pathname + "?"
+ "app=" + rtmp.app + "&stream=" + rtmp.stream
+ "&server=" + rtmp.server + "&port=" + rtmp.port
+ "&autostart=true";
if (query.shp_identify) {
url += "&shp_identify=" + query.shp_identify;
}
if (rtmp.vhost == "__defaultVhost__") {
url += "&vhost=" + rtmp.server;
} else {
url += "&vhost=" + rtmp.vhost;
}
if (rtmp.schema == "http") {
url += "&schema=http";
}
if (query.buffer) {
url += "&buffer=" + query.buffer;
}
if (query.api_port) {
url += "&api_port=" + query.api_port;
}
var queries = user_extra_params(query);
if (queries && queries.length) {
url += '&' + queries.join('&');
}
$("#player_url").text($("#txt_url").val()).attr("href", url);
$("#link_server").text(rtmp.server);
$("#link_port").text(rtmp.port);
$("#link_vhost").text(rtmp.vhost);
$("#link_app").text(rtmp.app);
$("#link_stream").text(rtmp.stream);
$("#link_rtmp").text($("#txt_url").val());
$("#link_url").attr("href", url);
};
$("#txt_url").change(function(){
apply_url_change();
});
$("#btn_generate_link").click(function(){
@ -848,91 +778,11 @@
});
$("#btn_play").click(function(){
url = $("#txt_rtmp_url").val();
$("#main_modal").modal({show:true, keyboard:true});
var r = parse_rtmp_url($("#txt_rtmp_url").val());
start_play_live(r);
});
$("#btn_fullscreen").click(function(){
$("#fullscreen_tips").toggle();
});
$("#btn_pause").click(function() {
$("#btn_resume").toggle();
$("#btn_pause").toggle();
srs_player.pause();
});
$("#btn_resume").click(function(){
$("#btn_resume").toggle();
$("#btn_pause").toggle();
srs_player.resume();
});
if (true) {
$("#srs_publish").click(function () {
url = $("#srs_publish").text();
$("#main_modal").modal({show: true, keyboard: false});
});
$("#srs_publish_ld").click(function () {
url = $("#srs_publish_ld").text();
$("#main_modal").modal({show: true, keyboard: false});
});
$("#srs_publish_sd").click(function () {
url = $("#srs_publish_sd").text();
$("#main_modal").modal({show: true, keyboard: false});
});
$("#srs_publish_fw").click(function () {
url = $("#srs_publish_fw").text();
$("#main_modal").modal({show: true, keyboard: false});
});
$("#srs_publish_fw_ld").click(function () {
url = $("#srs_publish_fw_ld").text();
$("#main_modal").modal({show: true, keyboard: false});
});
$("#srs_publish_fw_sd").click(function () {
url = $("#srs_publish_fw_sd").text();
$("#main_modal").modal({show: true, keyboard: false});
});
}
if (true) {
$("#btn_dar_original").click(function(){
select_dar("#btn_dar_original", 0, 0);
});
$("#btn_dar_21_9").click(function(){
select_dar("#btn_dar_21_9", 21, 9);
});
$("#btn_dar_16_9").click(function(){
select_dar("#btn_dar_16_9", 16, 9);
});
$("#btn_dar_4_3").click(function(){
select_dar("#btn_dar_4_3", 4, 3);
});
$("#btn_dar_fill").click(function(){
select_dar("#btn_dar_fill", -1, -1);
});
}
if (true) {
$("#btn_fs_size_video_100").click(function(){
select_fs_size("#btn_fs_size_video_100", "video", 100);
});
$("#btn_fs_size_video_75").click(function(){
select_fs_size("#btn_fs_size_video_75", "video", 75);
});
$("#btn_fs_size_video_50").click(function(){
select_fs_size("#btn_fs_size_video_50", "video", 50);
});
$("#btn_fs_size_screen_100").click(function(){
select_fs_size("#btn_fs_size_screen_100", "screen", 100);
});
$("#btn_fs_size_screen_75").click(function(){
select_fs_size("#btn_fs_size_screen_75", "screen", 75);
});
$("#btn_fs_size_screen_50").click(function(){
select_fs_size("#btn_fs_size_screen_50", "screen", 50);
});
}
if (true) {
for (var i = 0; i < bts.length; i++) {
var bt = bts[i];
@ -1160,92 +1010,50 @@
$("#create_channel_modal").modal('hide');
});
var sdk = null; // Global handler to do cleanup when replaying.
var startPlay = function() {
$('#rtc_media_player').show();
// Close PC when user replay.
if (sdk) {
sdk.close();
}
sdk = new SrsRtcPlayerAsync();
sdk.onaddstream = function (event) {
console.log('Start play, event: ', event);
$('#rtc_media_player').prop('srcObject', event.stream);
};
// For example:
// webrtc://r.ossrs.net/live/livestream
var url = $("#txt_rtc_url").val();
sdk.play(url).then(function(session){
$('#sessionid').html(session.sessionid);
$('#simulator-drop').attr('href', session.simulator + '?drop=1&username=' + session.sessionid);
}).catch(function (reason) {
sdk.close();
$('#rtc_media_player').hide();
console.error(reason);
});
};
$("#btn_rtc_play").click(function(){
$('#rtc_media_player').width(srs_get_player_width);
$('#rtc_media_player').height(srs_get_player_height);
$("#rtc_player_modal").modal({show: true, keyboard: false});
startPlay($("#txt_rtc_url").val());
startPlay();
});
$("#rtc_player_modal").on("hide", function(){
if (pc) {
pc.close();
if (sdk) {
sdk.close();
}
});
var startPlay = function(url) {
$('#rtc_media_player').show();
var urlObject = parse_rtmp_url(url);
var schema = window.location.protocol;
// Close PC when user replay.
if (pc) {
pc.close();
}
pc = new RTCPeerConnection(null);
pc.onaddstream = function (event) {
console.debug(event.stream);
$('#rtc_media_player').prop('srcObject', event.stream);
};
new Promise(function(resolve, reject) {
pc.addTransceiver("audio", {direction: "recvonly"});
pc.addTransceiver("video", {direction: "recvonly"});
pc.createOffer(function(offer){
resolve(offer);
},function(reason){
reject(reason);
});
}).then(function(offer) {
return pc.setLocalDescription(offer).then(function(){ return offer; });
}).then(function(offer) {
return new Promise(function(resolve, reject) {
// var port = urlObject.port || 1985;
var port = 1985;
// @see https://github.com/rtcdn/rtcdn-draft
var api = urlObject.user_query.play || '/rtc/v1/play/';
if (api.lastIndexOf('/') != api.length - 1) {
api += '/';
}
var url = schema + '//' + urlObject.server + ':' + port + api;
for (var key in urlObject.user_query) {
if (key != 'api' && key != 'play') {
url += '&' + key + '=' + urlObject.user_query[key];
}
}
// Replace /rtc/v1/play/&k=v to /rtc/v1/play/?k=v
url = url.replace(api + '&', api + '?');
// @see https://github.com/rtcdn/rtcdn-draft
var data = {
api: url, streamurl: urlObject.url, clientip: null, sdp: offer.sdp
};
console.log("offer: " + JSON.stringify(data));
$.ajax({
type: "POST", url: url, data: JSON.stringify(data),
contentType:'application/json', dataType: 'json'
}).done(function(data) {
console.log("answer: " + JSON.stringify(data));
resolve(data.sdp);
}).fail(function(reason){
reject(reason);
});
});
}).then(function(answer) {
return pc.setRemoteDescription(new RTCSessionDescription({type: 'answer', sdp: answer}));
}).catch(function(reason) {
throw reason;
});
};
}
apply_url_change();
};
</script>
</html>

View file

@ -50,7 +50,7 @@ using namespace std;
#include <srs_app_rtmp_conn.hpp>
#include <srs_protocol_utility.hpp>
#include <srs_protocol_format.hpp>
#include <srs_sip_stack.hpp>
#include <srs_app_sip.hpp>
//#define W_PS_FILE
//#define W_VIDEO_FILE
@ -405,7 +405,6 @@ srs_error_t SrsGb28181PsRtpProcessor::rtmpmuxer_enqueue_data(SrsGb28181RtmpMuxer
srs_error_t SrsGb28181PsRtpProcessor::on_rtp_packet_jitter(const sockaddr* from, const int fromlen, char* buf, int nb_buf)
{
srs_error_t err = srs_success;
bool completed = false;
pprint->elapse();
@ -739,13 +738,12 @@ srs_error_t SrsGb28181TcpPsRtpProcessor::rtmpmuxer_enqueue_data(SrsGb28181RtmpMu
srs_error_t SrsGb28181TcpPsRtpProcessor::on_rtp_packet_jitter(char* buf, int nb_buf, std::string ip, int port)
{
srs_error_t err = srs_success;
bool completed = false;
pprint->elapse();
char address_string[64] = {0};
char port_string[16] = {0};
/*if (getnameinfo(from, fromlen,
/*char port_string[16] = {0};
if (getnameinfo(from, fromlen,
(char*)&address_string, sizeof(address_string),
(char*)&port_string, sizeof(port_string),
NI_NUMERICHOST | NI_NUMERICSERV)) {
@ -1109,8 +1107,8 @@ srs_error_t SrsPsStreamDemixer::on_ps_stream(char* ps_data, int ps_size, uint32_
uint8_t p1 = (uint8_t)(next_ps_pack[0]);
uint8_t p2 = (uint8_t)(next_ps_pack[1]);
uint8_t p3 = (uint8_t)(next_ps_pack[2]);
uint8_t p4 = (uint8_t)(next_ps_pack[3]);
//uint8_t p3 = (uint8_t)(next_ps_pack[2]);
//uint8_t p4 = (uint8_t)(next_ps_pack[3]);
if (audio_enable && audio_es_type != STREAM_TYPE_AUDIO_AAC &&
(p1 & 0xFF) == 0xFF && (p2 & 0xF0) == 0xF0) {
@ -1260,8 +1258,8 @@ SrsGb28181RtmpMuxer::SrsGb28181RtmpMuxer(SrsGb28181Manger* c, std::string id, bo
((SrsSTCoroutine*)trd)->set_stack_size(1 << 18);
sdk = NULL;
vjitter = new SrsRtspJitter();
ajitter = new SrsRtspJitter();
vjitter = new SrsRtpTimeJitter();
ajitter = new SrsRtpTimeJitter();
avc = new SrsRawH264Stream();
aac = new SrsRawAacStream();
@ -1284,8 +1282,8 @@ SrsGb28181RtmpMuxer::SrsGb28181RtmpMuxer(SrsGb28181Manger* c, std::string id, bo
source = NULL;
source_publish = true;
jitter_buffer = new SrsPsJitterBuffer(id);
jitter_buffer_audio = new SrsPsJitterBuffer(id);
jitter_buffer = new SrsRtpJitterBuffer(id);
jitter_buffer_audio = new SrsRtpJitterBuffer(id);
ps_buflen = 0;
ps_buffer = NULL;
@ -1414,7 +1412,7 @@ srs_error_t SrsGb28181RtmpMuxer::initialize(SrsServer *s, SrsRequest* r)
srs_error_t err = srs_success;
if (!jitter_buffer) {
jitter_buffer = new SrsPsJitterBuffer(channel_id);
jitter_buffer = new SrsRtpJitterBuffer(channel_id);
}
jitter_buffer->SetDecodeErrorMode(kSelectiveErrors);
@ -1422,7 +1420,7 @@ srs_error_t SrsGb28181RtmpMuxer::initialize(SrsServer *s, SrsRequest* r)
jitter_buffer->SetNackSettings(250, 450, 0);
if (!jitter_buffer_audio) {
jitter_buffer_audio = new SrsPsJitterBuffer(channel_id);
jitter_buffer_audio = new SrsRtpJitterBuffer(channel_id);
}
jitter_buffer_audio->SetDecodeErrorMode(kSelectiveErrors);
@ -1458,6 +1456,7 @@ srs_error_t SrsGb28181RtmpMuxer::do_cycle()
send_rtmp_stream_time = srs_get_system_time();
uint32_t cur_timestamp = 0;
int buffer_size = 0;
bool keyframe = false;
//consume ps stream, and check status
while (true) {
@ -1472,7 +1471,7 @@ srs_error_t SrsGb28181RtmpMuxer::do_cycle()
if (config.jitterbuffer_enable){
if(jitter_buffer->FoundFrame(cur_timestamp)){
jitter_buffer->GetPsFrame(&ps_buffer, ps_buflen, buffer_size, cur_timestamp);
jitter_buffer->GetFrame(&ps_buffer, ps_buflen, buffer_size, keyframe, cur_timestamp);
if (buffer_size > 0){
if ((err = ps_demixer->on_ps_stream(ps_buffer, buffer_size, cur_timestamp, 0)) != srs_success){
@ -1483,7 +1482,7 @@ srs_error_t SrsGb28181RtmpMuxer::do_cycle()
}
if(jitter_buffer_audio->FoundFrame(cur_timestamp)){
jitter_buffer_audio->GetPsFrame(&ps_buffer_audio, ps_buflen_auido, buffer_size, cur_timestamp);
jitter_buffer_audio->GetFrame(&ps_buffer_audio, ps_buflen_auido, buffer_size, keyframe, cur_timestamp);
if (buffer_size > 0){
if ((err = ps_demixer->on_ps_stream(ps_buffer_audio, buffer_size, cur_timestamp, 0)) != srs_success){
@ -1600,10 +1599,12 @@ void SrsGb28181RtmpMuxer::insert_jitterbuffer(SrsPsRtpPacket *pkt)
//otherwise audio uses jitter_buffer_audio, and video uses jitter_buffer
if (av_same_ts){
pkt->marker = false;
jitter_buffer->InsertPacket(*pkt, pkt->payload->bytes(), pkt->payload->length(), NULL);
jitter_buffer->InsertPacket(pkt->sequence_number, pkt->timestamp, pkt->marker,
pkt->payload->bytes(), pkt->payload->length(), NULL);
ps_rtp_video_ts = pkt->timestamp;
}else {
jitter_buffer_audio->InsertPacket(*pkt, pkt->payload->bytes(), pkt->payload->length(), NULL);
jitter_buffer_audio->InsertPacket(pkt->sequence_number, pkt->timestamp, pkt->marker,
pkt->payload->bytes(), pkt->payload->length(), NULL);
}
//srs_cond_signal(wait_ps_queue);
@ -1771,13 +1772,7 @@ srs_error_t SrsGb28181RtmpMuxer::write_h264_ipb_frame2(char *frame, int frame_si
//0001xxxxxxxxxx
//xxxx0001xxxxxxx
uint32_t naluLen = size - cur_pos;
char *p = (char*)&naluLen;
video_data[cur_pos] = p[3];
video_data[cur_pos+1] = p[2];
video_data[cur_pos+2] = p[1];
video_data[cur_pos+3] = p[0];
uint32_t naluLen = size - cur_pos - 4;
char *frame = video_data + cur_pos + 4;
int frame_size = naluLen;
@ -1797,12 +1792,6 @@ srs_error_t SrsGb28181RtmpMuxer::write_h264_ipb_frame2(char *frame, int frame_si
//0001xxxxxxxx0001xxxxxxxx0001xxxxxxxxx
//xxxxxxxxxxxx0001xxxxxxxx0001xxxxxxxxx
uint32_t naluLen = cur_pos - pre_pos - 4;
char *p = (char*)&naluLen;
video_data[pre_pos] = p[3];
video_data[pre_pos+1] = p[2];
video_data[pre_pos+2] = p[1];
video_data[pre_pos+3] = p[0];
char *frame = video_data + pre_pos + 4;
int frame_size = naluLen;
@ -1816,12 +1805,6 @@ srs_error_t SrsGb28181RtmpMuxer::write_h264_ipb_frame2(char *frame, int frame_si
if (first_pos != pre_pos){
uint32_t naluLen = size - pre_pos - 4;
char *p = (char*)&naluLen;
video_data[pre_pos] = p[3];
video_data[pre_pos+1] = p[2];
video_data[pre_pos+2] = p[1];
video_data[pre_pos+3] = p[0];
char *frame = video_data + pre_pos + 4;
int frame_size = naluLen;
@ -2167,6 +2150,9 @@ SrsGb28181StreamChannel::SrsGb28181StreamChannel(){
rtp_peer_port = 0;
rtp_peer_ip = "";
rtmp_url = "";
flv_url = "";
hls_url = "";
webrtc_url = "";
recv_time = 0;
recv_time_str = "";
}
@ -2191,6 +2177,9 @@ void SrsGb28181StreamChannel::copy(const SrsGb28181StreamChannel *s){
rtp_peer_port = s->get_rtp_peer_port();
rtmp_url = s->get_rtmp_url();
flv_url = s->get_flv_url();
hls_url = s->get_hls_url();
webrtc_url = s->get_webrtc_url();
recv_time_str = s->get_recv_time_str();
recv_time = s->get_recv_time();
@ -2205,6 +2194,9 @@ void SrsGb28181StreamChannel::dumps(SrsJsonObject* obj)
obj->set("app", SrsJsonAny::str(app.c_str()));
obj->set("stream", SrsJsonAny::str(stream.c_str()));
obj->set("rtmp_url", SrsJsonAny::str(rtmp_url.c_str()));
obj->set("flv_url", SrsJsonAny::str(flv_url.c_str()));
obj->set("hls_url", SrsJsonAny::str(hls_url.c_str()));
obj->set("webrtc_url", SrsJsonAny::str(webrtc_url.c_str()));
obj->set("ssrc", SrsJsonAny::integer(ssrc));
obj->set("rtp_port", SrsJsonAny::integer(rtp_port));
@ -2298,7 +2290,7 @@ uint32_t SrsGb28181Manger::generate_ssrc(std::string id)
//gb28181 live ssrc max value 0999999999(3B9AC9FF)
//gb28181 vod ssrc max value 1999999999(773593FF)
uint8_t index = uint8_t(rand() % (0x0F - 0x01 + 1) + 0x01);
uint32_t ssrc = 0x2FFFF00 & (hash_code(id) << 8) | index;
uint32_t ssrc = ((0x2FFFF00) & (hash_code(id) << 8)) | index;
//uint32_t ssrc = 0x00FFFFFF & (hash_code(id));
srs_trace("gb28181: generate ssrc id=%s, ssrc=%u", id.c_str(), ssrc);
return ssrc;
@ -2599,7 +2591,21 @@ srs_error_t SrsGb28181Manger::create_stream_channel(SrsGb28181StreamChannel *cha
channel->set_rtmp_port(rtmp_port);
channel->set_ip(config->host);
std::string play_url = srs_generate_rtmp_url(config->host, rtmp_port, "", "", app, stream_name, "");
std::string flv_url = srs_string_replace(play_url, "rtmp://", "http://");
std::stringstream port;
port << ":" << rtmp_port;
flv_url = srs_string_replace(flv_url, port.str(), ":"+_srs_config->get_http_stream_listen());
std::string hls_url = flv_url + ".m3u8";
flv_url = flv_url + ".flv";
std::string webrtc_url = srs_string_replace(play_url, "rtmp://", "webrtc://");
webrtc_url = srs_string_replace(webrtc_url, port.str(), ":"+_srs_config->get_http_api_listen());
channel->set_rtmp_url(play_url);
channel->set_flv_url(flv_url);
channel->set_hls_url(hls_url);
channel->set_webrtc_url(webrtc_url);
request.app = app;
request.stream = stream_name;
@ -2876,7 +2882,6 @@ srs_error_t SrsGb28181Conn::do_cycle()
nb_read = nb_read + leftDataLength;
length;
pp = (char*)&length;
p = &(mbuffer[0]);
pp[1] = *p++;

View file

@ -34,13 +34,12 @@
#include <srs_app_st.hpp>
#include <srs_app_listener.hpp>
#include <srs_rtsp_stack.hpp>
#include <srs_kernel_stream.hpp>
#include <srs_app_log.hpp>
#include <srs_kernel_file.hpp>
#include <srs_protocol_json.hpp>
#include <srs_app_gb28181_sip.hpp>
#include <srs_app_gb28181_jitbuffer.hpp>
#include <srs_app_rtc_jitbuffer.hpp>
#include <srs_rtmp_stack.hpp>
#include <srs_app_source.hpp>
#include <srs_service_conn.hpp>
@ -90,7 +89,7 @@ class SrsPithyPrint;
class SrsSimpleRtmpClient;
class SrsSipStack;
class SrsGb28181Manger;
class SrsRtspJitter;
class SrsRtpTimeJitter;
class SrsSipRequest;
class SrsGb28181RtmpMuxer;
class SrsGb28181Config;
@ -99,7 +98,7 @@ class SrsGb28181TcpPsRtpProcessor;
class SrsGb28181SipService;
class SrsGb28181StreamChannel;
class SrsGb28181SipSession;
class SrsPsJitterBuffer;
class SrsRtpJitterBuffer;
class SrsServer;
class SrsSource;
class SrsRequest;
@ -316,8 +315,8 @@ private:
srs_cond_t wait_ps_queue;
SrsSimpleRtmpClient* sdk;
SrsRtspJitter* vjitter;
SrsRtspJitter* ajitter;
SrsRtpTimeJitter* vjitter;
SrsRtpTimeJitter* ajitter;
SrsRawH264Stream* avc;
std::string h264_sps;
@ -330,8 +329,8 @@ private:
SrsSource* source;
SrsServer* server;
SrsPsJitterBuffer *jitter_buffer;
SrsPsJitterBuffer *jitter_buffer_audio;
SrsRtpJitterBuffer *jitter_buffer;
SrsRtpJitterBuffer *jitter_buffer_audio;
char *ps_buffer;
char *ps_buffer_audio;
@ -340,7 +339,6 @@ private:
int ps_buflen_auido;
uint32_t ps_rtp_video_ts;
uint32_t ps_rtp_audio_ts;
bool source_publish;
@ -444,6 +442,9 @@ private:
std::string app;
std::string stream;
std::string rtmp_url;
std::string flv_url;
std::string hls_url;
std::string webrtc_url;
std::string ip;
int rtp_port;
@ -472,6 +473,9 @@ public:
uint32_t get_rtp_peer_port() const { return rtp_peer_port; }
std::string get_rtp_peer_ip() const { return rtp_peer_ip; }
std::string get_rtmp_url() const { return rtmp_url; }
std::string get_flv_url() const { return flv_url; }
std::string get_hls_url() const { return hls_url; }
std::string get_webrtc_url() const { return webrtc_url; }
srs_utime_t get_recv_time() const { return recv_time; }
std::string get_recv_time_str() const { return recv_time_str; }
@ -486,6 +490,9 @@ public:
void set_rtp_peer_ip( const std::string &p) { rtp_peer_ip = p; }
void set_rtp_peer_port( const int &s) { rtp_peer_port = s;}
void set_rtmp_url( const std::string &u) { rtmp_url = u; }
void set_flv_url( const std::string &u) { flv_url = u; }
void set_hls_url( const std::string &u) { hls_url = u; }
void set_webrtc_url( const std::string &u) { webrtc_url = u; }
void set_recv_time( const srs_utime_t &u) { recv_time = u; }
void set_recv_time_str( const std::string &u) { recv_time_str = u; }

View file

@ -43,7 +43,7 @@ using namespace std;
#include <srs_kernel_utility.hpp>
#include <srs_kernel_codec.hpp>
#include <srs_app_pithy_print.hpp>
#include <srs_sip_stack.hpp>
#include <srs_app_sip.hpp>
#include <srs_app_gb28181.hpp>

View file

@ -31,7 +31,7 @@
#include <map>
#include <srs_app_log.hpp>
#include <srs_sip_stack.hpp>
#include <srs_app_sip.hpp>
#include <srs_app_gb28181.hpp>
#include <srs_app_pithy_print.hpp>
#include <srs_service_conn.hpp>

View file

@ -21,8 +21,8 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef SRS_APP_GB28181_JITBUFFER_HPP
#define SRS_APP_GB28181_JITBUFFER_HPP
#ifndef SRS_APP_RTC_RTP_JITBUFFER_HPP
#define SRS_APP_RTC_RTP_JITBUFFER_HPP
#include <srs_core.hpp>
@ -36,13 +36,15 @@
#include <srs_app_log.hpp>
#include <srs_kernel_utility.hpp>
#include <srs_app_gb28181.hpp>
#include <srs_kernel_rtc_rtp.hpp>
#include <srs_kernel_flv.hpp>
class SrsPsRtpPacket;
class SrsPsFrameBuffer;
class PsDecodingState;
class SrsRtpFrameBuffer;
class SrsRtpDecodingState;
class SrsGb28181RtmpMuxer;
class VCMPacket;
class SrsRtpPacket2;
///jittbuffer
@ -57,7 +59,7 @@ enum FrameType {
};
// Used to indicate which decode with errors mode should be used.
enum PsDecodeErrorMode {
enum SrsRtpDecodeErrorMode {
kNoErrors, // Never decode with errors. Video will freeze
// if nack is disabled.
kSelectiveErrors, // Frames that are determined decodable in
@ -79,7 +81,7 @@ enum { kMaxVideoDelayMs = 10000 };
enum { kPacketsPerFrameMultiplier = 5 };
enum { kFastConvergeThreshold = 5};
enum PsJitterBufferEnum {
enum SrsRtpJitterBufferEnum {
kMaxConsecutiveOldFrames = 60,
kMaxConsecutiveOldPackets = 300,
kMaxPacketsInSession = 800,
@ -87,7 +89,7 @@ enum PsJitterBufferEnum {
kMaxJBFrameSizeBytes = 4000000 // sanity don't go above 4Mbyte.
};
enum PsFrameBufferEnum {
enum SrsRtpFrameBufferEnum {
kOutOfBoundsPacket = -7,
kNotInitialized = -6,
kOldPacket = -5,
@ -102,18 +104,65 @@ enum PsFrameBufferEnum {
kDuplicatePacket = 5 // We're receiving a duplicate packet.
};
enum PsFrameBufferStateEnum {
enum SrsRtpFrameBufferStateEnum {
kStateEmpty, // frame popped by the RTP receiver
kStateIncomplete, // frame that have one or more packet(s) stored
kStateComplete, // frame that have all packets
kStateDecodable // Hybrid mode - frame can be decoded
};
enum PsNackMode {
enum SrsRtpNackMode {
kNack,
kNoNack
};
// Used to indicate if a received packet contain a complete NALU (or equivalent)
enum VCMNaluCompleteness {
kNaluUnset = 0, // Packet has not been filled.
kNaluComplete = 1, // Packet can be decoded as is.
kNaluStart, // Packet contain beginning of NALU
kNaluIncomplete, // Packet is not beginning or end of NALU
kNaluEnd, // Packet is the end of a NALU
};
enum RtpVideoCodecTypes {
kRtpVideoNone,
kRtpVideoGeneric,
kRtpVideoVp8,
kRtpVideoVp9,
kRtpVideoH264,
kRtpVideoPS
};
// Video codec types
enum VideoCodecType {
kVideoCodecVP8,
kVideoCodecVP9,
kVideoCodecH264,
kVideoCodecH264SVC,
kVideoCodecI420,
kVideoCodecRED,
kVideoCodecULPFEC,
kVideoCodecGeneric,
kVideoCodecH264PS,
kVideoCodecUnknown
};
// The packetization types that we support: single, aggregated, and fragmented.
enum H264PacketizationTypes {
kH264SingleNalu, // This packet contains a single NAL unit.
kH264StapA, // This packet contains STAP-A (single time
// aggregation) packets. If this packet has an
// associated NAL unit type, it'll be for the
// first such aggregated packet.
kH264FuA, // This packet contains a FU-A (fragmentation
// unit) packet, meaning it is a part of a frame
// that was too large to fit into a single packet.
};
enum { kH264StartCodeLengthBytes = 4};
// Used to pass data from jitter buffer to session info.
// This data is then used in determining whether a frame is decodable.
struct FrameData {
@ -147,7 +196,7 @@ inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2)
return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : timestamp2;
}
typedef std::list<SrsPsFrameBuffer*> UnorderedFrameList;
typedef std::list<SrsRtpFrameBuffer*> UnorderedFrameList;
class TimestampLessThan {
public:
@ -159,16 +208,16 @@ public:
};
class FrameList
: public std::map<uint32_t, SrsPsFrameBuffer*, TimestampLessThan> {
: public std::map<uint32_t, SrsRtpFrameBuffer*, TimestampLessThan> {
public:
void InsertFrame(SrsPsFrameBuffer* frame);
SrsPsFrameBuffer* PopFrame(uint32_t timestamp);
SrsPsFrameBuffer* Front() const;
SrsPsFrameBuffer* FrontNext() const;
SrsPsFrameBuffer* Back() const;
void InsertFrame(SrsRtpFrameBuffer* frame);
SrsRtpFrameBuffer* PopFrame(uint32_t timestamp);
SrsRtpFrameBuffer* Front() const;
SrsRtpFrameBuffer* FrontNext() const;
SrsRtpFrameBuffer* Back() const;
int RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
UnorderedFrameList* free_frames);
void CleanUpOldOrEmptyFrames(PsDecodingState* decoding_state, UnorderedFrameList* free_frames);
void CleanUpOldOrEmptyFrames(SrsRtpDecodingState* decoding_state, UnorderedFrameList* free_frames);
void Reset(UnorderedFrameList* free_frames);
};
@ -180,7 +229,13 @@ public:
size_t size,
uint16_t seqNum,
uint32_t timestamp,
bool markerBit);
bool markerBit,
H264PacketizationTypes type,
RtpVideoCodecTypes rtpType,
bool singlenual,
bool isfirst,
FrameType ftype
);
void Reset();
@ -194,24 +249,32 @@ public:
bool markerBit;
FrameType frameType;
//cloopenwebrtc::VideoCodecType codec;
VideoCodecType codec;
bool isFirstPacket; // Is this first packet in a frame.
//VCMNaluCompleteness completeNALU; // Default is kNaluIncomplete.
VCMNaluCompleteness completeNALU; // Default is kNaluIncomplete.
bool insertStartCode; // True if a start code should be inserted before this
// packet.
int width;
int height;
//RTPVideoHeader codecSpecificHeader;
//H264 header
H264PacketizationTypes h264packetizationType;
bool h264singleNalu;
public:
void CopyCodecSpecifics(RtpVideoCodecTypes codecType, bool H264single_nalu, bool firstPacket);
};
class SrsPsFrameBuffer {
class SrsRtpFrameBuffer {
public:
SrsPsFrameBuffer();
virtual ~SrsPsFrameBuffer();
SrsRtpFrameBuffer();
virtual ~SrsRtpFrameBuffer();
public:
PsFrameBufferEnum InsertPacket(const VCMPacket& packet, const FrameData& frame_data);
SrsRtpFrameBufferEnum InsertPacket(const VCMPacket& packet, const FrameData& frame_data);
void UpdateCompleteSession();
void UpdateDecodableSession(const FrameData& frame_data);
bool HaveFirstPacket() const;
@ -220,7 +283,7 @@ public:
uint32_t GetTimeStamp() const;
FrameType GetFrameType() const;
PsFrameBufferStateEnum GetState() const;
SrsRtpFrameBufferStateEnum GetState() const;
int32_t GetHighSeqNum() const;
int32_t GetLowSeqNum() const;
@ -233,7 +296,6 @@ public:
bool complete() const;
bool decodable() const;
bool GetPsPlayload(SrsSimpleStream **ps_data, int &count);
bool DeletePacket(int &count);
void PrepareForDecode(bool continuous);
@ -248,7 +310,7 @@ private:
const PacketIterator& prev_packet_it);
size_t InsertBuffer(uint8_t* frame_buffer, PacketIterator packet_it);
size_t Insert(const uint8_t* buffer, size_t length, uint8_t* frame_buffer);
size_t Insert(const uint8_t* buffer, size_t length, bool insert_start_code, uint8_t* frame_buffer);
void ShiftSubsequentPackets(PacketIterator it, int steps_to_shift);
void VerifyAndAllocate(const uint32_t minimumSize);
void UpdateDataPointers(const uint8_t* old_base_ptr, const uint8_t* new_base_ptr);
@ -269,11 +331,11 @@ private:
uint32_t timeStamp_;
FrameType frame_type_;
PsDecodeErrorMode decode_error_mode_;
PsFrameBufferStateEnum state_;
SrsRtpDecodeErrorMode decode_error_mode_;
SrsRtpFrameBufferStateEnum state_;
uint16_t nackCount_;
int64_t latestPacketTimeMs_;
//uint16_t nackCount_;
//int64_t latestPacketTimeMs_;
// The payload.
uint8_t* _buffer;
@ -281,20 +343,20 @@ private:
size_t _length;
};
class PsDecodingState {
class SrsRtpDecodingState {
public:
PsDecodingState();
~PsDecodingState();
SrsRtpDecodingState();
~SrsRtpDecodingState();
// Check for old frame
bool IsOldFrame(const SrsPsFrameBuffer* frame) const;
bool IsOldFrame(const SrsRtpFrameBuffer* frame) const;
// Check for old packet
bool IsOldPacket(const VCMPacket* packet);
// Check for frame continuity based on current decoded state. Use best method
// possible, i.e. temporal info, picture ID or sequence number.
bool ContinuousFrame(const SrsPsFrameBuffer* frame) const;
void SetState(const SrsPsFrameBuffer* frame);
void CopyFrom(const PsDecodingState& state);
bool UpdateEmptyFrame(const SrsPsFrameBuffer* frame);
bool ContinuousFrame(const SrsRtpFrameBuffer* frame) const;
void SetState(const SrsRtpFrameBuffer* frame);
void CopyFrom(const SrsRtpDecodingState& state);
bool UpdateEmptyFrame(const SrsRtpFrameBuffer* frame);
// Update the sequence number if the timestamp matches current state and the
// sequence number is higher than the current one. This accounts for packets
// arriving late.
@ -309,64 +371,81 @@ public:
bool full_sync() const;
private:
void UpdateSyncState(const SrsPsFrameBuffer* frame);
void UpdateSyncState(const SrsRtpFrameBuffer* frame);
// Designated continuity functions
//bool ContinuousPictureId(int picture_id) const;
bool ContinuousSeqNum(uint16_t seq_num) const;
//bool ContinuousLayer(int temporal_id, int tl0_pic_id) const;
//bool UsingPictureId(const SrsPsFrameBuffer* frame) const;
//bool UsingPictureId(const SrsRtpFrameBuffer* frame) const;
// Keep state of last decoded frame.
// TODO(mikhal/stefan): create designated classes to handle these types.
uint16_t sequence_num_;
uint32_t time_stamp_;
int picture_id_;
int temporal_id_;
int tl0_pic_id_;
bool full_sync_; // Sync flag when temporal layers are used.
bool in_initial_state_;
bool m_firstPacket;
};
class SrsPsJitterBuffer
// The time jitter correct for rtp.
class SrsRtpTimeJitter
{
private:
int64_t previous_timestamp;
int64_t pts;
int delta;
public:
SrsRtpTimeJitter();
virtual ~SrsRtpTimeJitter();
public:
int64_t timestamp();
srs_error_t correct(int64_t& ts);
void reset();
};
class SrsRtpJitterBuffer
{
public:
SrsPsJitterBuffer(std::string key);
virtual ~SrsPsJitterBuffer();
SrsRtpJitterBuffer(std::string key);
virtual ~SrsRtpJitterBuffer();
public:
srs_error_t start();
void Reset();
PsFrameBufferEnum InsertPacket(const SrsPsRtpPacket &packet, char *buf, int size, bool* retransmitted);
void ReleaseFrame(SrsPsFrameBuffer* frame);
SrsRtpFrameBufferEnum InsertPacket2(const SrsRtpPacket2 &pkt, bool* retransmitted);
SrsRtpFrameBufferEnum InsertPacket(uint16_t seq, uint32_t ts, bool maker, char *buf, int size,
bool* retransmitted);
void ReleaseFrame(SrsRtpFrameBuffer* frame);
bool FoundFrame(uint32_t& time_stamp);
bool GetPsFrame(char **buffer, int &buf_len, int &size, const uint32_t time_stamp);
void SetDecodeErrorMode(PsDecodeErrorMode error_mode);
void SetNackMode(PsNackMode mode,int64_t low_rtt_nack_threshold_ms,
bool GetFrame(char **buffer, int &buf_len, int &size, bool &keyframe, const uint32_t time_stamp);
void SetDecodeErrorMode(SrsRtpDecodeErrorMode error_mode);
void SetNackMode(SrsRtpNackMode mode,int64_t low_rtt_nack_threshold_ms,
int64_t high_rtt_nack_threshold_ms);
void SetNackSettings(size_t max_nack_list_size,int max_packet_age_to_nack,
int max_incomplete_time_ms);
uint16_t* GetNackList(uint16_t* nack_list_size, bool* request_key_frame);
void Flush();
void ResetJittter();
bool isFirstKeyFrame;
private:
PsFrameBufferEnum GetFrame(const VCMPacket& packet, SrsPsFrameBuffer** frame,
SrsRtpFrameBufferEnum GetFrameByRtpPacket(const VCMPacket& packet, SrsRtpFrameBuffer** frame,
FrameList** frame_list);
SrsPsFrameBuffer* GetEmptyFrame();
SrsRtpFrameBuffer* GetEmptyFrame();
bool NextCompleteTimestamp(uint32_t max_wait_time_ms, uint32_t* timestamp);
bool NextMaybeIncompleteTimestamp(uint32_t* timestamp);
SrsPsFrameBuffer* ExtractAndSetDecode(uint32_t timestamp);
SrsPsFrameBuffer* NextFrame() const;
SrsRtpFrameBuffer* ExtractAndSetDecode(uint32_t timestamp);
SrsRtpFrameBuffer* NextFrame() const;
bool TryToIncreaseJitterBufferSize();
bool RecycleFramesUntilKeyFrame();
bool IsContinuous(const SrsPsFrameBuffer& frame) const;
bool IsContinuousInState(const SrsPsFrameBuffer& frame,
const PsDecodingState& decoding_state) const;
void FindAndInsertContinuousFrames(const SrsPsFrameBuffer& new_frame);
bool IsContinuous(const SrsRtpFrameBuffer& frame) const;
bool IsContinuousInState(const SrsRtpFrameBuffer& frame,
const SrsRtpDecodingState& decoding_state) const;
void FindAndInsertContinuousFrames(const SrsRtpFrameBuffer& new_frame);
void CleanUpOldOrEmptyFrames();
//nack
@ -376,11 +455,14 @@ private:
bool MissingTooOldPacket(uint16_t latest_sequence_number) const;
bool HandleTooOldPackets(uint16_t latest_sequence_number);
void DropPacketsFromNackList(uint16_t last_decoded_sequence_number);
PsNackMode nack_mode() const;
SrsRtpNackMode nack_mode() const;
int NonContinuousOrIncompleteDuration();
uint16_t EstimatedLowSequenceNumber(const SrsPsFrameBuffer& frame) const;
uint16_t EstimatedLowSequenceNumber(const SrsRtpFrameBuffer& frame) const;
bool WaitForRetransmissions();
bool IsPacketInOrder(uint16_t sequence_number);
bool IsFirstPacketInFrame(uint32_t ts, uint16_t seq);
private:
class SequenceNumberLessThan {
public:
@ -402,7 +484,7 @@ private:
UnorderedFrameList free_frames_;
FrameList decodable_frames_;
FrameList incomplete_frames_;
PsDecodingState last_decoded_state_;
SrsRtpDecodingState last_decoded_state_;
bool first_packet_since_reset_;
// Statistics.
@ -438,7 +520,7 @@ private:
int64_t rtt_ms_;
// NACK and retransmissions.
PsNackMode nack_mode_;
SrsRtpNackMode nack_mode_;
int64_t low_rtt_nack_threshold_ms_;
int64_t high_rtt_nack_threshold_ms_;
// Holds the internal NACK list (the missing sequence numbers).
@ -449,12 +531,17 @@ private:
int max_packet_age_to_nack_; // Measured in sequence numbers.
int max_incomplete_time_ms_;
PsDecodeErrorMode decode_error_mode_;
SrsRtpDecodeErrorMode decode_error_mode_;
// Estimated rolling average of packets per frame
float average_packets_per_frame_;
// average_packets_per_frame converges fast if we have fewer than this many
// frames.
int frame_counter_;
uint32_t last_received_timestamp_;
uint16_t last_received_sequence_number_;
bool first_packet_;
};
#endif

View file

@ -21,7 +21,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include <srs_sip_stack.hpp>
#include <srs_app_sip.hpp>
#if !defined(SRS_EXPORT_LIBRTMP)